feat: log search queries and results (#166)

* feat: log search queries and results

* fix: address coderabbit review comments

* fix: parse UUID when logging search results

* fix: remove custom UUID type and use DB agnostic UUID from sqlalchemy

* Add new cognee_db

---------

Co-authored-by: Leon Luithlen <leon@topoteretes.com>
This commit is contained in:
Boris 2024-11-17 11:59:10 +01:00 committed by GitHub
parent d30adb53f3
commit d8b6eeded5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
39 changed files with 267 additions and 110 deletions

View file

@ -0,0 +1,8 @@
import { fetch } from '@/utils';
export default function getHistory() {
return fetch(
'/v1/search',
)
.then((response) => response.json());
}

View file

@ -1,9 +1,12 @@
'use client';
import { v4 } from 'uuid';
import classNames from 'classnames';
import { useCallback, useState } from 'react';
import { useCallback, useEffect, useState } from 'react';
import { CTAButton, Stack, Text, DropdownSelect, TextArea, useBoolean } from 'ohmy-ui';
import { fetch } from '@/utils';
import styles from './SearchView.module.css';
import getHistory from '@/modules/chat/getHistory';
interface Message {
id: string;
@ -52,6 +55,14 @@ export default function SearchView() {
}, 300);
}, []);
useEffect(() => {
getHistory()
.then((history) => {
setMessages(history);
scrollToBottom();
});
}, [scrollToBottom]);
const handleSearchSubmit = useCallback((event: React.FormEvent<HTMLFormElement>) => {
event.preventDefault();
@ -78,7 +89,7 @@ export default function SearchView() {
'Content-Type': 'application/json',
},
body: JSON.stringify({
query: inputValue,
query: inputValue.trim(),
searchType: searchTypeValue,
}),
})

View file

@ -2,7 +2,7 @@ from .api.v1.config.config import config
from .api.v1.add import add
from .api.v1.cognify import cognify
from .api.v1.datasets.datasets import datasets
from .api.v1.search import search, SearchType
from .api.v1.search import search, SearchType, get_search_history
from .api.v1.prune import prune
# Pipelines

View file

@ -1 +1,2 @@
from .search_v2 import search, SearchType
from .get_search_history import get_search_history

View file

@ -0,0 +1,9 @@
from cognee.modules.search.operations import get_history
from cognee.modules.users.methods import get_default_user
from cognee.modules.users.models import User
async def get_search_history(user: User = None) -> list:
if not user:
user = await get_default_user()
return await get_history(user.id)

View file

@ -1,8 +1,11 @@
from cognee.api.v1.search import SearchType
from fastapi.responses import JSONResponse
from cognee.modules.users.models import User
from uuid import UUID
from datetime import datetime
from fastapi import Depends, APIRouter
from cognee.api.DTO import InDTO
from fastapi.responses import JSONResponse
from cognee.api.v1.search import SearchType
from cognee.api.DTO import InDTO, OutDTO
from cognee.modules.users.models import User
from cognee.modules.search.operations import get_history
from cognee.modules.users.methods import get_authenticated_user
@ -13,6 +16,24 @@ class SearchPayloadDTO(InDTO):
def get_search_router() -> APIRouter:
router = APIRouter()
class SearchHistoryItem(OutDTO):
id: UUID
text: str
user: str
created_at: datetime
@router.get("/", response_model = list[SearchHistoryItem])
async def get_search_history(user: User = Depends(get_authenticated_user)):
try:
history = await get_history(user.id)
return history
except Exception as error:
return JSONResponse(
status_code = 500,
content = {"error": str(error)}
)
@router.post("/", response_model = list)
async def search(payload: SearchPayloadDTO, user: User = Depends(get_authenticated_user)):
""" This endpoint is responsible for searching for nodes in the graph."""
@ -28,4 +49,4 @@ def get_search_router() -> APIRouter:
content = {"error": str(error)}
)
return router
return router

View file

@ -1,6 +1,9 @@
import json
from uuid import UUID
from enum import Enum
from typing import Callable, Dict
from cognee.modules.search.operations import log_query, log_result
from cognee.modules.storage.utils import JSONEncoder
from cognee.shared.utils import send_telemetry
from cognee.modules.users.models import User
from cognee.modules.users.methods import get_default_user
@ -14,15 +17,17 @@ class SearchType(Enum):
INSIGHTS = "INSIGHTS"
CHUNKS = "CHUNKS"
async def search(search_type: SearchType, query: str, user: User = None) -> list:
async def search(query_type: SearchType, query_text: str, user: User = None) -> list:
if user is None:
user = await get_default_user()
if user is None:
raise PermissionError("No user found in the system. Please create a user.")
query = await log_query(query_text, str(query_type), user.id)
own_document_ids = await get_document_ids_for_user(user.id)
search_results = await specific_search(search_type, query, user)
search_results = await specific_search(query_type, query_text, user)
filtered_search_results = []
@ -33,19 +38,21 @@ async def search(search_type: SearchType, query: str, user: User = None) -> list
if document_id is None or document_id in own_document_ids:
filtered_search_results.append(search_result)
await log_result(query.id, json.dumps(filtered_search_results, cls = JSONEncoder), user.id)
return filtered_search_results
async def specific_search(search_type: SearchType, query: str, user) -> list:
async def specific_search(query_type: SearchType, query: str, user) -> list:
search_tasks: Dict[SearchType, Callable] = {
SearchType.SUMMARIES: query_summaries,
SearchType.INSIGHTS: query_graph_connections,
SearchType.CHUNKS: query_chunks,
}
search_task = search_tasks.get(search_type)
search_task = search_tasks.get(query_type)
if search_task is None:
raise ValueError(f"Unsupported search type: {search_type}")
raise ValueError(f"Unsupported search type: {query_type}")
send_telemetry("cognee.search EXECUTION STARTED", user.id)

View file

@ -2,6 +2,3 @@ from .ModelBase import Base
from .config import get_relational_config
from .create_db_and_tables import create_db_and_tables
from .get_relational_engine import get_relational_engine
# Global data types
from .data_types.UUID import UUID

View file

@ -1,45 +0,0 @@
import uuid
from sqlalchemy.types import TypeDecorator, BINARY
from sqlalchemy.dialects.postgresql import UUID as psqlUUID
class UUID(TypeDecorator):
"""Platform-independent GUID type.
Uses Postgresql's UUID type, otherwise uses
BINARY(16), to store UUID.
"""
impl = BINARY
def load_dialect_impl(self, dialect):
if dialect.name == 'postgresql':
return dialect.type_descriptor(psqlUUID())
else:
return dialect.type_descriptor(BINARY(16))
def process_bind_param(self, value, dialect):
if value is None:
return value
else:
if not isinstance(value, uuid.UUID):
if isinstance(value, bytes):
value = uuid.UUID(bytes = value)
elif isinstance(value, int):
value = uuid.UUID(int = value)
elif isinstance(value, str):
value = uuid.UUID(value)
if dialect.name == 'postgresql':
return str(value)
else:
return value.bytes
def process_result_value(self, value, dialect):
if value is None:
return value
if dialect.name == 'postgresql':
if isinstance(value, uuid.UUID):
return value
return uuid.UUID(value)
else:
return uuid.UUID(bytes = value)

View file

@ -1,4 +1,4 @@
from datetime import datetime
from datetime import datetime, timezone
from sqlalchemy.orm import Mapped, MappedColumn
from sqlalchemy import Column, DateTime, ForeignKey, Enum, JSON
from cognee.infrastructure.databases.relational import Base, UUID
@ -24,4 +24,4 @@ class Operation(Base):
data_id = Column(UUID, ForeignKey("data.id"))
meta_data: Mapped[dict] = MappedColumn(type_ = JSON)
created_at = Column(DateTime, default = datetime.utcnow)
created_at = Column(DateTime, default = datetime.now(timezone.utc))

View file

@ -2,8 +2,8 @@ from uuid import uuid4
from typing import List
from datetime import datetime, timezone
from sqlalchemy.orm import relationship, Mapped
from sqlalchemy import Column, String, DateTime
from cognee.infrastructure.databases.relational import Base, UUID
from sqlalchemy import Column, String, DateTime, UUID
from cognee.infrastructure.databases.relational import Base
from .DatasetData import DatasetData
class Data(Base):

View file

@ -2,8 +2,8 @@ from uuid import uuid4
from typing import List
from datetime import datetime, timezone
from sqlalchemy.orm import relationship, Mapped
from sqlalchemy import Column, Text, DateTime
from cognee.infrastructure.databases.relational import Base, UUID
from sqlalchemy import Column, Text, DateTime, UUID
from cognee.infrastructure.databases.relational import Base
from .DatasetData import DatasetData
class Dataset(Base):

View file

@ -1,6 +1,6 @@
from datetime import datetime, timezone
from sqlalchemy import Column, DateTime, ForeignKey
from cognee.infrastructure.databases.relational import Base, UUID
from sqlalchemy import Column, DateTime, ForeignKey, UUID
from cognee.infrastructure.databases.relational import Base
class DatasetData(Base):
__tablename__ = "dataset_data"

View file

@ -1,9 +1,10 @@
from uuid import uuid4
from datetime import datetime, timezone
from sqlalchemy import Column, DateTime, String, Text
from sqlalchemy import Column, DateTime, String, Text, UUID
from sqlalchemy.orm import relationship, Mapped
from cognee.infrastructure.databases.relational import Base, UUID
from cognee.infrastructure.databases.relational import Base
from .PipelineTask import PipelineTask
from .Task import Task
class Pipeline(Base):
__tablename__ = "pipelines"

View file

@ -1,8 +1,8 @@
import enum
from uuid import uuid4
from datetime import datetime, timezone
from sqlalchemy import Column, DateTime, JSON, Enum
from cognee.infrastructure.databases.relational import Base, UUID
from sqlalchemy import Column, DateTime, JSON, Enum, UUID
from cognee.infrastructure.databases.relational import Base
class PipelineRunStatus(enum.Enum):
DATASET_PROCESSING_STARTED = "DATASET_PROCESSING_STARTED"

View file

@ -1,6 +1,6 @@
from datetime import datetime, timezone
from sqlalchemy import Column, DateTime, ForeignKey
from cognee.infrastructure.databases.relational import Base, UUID
from sqlalchemy import Column, DateTime, ForeignKey, UUID
from cognee.infrastructure.databases.relational import Base
class PipelineTask(Base):
__tablename__ = "pipeline_task"

View file

@ -0,0 +1,16 @@
from uuid import uuid4
from datetime import datetime, timezone
from sqlalchemy import Column, DateTime, String, UUID
from cognee.infrastructure.databases.relational import Base
class Query(Base):
__tablename__ = "queries"
id = Column(UUID, primary_key = True, default = uuid4)
text = Column(String)
query_type = Column(String)
user_id = Column(UUID)
created_at = Column(DateTime(timezone = True), default = lambda: datetime.now(timezone.utc))
updated_at = Column(DateTime(timezone = True), onupdate = lambda: datetime.now(timezone.utc))

View file

@ -0,0 +1,16 @@
from datetime import datetime, timezone
from uuid import uuid4
from sqlalchemy import Column, DateTime, Text, UUID
from cognee.infrastructure.databases.relational import Base
class Result(Base):
__tablename__ = "results"
id = Column(UUID, primary_key = True, default = uuid4)
value = Column(Text)
query_id = Column(UUID)
user_id = Column(UUID, index = True)
created_at = Column(DateTime(timezone = True), default = lambda: datetime.now(timezone.utc))
updated_at = Column(DateTime(timezone = True), onupdate = lambda: datetime.now(timezone.utc))

View file

@ -0,0 +1,3 @@
from .log_query import log_query
from .log_result import log_result
from .get_history import get_history

View file

@ -0,0 +1,31 @@
from uuid import UUID
from sqlalchemy import literal, select
from cognee.infrastructure.databases.relational import get_relational_engine
from ..models.Query import Query
from ..models.Result import Result
async def get_history(user_id: UUID, limit: int = 10) -> list[Result]:
db_engine = get_relational_engine()
queries_query = select(
Query.id,
Query.text.label("text"),
Query.created_at,
literal("user").label("user")
) \
.filter(Query.user_id == user_id)
results_query = select(
Result.id,
Result.value.label("text"),
Result.created_at,
literal("system").label("user")
) \
.filter(Result.user_id == user_id)
history_query = queries_query.union(results_query).order_by("created_at").limit(limit)
async with db_engine.get_async_session() as session:
history = (await session.execute(history_query)).all()
return history

View file

@ -0,0 +1,17 @@
from uuid import UUID
from sqlalchemy import select
from cognee.infrastructure.databases.relational import get_relational_engine
from ..models.Query import Query
async def get_queries(user_id: UUID, limit: int) -> list[Query]:
db_engine = get_relational_engine()
async with db_engine.get_async_session() as session:
queries = (await session.scalars(
select(Query)
.filter(Query.user_id == user_id)
.order_by(Query.created_at.desc())
.limit(limit)
)).all()
return queries

View file

@ -0,0 +1,17 @@
from uuid import UUID
from sqlalchemy import select
from cognee.infrastructure.databases.relational import get_relational_engine
from ..models.Result import Result
async def get_results(user_id: UUID, limit: int = 10) -> list[Result]:
db_engine = get_relational_engine()
async with db_engine.get_async_session() as session:
results = (await session.scalars(
select(Result)
.filter(Result.user_id == user_id)
.order_by(Result.created_at.desc())
.limit(limit)
)).all()
return results

View file

@ -0,0 +1,19 @@
from uuid import UUID
from cognee.infrastructure.databases.relational import get_relational_engine
from ..models.Query import Query
async def log_query(query_text: str, query_type: str, user_id: UUID) -> Query:
db_engine = get_relational_engine()
async with db_engine.get_async_session() as session:
query = Query(
text = query_text,
query_type = query_type,
user_id = user_id,
)
session.add(query)
await session.commit()
return query

View file

@ -0,0 +1,15 @@
from uuid import UUID
from cognee.infrastructure.databases.relational import get_relational_engine
from ..models.Result import Result
async def log_result(query_id: UUID, result: str, user_id: UUID):
db_engine = get_relational_engine()
async with db_engine.get_async_session() as session:
session.add(Result(
value = result,
query_id = query_id,
user_id = user_id,
))
await session.commit()

View file

@ -1,8 +1,8 @@
from uuid import uuid4
from datetime import datetime, timezone
from sqlalchemy.orm import relationship, Mapped
from sqlalchemy import Column, ForeignKey, DateTime
from cognee.infrastructure.databases.relational import Base, UUID
from sqlalchemy import Column, ForeignKey, DateTime, UUID
from cognee.infrastructure.databases.relational import Base
from .ACLResources import ACLResources
class ACL(Base):

View file

@ -1,6 +1,6 @@
from datetime import datetime, timezone
from sqlalchemy import Column, ForeignKey, DateTime
from cognee.infrastructure.databases.relational import Base, UUID
from sqlalchemy import Column, ForeignKey, DateTime, UUID
from cognee.infrastructure.databases.relational import Base
class ACLResources(Base):
__tablename__ = "acl_resources"

View file

@ -1,6 +1,5 @@
from sqlalchemy.orm import relationship, Mapped
from sqlalchemy import Column, String, ForeignKey
from cognee.infrastructure.databases.relational import UUID
from sqlalchemy import Column, String, ForeignKey, UUID
from .Principal import Principal
from .UserGroup import UserGroup

View file

@ -1,8 +1,8 @@
from uuid import uuid4
from datetime import datetime, timezone
# from sqlalchemy.orm import relationship
from sqlalchemy import Column, DateTime, String
from cognee.infrastructure.databases.relational import Base, UUID
from sqlalchemy import Column, DateTime, String, UUID
from cognee.infrastructure.databases.relational import Base
class Permission(Base):
__tablename__ = "permissions"

View file

@ -1,7 +1,7 @@
from uuid import uuid4
from datetime import datetime, timezone
from sqlalchemy import Column, String, DateTime
from cognee.infrastructure.databases.relational import Base, UUID
from sqlalchemy import Column, String, DateTime, UUID
from cognee.infrastructure.databases.relational import Base
class Principal(Base):
__tablename__ = "principals"

View file

@ -1,8 +1,8 @@
from uuid import uuid4
from datetime import datetime, timezone
from sqlalchemy.orm import relationship
from sqlalchemy import Column, DateTime
from cognee.infrastructure.databases.relational import Base, UUID
from sqlalchemy import Column, DateTime, UUID
from cognee.infrastructure.databases.relational import Base
from .ACLResources import ACLResources
class Resource(Base):

View file

@ -1,10 +1,10 @@
from uuid import UUID as uuid_UUID
from sqlalchemy import ForeignKey, Column
from sqlalchemy import ForeignKey, Column, UUID
from sqlalchemy.orm import relationship, Mapped
from fastapi_users.db import SQLAlchemyBaseUserTableUUID
from cognee.infrastructure.databases.relational import UUID
from .Principal import Principal
from .UserGroup import UserGroup
from .Group import Group
class User(SQLAlchemyBaseUserTableUUID, Principal):
__tablename__ = "users"
@ -25,7 +25,6 @@ class User(SQLAlchemyBaseUserTableUUID, Principal):
from fastapi_users import schemas
class UserRead(schemas.BaseUser[uuid_UUID]):
# groups: list[uuid_UUID] # Add groups attribute
pass
class UserCreate(schemas.BaseUserCreate):

View file

@ -1,6 +1,6 @@
from datetime import datetime, timezone
from sqlalchemy import Column, ForeignKey, DateTime
from cognee.infrastructure.databases.relational import Base, UUID
from sqlalchemy import Column, ForeignKey, DateTime, UUID
from cognee.infrastructure.databases.relational import Base
class UserGroup(Base):
__tablename__ = "user_groups"

View file

@ -35,24 +35,27 @@ async def main():
random_node = (await vector_engine.search("Entity_name", "AI"))[0]
random_node_name = random_node.payload["text"]
search_results = await cognee.search(SearchType.INSIGHTS, query = random_node_name)
search_results = await cognee.search(SearchType.INSIGHTS, query_text = random_node_name)
assert len(search_results) != 0, "The search results list is empty."
print("\n\nExtracted sentences are:\n")
for result in search_results:
print(f"{result}\n")
search_results = await cognee.search(SearchType.CHUNKS, query = random_node_name)
search_results = await cognee.search(SearchType.CHUNKS, query_text = random_node_name)
assert len(search_results) != 0, "The search results list is empty."
print("\n\nExtracted chunks are:\n")
for result in search_results:
print(f"{result}\n")
search_results = await cognee.search(SearchType.SUMMARIES, query = random_node_name)
search_results = await cognee.search(SearchType.SUMMARIES, query_text = random_node_name)
assert len(search_results) != 0, "Query related summaries don't exist."
print("\nExtracted summaries are:\n")
for result in search_results:
print(f"{result}\n")
history = await cognee.get_search_history()
assert len(history) == 6, "Search history is not correct."
if __name__ == "__main__":
import asyncio

View file

@ -39,24 +39,27 @@ async def main():
random_node = (await vector_engine.search("Entity_name", "Quantum computer"))[0]
random_node_name = random_node.payload["text"]
search_results = await cognee.search(SearchType.INSIGHTS, query = random_node_name)
search_results = await cognee.search(SearchType.INSIGHTS, query_text = random_node_name)
assert len(search_results) != 0, "The search results list is empty."
print("\n\nExtracted sentences are:\n")
for result in search_results:
print(f"{result}\n")
search_results = await cognee.search(SearchType.CHUNKS, query = random_node_name)
search_results = await cognee.search(SearchType.CHUNKS, query_text = random_node_name)
assert len(search_results) != 0, "The search results list is empty."
print("\n\nExtracted chunks are:\n")
for result in search_results:
print(f"{result}\n")
search_results = await cognee.search(SearchType.SUMMARIES, query = random_node_name)
search_results = await cognee.search(SearchType.SUMMARIES, query_text = random_node_name)
assert len(search_results) != 0, "Query related summaries don't exist."
print("\nExtracted summaries are:\n")
for result in search_results:
print(f"{result}\n")
history = await cognee.get_search_history()
assert len(history) == 6, "Search history is not correct."
if __name__ == "__main__":
import asyncio

View file

@ -68,24 +68,27 @@ async def main():
random_node = (await vector_engine.search("Entity_name", "Quantum computer"))[0]
random_node_name = random_node.payload["text"]
search_results = await cognee.search(SearchType.INSIGHTS, query=random_node_name)
search_results = await cognee.search(SearchType.INSIGHTS, query_text = random_node_name)
assert len(search_results) != 0, "The search results list is empty."
print("\n\nExtracted sentences are:\n")
for result in search_results:
print(f"{result}\n")
search_results = await cognee.search(SearchType.CHUNKS, query=random_node_name)
search_results = await cognee.search(SearchType.CHUNKS, query_text = random_node_name)
assert len(search_results) != 0, "The search results list is empty."
print("\n\nExtracted chunks are:\n")
for result in search_results:
print(f"{result}\n")
search_results = await cognee.search(SearchType.SUMMARIES, query=random_node_name)
search_results = await cognee.search(SearchType.SUMMARIES, query_text = random_node_name)
assert len(search_results) != 0, "Query related summaries don't exist."
print("\n\nExtracted summaries are:\n")
for result in search_results:
print(f"{result}\n")
history = await cognee.get_search_history()
assert len(history) == 6, "Search history is not correct."
if __name__ == "__main__":
import asyncio

View file

@ -40,24 +40,27 @@ async def main():
random_node = (await vector_engine.search("Entity_name", "Quantum computer"))[0]
random_node_name = random_node.payload["text"]
search_results = await cognee.search(SearchType.INSIGHTS, query = random_node_name)
search_results = await cognee.search(SearchType.INSIGHTS, query_text = random_node_name)
assert len(search_results) != 0, "The search results list is empty."
print("\n\nExtracted sentences are:\n")
for result in search_results:
print(f"{result}\n")
search_results = await cognee.search(SearchType.CHUNKS, query = random_node_name)
search_results = await cognee.search(SearchType.CHUNKS, query_text = random_node_name)
assert len(search_results) != 0, "The search results list is empty."
print("\n\nExtracted chunks are:\n")
for result in search_results:
print(f"{result}\n")
search_results = await cognee.search(SearchType.SUMMARIES, query = random_node_name)
search_results = await cognee.search(SearchType.SUMMARIES, query_text = random_node_name)
assert len(search_results) != 0, "Query related summaries don't exist."
print("\nExtracted summaries are:\n")
for result in search_results:
print(f"{result}\n")
history = await cognee.get_search_history()
assert len(history) == 6, "Search history is not correct."
if __name__ == "__main__":
import asyncio

View file

@ -38,24 +38,27 @@ async def main():
random_node = (await vector_engine.search("Entity_name", "Quantum computer"))[0]
random_node_name = random_node.payload["text"]
search_results = await cognee.search(SearchType.INSIGHTS, query = random_node_name)
search_results = await cognee.search(SearchType.INSIGHTS, query_text = random_node_name)
assert len(search_results) != 0, "The search results list is empty."
print("\n\nExtracted sentences are:\n")
for result in search_results:
print(f"{result}\n")
search_results = await cognee.search(SearchType.CHUNKS, query = random_node_name)
search_results = await cognee.search(SearchType.CHUNKS, query_text = random_node_name)
assert len(search_results) != 0, "The search results list is empty."
print("\n\nExtracted chunks are:\n")
for result in search_results:
print(f"{result}\n")
search_results = await cognee.search(SearchType.SUMMARIES, query = random_node_name)
search_results = await cognee.search(SearchType.SUMMARIES, query_text = random_node_name)
assert len(search_results) != 0, "Query related summaries don't exist."
print("\nExtracted summaries are:\n")
for result in search_results:
print(f"{result}\n")
history = await cognee.get_search_history()
assert len(history) == 6, "Search history is not correct."
if __name__ == "__main__":
import asyncio

View file

@ -791,7 +791,7 @@
"node = (await vector_engine.search(\"Entity_name\", \"sarah.nguyen@example.com\"))[0]\n",
"node_name = node.payload[\"text\"]\n",
"\n",
"search_results = await cognee.search(SearchType.SUMMARIES, query = node_name)\n",
"search_results = await cognee.search(SearchType.SUMMARIES, query_text = node_name)\n",
"print(\"\\n\\Extracted summaries are:\\n\")\n",
"for result in search_results:\n",
" print(f\"{result}\\n\")"
@ -812,7 +812,7 @@
"metadata": {},
"outputs": [],
"source": [
"search_results = await cognee.search(SearchType.CHUNKS, query = node_name)\n",
"search_results = await cognee.search(SearchType.CHUNKS, query_text = node_name)\n",
"print(\"\\n\\nExtracted chunks are:\\n\")\n",
"for result in search_results:\n",
" print(f\"{result}\\n\")"
@ -833,7 +833,7 @@
"metadata": {},
"outputs": [],
"source": [
"search_results = await cognee.search(SearchType.INSIGHTS, query = node_name)\n",
"search_results = await cognee.search(SearchType.INSIGHTS, query_text = node_name)\n",
"print(\"\\n\\nExtracted sentences are:\\n\")\n",
"for result in search_results:\n",
" print(f\"{result}\\n\")"