feat: Add compute search to cognee

Add compute search to cognee which makes searches human readable

Feature COG-656
This commit is contained in:
Igor Ilic 2024-12-13 15:18:33 +01:00
parent 43187e4d63
commit 9c3e2422f3
5 changed files with 60 additions and 0 deletions

View file

@ -14,11 +14,13 @@ from cognee.modules.users.permissions.methods import get_document_ids_for_user
from cognee.tasks.chunks import query_chunks
from cognee.tasks.graph import query_graph_connections
from cognee.tasks.summarization import query_summaries
from cognee.tasks.compute import query_compute
class SearchType(Enum):
SUMMARIES = "SUMMARIES"
INSIGHTS = "INSIGHTS"
CHUNKS = "CHUNKS"
COMPUTE = "COMPUTE"
async def search(query_type: SearchType, query_text: str, user: User = None) -> list:
if user is None:
@ -50,6 +52,7 @@ async def specific_search(query_type: SearchType, query: str, user) -> list:
SearchType.SUMMARIES: query_summaries,
SearchType.INSIGHTS: query_graph_connections,
SearchType.CHUNKS: query_chunks,
SearchType.COMPUTE: query_compute,
}
search_task = search_tasks.get(query_type)

View file

@ -0,0 +1 @@
from .query_compute import query_compute

View file

@ -0,0 +1,9 @@
"""
Custom exceptions for the Cognee API.
This module defines a set of exceptions for handling various compute errors
"""
from .exceptions import (
NoRelevantDataFound,
)

View file

@ -0,0 +1,11 @@
from cognee.exceptions import CogneeApiError
from fastapi import status
class NoRelevantDataFound(CogneeApiError):
def __init__(
self,
message: str = "Search did not find any data.",
name: str = "NoRelevantDataFound",
status_code=status.HTTP_404_NOT_FOUND,
):
super().__init__(message, name, status_code)

View file

@ -0,0 +1,36 @@
from cognee.infrastructure.databases.vector import get_vector_engine
from cognee.tasks.compute.exceptions import NoRelevantDataFound
from cognee.infrastructure.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.prompts import read_query_prompt, render_prompt
async def query_compute(query: str) -> list:
"""
Parameters:
- query (str): The query string to compute.
Returns:
- list: Answer to the query.
"""
vector_engine = get_vector_engine()
found_chunks = await vector_engine.search("document_chunk_text", query, limit = 1)
if len(found_chunks) == 0:
raise NoRelevantDataFound
args = {
"question": query,
"context": found_chunks[0].payload["text"],
}
user_prompt = render_prompt("context_for_question.txt", args)
system_prompt = read_query_prompt("answer_hotpot_using_cognee_search.txt")
llm_client = get_llm_client()
computed_answer = await llm_client.acreate_structured_output(
text_input=user_prompt,
system_prompt=system_prompt,
response_model=str,
)
return [computed_answer]