feat: Add compute search to cognee
Add compute search to cognee which makes searches human readable Feature COG-656
This commit is contained in:
parent
43187e4d63
commit
9c3e2422f3
5 changed files with 60 additions and 0 deletions
|
|
@ -14,11 +14,13 @@ from cognee.modules.users.permissions.methods import get_document_ids_for_user
|
||||||
from cognee.tasks.chunks import query_chunks
|
from cognee.tasks.chunks import query_chunks
|
||||||
from cognee.tasks.graph import query_graph_connections
|
from cognee.tasks.graph import query_graph_connections
|
||||||
from cognee.tasks.summarization import query_summaries
|
from cognee.tasks.summarization import query_summaries
|
||||||
|
from cognee.tasks.compute import query_compute
|
||||||
|
|
||||||
class SearchType(Enum):
|
class SearchType(Enum):
|
||||||
SUMMARIES = "SUMMARIES"
|
SUMMARIES = "SUMMARIES"
|
||||||
INSIGHTS = "INSIGHTS"
|
INSIGHTS = "INSIGHTS"
|
||||||
CHUNKS = "CHUNKS"
|
CHUNKS = "CHUNKS"
|
||||||
|
COMPUTE = "COMPUTE"
|
||||||
|
|
||||||
async def search(query_type: SearchType, query_text: str, user: User = None) -> list:
|
async def search(query_type: SearchType, query_text: str, user: User = None) -> list:
|
||||||
if user is None:
|
if user is None:
|
||||||
|
|
@ -50,6 +52,7 @@ async def specific_search(query_type: SearchType, query: str, user) -> list:
|
||||||
SearchType.SUMMARIES: query_summaries,
|
SearchType.SUMMARIES: query_summaries,
|
||||||
SearchType.INSIGHTS: query_graph_connections,
|
SearchType.INSIGHTS: query_graph_connections,
|
||||||
SearchType.CHUNKS: query_chunks,
|
SearchType.CHUNKS: query_chunks,
|
||||||
|
SearchType.COMPUTE: query_compute,
|
||||||
}
|
}
|
||||||
|
|
||||||
search_task = search_tasks.get(query_type)
|
search_task = search_tasks.get(query_type)
|
||||||
|
|
|
||||||
1
cognee/tasks/compute/__init__.py
Normal file
1
cognee/tasks/compute/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
from .query_compute import query_compute
|
||||||
9
cognee/tasks/compute/exceptions/__init__.py
Normal file
9
cognee/tasks/compute/exceptions/__init__.py
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
"""
|
||||||
|
Custom exceptions for the Cognee API.
|
||||||
|
|
||||||
|
This module defines a set of exceptions for handling various compute errors
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .exceptions import (
|
||||||
|
NoRelevantDataFound,
|
||||||
|
)
|
||||||
11
cognee/tasks/compute/exceptions/exceptions.py
Normal file
11
cognee/tasks/compute/exceptions/exceptions.py
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
from cognee.exceptions import CogneeApiError
|
||||||
|
from fastapi import status
|
||||||
|
|
||||||
|
class NoRelevantDataFound(CogneeApiError):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str = "Search did not find any data.",
|
||||||
|
name: str = "NoRelevantDataFound",
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
):
|
||||||
|
super().__init__(message, name, status_code)
|
||||||
36
cognee/tasks/compute/query_compute.py
Normal file
36
cognee/tasks/compute/query_compute.py
Normal file
|
|
@ -0,0 +1,36 @@
|
||||||
|
from cognee.infrastructure.databases.vector import get_vector_engine
|
||||||
|
from cognee.tasks.compute.exceptions import NoRelevantDataFound
|
||||||
|
from cognee.infrastructure.llm.get_llm_client import get_llm_client
|
||||||
|
from cognee.infrastructure.llm.prompts import read_query_prompt, render_prompt
|
||||||
|
|
||||||
|
|
||||||
|
async def query_compute(query: str) -> list:
|
||||||
|
"""
|
||||||
|
Parameters:
|
||||||
|
- query (str): The query string to compute.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- list: Answer to the query.
|
||||||
|
"""
|
||||||
|
vector_engine = get_vector_engine()
|
||||||
|
|
||||||
|
found_chunks = await vector_engine.search("document_chunk_text", query, limit = 1)
|
||||||
|
|
||||||
|
if len(found_chunks) == 0:
|
||||||
|
raise NoRelevantDataFound
|
||||||
|
|
||||||
|
args = {
|
||||||
|
"question": query,
|
||||||
|
"context": found_chunks[0].payload["text"],
|
||||||
|
}
|
||||||
|
user_prompt = render_prompt("context_for_question.txt", args)
|
||||||
|
system_prompt = read_query_prompt("answer_hotpot_using_cognee_search.txt")
|
||||||
|
|
||||||
|
llm_client = get_llm_client()
|
||||||
|
computed_answer = await llm_client.acreate_structured_output(
|
||||||
|
text_input=user_prompt,
|
||||||
|
system_prompt=system_prompt,
|
||||||
|
response_model=str,
|
||||||
|
)
|
||||||
|
|
||||||
|
return [computed_answer]
|
||||||
Loading…
Add table
Reference in a new issue