fix: hotfix 0.1.38 (#765)

<!-- .github/pull_request_template.md -->

## Description
- db_engine was not dynamically gathered, with this a change in system
directory will be handled correctly
- Added top_k to all search types
- Reduced delete test threshold 
- Updated MCP version and info

## DCO Affirmation
I affirm that all code in every commit of this pull request conforms to
the terms of the Topoteretes Developer Certificate of Origin.

---------

Co-authored-by: Boris <boris@topoteretes.com>
This commit is contained in:
Igor Ilic 2025-04-23 12:04:48 +02:00 committed by GitHub
parent 60da1c899e
commit f404386df5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 1108 additions and 749 deletions

View file

@ -1,19 +1,19 @@
[project]
name = "cognee-mcp"
version = "0.2.2"
version = "0.2.3"
description = "A MCP server project"
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
"cognee[postgres,codegraph,gemini,huggingface]==0.1.37",
"cognee[postgres,codegraph,gemini,huggingface]==0.1.38",
"mcp==1.5.0",
"uv>=0.6.3",
]
[[project.authors]]
name = "Rita Aleksziev"
email = "rita@topoteretes.com"
name = "Boris Arzentar"
email = "boris@topoteretes.com"
[build-system]
requires = [ "hatchling", ]

1819
cognee-mcp/uv.lock generated

File diff suppressed because it is too large Load diff

View file

@ -21,10 +21,10 @@ Node = Tuple[str, NodeData] # (node_id, properties)
def record_graph_changes(func):
"""Decorator to record graph changes in the relationship database."""
db_engine = get_relational_engine()
@wraps(func)
async def wrapper(self, *args, **kwargs):
db_engine = get_relational_engine()
frame = inspect.currentframe()
while frame:
if frame.f_back and frame.f_back.f_code.co_name != "wrapper":

View file

@ -19,9 +19,9 @@ class CodeRetriever(BaseRetriever):
filenames: List[str] = []
sourcecode: str
def __init__(self, limit: int = 3):
def __init__(self, top_k: int = 3):
"""Initialize retriever with search parameters."""
self.limit = limit
self.top_k = top_k
self.file_name_collections = ["CodeFile_name"]
self.classes_and_functions_collections = [
"ClassDefinition_source_code",
@ -60,7 +60,7 @@ class CodeRetriever(BaseRetriever):
if not files_and_codeparts.filenames or not files_and_codeparts.sourcecode:
for collection in self.file_name_collections:
search_results_file = await vector_engine.search(
collection, query, limit=self.limit
collection, query, limit=self.top_k
)
for res in search_results_file:
similar_filenames.append(
@ -69,7 +69,7 @@ class CodeRetriever(BaseRetriever):
for collection in self.classes_and_functions_collections:
search_results_code = await vector_engine.search(
collection, query, limit=self.limit
collection, query, limit=self.top_k
)
for res in search_results_code:
similar_codepieces.append(
@ -79,7 +79,7 @@ class CodeRetriever(BaseRetriever):
for collection in self.file_name_collections:
for file_from_query in files_and_codeparts.filenames:
search_results_file = await vector_engine.search(
collection, file_from_query, limit=self.limit
collection, file_from_query, limit=self.top_k
)
for res in search_results_file:
similar_filenames.append(
@ -88,7 +88,7 @@ class CodeRetriever(BaseRetriever):
for collection in self.classes_and_functions_collections:
search_results_code = await vector_engine.search(
collection, files_and_codeparts.sourcecode, limit=self.limit
collection, files_and_codeparts.sourcecode, limit=self.top_k
)
for res in search_results_code:
similar_codepieces.append(

View file

@ -9,9 +9,9 @@ from cognee.infrastructure.databases.vector.exceptions.exceptions import Collect
class SummariesRetriever(BaseRetriever):
"""Retriever for handling summary-based searches."""
def __init__(self, limit: int = 5):
def __init__(self, top_k: int = 5):
"""Initialize retriever with search parameters."""
self.limit = limit
self.top_k = top_k
async def get_context(self, query: str) -> Any:
"""Retrieves summary context based on the query."""
@ -19,7 +19,7 @@ class SummariesRetriever(BaseRetriever):
try:
summaries_results = await vector_engine.search(
"TextSummary_text", query, limit=self.limit
"TextSummary_text", query, limit=self.top_k
)
except CollectionNotFoundError as error:
raise NoDataError("No data found in the system, please add data first.") from error

View file

@ -59,9 +59,9 @@ async def specific_search(
top_k: int = 10,
) -> list:
search_tasks: dict[SearchType, Callable] = {
SearchType.SUMMARIES: SummariesRetriever().get_completion,
SearchType.SUMMARIES: SummariesRetriever(top_k=top_k).get_completion,
SearchType.INSIGHTS: InsightsRetriever(top_k=top_k).get_completion,
SearchType.CHUNKS: ChunksRetriever().get_completion,
SearchType.CHUNKS: ChunksRetriever(top_k=top_k).get_completion,
SearchType.RAG_COMPLETION: CompletionRetriever(
system_prompt_path=system_prompt_path,
top_k=top_k,
@ -71,9 +71,9 @@ async def specific_search(
top_k=top_k,
).get_completion,
SearchType.GRAPH_SUMMARY_COMPLETION: GraphSummaryCompletionRetriever(
system_prompt_path=system_prompt_path,
system_prompt_path=system_prompt_path, top_k=top_k
).get_completion,
SearchType.CODE: CodeRetriever().get_completion,
SearchType.CODE: CodeRetriever(top_k=top_k).get_completion,
SearchType.CYPHER: CypherSearchRetriever().get_completion,
SearchType.NATURAL_LANGUAGE: NaturalLanguageRetriever().get_completion,
}

View file

@ -57,7 +57,7 @@ async def main():
graph_engine = await get_graph_engine()
nodes, edges = await graph_engine.get_graph_data()
assert len(nodes) > 15 and len(edges) > 15, "Graph database is not loaded."
assert len(nodes) > 10 and len(edges) > 10, "Graph database is not loaded."
await cognee.delete([text_1, text_2], mode="hard")
nodes, edges = await graph_engine.get_graph_data()