fix: Resolve issue with MCP server communication (#674)
<!-- .github/pull_request_template.md --> ## Description - Fixes MCP server communication issue by switching to sys.stderr ( as is default for python loggin ) - Adds needed api optional dependency for fastapi users - Removes lock file as a new one will need to be made after new Cognee release with api optional dependency - Adds log file location to MCP tool call answer ## DCO Affirmation I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin
This commit is contained in:
parent
2611d89094
commit
f6747128a2
6 changed files with 306 additions and 4963 deletions
|
|
@ -1,13 +1,13 @@
|
|||
[project]
|
||||
name = "cognee-mcp"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
description = "A MCP server project"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
|
||||
dependencies = [
|
||||
"cognee[postgres,codegraph,gemini,huggingface]",
|
||||
"mcp==1.3.0",
|
||||
"mcp==1.5.0",
|
||||
"uv>=0.6.3",
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import asyncio
|
|||
import json
|
||||
import os
|
||||
import cognee
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.shared.logging_utils import get_logger, get_log_file_location
|
||||
import importlib.util
|
||||
from contextlib import redirect_stderr, redirect_stdout
|
||||
|
||||
|
|
@ -92,6 +92,8 @@ async def call_tools(name: str, arguments: dict) -> list[types.TextContent]:
|
|||
try:
|
||||
with open(os.devnull, "w") as fnull:
|
||||
with redirect_stdout(fnull), redirect_stderr(fnull):
|
||||
log_file = get_log_file_location()
|
||||
|
||||
if name == "cognify":
|
||||
asyncio.create_task(
|
||||
cognify(
|
||||
|
|
@ -101,19 +103,31 @@ async def call_tools(name: str, arguments: dict) -> list[types.TextContent]:
|
|||
)
|
||||
)
|
||||
|
||||
text = (
|
||||
f"Background process launched due to MCP timeout limitations.\n"
|
||||
f"Average completion time is around 4 minutes.\n"
|
||||
f"For current cognify status you can check the log file at: {log_file}"
|
||||
)
|
||||
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text="Background process launched due to MCP timeout limitations. Estimated completion time up to 4 minutes.",
|
||||
text=text,
|
||||
)
|
||||
]
|
||||
if name == "codify":
|
||||
asyncio.create_task(codify(arguments.get("repo_path")))
|
||||
|
||||
text = (
|
||||
f"Background process launched due to MCP timeout limitations.\n"
|
||||
f"Average completion time is around 4 minutes.\n"
|
||||
f"For current codify status you can check the log file at: {log_file}"
|
||||
)
|
||||
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text="Background process launched due to MCP timeout limitations. Estimated completion time up to 4 minutes.",
|
||||
text=text,
|
||||
)
|
||||
]
|
||||
elif name == "search":
|
||||
|
|
@ -133,6 +147,7 @@ async def call_tools(name: str, arguments: dict) -> list[types.TextContent]:
|
|||
|
||||
async def cognify(text: str, graph_model_file: str = None, graph_model_name: str = None) -> str:
|
||||
"""Build knowledge graph from the input text"""
|
||||
logger.info("Cognify process starting.")
|
||||
if graph_model_file and graph_model_name:
|
||||
graph_model = load_class(graph_model_file, graph_model_name)
|
||||
else:
|
||||
|
|
@ -141,14 +156,23 @@ async def cognify(text: str, graph_model_file: str = None, graph_model_name: str
|
|||
await cognee.add(text)
|
||||
|
||||
try:
|
||||
asyncio.create_task(cognee.cognify(graph_model=graph_model))
|
||||
await cognee.cognify(graph_model=graph_model)
|
||||
logger.info("Cognify process finished.")
|
||||
except Exception as e:
|
||||
logger.error("Cognify process failed.")
|
||||
raise ValueError(f"Failed to cognify: {str(e)}")
|
||||
|
||||
|
||||
async def codify(repo_path: str):
|
||||
logger.info("Codify process starting.")
|
||||
results = []
|
||||
async for result in run_code_graph_pipeline(repo_path, False):
|
||||
results.append(result)
|
||||
logger.info(result)
|
||||
if all(results):
|
||||
logger.info("Codify process finished succesfully.")
|
||||
else:
|
||||
logger.info("Codify process failed.")
|
||||
|
||||
|
||||
async def search(search_query: str, search_type: str) -> str:
|
||||
|
|
|
|||
4460
cognee-mcp/uv.lock
generated
4460
cognee-mcp/uv.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -256,7 +256,7 @@ def setup_logging(log_level=INFO, name=None):
|
|||
self.handleError(record)
|
||||
|
||||
# Use our custom handler for console output
|
||||
stream_handler = NewlineStreamHandler(sys.stdout)
|
||||
stream_handler = NewlineStreamHandler(sys.stderr)
|
||||
stream_handler.setFormatter(console_formatter)
|
||||
stream_handler.setLevel(log_level)
|
||||
|
||||
|
|
@ -279,3 +279,13 @@ def setup_logging(log_level=INFO, name=None):
|
|||
|
||||
# Return a configured logger
|
||||
return structlog.get_logger(name if name else __name__)
|
||||
|
||||
|
||||
def get_log_file_location():
|
||||
# Get the root logger
|
||||
root_logger = logging.getLogger()
|
||||
|
||||
# Loop through handlers to find the FileHandler
|
||||
for handler in root_logger.handlers:
|
||||
if isinstance(handler, logging.FileHandler):
|
||||
return handler.baseFilename
|
||||
|
|
|
|||
753
poetry.lock
generated
753
poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -25,7 +25,7 @@ pydantic = "2.10.5"
|
|||
pydantic-settings = "^2.2.1"
|
||||
typing_extensions = "4.12.2"
|
||||
nltk = "3.9.1"
|
||||
numpy = "1.26.4"
|
||||
numpy = ">=1.26.4, <=2.1"
|
||||
pandas = "2.2.3"
|
||||
boto3 = "^1.26.125"
|
||||
botocore="^1.35.54"
|
||||
|
|
@ -48,7 +48,7 @@ lancedb = "0.16.0"
|
|||
alembic = "^1.13.3"
|
||||
pre-commit = "^4.0.1"
|
||||
scikit-learn = "^1.6.1"
|
||||
fastapi = {version = "0.115.7", optional = true}
|
||||
fastapi = {version = "0.115.7"}
|
||||
fastapi-users = {version = "14.0.0", extras = ["sqlalchemy"]}
|
||||
uvicorn = {version = "0.34.0", optional = true}
|
||||
gunicorn = {version = "^20.1.0", optional = true}
|
||||
|
|
@ -74,7 +74,7 @@ transformers = {version = "^4.46.3", optional = true}
|
|||
pymilvus = {version = "^2.5.0", optional = true}
|
||||
unstructured = { extras = ["csv", "doc", "docx", "epub", "md", "odt", "org", "ppt", "pptx", "rst", "rtf", "tsv", "xlsx"], version = "^0.16.13", optional = true }
|
||||
mistral-common = {version = "^1.5.2", optional = true}
|
||||
fastembed = {version = "^0.6.0", optional = true, markers = "python_version < '3.13'"}
|
||||
fastembed = {version = "<=0.6.0", optional = true, markers = "python_version < '3.13'"}
|
||||
tree-sitter = {version = "^0.24.0", optional = true}
|
||||
tree-sitter-python = {version = "^0.23.6", optional = true}
|
||||
plotly = {version = "^6.0.0", optional = true}
|
||||
|
|
@ -85,7 +85,7 @@ structlog = "^25.2.0"
|
|||
|
||||
|
||||
[tool.poetry.extras]
|
||||
api = ["fastapi", "fastapi-users", "uvicorn", "gunicorn"]
|
||||
api = ["uvicorn", "gunicorn"]
|
||||
filesystem = ["s3fs", "botocore"]
|
||||
weaviate = ["weaviate-client"]
|
||||
qdrant = ["qdrant-client"]
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue