Add mcp to cognee
This commit is contained in:
parent
fb1c223982
commit
3dc2d1e647
4 changed files with 237 additions and 0 deletions
14
cognee-mcp/mcpcognee/__init__.py
Normal file
14
cognee-mcp/mcpcognee/__init__.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
import asyncio
|
||||
|
||||
from . import server
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point for the package."""
|
||||
asyncio.run(server.main())
|
||||
|
||||
# Optionally expose other important items at package level
|
||||
__all__ = ['main', 'server']
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
3
cognee-mcp/mcpcognee/__main__.py
Normal file
3
cognee-mcp/mcpcognee/__main__.py
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
from mcpcognee import main
|
||||
|
||||
main()
|
||||
126
cognee-mcp/mcpcognee/server.py
Normal file
126
cognee-mcp/mcpcognee/server.py
Normal file
|
|
@ -0,0 +1,126 @@
|
|||
import importlib.util
|
||||
import os
|
||||
from contextlib import redirect_stderr, redirect_stdout
|
||||
|
||||
import cognee
|
||||
import mcp.server.stdio
|
||||
import mcp.types as types
|
||||
from cognee.api.v1.search import SearchType
|
||||
from cognee.shared.data_models import KnowledgeGraph
|
||||
from mcp.server import NotificationOptions, Server
|
||||
from mcp.server.models import InitializationOptions
|
||||
from pydantic import AnyUrl, BaseModel
|
||||
|
||||
server = Server("mcpcognee")
|
||||
|
||||
|
||||
def node_to_string(node):
|
||||
keys_to_keep = ["chunk_index", "topological_rank", "cut_type", "id", "text"]
|
||||
keyset = set(keys_to_keep) & node.keys()
|
||||
return "Node(" + " ".join([key + ": " + str(node[key]) + "," for key in keyset]) + ")"
|
||||
|
||||
|
||||
def retrieved_edges_to_string(search_results):
|
||||
edge_strings = []
|
||||
for triplet in search_results:
|
||||
node1, edge, node2 = triplet
|
||||
relationship_type = edge["relationship_name"]
|
||||
edge_str = f"{node_to_string(node1)} {relationship_type} {node_to_string(node2)}"
|
||||
edge_strings.append(edge_str)
|
||||
return "\n".join(edge_strings)
|
||||
|
||||
|
||||
def load_class(model_file, model_name):
|
||||
model_file = os.path.abspath(model_file)
|
||||
spec = importlib.util.spec_from_file_location("graph_model", model_file)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
model_class = getattr(module, model_name)
|
||||
|
||||
return model_class
|
||||
|
||||
|
||||
@server.list_tools()
|
||||
async def handle_list_tools() -> list[types.Tool]:
|
||||
"""
|
||||
List available tools.
|
||||
Each tool specifies its arguments using JSON Schema validation.
|
||||
"""
|
||||
return [
|
||||
types.Tool(
|
||||
name="Cognify_and_search",
|
||||
description="Build knowledge graph from the input text and search in it.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"text": {"type": "string"},
|
||||
"search_query": {"type": "string"},
|
||||
"graph_model_file": {"type": "string"},
|
||||
"graph_model_name": {"type": "string"},
|
||||
},
|
||||
"required": ["text", "search_query"],
|
||||
},
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@server.call_tool()
|
||||
async def handle_call_tool(
|
||||
name: str, arguments: dict | None
|
||||
) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
|
||||
"""
|
||||
Handle tool execution requests.
|
||||
Tools can modify server state and notify clients of changes.
|
||||
"""
|
||||
if name == "Cognify_and_search":
|
||||
with open(os.devnull, "w") as fnull:
|
||||
with redirect_stdout(fnull), redirect_stderr(fnull):
|
||||
await cognee.prune.prune_data()
|
||||
await cognee.prune.prune_system(metadata=True)
|
||||
|
||||
if not arguments:
|
||||
raise ValueError("Missing arguments")
|
||||
|
||||
text = arguments.get("text")
|
||||
search_query = arguments.get("search_query")
|
||||
if ("graph_model_file" in arguments) and ("graph_model_name" in arguments):
|
||||
model_file = arguments.get("graph_model_file")
|
||||
model_name = arguments.get("graph_model_name")
|
||||
graph_model = load_class(model_file, model_name)
|
||||
else:
|
||||
graph_model = KnowledgeGraph
|
||||
|
||||
await cognee.add(text)
|
||||
await cognee.cognify(graph_model=graph_model)
|
||||
search_results = await cognee.search(
|
||||
SearchType.INSIGHTS, query_text=search_query
|
||||
)
|
||||
|
||||
results = retrieved_edges_to_string(search_results)
|
||||
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text=results,
|
||||
)
|
||||
]
|
||||
else:
|
||||
raise ValueError(f"Unknown tool: {name}")
|
||||
|
||||
|
||||
async def main():
|
||||
# Run the server using stdin/stdout streams
|
||||
async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
|
||||
await server.run(
|
||||
read_stream,
|
||||
write_stream,
|
||||
InitializationOptions(
|
||||
server_name="mcpcognee",
|
||||
server_version="0.1.0",
|
||||
capabilities=server.get_capabilities(
|
||||
notification_options=NotificationOptions(),
|
||||
experimental_capabilities={},
|
||||
),
|
||||
),
|
||||
)
|
||||
94
cognee-mcp/pyproject.toml
Normal file
94
cognee-mcp/pyproject.toml
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
[project]
|
||||
name = "mcpcognee"
|
||||
version = "0.1.0"
|
||||
description = "A MCP server project"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"mcp>=1.1.1",
|
||||
"openai==1.52.0",
|
||||
"pydantic==2.8.2",
|
||||
"python-dotenv==1.0.1",
|
||||
"fastapi>=0.109.2,<0.110.0",
|
||||
"uvicorn==0.22.0",
|
||||
"requests==2.32.3",
|
||||
"aiohttp==3.10.10",
|
||||
"typing_extensions==4.12.2",
|
||||
"nest_asyncio==1.6.0",
|
||||
"numpy==1.26.4",
|
||||
"datasets==3.1.0",
|
||||
"falkordb==1.0.9", # Optional
|
||||
"boto3>=1.26.125,<2.0.0",
|
||||
"botocore>=1.35.54,<2.0.0",
|
||||
"gunicorn>=20.1.0,<21.0.0",
|
||||
"sqlalchemy==2.0.35",
|
||||
"instructor==1.5.2",
|
||||
"networkx>=3.2.1,<4.0.0",
|
||||
"aiosqlite>=0.20.0,<0.21.0",
|
||||
"pandas==2.0.3",
|
||||
"filetype>=1.2.0,<2.0.0",
|
||||
"nltk>=3.8.1,<4.0.0",
|
||||
"dlt[sqlalchemy]>=1.4.1,<2.0.0",
|
||||
"aiofiles>=23.2.1,<24.0.0",
|
||||
"qdrant-client>=1.9.0,<2.0.0", # Optional
|
||||
"graphistry>=0.33.5,<0.34.0",
|
||||
"tenacity>=8.4.1,<9.0.0",
|
||||
"weaviate-client==4.6.7", # Optional
|
||||
"scikit-learn>=1.5.0,<2.0.0",
|
||||
"pypdf>=4.1.0,<5.0.0",
|
||||
"neo4j>=5.20.0,<6.0.0", # Optional
|
||||
"jinja2>=3.1.3,<4.0.0",
|
||||
"matplotlib>=3.8.3,<4.0.0",
|
||||
"tiktoken==0.7.0",
|
||||
"langchain_text_splitters==0.3.2", # Optional
|
||||
"langsmith==0.1.139", # Optional
|
||||
"langdetect==1.0.9",
|
||||
"posthog>=3.5.0,<4.0.0", # Optional
|
||||
"lancedb==0.15.0",
|
||||
"litellm==1.49.1",
|
||||
"groq==0.8.0", # Optional
|
||||
"langfuse>=2.32.0,<3.0.0", # Optional
|
||||
"pydantic-settings>=2.2.1,<3.0.0",
|
||||
"anthropic>=0.26.1,<1.0.0",
|
||||
"sentry-sdk[fastapi]>=2.9.0,<3.0.0",
|
||||
"fastapi-users[sqlalchemy]", # Optional
|
||||
"alembic>=1.13.3,<2.0.0",
|
||||
"asyncpg==0.30.0", # Optional
|
||||
"pgvector>=0.3.5,<0.4.0", # Optional
|
||||
"psycopg2>=2.9.10,<3.0.0", # Optional
|
||||
"llama-index-core>=0.11.22,<0.12.0", # Optional
|
||||
"deepeval>=2.0.1,<3.0.0", # Optional
|
||||
"transformers>=4.46.3,<5.0.0",
|
||||
"pymilvus>=2.5.0,<3.0.0", # Optional
|
||||
"unstructured[csv,doc,docx,epub,md,odt,org,ppt,pptx,rst,rtf,tsv,xlsx]>=0.16.10,<1.0.0", # Optional
|
||||
"pytest>=7.4.0,<8.0.0",
|
||||
"pytest-asyncio>=0.21.1,<0.22.0",
|
||||
"coverage>=7.3.2,<8.0.0",
|
||||
"mypy>=1.7.1,<2.0.0",
|
||||
"deptry>=0.20.0,<0.21.0",
|
||||
"debugpy==1.8.2",
|
||||
"pylint>=3.0.3,<4.0.0",
|
||||
"ruff>=0.2.2,<0.3.0",
|
||||
"tweepy==4.14.0",
|
||||
"gitpython>=3.1.43,<4.0.0",
|
||||
"cognee",
|
||||
]
|
||||
|
||||
[[project.authors]]
|
||||
name = "Rita Aleksziev"
|
||||
email = "rita@topoteretes.com"
|
||||
|
||||
[build-system]
|
||||
requires = [ "hatchling",]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.uv.sources]
|
||||
cognee = { path = "../../cognee" }
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"cognee",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
mcpcognee = "mcpcognee:main"
|
||||
Loading…
Add table
Reference in a new issue