Merge pull request #370 from topoteretes/COG-MCP

Add mcp to cognee
This commit is contained in:
Vasilije 2024-12-14 12:37:36 +01:00 committed by GitHub
commit 2f2aa81194
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 295 additions and 0 deletions

57
cognee-mcp/README.md Normal file
View file

@ -0,0 +1,57 @@
# cognee MCP server
A MCP server project
Create a boilerplate server:
```jsx
uvx create-mcp-server
```
1. The command will ask you to name your server, e.g. mcp_cognee
2. Answer “Y” to connect with Claude
Then run
```jsx
cd mcp_cognee
uv sync --dev --all-extras
```
Activate the venv with
```jsx
source .venv/bin/activate
```
This should already add the new server to your Claude config, but if not, add these lines manually:
```
"mcpcognee": {
"command": "uv",
"args": [
"--directory",
"/Users/your_username/mcp/mcp_cognee",
"run",
"mcpcognee"
],
"env": {
"ENV": "local",
"TOKENIZERS_PARALLELISM": "false",
"LLM_API_KEY": "add_your_api_key_here",
"GRAPH_DATABASE_PROVIDER": "neo4j",
"GRAPH_DATABASE_URL": "bolt://localhost:7687",
"GRAPH_DATABASE_USERNAME": "add_username_here",
"GRAPH_DATABASE_PASSWORD": "add_pwd_here",
"VECTOR_DB_PROVIDER": "lancedb",
"DB_PROVIDER": "sqlite",
"DB_NAME": "postgres"
}
}
```
Then, edit the pyproject.toml in your new folder so that it includes packages from the cognee requirements. Use the pyproject.toml in your cognee library for this, but match the syntax of the automatically generated pyproject.toml so that it is compatible with uv.
Define cognify tool in server.py
Restart your Claude desktop.

View file

@ -0,0 +1,14 @@
import asyncio
from . import server
def main():
"""Main entry point for the package."""
asyncio.run(server.main())
# Optionally expose other important items at package level
__all__ = ['main', 'server']
if __name__ == "__main__":
main()

View file

@ -0,0 +1,4 @@
from mcpcognee import main
import asyncio
asyncio.run(main())

View file

@ -0,0 +1,126 @@
import importlib.util
import os
from contextlib import redirect_stderr, redirect_stdout
import cognee
import mcp.server.stdio
import mcp.types as types
from cognee.api.v1.search import SearchType
from cognee.shared.data_models import KnowledgeGraph
from mcp.server import NotificationOptions, Server
from mcp.server.models import InitializationOptions
from pydantic import AnyUrl, BaseModel
server = Server("mcpcognee")
def node_to_string(node):
keys_to_keep = ["chunk_index", "topological_rank", "cut_type", "id", "text"]
keyset = set(keys_to_keep) & node.keys()
return "Node(" + " ".join([key + ": " + str(node[key]) + "," for key in keyset]) + ")"
def retrieved_edges_to_string(search_results):
edge_strings = []
for triplet in search_results:
node1, edge, node2 = triplet
relationship_type = edge["relationship_name"]
edge_str = f"{node_to_string(node1)} {relationship_type} {node_to_string(node2)}"
edge_strings.append(edge_str)
return "\n".join(edge_strings)
def load_class(model_file, model_name):
model_file = os.path.abspath(model_file)
spec = importlib.util.spec_from_file_location("graph_model", model_file)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
model_class = getattr(module, model_name)
return model_class
@server.list_tools()
async def handle_list_tools() -> list[types.Tool]:
"""
List available tools.
Each tool specifies its arguments using JSON Schema validation.
"""
return [
types.Tool(
name="Cognify_and_search",
description="Build knowledge graph from the input text and search in it.",
inputSchema={
"type": "object",
"properties": {
"text": {"type": "string"},
"search_query": {"type": "string"},
"graph_model_file": {"type": "string"},
"graph_model_name": {"type": "string"},
},
"required": ["text", "search_query"],
},
)
]
@server.call_tool()
async def handle_call_tool(
name: str, arguments: dict | None
) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
"""
Handle tool execution requests.
Tools can modify server state and notify clients of changes.
"""
if name == "Cognify_and_search":
with open(os.devnull, "w") as fnull:
with redirect_stdout(fnull), redirect_stderr(fnull):
await cognee.prune.prune_data()
await cognee.prune.prune_system(metadata=True)
if not arguments:
raise ValueError("Missing arguments")
text = arguments.get("text")
search_query = arguments.get("search_query")
if ("graph_model_file" in arguments) and ("graph_model_name" in arguments):
model_file = arguments.get("graph_model_file")
model_name = arguments.get("graph_model_name")
graph_model = load_class(model_file, model_name)
else:
graph_model = KnowledgeGraph
await cognee.add(text)
await cognee.cognify(graph_model=graph_model)
search_results = await cognee.search(
SearchType.INSIGHTS, query_text=search_query
)
results = retrieved_edges_to_string(search_results)
return [
types.TextContent(
type="text",
text=results,
)
]
else:
raise ValueError(f"Unknown tool: {name}")
async def main():
# Run the server using stdin/stdout streams
async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
await server.run(
read_stream,
write_stream,
InitializationOptions(
server_name="mcpcognee",
server_version="0.1.0",
capabilities=server.get_capabilities(
notification_options=NotificationOptions(),
experimental_capabilities={},
),
),
)

94
cognee-mcp/pyproject.toml Normal file
View file

@ -0,0 +1,94 @@
[project]
name = "mcpcognee"
version = "0.1.0"
description = "A MCP server project"
readme = "README.md"
requires-python = ">=3.11"
dependencies = [
"mcp>=1.1.1",
"openai==1.52.0",
"pydantic==2.8.2",
"python-dotenv==1.0.1",
"fastapi>=0.109.2,<0.110.0",
"uvicorn==0.22.0",
"requests==2.32.3",
"aiohttp==3.10.10",
"typing_extensions==4.12.2",
"nest_asyncio==1.6.0",
"numpy==1.26.4",
"datasets==3.1.0",
"falkordb==1.0.9", # Optional
"boto3>=1.26.125,<2.0.0",
"botocore>=1.35.54,<2.0.0",
"gunicorn>=20.1.0,<21.0.0",
"sqlalchemy==2.0.35",
"instructor==1.5.2",
"networkx>=3.2.1,<4.0.0",
"aiosqlite>=0.20.0,<0.21.0",
"pandas==2.0.3",
"filetype>=1.2.0,<2.0.0",
"nltk>=3.8.1,<4.0.0",
"dlt[sqlalchemy]>=1.4.1,<2.0.0",
"aiofiles>=23.2.1,<24.0.0",
"qdrant-client>=1.9.0,<2.0.0", # Optional
"graphistry>=0.33.5,<0.34.0",
"tenacity>=8.4.1,<9.0.0",
"weaviate-client==4.6.7", # Optional
"scikit-learn>=1.5.0,<2.0.0",
"pypdf>=4.1.0,<5.0.0",
"neo4j>=5.20.0,<6.0.0", # Optional
"jinja2>=3.1.3,<4.0.0",
"matplotlib>=3.8.3,<4.0.0",
"tiktoken==0.7.0",
"langchain_text_splitters==0.3.2", # Optional
"langsmith==0.1.139", # Optional
"langdetect==1.0.9",
"posthog>=3.5.0,<4.0.0", # Optional
"lancedb==0.15.0",
"litellm==1.49.1",
"groq==0.8.0", # Optional
"langfuse>=2.32.0,<3.0.0", # Optional
"pydantic-settings>=2.2.1,<3.0.0",
"anthropic>=0.26.1,<1.0.0",
"sentry-sdk[fastapi]>=2.9.0,<3.0.0",
"fastapi-users[sqlalchemy]", # Optional
"alembic>=1.13.3,<2.0.0",
"asyncpg==0.30.0", # Optional
"pgvector>=0.3.5,<0.4.0", # Optional
"psycopg2>=2.9.10,<3.0.0", # Optional
"llama-index-core>=0.11.22,<0.12.0", # Optional
"deepeval>=2.0.1,<3.0.0", # Optional
"transformers>=4.46.3,<5.0.0",
"pymilvus>=2.5.0,<3.0.0", # Optional
"unstructured[csv,doc,docx,epub,md,odt,org,ppt,pptx,rst,rtf,tsv,xlsx]>=0.16.10,<1.0.0", # Optional
"pytest>=7.4.0,<8.0.0",
"pytest-asyncio>=0.21.1,<0.22.0",
"coverage>=7.3.2,<8.0.0",
"mypy>=1.7.1,<2.0.0",
"deptry>=0.20.0,<0.21.0",
"debugpy==1.8.2",
"pylint>=3.0.3,<4.0.0",
"ruff>=0.2.2,<0.3.0",
"tweepy==4.14.0",
"gitpython>=3.1.43,<4.0.0",
"cognee",
]
[[project.authors]]
name = "Rita Aleksziev"
email = "rita@topoteretes.com"
[build-system]
requires = [ "hatchling",]
build-backend = "hatchling.build"
[tool.uv.sources]
cognee = { path = "../../cognee" }
[dependency-groups]
dev = [
"cognee",
]
[project.scripts]
mcpcognee = "mcpcognee:main"