Merge pull request #400 from topoteretes/feat/cognee-mcp

Feat/cognee mcp
This commit is contained in:
Vasilije 2024-12-23 16:10:01 +01:00 committed by GitHub
commit e770eda4f8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 4870 additions and 72 deletions

View file

@ -1,52 +1,36 @@
# cognee MCP server # cognee MCP server
A MCP server project 1. Clone the [cognee](www.github.com/topoteretes/cognee) repo
Create a boilerplate server:
2. Install dependencies
```jsx ```jsx
uvx create-mcp-server cd cognee-mcp
```
1. The command will ask you to name your server, e.g. mcp_cognee
2. Answer “Y” to connect with Claude
Then run
```jsx
cd mcp_cognee
uv sync --dev --all-extras uv sync --dev --all-extras
``` ```
Activate the venv with 3. Activate the venv with
```jsx ```jsx
source .venv/bin/activate source .venv/bin/activate
``` ```
This should already add the new server to your Claude config, but if not, add these lines manually: 4. Add the new server to your Claude config:
``` ```json
"mcpcognee": { "cognee": {
"command": "uv", "command": "uv",
"args": [ "args": [
"--directory", "--directory",
"/Users/your_username/mcp/mcp_cognee", "/{Absolute path to cognee directory}/cognee-mcp",
"run", "run",
"mcpcognee" "cognee"
], ],
"env": { "env": {
"ENV": "local", "ENV": "local",
"TOKENIZERS_PARALLELISM": "false", "TOKENIZERS_PARALLELISM": "false",
"LLM_API_KEY": "add_your_api_key_here", "LLM_API_KEY": "add_your_api_key_here",
"GRAPH_DATABASE_PROVIDER": "neo4j",
"GRAPH_DATABASE_URL": "bolt://localhost:7687",
"GRAPH_DATABASE_USERNAME": "add_username_here",
"GRAPH_DATABASE_PASSWORD": "add_pwd_here",
"VECTOR_DB_PROVIDER": "lancedb",
"DB_PROVIDER": "sqlite",
"DB_NAME": "postgres"
} }
} }
``` ```

View file

@ -8,7 +8,7 @@ def main():
asyncio.run(server.main()) asyncio.run(server.main())
# Optionally expose other important items at package level # Optionally expose other important items at package level
__all__ = ['main', 'server'] __all__ = ["main", "server"]
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View file

@ -1,5 +1,6 @@
import importlib.util import importlib.util
import os import os
import asyncio
from contextlib import redirect_stderr, redirect_stdout from contextlib import redirect_stderr, redirect_stdout
import cognee import cognee
@ -9,15 +10,17 @@ from cognee.api.v1.search import SearchType
from cognee.shared.data_models import KnowledgeGraph from cognee.shared.data_models import KnowledgeGraph
from mcp.server import NotificationOptions, Server from mcp.server import NotificationOptions, Server
from mcp.server.models import InitializationOptions from mcp.server.models import InitializationOptions
from pydantic import AnyUrl, BaseModel
server = Server("mcpcognee") server = Server("cognee-mcp")
def node_to_string(node): def node_to_string(node):
keys_to_keep = ["chunk_index", "topological_rank", "cut_type", "id", "text"] # keys_to_keep = ["chunk_index", "topological_rank", "cut_type", "id", "text"]
keyset = set(keys_to_keep) & node.keys() # keyset = set(keys_to_keep) & node.keys()
return "Node(" + " ".join([key + ": " + str(node[key]) + "," for key in keyset]) + ")" # return "Node(" + " ".join([key + ": " + str(node[key]) + "," for key in keyset]) + ")"
node_data = ", ".join([f"{key}: \"{value}\"" for key, value in node.items() if key in ["id", "name"]])
return f"Node({node_data})"
def retrieved_edges_to_string(search_results): def retrieved_edges_to_string(search_results):
@ -49,50 +52,85 @@ async def handle_list_tools() -> list[types.Tool]:
""" """
return [ return [
types.Tool( types.Tool(
name="Cognify_and_search", name = "cognify",
description="Build knowledge graph from the input text and search in it.", description = "Build knowledge graph from the input text.",
inputSchema = { inputSchema = {
"type": "object", "type": "object",
"properties": { "properties": {
"text": {"type": "string"}, "text": {"type": "string"},
"search_query": {"type": "string"},
"graph_model_file": {"type": "string"}, "graph_model_file": {"type": "string"},
"graph_model_name": {"type": "string"}, "graph_model_name": {"type": "string"},
}, },
"required": ["text", "search_query"], "required": ["text"],
}, },
) ),
types.Tool(
name = "search",
description = "Search the knowledge graph.",
inputSchema = {
"type": "object",
"properties": {
"query": {"type": "string"},
},
"required": ["query"],
},
),
types.Tool(
name = "prune",
description = "Reset the knowledge graph.",
inputSchema = {
"type": "object",
"properties": {
"query": {"type": "string"},
},
},
),
] ]
@server.call_tool() @server.call_tool()
async def handle_call_tool( async def handle_call_tool(
name: str, arguments: dict | None name: str,
arguments: dict | None
) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
""" """
Handle tool execution requests. Handle tool execution requests.
Tools can modify server state and notify clients of changes. Tools can modify server state and notify clients of changes.
""" """
if name == "Cognify_and_search": if name == "cognify":
with open(os.devnull, "w") as fnull: with open(os.devnull, "w") as fnull:
with redirect_stdout(fnull), redirect_stderr(fnull): with redirect_stdout(fnull), redirect_stderr(fnull):
await cognee.prune.prune_data()
await cognee.prune.prune_system(metadata=True)
if not arguments: if not arguments:
raise ValueError("Missing arguments") raise ValueError("Missing arguments")
text = arguments.get("text") text = arguments.get("text")
search_query = arguments.get("search_query")
if ("graph_model_file" in arguments) and ("graph_model_name" in arguments): if ("graph_model_file" in arguments) and ("graph_model_name" in arguments):
model_file = arguments.get("graph_model_file") model_file = arguments.get("graph_model_file")
model_name = arguments.get("graph_model_name") model_name = arguments.get("graph_model_name")
graph_model = load_class(model_file, model_name) graph_model = load_class(model_file, model_name)
else: else:
graph_model = KnowledgeGraph graph_model = KnowledgeGraph
await cognee.add(text) await cognee.add(text)
await cognee.cognify(graph_model = graph_model) await cognee.cognify(graph_model = graph_model)
return [
types.TextContent(
type = "text",
text = "Ingested",
)
]
elif name == "search":
with open(os.devnull, "w") as fnull:
with redirect_stdout(fnull), redirect_stderr(fnull):
if not arguments:
raise ValueError("Missing arguments")
search_query = arguments.get("query")
search_results = await cognee.search( search_results = await cognee.search(
SearchType.INSIGHTS, query_text = search_query SearchType.INSIGHTS, query_text = search_query
) )
@ -105,6 +143,18 @@ async def handle_call_tool(
text = results, text = results,
) )
] ]
elif name == "prune":
with open(os.devnull, "w") as fnull:
with redirect_stdout(fnull), redirect_stderr(fnull):
await cognee.prune.prune_data()
await cognee.prune.prune_system(metadata=True)
return [
types.TextContent(
type = "text",
text = "Pruned",
)
]
else: else:
raise ValueError(f"Unknown tool: {name}") raise ValueError(f"Unknown tool: {name}")
@ -116,7 +166,7 @@ async def main():
read_stream, read_stream,
write_stream, write_stream,
InitializationOptions( InitializationOptions(
server_name="mcpcognee", server_name = "cognee-mcp",
server_version = "0.1.0", server_version = "0.1.0",
capabilities = server.get_capabilities( capabilities = server.get_capabilities(
notification_options = NotificationOptions(), notification_options = NotificationOptions(),
@ -124,3 +174,7 @@ async def main():
), ),
), ),
) )
# This is needed if you'd like to connect to a custom client
if __name__ == "__main__":
asyncio.run(main())

View file

@ -1,4 +0,0 @@
from mcpcognee import main
import asyncio
asyncio.run(main())

View file

@ -1,5 +1,5 @@
[project] [project]
name = "mcpcognee" name = "cognee-mcp"
version = "0.1.0" version = "0.1.0"
description = "A MCP server project" description = "A MCP server project"
readme = "README.md" readme = "README.md"
@ -91,4 +91,4 @@ dev = [
] ]
[project.scripts] [project.scripts]
mcpcognee = "mcpcognee:main" cognee = "cognee_mcp:main"

4764
cognee-mcp/uv.lock generated Normal file

File diff suppressed because it is too large Load diff