Merge pull request #400 from topoteretes/feat/cognee-mcp
Feat/cognee mcp
This commit is contained in:
commit
e770eda4f8
6 changed files with 4870 additions and 72 deletions
|
|
@ -1,57 +1,41 @@
|
|||
# cognee MCP server
|
||||
|
||||
A MCP server project
|
||||
1. Clone the [cognee](www.github.com/topoteretes/cognee) repo
|
||||
|
||||
Create a boilerplate server:
|
||||
|
||||
2. Install dependencies
|
||||
|
||||
```jsx
|
||||
uvx create-mcp-server
|
||||
```
|
||||
|
||||
1. The command will ask you to name your server, e.g. mcp_cognee
|
||||
|
||||
|
||||
2. Answer “Y” to connect with Claude
|
||||
Then run
|
||||
|
||||
```jsx
|
||||
cd mcp_cognee
|
||||
cd cognee-mcp
|
||||
uv sync --dev --all-extras
|
||||
```
|
||||
|
||||
Activate the venv with
|
||||
3. Activate the venv with
|
||||
|
||||
```jsx
|
||||
source .venv/bin/activate
|
||||
```
|
||||
|
||||
This should already add the new server to your Claude config, but if not, add these lines manually:
|
||||
4. Add the new server to your Claude config:
|
||||
|
||||
```
|
||||
"mcpcognee": {
|
||||
"command": "uv",
|
||||
"args": [
|
||||
"--directory",
|
||||
"/Users/your_username/mcp/mcp_cognee",
|
||||
"run",
|
||||
"mcpcognee"
|
||||
],
|
||||
"env": {
|
||||
"ENV": "local",
|
||||
"TOKENIZERS_PARALLELISM": "false",
|
||||
"LLM_API_KEY": "add_your_api_key_here",
|
||||
"GRAPH_DATABASE_PROVIDER": "neo4j",
|
||||
"GRAPH_DATABASE_URL": "bolt://localhost:7687",
|
||||
"GRAPH_DATABASE_USERNAME": "add_username_here",
|
||||
"GRAPH_DATABASE_PASSWORD": "add_pwd_here",
|
||||
"VECTOR_DB_PROVIDER": "lancedb",
|
||||
"DB_PROVIDER": "sqlite",
|
||||
"DB_NAME": "postgres"
|
||||
}
|
||||
}
|
||||
```json
|
||||
"cognee": {
|
||||
"command": "uv",
|
||||
"args": [
|
||||
"--directory",
|
||||
"/{Absolute path to cognee directory}/cognee-mcp",
|
||||
"run",
|
||||
"cognee"
|
||||
],
|
||||
"env": {
|
||||
"ENV": "local",
|
||||
"TOKENIZERS_PARALLELISM": "false",
|
||||
"LLM_API_KEY": "add_your_api_key_here",
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Then, edit the pyproject.toml in your new folder so that it includes packages from the cognee requirements. Use the pyproject.toml in your cognee library for this, but match the syntax of the automatically generated pyproject.toml so that it is compatible with uv.
|
||||
|
||||
Define cognify tool in server.py
|
||||
Restart your Claude desktop.
|
||||
Restart your Claude desktop.
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ def main():
|
|||
asyncio.run(server.main())
|
||||
|
||||
# Optionally expose other important items at package level
|
||||
__all__ = ['main', 'server']
|
||||
__all__ = ["main", "server"]
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
import importlib.util
|
||||
import os
|
||||
import asyncio
|
||||
from contextlib import redirect_stderr, redirect_stdout
|
||||
|
||||
import cognee
|
||||
|
|
@ -9,15 +10,17 @@ from cognee.api.v1.search import SearchType
|
|||
from cognee.shared.data_models import KnowledgeGraph
|
||||
from mcp.server import NotificationOptions, Server
|
||||
from mcp.server.models import InitializationOptions
|
||||
from pydantic import AnyUrl, BaseModel
|
||||
|
||||
server = Server("mcpcognee")
|
||||
server = Server("cognee-mcp")
|
||||
|
||||
|
||||
def node_to_string(node):
|
||||
keys_to_keep = ["chunk_index", "topological_rank", "cut_type", "id", "text"]
|
||||
keyset = set(keys_to_keep) & node.keys()
|
||||
return "Node(" + " ".join([key + ": " + str(node[key]) + "," for key in keyset]) + ")"
|
||||
# keys_to_keep = ["chunk_index", "topological_rank", "cut_type", "id", "text"]
|
||||
# keyset = set(keys_to_keep) & node.keys()
|
||||
# return "Node(" + " ".join([key + ": " + str(node[key]) + "," for key in keyset]) + ")"
|
||||
node_data = ", ".join([f"{key}: \"{value}\"" for key, value in node.items() if key in ["id", "name"]])
|
||||
|
||||
return f"Node({node_data})"
|
||||
|
||||
|
||||
def retrieved_edges_to_string(search_results):
|
||||
|
|
@ -49,60 +52,107 @@ async def handle_list_tools() -> list[types.Tool]:
|
|||
"""
|
||||
return [
|
||||
types.Tool(
|
||||
name="Cognify_and_search",
|
||||
description="Build knowledge graph from the input text and search in it.",
|
||||
inputSchema={
|
||||
name = "cognify",
|
||||
description = "Build knowledge graph from the input text.",
|
||||
inputSchema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"text": {"type": "string"},
|
||||
"search_query": {"type": "string"},
|
||||
"graph_model_file": {"type": "string"},
|
||||
"graph_model_name": {"type": "string"},
|
||||
},
|
||||
"required": ["text", "search_query"],
|
||||
"required": ["text"],
|
||||
},
|
||||
)
|
||||
),
|
||||
types.Tool(
|
||||
name = "search",
|
||||
description = "Search the knowledge graph.",
|
||||
inputSchema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {"type": "string"},
|
||||
},
|
||||
"required": ["query"],
|
||||
},
|
||||
),
|
||||
types.Tool(
|
||||
name = "prune",
|
||||
description = "Reset the knowledge graph.",
|
||||
inputSchema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {"type": "string"},
|
||||
},
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@server.call_tool()
|
||||
async def handle_call_tool(
|
||||
name: str, arguments: dict | None
|
||||
name: str,
|
||||
arguments: dict | None
|
||||
) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
|
||||
"""
|
||||
Handle tool execution requests.
|
||||
Tools can modify server state and notify clients of changes.
|
||||
"""
|
||||
if name == "Cognify_and_search":
|
||||
if name == "cognify":
|
||||
with open(os.devnull, "w") as fnull:
|
||||
with redirect_stdout(fnull), redirect_stderr(fnull):
|
||||
await cognee.prune.prune_data()
|
||||
await cognee.prune.prune_system(metadata=True)
|
||||
|
||||
if not arguments:
|
||||
raise ValueError("Missing arguments")
|
||||
|
||||
text = arguments.get("text")
|
||||
search_query = arguments.get("search_query")
|
||||
|
||||
if ("graph_model_file" in arguments) and ("graph_model_name" in arguments):
|
||||
model_file = arguments.get("graph_model_file")
|
||||
model_name = arguments.get("graph_model_name")
|
||||
|
||||
graph_model = load_class(model_file, model_name)
|
||||
else:
|
||||
graph_model = KnowledgeGraph
|
||||
|
||||
await cognee.add(text)
|
||||
await cognee.cognify(graph_model=graph_model)
|
||||
|
||||
await cognee.cognify(graph_model = graph_model)
|
||||
|
||||
return [
|
||||
types.TextContent(
|
||||
type = "text",
|
||||
text = "Ingested",
|
||||
)
|
||||
]
|
||||
elif name == "search":
|
||||
with open(os.devnull, "w") as fnull:
|
||||
with redirect_stdout(fnull), redirect_stderr(fnull):
|
||||
if not arguments:
|
||||
raise ValueError("Missing arguments")
|
||||
|
||||
search_query = arguments.get("query")
|
||||
|
||||
search_results = await cognee.search(
|
||||
SearchType.INSIGHTS, query_text=search_query
|
||||
SearchType.INSIGHTS, query_text = search_query
|
||||
)
|
||||
|
||||
results = retrieved_edges_to_string(search_results)
|
||||
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text",
|
||||
text=results,
|
||||
type = "text",
|
||||
text = results,
|
||||
)
|
||||
]
|
||||
elif name == "prune":
|
||||
with open(os.devnull, "w") as fnull:
|
||||
with redirect_stdout(fnull), redirect_stderr(fnull):
|
||||
await cognee.prune.prune_data()
|
||||
await cognee.prune.prune_system(metadata=True)
|
||||
|
||||
return [
|
||||
types.TextContent(
|
||||
type = "text",
|
||||
text = "Pruned",
|
||||
)
|
||||
]
|
||||
else:
|
||||
|
|
@ -116,11 +166,15 @@ async def main():
|
|||
read_stream,
|
||||
write_stream,
|
||||
InitializationOptions(
|
||||
server_name="mcpcognee",
|
||||
server_version="0.1.0",
|
||||
capabilities=server.get_capabilities(
|
||||
notification_options=NotificationOptions(),
|
||||
experimental_capabilities={},
|
||||
server_name = "cognee-mcp",
|
||||
server_version = "0.1.0",
|
||||
capabilities = server.get_capabilities(
|
||||
notification_options = NotificationOptions(),
|
||||
experimental_capabilities = {},
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# This is needed if you'd like to connect to a custom client
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
from mcpcognee import main
|
||||
import asyncio
|
||||
|
||||
asyncio.run(main())
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
[project]
|
||||
name = "mcpcognee"
|
||||
name = "cognee-mcp"
|
||||
version = "0.1.0"
|
||||
description = "A MCP server project"
|
||||
readme = "README.md"
|
||||
|
|
@ -91,4 +91,4 @@ dev = [
|
|||
]
|
||||
|
||||
[project.scripts]
|
||||
mcpcognee = "mcpcognee:main"
|
||||
cognee = "cognee_mcp:main"
|
||||
|
|
|
|||
4764
cognee-mcp/uv.lock
generated
Normal file
4764
cognee-mcp/uv.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Reference in a new issue