From 36dbda46b25405bb71088ff9280fce9572a1259e Mon Sep 17 00:00:00 2001
From: Hande <159312713+hande-k@users.noreply.github.com>
Date: Thu, 12 Jun 2025 10:52:44 +0200
Subject: [PATCH] chore: mcp readme rewrite (#974)
## Description
## DCO Affirmation
I affirm that all code in every commit of this pull request conforms to
the terms of the Topoteretes Developer Certificate of Origin.
---
cognee-mcp/README.md | 256 ++++++++++++++++++++++++--------------
cognee-mcp/pyproject.toml | 4 +-
2 files changed, 166 insertions(+), 94 deletions(-)
diff --git a/cognee-mcp/README.md b/cognee-mcp/README.md
index ba7ba2a4d..a5dee68d8 100644
--- a/cognee-mcp/README.md
+++ b/cognee-mcp/README.md
@@ -1,98 +1,159 @@
-# cognee MCP server
+
+
+
+
+
+
+
+ cognee‑mcp - Run cognee’s memory engine as a Model Context Protocol server
+
+
+ Demo
+ .
+ Learn more
+ ·
+ Join Discord
+ ·
+ Join r/AIMemory
+
+
+
+ [](https://GitHub.com/topoteretes/cognee/network/)
+ [](https://GitHub.com/topoteretes/cognee/stargazers/)
+ [](https://GitHub.com/topoteretes/cognee/commit/)
+ [](https://github.com/topoteretes/cognee/tags/)
+ [](https://pepy.tech/project/cognee)
+ [](https://github.com/topoteretes/cognee/blob/main/LICENSE)
+ [](https://github.com/topoteretes/cognee/graphs/contributors)
+
+

+
+

+
+
+Build memory for Agents and query from any client that speaks MCP – in your terminal or IDE.
+
+
+
+## ✨ Features
+
+- SSE & stdio transports – choose real‑time streaming --transport sse or the classic stdio pipe
+- Integrated logging – all actions written to a rotating file (see get_log_file_location()) and mirrored to console in dev
+- Local file ingestion – feed .md, source files, Cursor rule‑sets, etc. straight from disk
+- Background pipelines – long‑running cognify & codify jobs spawn off‑thread; check progress with status tools
+- Developer rules bootstrap – one call indexes .cursorrules, .cursor/rules, AGENT.md, and friends into the developer_rules nodeset
+- Prune & reset – wipe memory clean with a single prune call when you want to start fresh
Please refer to our documentation [here](https://docs.cognee.ai/how-to-guides/deployment/mcp) for further information.
-### Installing Manually
-A MCP server project
-=======
-1. Clone the [cognee](https://github.com/topoteretes/cognee) repo
+## 🚀 Quick Start
-2. Install dependencies
+1. Clone cognee repo
+ ```
+ git clone https://github.com/topoteretes/cognee.git
+ ```
+2. Navigate to cognee-mcp subdirectory
+ ```
+ cd cognee/cognee-mcp
+ ```
+3. Install uv if you don't have one
+ ```
+ brew install uv
+ ```
+4. Install all the dependencies you need for cognee mcp server with uv
+ ```
+ uv sync --dev --all-extras --reinstall
+ ```
+5. Activate the virtual environment in cognee mcp directory
+ ```
+ source .venv/bin/activate
+ ```
+6. Set up your OpenAI API key in .env for a quick setup with the default cognee configurations
+ ```
+ LLM_API_KEY="YOUR_OPENAI_API_KEY"
+ ```
+7. Run cognee mcp server with stdio (default)
+ ```
+ python src/server.py
+ ```
+ or stream responses over SSE
+ ```
+ python src/server.py --transport sse
+ ```
-```
-brew install uv
-```
+You can do more advanced configurations by creating .env file using our template.
+To use different LLM providers / database configurations, and for more info check out our documentation.
-```jsx
-cd cognee-mcp
-uv sync --dev --all-extras --reinstall
-```
+## 💻 Basic Usage
-3. Activate the venv with
+The MCP server exposes its functionality through tools. Call them from any MCP client (Cursor, Claude Desktop, Cline, Roo and more).
-```jsx
-source .venv/bin/activate
-```
-4. Add the new server to your Claude config:
+### Available Tools
-The file should be located here: ~/Library/Application\ Support/Claude/
-```
-cd ~/Library/Application\ Support/Claude/
-```
-You need to create claude_desktop_config.json in this folder if it doesn't exist
-Make sure to add your paths and LLM API key to the file bellow
-Use your editor of choice, for example Nano:
-```
-nano claude_desktop_config.json
-```
+- cognify: Turns your data into a structured knowledge graph and stores it in memory
-```
-{
- "mcpServers": {
- "cognee": {
- "command": "/Users/{user}/cognee/.venv/bin/uv",
- "args": [
- "--directory",
- "/Users/{user}/cognee/cognee-mcp",
- "run",
- "cognee"
- ],
- "env": {
- "ENV": "local",
- "TOKENIZERS_PARALLELISM": "false",
- "LLM_API_KEY": "sk-"
+- codify: Analyse a code repository, build a code graph, stores it in memory
+
+- search: Query memory – supports GRAPH_COMPLETION, RAG_COMPLETION, CODE, CHUNKS, INSIGHTS
+
+- prune: Reset cognee for a fresh start
+
+- cognify_status / codify_status: Track pipeline progress
+
+Remember – use the CODE search type to query your code graph. For huge repos, run codify on modules incrementally and cache results.
+
+### IDE Example: Cursor
+
+1. After you run the server as described in the [Quick Start](#-quickstart), create a run script for cognee. Here is a simple example:
+ ```
+ #!/bin/bash
+ export ENV=local
+ export TOKENIZERS_PARALLELISM=false
+ export EMBEDDING_PROVIDER = "fastembed"
+ export EMBEDDING_MODEL="sentence-transformers/all-MiniLM-L6-v2"
+ export EMBEDDING_DIMENSIONS= 384
+ export EMBEDDING_MAX_TOKENS-256
+ export LLM_API_KEY=your-OpenAI-API-key
+ uv --directory /{cognee_root_path}/cognee-mcp run cognee
+ ```
+ Remember to replace *your-OpenAI-API-key* and *{cognee_root_path}* with correct values.
+
+2. Install Cursor and navigate to Settings → MCP Tools → New MCP Server
+
+3. Cursor will open *mcp.json* file in a new tab. Configure your cognee MCP server by copy-pasting the following:
+ ```
+ {
+ "mcpServers": {
+ "cognee": {
+ "command": "sh",
+ "args": [
+ "/{path-to-your-script}/run-cognee.sh"
+ ]
+ }
}
- }
- }
-}
-```
+ }
+ ```
+ Remember to replace *{path-to-your-script}* with the correct value of the path of the script you created in the first step.
-Restart your Claude desktop.
+ That's it! You can refresh the server from the toggle next to your new cognee server. Check the green dot and the available tools to verify your server is running.
-### Installing via Smithery
+ Now you can open your Cursor Agent and start using cognee tools from it via prompting.
-To install Cognee for Claude Desktop automatically via [Smithery](https://smithery.ai/server/cognee):
-
-```bash
-npx -y @smithery/cli install cognee --client claude
-```
-
-Define cognify tool in server.py
-Restart your Claude desktop.
-
-## Running the Server
-
-### Standard stdio transport:
-```bash
-python src/server.py
-```
-
-### SSE transport:
-```bash
-python src/server.py --transport sse
-```
## Development and Debugging
+### Debugging
+
To use debugger, run:
-```bash
-mcp dev src/server.py
-```
+ ```bash
+ mcp dev src/server.py
+ ```
+
Open inspector with timeout passed:
-```
-http://localhost:5173?timeout=120000
-```
+ ```
+ http://localhost:5173?timeout=120000
+ ```
To apply new changes while developing cognee you need to do:
@@ -101,20 +162,31 @@ To apply new changes while developing cognee you need to do:
3. `mcp dev src/server.py`
### Development
-In order to use local cognee build, run in root of the cognee repo:
-```bash
-poetry build -o ./cognee-mcp/sources
-```
-After the build process is done, change the cognee library dependency inside the `cognee-mcp/pyproject.toml` from
-```toml
-cognee[postgres,codegraph,gemini,huggingface]==0.1.38
-```
-to
-```toml
-cognee[postgres,codegraph,gemini,huggingface]
-```
-After that add the following snippet to the same file (`cognee-mcp/pyproject.toml`).
-```toml
-[tool.uv.sources]
-cognee = { path = "sources/cognee-0.1.38-py3-none-any.whl" }
-```
+
+In order to use local cognee:
+
+1. Uncomment the following line in the cognee-mcp [`pyproject.toml`](pyproject.toml) file and set the cognee root path.
+ ```
+ #"cognee[postgres,codegraph,gemini,huggingface,docs,neo4j] @ file:/Users//Desktop/cognee"
+ ```
+ Remember to replace `file:/Users//Desktop/cognee` with your actual cognee root path.
+
+2. Install dependencies with uv in the mcp folder
+ ```
+ uv sync --reinstall
+ ```
+
+## Code of Conduct
+
+We are committed to making open source an enjoyable and respectful experience for our community. See CODE_OF_CONDUCT for more information.
+
+## 💫 Contributors
+
+
+
+
+
+
+## Star History
+
+[](https://star-history.com/#topoteretes/cognee&Date)
diff --git a/cognee-mcp/pyproject.toml b/cognee-mcp/pyproject.toml
index 0f8c8684d..328dcc1d8 100644
--- a/cognee-mcp/pyproject.toml
+++ b/cognee-mcp/pyproject.toml
@@ -6,8 +6,8 @@ readme = "README.md"
requires-python = ">=3.10"
dependencies = [
- # For local cognee repo usage remove comment bellow and add absolute path to cognee
- #"cognee[postgres,codegraph,gemini,huggingface] @ file:/Users//Desktop/cognee",
+ # For local cognee repo usage remove comment bellow and add absolute path to cognee. Then run `uv sync --reinstall` in the mcp folder on local cognee changes.
+ #"cognee[postgres,codegraph,gemini,huggingface,docs,neo4j] @ file:/Users//Desktop/cognee",
"cognee[postgres,codegraph,gemini,huggingface,docs,neo4j]==0.1.40",
"fastmcp>=1.0",
"mcp==1.5.0",