Compare commits
17 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 032c2a20c6 | |||
| 323bd57c9b | |||
| 8bbf068eae | |||
| f3132dc763 | |||
| efd21d6c7e | |||
| 8f4c1ea8b9 | |||
| 59802cfae5 | |||
| b5a8227035 | |||
| becd9107f9 | |||
| 6a5e4a356e | |||
| c5d7597319 | |||
|
|
d6ff7bb78c | ||
|
|
5e593dd096 | ||
|
|
1de752646a | ||
|
|
24105b9556 | ||
|
|
2d50fa565d | ||
|
|
6dc7b8800b |
13 changed files with 136 additions and 28 deletions
|
|
@ -18,7 +18,9 @@ import logging
|
||||||
from collections.abc import Coroutine
|
from collections.abc import Coroutine
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
import neo4j.exceptions
|
||||||
from neo4j import AsyncGraphDatabase, EagerResult
|
from neo4j import AsyncGraphDatabase, EagerResult
|
||||||
|
from neo4j.exceptions import ClientError
|
||||||
from typing_extensions import LiteralString
|
from typing_extensions import LiteralString
|
||||||
|
|
||||||
from graphiti_core.driver.driver import GraphDriver, GraphDriverSession, GraphProvider
|
from graphiti_core.driver.driver import GraphDriver, GraphDriverSession, GraphProvider
|
||||||
|
|
@ -70,6 +72,15 @@ class Neo4jDriver(GraphDriver):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = await self.client.execute_query(cypher_query_, parameters_=params, **kwargs)
|
result = await self.client.execute_query(cypher_query_, parameters_=params, **kwargs)
|
||||||
|
except neo4j.exceptions.ClientError as e:
|
||||||
|
# Handle race condition when creating indices/constraints in parallel
|
||||||
|
# Neo4j 5.26+ may throw EquivalentSchemaRuleAlreadyExists even with IF NOT EXISTS
|
||||||
|
if 'EquivalentSchemaRuleAlreadyExists' in str(e):
|
||||||
|
logger.info(f'Index or constraint already exists, continuing: {cypher_query_}')
|
||||||
|
# Return empty result to indicate success (index exists)
|
||||||
|
return EagerResult([], None, None) # type: ignore
|
||||||
|
logger.error(f'Error executing Neo4j query: {e}\n{cypher_query_}\n{params}')
|
||||||
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f'Error executing Neo4j query: {e}\n{cypher_query_}\n{params}')
|
logger.error(f'Error executing Neo4j query: {e}\n{cypher_query_}\n{params}')
|
||||||
raise
|
raise
|
||||||
|
|
@ -88,6 +99,21 @@ class Neo4jDriver(GraphDriver):
|
||||||
'CALL db.indexes() YIELD name DROP INDEX name',
|
'CALL db.indexes() YIELD name DROP INDEX name',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _execute_index_query(self, query: LiteralString) -> EagerResult | None:
|
||||||
|
"""Execute an index creation query, ignoring 'index already exists' errors.
|
||||||
|
|
||||||
|
Neo4j can raise EquivalentSchemaRuleAlreadyExists when concurrent CREATE INDEX
|
||||||
|
IF NOT EXISTS queries race, even though the index exists. This is safe to ignore.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return await self.execute_query(query)
|
||||||
|
except ClientError as e:
|
||||||
|
# Ignore "equivalent index already exists" error (race condition with IF NOT EXISTS)
|
||||||
|
if 'EquivalentSchemaRuleAlreadyExists' in str(e):
|
||||||
|
logger.debug(f'Index already exists (concurrent creation): {query[:50]}...')
|
||||||
|
return None
|
||||||
|
raise
|
||||||
|
|
||||||
async def build_indices_and_constraints(self, delete_existing: bool = False):
|
async def build_indices_and_constraints(self, delete_existing: bool = False):
|
||||||
if delete_existing:
|
if delete_existing:
|
||||||
await self.delete_all_indexes()
|
await self.delete_all_indexes()
|
||||||
|
|
@ -98,14 +124,7 @@ class Neo4jDriver(GraphDriver):
|
||||||
|
|
||||||
index_queries: list[LiteralString] = range_indices + fulltext_indices
|
index_queries: list[LiteralString] = range_indices + fulltext_indices
|
||||||
|
|
||||||
await semaphore_gather(
|
await semaphore_gather(*[self._execute_index_query(query) for query in index_queries])
|
||||||
*[
|
|
||||||
self.execute_query(
|
|
||||||
query,
|
|
||||||
)
|
|
||||||
for query in index_queries
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
async def health_check(self) -> None:
|
async def health_check(self) -> None:
|
||||||
"""Check Neo4j connectivity by running the driver's verify_connectivity method."""
|
"""Check Neo4j connectivity by running the driver's verify_connectivity method."""
|
||||||
|
|
|
||||||
|
|
@ -31,8 +31,8 @@ from .errors import RateLimitError, RefusalError
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
DEFAULT_MODEL = 'gpt-5-mini'
|
DEFAULT_MODEL = 'gpt-4o-mini'
|
||||||
DEFAULT_SMALL_MODEL = 'gpt-5-nano'
|
DEFAULT_SMALL_MODEL = 'gpt-4o-mini'
|
||||||
DEFAULT_REASONING = 'minimal'
|
DEFAULT_REASONING = 'minimal'
|
||||||
DEFAULT_VERBOSITY = 'low'
|
DEFAULT_VERBOSITY = 'low'
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -41,6 +41,16 @@ class DateFilter(BaseModel):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PropertyFilter(BaseModel):
|
||||||
|
property_name: str = Field(description='Property name')
|
||||||
|
property_value: str | int | float | None = Field(
|
||||||
|
description='Value you want to match on for the property'
|
||||||
|
)
|
||||||
|
comparison_operator: ComparisonOperator = Field(
|
||||||
|
description='Comparison operator for the property'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class SearchFilters(BaseModel):
|
class SearchFilters(BaseModel):
|
||||||
node_labels: list[str] | None = Field(
|
node_labels: list[str] | None = Field(
|
||||||
default=None, description='List of node labels to filter on'
|
default=None, description='List of node labels to filter on'
|
||||||
|
|
@ -53,6 +63,7 @@ class SearchFilters(BaseModel):
|
||||||
created_at: list[list[DateFilter]] | None = Field(default=None)
|
created_at: list[list[DateFilter]] | None = Field(default=None)
|
||||||
expired_at: list[list[DateFilter]] | None = Field(default=None)
|
expired_at: list[list[DateFilter]] | None = Field(default=None)
|
||||||
edge_uuids: list[str] | None = Field(default=None)
|
edge_uuids: list[str] | None = Field(default=None)
|
||||||
|
property_filters: list[PropertyFilter] | None = Field(default=None)
|
||||||
|
|
||||||
|
|
||||||
def cypher_to_opensearch_operator(op: ComparisonOperator) -> str:
|
def cypher_to_opensearch_operator(op: ComparisonOperator) -> str:
|
||||||
|
|
|
||||||
49
mcp_server/.env
Normal file
49
mcp_server/.env
Normal file
|
|
@ -0,0 +1,49 @@
|
||||||
|
# Graphiti MCP Server Environment Configuration
|
||||||
|
MCP_SERVER_HOST=gmakai.online
|
||||||
|
# Neo4j Database Configuration
|
||||||
|
# These settings are used to connect to your Neo4j database
|
||||||
|
NEO4J_URI=bolt://neo4j:7687
|
||||||
|
NEO4J_USER=neo4j
|
||||||
|
NEO4J_PASSWORD=kg3Jsdb2
|
||||||
|
|
||||||
|
# OpenAI API Configuration
|
||||||
|
# Required for LLM operations
|
||||||
|
OPENAI_API_KEY=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
|
||||||
|
MODEL_NAME=gpt5.1-nano
|
||||||
|
|
||||||
|
# Optional: Only needed for non-standard OpenAI endpoints
|
||||||
|
OPENAI_BASE_URL=https://openrouter.ai/api/v1
|
||||||
|
|
||||||
|
# Optional: Group ID for namespacing graph data
|
||||||
|
# GROUP_ID=my_project
|
||||||
|
|
||||||
|
# Concurrency Control
|
||||||
|
# Controls how many episodes can be processed simultaneously
|
||||||
|
# Default: 10 (suitable for OpenAI Tier 3, mid-tier Anthropic)
|
||||||
|
# Adjust based on your LLM provider's rate limits:
|
||||||
|
# - OpenAI Tier 1 (free): 1-2
|
||||||
|
# - OpenAI Tier 2: 5-8
|
||||||
|
# - OpenAI Tier 3: 10-15
|
||||||
|
# - OpenAI Tier 4: 20-50
|
||||||
|
# - Anthropic default: 5-8
|
||||||
|
# - Anthropic high tier: 15-30
|
||||||
|
# - Ollama (local): 1-5
|
||||||
|
# See README.md "Concurrency and LLM Provider 429 Rate Limit Errors" for details
|
||||||
|
SEMAPHORE_LIMIT=10
|
||||||
|
|
||||||
|
# Optional: Path configuration for Docker
|
||||||
|
# PATH=/root/.local/bin:${PATH}
|
||||||
|
|
||||||
|
# Optional: Memory settings for Neo4j (used in Docker Compose)
|
||||||
|
# NEO4J_server_memory_heap_initial__size=512m
|
||||||
|
# NEO4J_server_memory_heap_max__size=1G
|
||||||
|
# NEO4J_server_memory_pagecache_size=512m
|
||||||
|
|
||||||
|
# Azure OpenAI configuration
|
||||||
|
# Optional: Only needed for Azure OpenAI endpoints
|
||||||
|
# AZURE_OPENAI_ENDPOINT=your_azure_openai_endpoint_here
|
||||||
|
# AZURE_OPENAI_API_VERSION=2025-01-01-preview
|
||||||
|
# AZURE_OPENAI_DEPLOYMENT_NAME=gpt-4o-gpt-4o-mini-deployment
|
||||||
|
# AZURE_OPENAI_EMBEDDING_API_VERSION=2023-05-15
|
||||||
|
# AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME=text-embedding-3-large-deployment
|
||||||
|
# AZURE_OPENAI_USE_MANAGED_IDENTITY=false
|
||||||
|
|
@ -8,7 +8,7 @@ server:
|
||||||
|
|
||||||
llm:
|
llm:
|
||||||
provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq
|
provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq
|
||||||
model: "gpt-5-mini"
|
model: "gpt-4o-mini"
|
||||||
max_tokens: 4096
|
max_tokens: 4096
|
||||||
|
|
||||||
providers:
|
providers:
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ server:
|
||||||
|
|
||||||
llm:
|
llm:
|
||||||
provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq
|
provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq
|
||||||
model: "gpt-5-mini"
|
model: "gpt-4o-mini"
|
||||||
max_tokens: 4096
|
max_tokens: 4096
|
||||||
|
|
||||||
providers:
|
providers:
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ server:
|
||||||
|
|
||||||
llm:
|
llm:
|
||||||
provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq
|
provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq
|
||||||
model: "gpt-5-mini"
|
model: "gpt-4o-mini"
|
||||||
max_tokens: 4096
|
max_tokens: 4096
|
||||||
|
|
||||||
providers:
|
providers:
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ server:
|
||||||
|
|
||||||
llm:
|
llm:
|
||||||
provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq
|
provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq
|
||||||
model: "gpt-5-mini"
|
model: "gpt-4o-mini"
|
||||||
max_tokens: 4096
|
max_tokens: 4096
|
||||||
|
|
||||||
providers:
|
providers:
|
||||||
|
|
|
||||||
|
|
@ -1,23 +1,23 @@
|
||||||
services:
|
services:
|
||||||
neo4j:
|
neo4j:
|
||||||
image: neo4j:5.26.0
|
image: neo4j:latest
|
||||||
ports:
|
ports:
|
||||||
- "7474:7474" # HTTP
|
- "7474:7474" # HTTP
|
||||||
- "7687:7687" # Bolt
|
- "7687:7687" # Bolt
|
||||||
environment:
|
environment:
|
||||||
- NEO4J_AUTH=${NEO4J_USER:-neo4j}/${NEO4J_PASSWORD:-demodemo}
|
- NEO4J_AUTH=${NEO4J_USER:-neo4j}/${NEO4J_PASSWORD:-kg3Jsdb2}
|
||||||
- NEO4J_server_memory_heap_initial__size=512m
|
|
||||||
- NEO4J_server_memory_heap_max__size=1G
|
|
||||||
- NEO4J_server_memory_pagecache_size=512m
|
|
||||||
volumes:
|
volumes:
|
||||||
- neo4j_data:/data
|
- /data/neo4j/data:/data
|
||||||
- neo4j_logs:/logs
|
- /data/neo4j/logs:/logs
|
||||||
|
- /data/neo4j/plugins:/plugins
|
||||||
|
- /data/neo4j/config:/config
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "wget", "-O", "/dev/null", "http://localhost:7474"]
|
test: ["CMD", "wget", "-O", "/dev/null", "http://localhost:7474"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
start_period: 30s
|
start_period: 30s
|
||||||
|
restart: always
|
||||||
|
|
||||||
graphiti-mcp:
|
graphiti-mcp:
|
||||||
image: zepai/knowledge-graph-mcp:standalone
|
image: zepai/knowledge-graph-mcp:standalone
|
||||||
|
|
@ -27,9 +27,9 @@ services:
|
||||||
build:
|
build:
|
||||||
context: ..
|
context: ..
|
||||||
dockerfile: docker/Dockerfile.standalone
|
dockerfile: docker/Dockerfile.standalone
|
||||||
env_file:
|
#env_file:
|
||||||
- path: ../.env
|
# - path: ../.env
|
||||||
required: false
|
# required: true
|
||||||
depends_on:
|
depends_on:
|
||||||
neo4j:
|
neo4j:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
@ -37,13 +37,18 @@ services:
|
||||||
# Database configuration
|
# Database configuration
|
||||||
- NEO4J_URI=${NEO4J_URI:-bolt://neo4j:7687}
|
- NEO4J_URI=${NEO4J_URI:-bolt://neo4j:7687}
|
||||||
- NEO4J_USER=${NEO4J_USER:-neo4j}
|
- NEO4J_USER=${NEO4J_USER:-neo4j}
|
||||||
- NEO4J_PASSWORD=${NEO4J_PASSWORD:-demodemo}
|
- NEO4J_PASSWORD=${NEO4J_PASSWORD:-kg3Jsdb2}
|
||||||
- NEO4J_DATABASE=${NEO4J_DATABASE:-neo4j}
|
- NEO4J_DATABASE=${NEO4J_DATABASE:-neo4j}
|
||||||
# Application configuration
|
# Application configuration
|
||||||
- GRAPHITI_GROUP_ID=${GRAPHITI_GROUP_ID:-main}
|
- GRAPHITI_GROUP_ID=${GRAPHITI_GROUP_ID:-main}
|
||||||
- SEMAPHORE_LIMIT=${SEMAPHORE_LIMIT:-10}
|
- SEMAPHORE_LIMIT=${SEMAPHORE_LIMIT:-10}
|
||||||
- CONFIG_PATH=/app/mcp/config/config.yaml
|
- CONFIG_PATH=/app/mcp/config/config.yaml
|
||||||
- PATH=/root/.local/bin:${PATH}
|
- PATH=/root/.local/bin:${PATH}
|
||||||
|
- MCP_SERVER_HOST=gmakai.online
|
||||||
|
- OPENAI_API_KEY=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
|
||||||
|
- MODEL_NAME=gpt5.1-nano
|
||||||
|
- OPENAI_BASE_URL=https://openrouter.ai/api/v1
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
- ../config/config-docker-neo4j.yaml:/app/mcp/config/config.yaml:ro
|
- ../config/config-docker-neo4j.yaml:/app/mcp/config/config.yaml:ro
|
||||||
ports:
|
ports:
|
||||||
|
|
|
||||||
|
|
@ -147,7 +147,7 @@ class LLMConfig(BaseModel):
|
||||||
"""LLM configuration."""
|
"""LLM configuration."""
|
||||||
|
|
||||||
provider: str = Field(default='openai', description='LLM provider')
|
provider: str = Field(default='openai', description='LLM provider')
|
||||||
model: str = Field(default='gpt-4.1', description='Model name')
|
model: str = Field(default='gpt-4o-mini', description='Model name')
|
||||||
temperature: float | None = Field(
|
temperature: float | None = Field(
|
||||||
default=None, description='Temperature (optional, defaults to None for reasoning models)'
|
default=None, description='Temperature (optional, defaults to None for reasoning models)'
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
[project]
|
[project]
|
||||||
name = "graphiti-core"
|
name = "graphiti-core"
|
||||||
description = "A temporal graph building library"
|
description = "A temporal graph building library"
|
||||||
version = "0.24.1"
|
version = "0.24.3"
|
||||||
authors = [
|
authors = [
|
||||||
{ name = "Paul Paliychuk", email = "paul@getzep.com" },
|
{ name = "Paul Paliychuk", email = "paul@getzep.com" },
|
||||||
{ name = "Preston Rasmussen", email = "preston@getzep.com" },
|
{ name = "Preston Rasmussen", email = "preston@getzep.com" },
|
||||||
|
|
|
||||||
|
|
@ -487,6 +487,30 @@
|
||||||
"created_at": "2025-11-27T02:45:53Z",
|
"created_at": "2025-11-27T02:45:53Z",
|
||||||
"repoId": 840056306,
|
"repoId": 840056306,
|
||||||
"pullRequestNo": 1085
|
"pullRequestNo": 1085
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "ronaldmego",
|
||||||
|
"id": 17481958,
|
||||||
|
"comment_id": 3617267429,
|
||||||
|
"created_at": "2025-12-05T14:59:42Z",
|
||||||
|
"repoId": 840056306,
|
||||||
|
"pullRequestNo": 1094
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "NShumway",
|
||||||
|
"id": 29358113,
|
||||||
|
"comment_id": 3634967978,
|
||||||
|
"created_at": "2025-12-10T01:26:49Z",
|
||||||
|
"repoId": 840056306,
|
||||||
|
"pullRequestNo": 1102
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "husniadil",
|
||||||
|
"id": 10581130,
|
||||||
|
"comment_id": 3650156180,
|
||||||
|
"created_at": "2025-12-14T03:37:59Z",
|
||||||
|
"repoId": 840056306,
|
||||||
|
"pullRequestNo": 1105
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
2
uv.lock
generated
2
uv.lock
generated
|
|
@ -808,7 +808,7 @@ wheels = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "graphiti-core"
|
name = "graphiti-core"
|
||||||
version = "0.24.1"
|
version = "0.24.3"
|
||||||
source = { editable = "." }
|
source = { editable = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "diskcache" },
|
{ name = "diskcache" },
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue