Major improvements to the Graphiti MCP server configuration:
Configuration System:
- Add YAML-based configuration with config.yaml
- Support environment variable expansion in YAML (${VAR_NAME} syntax)
- Implement hierarchical configuration: CLI > env > YAML > defaults
- Add pydantic-settings for robust configuration management
Multi-Provider Support:
- Add factory pattern for LLM clients (OpenAI, Anthropic, Gemini, Groq, Azure)
- Add factory pattern for embedder clients (OpenAI, Azure, Gemini, Voyage)
- Add factory pattern for database drivers (Neo4j, FalkorDB)
- Graceful handling of unavailable providers
Code Improvements:
- Refactor main server to use unified configuration system
- Remove obsolete graphiti_service.py with hardcoded Neo4j configs
- Clean up deprecated type hints and fix all lint issues
- Add comprehensive test suite for configuration loading
Documentation:
- Update README with concise configuration instructions
- Add VS Code integration example
- Remove overly verbose separate documentation
Docker Updates:
- Update Dockerfile to include config.yaml
- Enhance docker-compose.yml with provider environment variables
- Support configuration volume mounting
Breaking Changes:
- None - full backward compatibility maintained
- All existing CLI arguments and environment variables still work
🤖 Generated with [Claude Code](https://claude.ai/code)
Co-Authored-By: Claude <noreply@anthropic.com>
64 lines
2.2 KiB
YAML
64 lines
2.2 KiB
YAML
services:
|
|
neo4j:
|
|
image: neo4j:5.26.0
|
|
ports:
|
|
- "7474:7474" # HTTP
|
|
- "7687:7687" # Bolt
|
|
environment:
|
|
- NEO4J_AUTH=${NEO4J_USER:-neo4j}/${NEO4J_PASSWORD:-demodemo}
|
|
- NEO4J_server_memory_heap_initial__size=512m
|
|
- NEO4J_server_memory_heap_max__size=1G
|
|
- NEO4J_server_memory_pagecache_size=512m
|
|
volumes:
|
|
- neo4j_data:/data
|
|
- neo4j_logs:/logs
|
|
healthcheck:
|
|
test: ["CMD", "wget", "-O", "/dev/null", "http://localhost:7474"]
|
|
interval: 10s
|
|
timeout: 5s
|
|
retries: 5
|
|
start_period: 30s
|
|
|
|
graphiti-mcp:
|
|
image: zepai/knowledge-graph-mcp:latest
|
|
build:
|
|
context: .
|
|
dockerfile: Dockerfile
|
|
env_file:
|
|
- path: .env
|
|
required: false # Makes the file optional. Default value is 'true'
|
|
depends_on:
|
|
neo4j:
|
|
condition: service_healthy
|
|
environment:
|
|
# Database configuration
|
|
- NEO4J_URI=${NEO4J_URI:-bolt://neo4j:7687}
|
|
- NEO4J_USER=${NEO4J_USER:-neo4j}
|
|
- NEO4J_PASSWORD=${NEO4J_PASSWORD:-demodemo}
|
|
- NEO4J_DATABASE=${NEO4J_DATABASE:-neo4j}
|
|
# LLM provider configurations
|
|
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
|
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
|
- GOOGLE_API_KEY=${GOOGLE_API_KEY}
|
|
- GROQ_API_KEY=${GROQ_API_KEY}
|
|
- AZURE_OPENAI_API_KEY=${AZURE_OPENAI_API_KEY}
|
|
- AZURE_OPENAI_ENDPOINT=${AZURE_OPENAI_ENDPOINT}
|
|
- AZURE_OPENAI_DEPLOYMENT=${AZURE_OPENAI_DEPLOYMENT}
|
|
# Embedder provider configurations
|
|
- VOYAGE_API_KEY=${VOYAGE_API_KEY}
|
|
- AZURE_OPENAI_EMBEDDINGS_ENDPOINT=${AZURE_OPENAI_EMBEDDINGS_ENDPOINT}
|
|
- AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT=${AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT}
|
|
# Application configuration
|
|
- GRAPHITI_GROUP_ID=${GRAPHITI_GROUP_ID:-main}
|
|
- SEMAPHORE_LIMIT=${SEMAPHORE_LIMIT:-10}
|
|
- CONFIG_PATH=/app/config/config.yaml
|
|
- PATH=/root/.local/bin:${PATH}
|
|
volumes:
|
|
- ./config.yaml:/app/config/config.yaml:ro
|
|
ports:
|
|
- "8000:8000" # Expose the MCP server via HTTP for SSE transport
|
|
command: ["uv", "run", "graphiti_mcp_server.py", "--transport", "sse", "--config", "/app/config/config.yaml"]
|
|
|
|
volumes:
|
|
neo4j_data:
|
|
neo4j_logs:
|