This is a major refactoring of the MCP Server to support multiple providers through a YAML-based configuration system with factory pattern implementation. ## Key Changes ### Architecture Improvements - Modular configuration system with YAML-based settings - Factory pattern for LLM, Embedder, and Database providers - Support for multiple database backends (Neo4j, FalkorDB, KuzuDB) - Clean separation of concerns with dedicated service modules ### Provider Support - **LLM**: OpenAI, Anthropic, Gemini, Groq - **Embedders**: OpenAI, Voyage, Gemini, Anthropic, Sentence Transformers - **Databases**: Neo4j, FalkorDB, KuzuDB (new default) - Azure OpenAI support with AD authentication ### Configuration - YAML configuration with environment variable expansion - CLI argument overrides for runtime configuration - Multiple pre-configured Docker Compose setups - Proper boolean handling in environment variables ### Testing & CI - Comprehensive test suite with unit and integration tests - GitHub Actions workflows for linting and testing - Multi-database testing support ### Docker Support - Updated Docker images with multi-stage builds - Database-specific docker-compose configurations - Persistent volume support for all databases ### Bug Fixes - Fixed KuzuDB connectivity checks - Corrected Docker command paths - Improved error handling and logging - Fixed boolean environment variable expansion Co-authored-by: Claude <noreply@anthropic.com>
64 lines
2.2 KiB
YAML
64 lines
2.2 KiB
YAML
services:
|
|
neo4j:
|
|
image: neo4j:5.26.0
|
|
ports:
|
|
- "7474:7474" # HTTP
|
|
- "7687:7687" # Bolt
|
|
environment:
|
|
- NEO4J_AUTH=${NEO4J_USER:-neo4j}/${NEO4J_PASSWORD:-demodemo}
|
|
- NEO4J_server_memory_heap_initial__size=512m
|
|
- NEO4J_server_memory_heap_max__size=1G
|
|
- NEO4J_server_memory_pagecache_size=512m
|
|
volumes:
|
|
- neo4j_data:/data
|
|
- neo4j_logs:/logs
|
|
healthcheck:
|
|
test: ["CMD", "wget", "-O", "/dev/null", "http://localhost:7474"]
|
|
interval: 10s
|
|
timeout: 5s
|
|
retries: 5
|
|
start_period: 30s
|
|
|
|
graphiti-mcp:
|
|
image: zepai/knowledge-graph-mcp:latest
|
|
build:
|
|
context: .
|
|
dockerfile: Dockerfile
|
|
env_file:
|
|
- path: .env
|
|
required: false # Makes the file optional. Default value is 'true'
|
|
depends_on:
|
|
neo4j:
|
|
condition: service_healthy
|
|
environment:
|
|
# Database configuration
|
|
- NEO4J_URI=${NEO4J_URI:-bolt://neo4j:7687}
|
|
- NEO4J_USER=${NEO4J_USER:-neo4j}
|
|
- NEO4J_PASSWORD=${NEO4J_PASSWORD:-demodemo}
|
|
- NEO4J_DATABASE=${NEO4J_DATABASE:-neo4j}
|
|
# LLM provider configurations
|
|
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
|
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
|
- GOOGLE_API_KEY=${GOOGLE_API_KEY}
|
|
- GROQ_API_KEY=${GROQ_API_KEY}
|
|
- AZURE_OPENAI_API_KEY=${AZURE_OPENAI_API_KEY}
|
|
- AZURE_OPENAI_ENDPOINT=${AZURE_OPENAI_ENDPOINT}
|
|
- AZURE_OPENAI_DEPLOYMENT=${AZURE_OPENAI_DEPLOYMENT}
|
|
# Embedder provider configurations
|
|
- VOYAGE_API_KEY=${VOYAGE_API_KEY}
|
|
- AZURE_OPENAI_EMBEDDINGS_ENDPOINT=${AZURE_OPENAI_EMBEDDINGS_ENDPOINT}
|
|
- AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT=${AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT}
|
|
# Application configuration
|
|
- GRAPHITI_GROUP_ID=${GRAPHITI_GROUP_ID:-main}
|
|
- SEMAPHORE_LIMIT=${SEMAPHORE_LIMIT:-10}
|
|
- CONFIG_PATH=/app/config/config.yaml
|
|
- PATH=/root/.local/bin:${PATH}
|
|
volumes:
|
|
- ../config/config-docker-neo4j.yaml:/app/config/config.yaml:ro
|
|
ports:
|
|
- "8000:8000" # Expose the MCP server via HTTP for SSE transport
|
|
command: ["uv", "run", "src/graphiti_mcp_server.py", "--transport", "sse", "--config", "/app/config/config.yaml"]
|
|
|
|
volumes:
|
|
neo4j_data:
|
|
neo4j_logs:
|