graphiti/mcp_server/docker-compose.yml
0fism 0fa7865cee feat(mcp-server): add EMBEDDER_BASE_URL support for custom embedding endpoints
- Add base_url parameter to GraphitiEmbedderConfig
- Support EMBEDDER_BASE_URL, EMBEDDER_API_KEY, EMBEDDER_MODEL_NAME env vars
- Update docker-compose.yml to pass embedder environment variables
- Add documentation in README.md and .env.example
- Enables use of Ollama, Voyage, and other OpenAI-compatible embedding services

Partially resolves #912, #517
2025-10-16 01:05:22 +08:00

51 lines
1.5 KiB
YAML

services:
neo4j:
image: neo4j:5.26.0
ports:
- "7474:7474" # HTTP
- "7687:7687" # Bolt
environment:
- NEO4J_AUTH=${NEO4J_USER:-neo4j}/${NEO4J_PASSWORD:-demodemo}
- NEO4J_server_memory_heap_initial__size=512m
- NEO4J_server_memory_heap_max__size=1G
- NEO4J_server_memory_pagecache_size=512m
volumes:
- neo4j_data:/data
- neo4j_logs:/logs
healthcheck:
test: ["CMD", "wget", "-O", "/dev/null", "http://localhost:7474"]
interval: 10s
timeout: 5s
retries: 5
start_period: 30s
graphiti-mcp:
image: zepai/knowledge-graph-mcp:latest
build:
context: .
dockerfile: Dockerfile
env_file:
- path: .env
required: false # Makes the file optional. Default value is 'true'
depends_on:
neo4j:
condition: service_healthy
environment:
- NEO4J_URI=${NEO4J_URI:-bolt://neo4j:7687}
- NEO4J_USER=${NEO4J_USER:-neo4j}
- NEO4J_PASSWORD=${NEO4J_PASSWORD:-demodemo}
- OPENAI_API_KEY=${OPENAI_API_KEY}
- OPENAI_BASE_URL=${OPENAI_BASE_URL}
- MODEL_NAME=${MODEL_NAME}
- EMBEDDER_API_KEY=${EMBEDDER_API_KEY}
- EMBEDDER_BASE_URL=${EMBEDDER_BASE_URL}
- EMBEDDER_MODEL_NAME=${EMBEDDER_MODEL_NAME}
- PATH=/root/.local/bin:${PATH}
- SEMAPHORE_LIMIT=${SEMAPHORE_LIMIT:-10}
ports:
- "8000:8000" # Expose the MCP server via HTTP for SSE transport
command: ["uv", "run", "graphiti_mcp_server.py", "--transport", "sse"]
volumes:
neo4j_data:
neo4j_logs: