From 0fa7865ceecec5de52ad1e2ba0f5e33c3241f0f3 Mon Sep 17 00:00:00 2001 From: 0fism Date: Thu, 16 Oct 2025 01:05:22 +0800 Subject: [PATCH] feat(mcp-server): add EMBEDDER_BASE_URL support for custom embedding endpoints - Add base_url parameter to GraphitiEmbedderConfig - Support EMBEDDER_BASE_URL, EMBEDDER_API_KEY, EMBEDDER_MODEL_NAME env vars - Update docker-compose.yml to pass embedder environment variables - Add documentation in README.md and .env.example - Enables use of Ollama, Voyage, and other OpenAI-compatible embedding services Partially resolves #912, #517 --- mcp_server/.env.example | 6 ++++++ mcp_server/README.md | 3 +++ mcp_server/docker-compose.yml | 4 ++++ mcp_server/graphiti_mcp_server.py | 13 +++++++++++-- 4 files changed, 24 insertions(+), 2 deletions(-) diff --git a/mcp_server/.env.example b/mcp_server/.env.example index 1e70ee56..d79a8be5 100644 --- a/mcp_server/.env.example +++ b/mcp_server/.env.example @@ -11,6 +11,12 @@ NEO4J_PASSWORD=demodemo OPENAI_API_KEY=your_openai_api_key_here MODEL_NAME=gpt-4.1-mini +# Embedding Service Configuration (Optional) +# Use these to configure a separate embedding service (e.g., Ollama, Voyage, or custom OpenAI-compatible service) +# EMBEDDER_API_KEY=your_embedder_api_key_here # Defaults to OPENAI_API_KEY if not set +# EMBEDDER_BASE_URL=http://localhost:11434/v1 # For Ollama or other OpenAI-compatible endpoints +# EMBEDDER_MODEL_NAME=nomic-embed-text # Model name for embedding service + # Optional: Only needed for non-standard OpenAI endpoints # OPENAI_BASE_URL=https://api.openai.com/v1 diff --git a/mcp_server/README.md b/mcp_server/README.md index d957feb8..d748cd86 100644 --- a/mcp_server/README.md +++ b/mcp_server/README.md @@ -92,6 +92,9 @@ The server uses the following environment variables: - `MODEL_NAME`: OpenAI model name to use for LLM operations. - `SMALL_MODEL_NAME`: OpenAI model name to use for smaller LLM operations. - `LLM_TEMPERATURE`: Temperature for LLM responses (0.0-2.0). +- `EMBEDDER_API_KEY`: Optional API key for embedding service (defaults to OPENAI_API_KEY if not set) +- `EMBEDDER_BASE_URL`: Optional base URL for embedding service (e.g., http://localhost:11434/v1 for Ollama) +- `EMBEDDER_MODEL_NAME`: Embedding model name (default: text-embedding-3-small) - `AZURE_OPENAI_ENDPOINT`: Optional Azure OpenAI LLM endpoint URL - `AZURE_OPENAI_DEPLOYMENT_NAME`: Optional Azure OpenAI LLM deployment name - `AZURE_OPENAI_API_VERSION`: Optional Azure OpenAI LLM API version diff --git a/mcp_server/docker-compose.yml b/mcp_server/docker-compose.yml index 1a0a548a..0d1d2b4a 100644 --- a/mcp_server/docker-compose.yml +++ b/mcp_server/docker-compose.yml @@ -35,7 +35,11 @@ services: - NEO4J_USER=${NEO4J_USER:-neo4j} - NEO4J_PASSWORD=${NEO4J_PASSWORD:-demodemo} - OPENAI_API_KEY=${OPENAI_API_KEY} + - OPENAI_BASE_URL=${OPENAI_BASE_URL} - MODEL_NAME=${MODEL_NAME} + - EMBEDDER_API_KEY=${EMBEDDER_API_KEY} + - EMBEDDER_BASE_URL=${EMBEDDER_BASE_URL} + - EMBEDDER_MODEL_NAME=${EMBEDDER_MODEL_NAME} - PATH=/root/.local/bin:${PATH} - SEMAPHORE_LIMIT=${SEMAPHORE_LIMIT:-10} ports: diff --git a/mcp_server/graphiti_mcp_server.py b/mcp_server/graphiti_mcp_server.py index 9b382074..43118c69 100644 --- a/mcp_server/graphiti_mcp_server.py +++ b/mcp_server/graphiti_mcp_server.py @@ -354,6 +354,7 @@ class GraphitiEmbedderConfig(BaseModel): model: str = DEFAULT_EMBEDDER_MODEL api_key: str | None = None + base_url: str | None = None azure_openai_endpoint: str | None = None azure_openai_deployment_name: str | None = None azure_openai_api_version: str | None = None @@ -367,6 +368,11 @@ class GraphitiEmbedderConfig(BaseModel): model_env = os.environ.get('EMBEDDER_MODEL_NAME', '') model = model_env if model_env.strip() else DEFAULT_EMBEDDER_MODEL + # Get base_url from environment + base_url = os.environ.get('EMBEDDER_BASE_URL', None) + logger.info(f'EMBEDDER_BASE_URL from env: {base_url}') + logger.info(f'EMBEDDER_MODEL_NAME from env: {model}') + azure_openai_endpoint = os.environ.get('AZURE_OPENAI_EMBEDDING_ENDPOINT', None) azure_openai_api_version = os.environ.get('AZURE_OPENAI_EMBEDDING_API_VERSION', None) azure_openai_deployment_name = os.environ.get( @@ -405,9 +411,12 @@ class GraphitiEmbedderConfig(BaseModel): azure_openai_deployment_name=azure_openai_deployment_name, ) else: + api_key = os.environ.get('EMBEDDER_API_KEY') or os.environ.get('OPENAI_API_KEY') + return cls( model=model, - api_key=os.environ.get('OPENAI_API_KEY'), + api_key=api_key, + base_url=base_url, ) def create_client(self) -> EmbedderClient | None: @@ -444,7 +453,7 @@ class GraphitiEmbedderConfig(BaseModel): if not self.api_key: return None - embedder_config = OpenAIEmbedderConfig(api_key=self.api_key, embedding_model=self.model) + embedder_config = OpenAIEmbedderConfig(api_key=self.api_key, embedding_model=self.model, base_url=self.base_url) return OpenAIEmbedder(config=embedder_config)