This commit is contained in:
0fism 2025-10-30 10:35:00 +08:00 committed by GitHub
commit 6b65a058be
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 24 additions and 2 deletions

View file

@ -20,6 +20,12 @@ FALKORDB_PASSWORD=
OPENAI_API_KEY=your_openai_api_key_here
MODEL_NAME=gpt-4.1-mini
# Embedding Service Configuration (Optional)
# Use these to configure a separate embedding service (e.g., Ollama, Voyage, or custom OpenAI-compatible service)
# EMBEDDER_API_KEY=your_embedder_api_key_here # Defaults to OPENAI_API_KEY if not set
# EMBEDDER_BASE_URL=http://localhost:11434/v1 # For Ollama or other OpenAI-compatible endpoints
# EMBEDDER_MODEL_NAME=nomic-embed-text # Model name for embedding service
# Optional: Only needed for non-standard OpenAI endpoints
# OPENAI_BASE_URL=https://api.openai.com/v1

View file

@ -102,6 +102,9 @@ The server supports both Neo4j and FalkorDB as database backends. Use the `DATAB
- `MODEL_NAME`: OpenAI model name to use for LLM operations.
- `SMALL_MODEL_NAME`: OpenAI model name to use for smaller LLM operations.
- `LLM_TEMPERATURE`: Temperature for LLM responses (0.0-2.0).
- `EMBEDDER_API_KEY`: Optional API key for embedding service (defaults to OPENAI_API_KEY if not set)
- `EMBEDDER_BASE_URL`: Optional base URL for embedding service (e.g., http://localhost:11434/v1 for Ollama)
- `EMBEDDER_MODEL_NAME`: Embedding model name (default: text-embedding-3-small)
- `AZURE_OPENAI_ENDPOINT`: Optional Azure OpenAI LLM endpoint URL
- `AZURE_OPENAI_DEPLOYMENT_NAME`: Optional Azure OpenAI LLM deployment name
- `AZURE_OPENAI_API_VERSION`: Optional Azure OpenAI LLM API version

View file

@ -39,7 +39,11 @@ services:
- NEO4J_USER=${NEO4J_USER:-neo4j}
- NEO4J_PASSWORD=${NEO4J_PASSWORD:-demodemo}
- OPENAI_API_KEY=${OPENAI_API_KEY}
- OPENAI_BASE_URL=${OPENAI_BASE_URL}
- MODEL_NAME=${MODEL_NAME}
- EMBEDDER_API_KEY=${EMBEDDER_API_KEY}
- EMBEDDER_BASE_URL=${EMBEDDER_BASE_URL}
- EMBEDDER_MODEL_NAME=${EMBEDDER_MODEL_NAME}
- PATH=/root/.local/bin:${PATH}
- SEMAPHORE_LIMIT=${SEMAPHORE_LIMIT:-10}
ports:

View file

@ -356,6 +356,7 @@ class GraphitiEmbedderConfig(BaseModel):
model: str = DEFAULT_EMBEDDER_MODEL
api_key: str | None = None
base_url: str | None = None
azure_openai_endpoint: str | None = None
azure_openai_deployment_name: str | None = None
azure_openai_api_version: str | None = None
@ -369,6 +370,11 @@ class GraphitiEmbedderConfig(BaseModel):
model_env = os.environ.get('EMBEDDER_MODEL_NAME', '')
model = model_env if model_env.strip() else DEFAULT_EMBEDDER_MODEL
# Get base_url from environment
base_url = os.environ.get('EMBEDDER_BASE_URL', None)
logger.info(f'EMBEDDER_BASE_URL from env: {base_url}')
logger.info(f'EMBEDDER_MODEL_NAME from env: {model}')
azure_openai_endpoint = os.environ.get('AZURE_OPENAI_EMBEDDING_ENDPOINT', None)
azure_openai_api_version = os.environ.get('AZURE_OPENAI_EMBEDDING_API_VERSION', None)
azure_openai_deployment_name = os.environ.get(
@ -407,9 +413,12 @@ class GraphitiEmbedderConfig(BaseModel):
azure_openai_deployment_name=azure_openai_deployment_name,
)
else:
api_key = os.environ.get('EMBEDDER_API_KEY') or os.environ.get('OPENAI_API_KEY')
return cls(
model=model,
api_key=os.environ.get('OPENAI_API_KEY'),
api_key=api_key,
base_url=base_url,
)
def create_client(self) -> EmbedderClient | None:
@ -446,7 +455,7 @@ class GraphitiEmbedderConfig(BaseModel):
if not self.api_key:
return None
embedder_config = OpenAIEmbedderConfig(api_key=self.api_key, embedding_model=self.model)
embedder_config = OpenAIEmbedderConfig(api_key=self.api_key, embedding_model=self.model, base_url=self.base_url)
return OpenAIEmbedder(config=embedder_config)