graphiti/mcp_server/.env.example
2025-10-30 10:35:00 +08:00

50 lines
1.8 KiB
Text

# Graphiti MCP Server Environment Configuration
# Database Configuration
# Choose between 'neo4j' or 'falkordb'
DATABASE_TYPE=falkordb
# These settings are used to connect to your Neo4j database
NEO4J_URI=bolt://localhost:7687
NEO4J_USER=neo4j
NEO4J_PASSWORD=demodemo
# These settings are used to connect to your FalkorDB database
FALKORDB_PORT=6379
FALKORDB_HOST=localhost
FALKORDB_USER=
FALKORDB_PASSWORD=
# OpenAI API Configuration
# Required for LLM operations
OPENAI_API_KEY=your_openai_api_key_here
MODEL_NAME=gpt-4.1-mini
# Embedding Service Configuration (Optional)
# Use these to configure a separate embedding service (e.g., Ollama, Voyage, or custom OpenAI-compatible service)
# EMBEDDER_API_KEY=your_embedder_api_key_here # Defaults to OPENAI_API_KEY if not set
# EMBEDDER_BASE_URL=http://localhost:11434/v1 # For Ollama or other OpenAI-compatible endpoints
# EMBEDDER_MODEL_NAME=nomic-embed-text # Model name for embedding service
# Optional: Only needed for non-standard OpenAI endpoints
# OPENAI_BASE_URL=https://api.openai.com/v1
# Optional: Group ID for namespacing graph data
# GROUP_ID=my_project
# Optional: Path configuration for Docker
# PATH=/root/.local/bin:${PATH}
# Optional: Memory settings for Neo4j (used in Docker Compose)
# NEO4J_server_memory_heap_initial__size=512m
# NEO4J_server_memory_heap_max__size=1G
# NEO4J_server_memory_pagecache_size=512m
# Azure OpenAI configuration
# Optional: Only needed for Azure OpenAI endpoints
# AZURE_OPENAI_ENDPOINT=your_azure_openai_endpoint_here
# AZURE_OPENAI_API_VERSION=2025-01-01-preview
# AZURE_OPENAI_DEPLOYMENT_NAME=gpt-4o-gpt-4o-mini-deployment
# AZURE_OPENAI_EMBEDDING_API_VERSION=2023-05-15
# AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME=text-embedding-3-large-deployment
# AZURE_OPENAI_USE_MANAGED_IDENTITY=false