# Graphiti MCP Server Environment Configuration # Database Configuration # Choose between 'neo4j' or 'falkordb' DATABASE_TYPE=falkordb # These settings are used to connect to your Neo4j database NEO4J_URI=bolt://localhost:7687 NEO4J_USER=neo4j NEO4J_PASSWORD=demodemo # These settings are used to connect to your FalkorDB database FALKORDB_PORT=6379 FALKORDB_HOST=localhost FALKORDB_USER= FALKORDB_PASSWORD= # OpenAI API Configuration # Required for LLM operations OPENAI_API_KEY=your_openai_api_key_here MODEL_NAME=gpt-4.1-mini # Optional: Only needed for non-standard OpenAI endpoints # OPENAI_BASE_URL=https://api.openai.com/v1 # Embedder Configuration # Provider is auto-detected based on configuration: # - Azure: if AZURE_OPENAI_EMBEDDING_ENDPOINT is set # - Ollama: if USE_OLLAMA_FOR_EMBEDDER is set to true # - OpenAI: default (no additional config needed) # USE_OLLAMA_FOR_EMBEDDER=true # Set this to true to use Ollama # OLLAMA_EMBEDDER_API_KEY=ollama # Ollama API key (optional, defaults to 'ollama') # OLLAMA_EMBEDDER_BASE_URL=http://localhost:11434 # Ollama base URL (when using Ollama) # OLLAMA_EMBEDDER_MODEL_NAME=nomic-embed-text # Ollama embedding model to use # OLLAMA_EMBEDDER_DIMENSION=768 # Ollama embedding dimension (model-specific) # Optional: Group ID for namespacing graph data # GROUP_ID=my_project # Optional: Path configuration for Docker # PATH=/root/.local/bin:${PATH} # Optional: Memory settings for Neo4j (used in Docker Compose) # NEO4J_server_memory_heap_initial__size=512m # NEO4J_server_memory_heap_max__size=1G # NEO4J_server_memory_pagecache_size=512m # Azure OpenAI configuration # Optional: Only needed for Azure OpenAI endpoints # AZURE_OPENAI_ENDPOINT=your_azure_openai_endpoint_here # AZURE_OPENAI_API_VERSION=2025-01-01-preview # AZURE_OPENAI_DEPLOYMENT_NAME=gpt-4o-gpt-4o-mini-deployment # AZURE_OPENAI_EMBEDDING_API_VERSION=2023-05-15 # AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME=text-embedding-3-large-deployment # AZURE_OPENAI_USE_MANAGED_IDENTITY=false