44 lines
1.6 KiB
Text
44 lines
1.6 KiB
Text
# Graphiti MCP Server Environment Configuration
|
|
|
|
# Neo4j Database Configuration
|
|
# These settings are used to connect to your Neo4j database
|
|
NEO4J_URI=bolt://localhost:7687
|
|
NEO4J_USER=neo4j
|
|
NEO4J_PASSWORD=demodemo
|
|
|
|
# OpenAI API Configuration
|
|
# Required for LLM operations
|
|
OPENAI_API_KEY=your_gemini_api_key_here
|
|
MODEL_NAME=gemini-2.5-flash
|
|
SMALL_MODEL_NAME=gemini-2.5-flash
|
|
|
|
# Optional: Only needed for non-standard OpenAI endpoints
|
|
OPENAI_BASE_URL=https://generativelanguage.googleapis.com/v1beta
|
|
|
|
# Embedder Configuration
|
|
# Optional: Separate API key and URL for embedder (falls back to OPENAI_API_KEY and OPENAI_BASE_URL if not set)
|
|
# Note: OpenRouter does not support embeddings API, using Ollama as free alternative
|
|
EMBEDDER_API_KEY=ollama
|
|
EMBEDDER_BASE_URL=http://localhost:11434
|
|
EMBEDDER_MODEL_NAME=nomic-embed-text
|
|
EMBEDDER_DIMENSION=768
|
|
|
|
# Optional: Group ID for namespacing graph data
|
|
# GROUP_ID=my_project
|
|
|
|
# Optional: Path configuration for Docker
|
|
# PATH=/root/.local/bin:${PATH}
|
|
|
|
# Optional: Memory settings for Neo4j (used in Docker Compose)
|
|
# NEO4J_server_memory_heap_initial__size=512m
|
|
# NEO4J_server_memory_heap_max__size=1G
|
|
# NEO4J_server_memory_pagecache_size=512m
|
|
|
|
# Azure OpenAI configuration
|
|
# Optional: Only needed for Azure OpenAI endpoints
|
|
# AZURE_OPENAI_ENDPOINT=your_azure_openai_endpoint_here
|
|
# AZURE_OPENAI_API_VERSION=2025-01-01-preview
|
|
# AZURE_OPENAI_DEPLOYMENT_NAME=gpt-4o-gpt-4o-mini-deployment
|
|
# AZURE_OPENAI_EMBEDDING_API_VERSION=2023-05-15
|
|
# AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME=text-embedding-3-large-deployment
|
|
# AZURE_OPENAI_USE_MANAGED_IDENTITY=false
|