graphiti/mcp_server/.env.example
Varjitt Jeeva b2fd2b5aad doc: mcp server env update - Azure OpenAI embedding endpoint
doc: mcp server env update - Azure OpenAI embedding endpoint
2025-08-07 16:23:07 -04:00

40 lines
1.4 KiB
Text

# Graphiti MCP Server Environment Configuration
# Neo4j Database Configuration
# These settings are used to connect to your Neo4j database
NEO4J_URI=bolt://localhost:7687
NEO4J_USER=neo4j
NEO4J_PASSWORD=demodemo
# OpenAI API Configuration
# Required for LLM operations
OPENAI_API_KEY=your_openai_api_key_here
MODEL_NAME=gpt-4.1-mini
# Optional: Only needed for non-standard OpenAI endpoints
# OPENAI_BASE_URL=https://api.openai.com/v1
# Optional: Group ID for namespacing graph data
# GROUP_ID=my_project
# Optional: Path configuration for Docker
# PATH=/root/.local/bin:${PATH}
# Optional: Memory settings for Neo4j (used in Docker Compose)
# NEO4J_server_memory_heap_initial__size=512m
# NEO4J_server_memory_heap_max__size=1G
# NEO4J_server_memory_pagecache_size=512m
# Azure OpenAI configuration
# Optional: Only needed for Azure OpenAI endpoints
# AZURE_OPENAI_ENDPOINT=your_azure_openai_endpoint_here
# AZURE_OPENAI_API_VERSION=2025-01-01-preview
# AZURE_OPENAI_DEPLOYMENT_NAME=gpt-4o-gpt-4o-mini-deployment
# AZURE_OPENAI_USE_MANAGED_IDENTITY=false
# Optional: Azure OpenAI Embedding configuration
# Note: You must set the endpoint even if you set the AZURE_OPENAI_ENDPOINT
# otherwise the client will default to api.openai.com
# AZURE_OPENAI_EMBEDDING_ENDPOINT=your_azure_openai_embedding_endpoint_here
# AZURE_OPENAI_EMBEDDING_API_VERSION=2023-05-15
# AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME=text-embedding-3-large-deployment