From c5d7597319149e9ea11f5069ac1e20d127ac00d6 Mon Sep 17 00:00:00 2001 From: gmakstutis Date: Sun, 14 Dec 2025 19:10:44 +0000 Subject: [PATCH] Add mcp_server/.env --- mcp_server/.env | 49 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 mcp_server/.env diff --git a/mcp_server/.env b/mcp_server/.env new file mode 100644 index 00000000..dd4677b2 --- /dev/null +++ b/mcp_server/.env @@ -0,0 +1,49 @@ +# Graphiti MCP Server Environment Configuration + +# Neo4j Database Configuration +# These settings are used to connect to your Neo4j database +NEO4J_URI=bolt://localhost:7687 +NEO4J_USER=neo4j +NEO4J_PASSWORD=demodemo + +# OpenAI API Configuration +# Required for LLM operations +OPENAI_API_KEY=your_openai_api_key_here +MODEL_NAME=gpt-4.1-mini + +# Optional: Only needed for non-standard OpenAI endpoints +# OPENAI_BASE_URL=https://api.openai.com/v1 + +# Optional: Group ID for namespacing graph data +# GROUP_ID=my_project + +# Concurrency Control +# Controls how many episodes can be processed simultaneously +# Default: 10 (suitable for OpenAI Tier 3, mid-tier Anthropic) +# Adjust based on your LLM provider's rate limits: +# - OpenAI Tier 1 (free): 1-2 +# - OpenAI Tier 2: 5-8 +# - OpenAI Tier 3: 10-15 +# - OpenAI Tier 4: 20-50 +# - Anthropic default: 5-8 +# - Anthropic high tier: 15-30 +# - Ollama (local): 1-5 +# See README.md "Concurrency and LLM Provider 429 Rate Limit Errors" for details +SEMAPHORE_LIMIT=10 + +# Optional: Path configuration for Docker +# PATH=/root/.local/bin:${PATH} + +# Optional: Memory settings for Neo4j (used in Docker Compose) +# NEO4J_server_memory_heap_initial__size=512m +# NEO4J_server_memory_heap_max__size=1G +# NEO4J_server_memory_pagecache_size=512m + +# Azure OpenAI configuration +# Optional: Only needed for Azure OpenAI endpoints +# AZURE_OPENAI_ENDPOINT=your_azure_openai_endpoint_here +# AZURE_OPENAI_API_VERSION=2025-01-01-preview +# AZURE_OPENAI_DEPLOYMENT_NAME=gpt-4o-gpt-4o-mini-deployment +# AZURE_OPENAI_EMBEDDING_API_VERSION=2023-05-15 +# AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME=text-embedding-3-large-deployment +# AZURE_OPENAI_USE_MANAGED_IDENTITY=false