fix: Improve MCP server configuration and initialization
- Fix API key detection: Remove hardcoded OpenAI checks, let factories handle provider-specific validation - Fix .env file loading: Search for .env in mcp_server directory first - Change default transport to SSE for broader compatibility (was stdio) - Add proper error handling with warnings for failed client initialization - Model already defaults to gpt-4o as requested These changes ensure the MCP server properly loads API keys from .env files and creates the appropriate LLM/embedder clients based on configuration. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
parent
fa2b5696bc
commit
0f85119443
2 changed files with 17 additions and 9 deletions
|
|
@ -2,7 +2,7 @@
|
|||
# This file supports environment variable expansion using ${VAR_NAME} or ${VAR_NAME:default_value}
|
||||
|
||||
server:
|
||||
transport: "stdio" # Options: stdio, sse
|
||||
transport: "sse" # Options: stdio, sse, http
|
||||
host: "0.0.0.0"
|
||||
port: 8000
|
||||
|
||||
|
|
|
|||
|
|
@ -35,7 +35,14 @@ from services.factories import DatabaseDriverFactory, EmbedderFactory, LLMClient
|
|||
from services.queue_service import QueueService
|
||||
from utils.formatting import format_fact_result
|
||||
|
||||
load_dotenv()
|
||||
# Load .env file from mcp_server directory
|
||||
mcp_server_dir = Path(__file__).parent.parent
|
||||
env_file = mcp_server_dir / '.env'
|
||||
if env_file.exists():
|
||||
load_dotenv(env_file)
|
||||
else:
|
||||
# Try current working directory as fallback
|
||||
load_dotenv()
|
||||
|
||||
|
||||
# Semaphore limit for concurrent Graphiti operations.
|
||||
|
|
@ -118,16 +125,17 @@ class GraphitiService:
|
|||
llm_client = None
|
||||
embedder_client = None
|
||||
|
||||
# Only create LLM client if API key is available
|
||||
if self.config.llm.providers.openai and self.config.llm.providers.openai.api_key:
|
||||
# Create LLM client based on configured provider
|
||||
try:
|
||||
llm_client = LLMClientFactory.create(self.config.llm)
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to create LLM client: {e}')
|
||||
|
||||
# Only create embedder client if API key is available
|
||||
if (
|
||||
self.config.embedder.providers.openai
|
||||
and self.config.embedder.providers.openai.api_key
|
||||
):
|
||||
# Create embedder client based on configured provider
|
||||
try:
|
||||
embedder_client = EmbedderFactory.create(self.config.embedder)
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to create embedder client: {e}')
|
||||
|
||||
# Get database configuration
|
||||
db_config = DatabaseDriverFactory.create_config(self.config.database)
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue