diff --git a/mcp_server/config/config-docker-falkordb.yaml b/mcp_server/config/config-docker-falkordb.yaml index bad8f212..8a1e6a15 100644 --- a/mcp_server/config/config-docker-falkordb.yaml +++ b/mcp_server/config/config-docker-falkordb.yaml @@ -8,7 +8,7 @@ server: llm: provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq - model: "gpt-4.1" + model: "gpt-5-mini" temperature: 0.0 max_tokens: 4096 @@ -41,7 +41,7 @@ llm: embedder: provider: "openai" # Options: openai, azure_openai, gemini, voyage - model: "text-embedding-ada-002" + model: "text-embedding-3-small" dimensions: 1536 providers: diff --git a/mcp_server/config/config-docker-kuzu.yaml b/mcp_server/config/config-docker-kuzu.yaml index a1eacdf8..e8884a66 100644 --- a/mcp_server/config/config-docker-kuzu.yaml +++ b/mcp_server/config/config-docker-kuzu.yaml @@ -9,7 +9,7 @@ server: llm: provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq - model: "gpt-4.1" + model: "gpt-5-mini" temperature: 0.0 max_tokens: 4096 @@ -42,7 +42,7 @@ llm: embedder: provider: "openai" # Options: openai, azure_openai, gemini, voyage - model: "text-embedding-ada-002" + model: "text-embedding-3-small" dimensions: 1536 providers: diff --git a/mcp_server/config/config-docker-neo4j.yaml b/mcp_server/config/config-docker-neo4j.yaml index 572ff025..0e1650fa 100644 --- a/mcp_server/config/config-docker-neo4j.yaml +++ b/mcp_server/config/config-docker-neo4j.yaml @@ -8,7 +8,7 @@ server: llm: provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq - model: "gpt-4.1" + model: "gpt-5-mini" temperature: 0.0 max_tokens: 4096 @@ -41,7 +41,7 @@ llm: embedder: provider: "openai" # Options: openai, azure_openai, gemini, voyage - model: "text-embedding-ada-002" + model: "text-embedding-3-small" dimensions: 1536 providers: