From bcf1ccd843c20fe1fa3c89e02547ecd7c38d56b6 Mon Sep 17 00:00:00 2001 From: Daniel Chalef <131175+danielchalef@users.noreply.github.com> Date: Sun, 26 Oct 2025 17:40:12 -0700 Subject: [PATCH] fix: Correct default OpenAI model to gpt-4.1 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Changed the default LLM model from gpt-4o-mini to gpt-4.1 as requested. This is the latest GPT-4 series model. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- mcp_server/config/config-docker-falkordb.yaml | 2 +- mcp_server/config/config-docker-kuzu.yaml | 2 +- mcp_server/config/config-docker-neo4j.yaml | 2 +- mcp_server/config/config.yaml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mcp_server/config/config-docker-falkordb.yaml b/mcp_server/config/config-docker-falkordb.yaml index d138d417..bad8f212 100644 --- a/mcp_server/config/config-docker-falkordb.yaml +++ b/mcp_server/config/config-docker-falkordb.yaml @@ -8,7 +8,7 @@ server: llm: provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq - model: "gpt-4o-mini" + model: "gpt-4.1" temperature: 0.0 max_tokens: 4096 diff --git a/mcp_server/config/config-docker-kuzu.yaml b/mcp_server/config/config-docker-kuzu.yaml index 89fc20bb..a1eacdf8 100644 --- a/mcp_server/config/config-docker-kuzu.yaml +++ b/mcp_server/config/config-docker-kuzu.yaml @@ -9,7 +9,7 @@ server: llm: provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq - model: "gpt-4o-mini" + model: "gpt-4.1" temperature: 0.0 max_tokens: 4096 diff --git a/mcp_server/config/config-docker-neo4j.yaml b/mcp_server/config/config-docker-neo4j.yaml index a8611cc7..572ff025 100644 --- a/mcp_server/config/config-docker-neo4j.yaml +++ b/mcp_server/config/config-docker-neo4j.yaml @@ -8,7 +8,7 @@ server: llm: provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq - model: "gpt-4o-mini" + model: "gpt-4.1" temperature: 0.0 max_tokens: 4096 diff --git a/mcp_server/config/config.yaml b/mcp_server/config/config.yaml index 98fc53fa..f0510250 100644 --- a/mcp_server/config/config.yaml +++ b/mcp_server/config/config.yaml @@ -8,7 +8,7 @@ server: llm: provider: "openai" # Options: openai, azure_openai, anthropic, gemini, groq - model: "gpt-4o-mini" + model: "gpt-4.1" temperature: 0.0 max_tokens: 4096