From 0ef2623ac66ba567348173abf926ebe1da77e886 Mon Sep 17 00:00:00 2001 From: Igor Ilic <30923996+dexters1@users.noreply.github.com> Date: Tue, 7 Oct 2025 20:17:10 +0200 Subject: [PATCH] refactor: use gpt-5-mini by default (#1505) ## Description Set gpt-5-mini back to default LLM as issues with it have been resolved ## Type of Change - [ ] Bug fix (non-breaking change that fixes an issue) - [ ] New feature (non-breaking change that adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to change) - [ ] Documentation update - [ ] Code refactoring - [ ] Performance improvement - [ ] Other (please specify): ## Pre-submission Checklist - [ ] **I have tested my changes thoroughly before submitting this PR** - [ ] **This PR contains minimal changes necessary to address the issue/feature** - [ ] My code follows the project's coding standards and style guidelines - [ ] I have added tests that prove my fix is effective or that my feature works - [ ] I have added necessary documentation (if applicable) - [ ] All new and existing tests pass - [ ] I have searched existing PRs to ensure this change hasn't been submitted already - [ ] I have linked any relevant issues in the description - [ ] My commits have clear and descriptive messages ## DCO Affirmation I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin. --- .env.template | 4 ++-- cognee/infrastructure/llm/config.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.env.template b/.env.template index b1e7057e2..7fd3ba9e8 100644 --- a/.env.template +++ b/.env.template @@ -16,7 +16,7 @@ STRUCTURED_OUTPUT_FRAMEWORK="instructor" LLM_API_KEY="your_api_key" -LLM_MODEL="openai/gpt-4o-mini" +LLM_MODEL="openai/gpt-5-mini" LLM_PROVIDER="openai" LLM_ENDPOINT="" LLM_API_VERSION="" @@ -36,7 +36,7 @@ EMBEDDING_MAX_TOKENS=8191 # If using BAML structured output these env variables will be used BAML_LLM_PROVIDER=openai -BAML_LLM_MODEL="gpt-4o-mini" +BAML_LLM_MODEL="gpt-5-mini" BAML_LLM_ENDPOINT="" BAML_LLM_API_KEY="your_api_key" BAML_LLM_API_VERSION="" diff --git a/cognee/infrastructure/llm/config.py b/cognee/infrastructure/llm/config.py index 6658a6251..8fd196eaf 100644 --- a/cognee/infrastructure/llm/config.py +++ b/cognee/infrastructure/llm/config.py @@ -39,7 +39,7 @@ class LLMConfig(BaseSettings): structured_output_framework: str = "instructor" llm_provider: str = "openai" - llm_model: str = "openai/gpt-4o-mini" + llm_model: str = "openai/gpt-5-mini" llm_endpoint: str = "" llm_api_key: Optional[str] = None llm_api_version: Optional[str] = None @@ -48,7 +48,7 @@ class LLMConfig(BaseSettings): llm_max_completion_tokens: int = 16384 baml_llm_provider: str = "openai" - baml_llm_model: str = "gpt-4o-mini" + baml_llm_model: str = "gpt-5-mini" baml_llm_endpoint: str = "" baml_llm_api_key: Optional[str] = None baml_llm_temperature: float = 0.0