refactor: Set Cognee to use gpt4o-mini by default (#1349)

<!-- .github/pull_request_template.md -->

## Description
<!-- Provide a clear description of the changes in this PR -->

## DCO Affirmation
I affirm that all code in every commit of this pull request conforms to
the terms of the Topoteretes Developer Certificate of Origin.
This commit is contained in:
Vasilije 2025-09-08 13:16:00 -07:00 committed by GitHub
commit 859f2e2fc6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 4 additions and 4 deletions

View file

@ -16,7 +16,7 @@
STRUCTURED_OUTPUT_FRAMEWORK="instructor"
LLM_API_KEY="your_api_key"
LLM_MODEL="openai/gpt-5-mini"
LLM_MODEL="openai/gpt-4o-mini"
LLM_PROVIDER="openai"
LLM_ENDPOINT=""
LLM_API_VERSION=""
@ -33,7 +33,7 @@ EMBEDDING_MAX_TOKENS=8191
# If using BAML structured output these env variables will be used
BAML_LLM_PROVIDER=openai
BAML_LLM_MODEL="gpt-5-mini"
BAML_LLM_MODEL="gpt-4o-mini"
BAML_LLM_ENDPOINT=""
BAML_LLM_API_KEY="your_api_key"
BAML_LLM_API_VERSION=""

View file

@ -35,7 +35,7 @@ class LLMConfig(BaseSettings):
structured_output_framework: str = "instructor"
llm_provider: str = "openai"
llm_model: str = "gpt-5-mini"
llm_model: str = "openai/gpt-4o-mini"
llm_endpoint: str = ""
llm_api_key: Optional[str] = None
llm_api_version: Optional[str] = None
@ -44,7 +44,7 @@ class LLMConfig(BaseSettings):
llm_max_completion_tokens: int = 16384
baml_llm_provider: str = "openai"
baml_llm_model: str = "gpt-5-mini"
baml_llm_model: str = "gpt-4o-mini"
baml_llm_endpoint: str = ""
baml_llm_api_key: Optional[str] = None
baml_llm_temperature: float = 0.0