diff --git a/.env.template b/.env.template index b1e7057e2..7fd3ba9e8 100644 --- a/.env.template +++ b/.env.template @@ -16,7 +16,7 @@ STRUCTURED_OUTPUT_FRAMEWORK="instructor" LLM_API_KEY="your_api_key" -LLM_MODEL="openai/gpt-4o-mini" +LLM_MODEL="openai/gpt-5-mini" LLM_PROVIDER="openai" LLM_ENDPOINT="" LLM_API_VERSION="" @@ -36,7 +36,7 @@ EMBEDDING_MAX_TOKENS=8191 # If using BAML structured output these env variables will be used BAML_LLM_PROVIDER=openai -BAML_LLM_MODEL="gpt-4o-mini" +BAML_LLM_MODEL="gpt-5-mini" BAML_LLM_ENDPOINT="" BAML_LLM_API_KEY="your_api_key" BAML_LLM_API_VERSION="" diff --git a/cognee/infrastructure/llm/config.py b/cognee/infrastructure/llm/config.py index 6658a6251..8fd196eaf 100644 --- a/cognee/infrastructure/llm/config.py +++ b/cognee/infrastructure/llm/config.py @@ -39,7 +39,7 @@ class LLMConfig(BaseSettings): structured_output_framework: str = "instructor" llm_provider: str = "openai" - llm_model: str = "openai/gpt-4o-mini" + llm_model: str = "openai/gpt-5-mini" llm_endpoint: str = "" llm_api_key: Optional[str] = None llm_api_version: Optional[str] = None @@ -48,7 +48,7 @@ class LLMConfig(BaseSettings): llm_max_completion_tokens: int = 16384 baml_llm_provider: str = "openai" - baml_llm_model: str = "gpt-4o-mini" + baml_llm_model: str = "gpt-5-mini" baml_llm_endpoint: str = "" baml_llm_api_key: Optional[str] = None baml_llm_temperature: float = 0.0