refactor: Use gpt-4o-mini by default

This commit is contained in:
Igor Ilic 2025-09-08 21:27:38 +02:00
parent 73c9777128
commit 47b62d50e4
2 changed files with 3 additions and 3 deletions

View file

@ -16,7 +16,7 @@
STRUCTURED_OUTPUT_FRAMEWORK="instructor"
LLM_API_KEY="your_api_key"
LLM_MODEL="openai/gpt-5-mini"
LLM_MODEL="openai/gpt-4o-mini"
LLM_PROVIDER="openai"
LLM_ENDPOINT=""
LLM_API_VERSION=""
@ -33,7 +33,7 @@ EMBEDDING_MAX_TOKENS=8191
# If using BAML structured output these env variables will be used
BAML_LLM_PROVIDER=openai
BAML_LLM_MODEL="gpt-5-mini"
BAML_LLM_MODEL="gpt-4o-mini"
BAML_LLM_ENDPOINT=""
BAML_LLM_API_KEY="your_api_key"
BAML_LLM_API_VERSION=""

View file

@ -44,7 +44,7 @@ class LLMConfig(BaseSettings):
llm_max_completion_tokens: int = 16384
baml_llm_provider: str = "openai"
baml_llm_model: str = "openai/gpt-4o-mini"
baml_llm_model: str = "gpt-4o-mini"
baml_llm_endpoint: str = ""
baml_llm_api_key: Optional[str] = None
baml_llm_temperature: float = 0.0