feat: Add baml specific llm config options

This commit is contained in:
Igor Ilic 2025-08-05 21:23:40 +02:00
parent a9ec51691e
commit 9ca050a70d
3 changed files with 25 additions and 12 deletions

View file

@ -31,6 +31,12 @@ EMBEDDING_MAX_TOKENS=8191
# If embedding key is not provided same key set for LLM_API_KEY will be used
#EMBEDDING_API_KEY="your_api_key"
# If using BAML structured output these env variables will be used
BAML_LLM_PROVIDER=openai
BAML_LLM_MODEL="gpt-4o-mini"
BAML_LLM_ENDPOINT=""
BAML_LLM_API_KEY="your_api_key"
################################################################################
# 🗄️ Relational database settings
################################################################################

View file

@ -150,18 +150,17 @@ jobs:
name: Run Simple Examples BAML
runs-on: ubuntu-22.04
env:
LLM_PROVIDER: openai-generic
LLM_MODEL: ${{ secrets.LLM_MODEL }}
LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }}
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }}
STRUCTURED_OUTPUT_FRAMEWORK: "BAML"
BAML_LLM_PROVIDER: openai-generic
BAML_LLM_MODEL: ${{ secrets.LLM_MODEL }}
BAML_LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }}
BAML_LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
EMBEDDING_PROVIDER: openai-generic
EMBEDDING_PROVIDER: openai
EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }}
EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }}
EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }}
EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }}
STRUCTURED_OUTPUT_FRAMEWORK: "BAML"
steps:
- name: Check out repository
uses: actions/checkout@v4

View file

@ -42,6 +42,13 @@ class LLMConfig(BaseSettings):
llm_temperature: float = 0.0
llm_streaming: bool = False
llm_max_tokens: int = 16384
baml_llm_provider: str = "openai"
baml_llm_model: str = "gpt-4o-mini"
baml_llm_endpoint: str = ""
baml_llm_api_key: Optional[str] = None
baml_llm_temperature: float = 0.0
transcription_model: str = "whisper-1"
graph_prompt_path: str = "generate_graph_prompt.txt"
llm_rate_limit_enabled: bool = False
@ -62,12 +69,13 @@ class LLMConfig(BaseSettings):
def model_post_init(self, __context) -> None:
"""Initialize the BAML registry after the model is created."""
self.baml_registry.add_llm_client(
name=self.llm_provider,
provider=self.llm_provider,
name=self.baml_llm_provider,
provider=self.baml_llm_provider,
options={
"model": self.llm_model,
"temperature": self.llm_temperature,
"api_key": self.llm_api_key,
"model": self.baml_llm_model,
"temperature": self.baml_llm_temperature,
"api_key": self.baml_llm_api_key,
"endpoint": self.baml_llm_endpoint,
},
)
# Sets the primary client