feat: Add baml specific llm config options
This commit is contained in:
parent
a9ec51691e
commit
9ca050a70d
3 changed files with 25 additions and 12 deletions
|
|
@ -31,6 +31,12 @@ EMBEDDING_MAX_TOKENS=8191
|
||||||
# If embedding key is not provided same key set for LLM_API_KEY will be used
|
# If embedding key is not provided same key set for LLM_API_KEY will be used
|
||||||
#EMBEDDING_API_KEY="your_api_key"
|
#EMBEDDING_API_KEY="your_api_key"
|
||||||
|
|
||||||
|
# If using BAML structured output these env variables will be used
|
||||||
|
BAML_LLM_PROVIDER=openai
|
||||||
|
BAML_LLM_MODEL="gpt-4o-mini"
|
||||||
|
BAML_LLM_ENDPOINT=""
|
||||||
|
BAML_LLM_API_KEY="your_api_key"
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# 🗄️ Relational database settings
|
# 🗄️ Relational database settings
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
|
||||||
13
.github/workflows/basic_tests.yml
vendored
13
.github/workflows/basic_tests.yml
vendored
|
|
@ -150,18 +150,17 @@ jobs:
|
||||||
name: Run Simple Examples BAML
|
name: Run Simple Examples BAML
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
LLM_PROVIDER: openai-generic
|
STRUCTURED_OUTPUT_FRAMEWORK: "BAML"
|
||||||
LLM_MODEL: ${{ secrets.LLM_MODEL }}
|
BAML_LLM_PROVIDER: openai-generic
|
||||||
LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }}
|
BAML_LLM_MODEL: ${{ secrets.LLM_MODEL }}
|
||||||
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
|
BAML_LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }}
|
||||||
LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }}
|
BAML_LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
|
||||||
|
|
||||||
EMBEDDING_PROVIDER: openai-generic
|
EMBEDDING_PROVIDER: openai
|
||||||
EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }}
|
EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }}
|
||||||
EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }}
|
EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }}
|
||||||
EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }}
|
EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }}
|
||||||
EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }}
|
EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }}
|
||||||
STRUCTURED_OUTPUT_FRAMEWORK: "BAML"
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
|
||||||
|
|
@ -42,6 +42,13 @@ class LLMConfig(BaseSettings):
|
||||||
llm_temperature: float = 0.0
|
llm_temperature: float = 0.0
|
||||||
llm_streaming: bool = False
|
llm_streaming: bool = False
|
||||||
llm_max_tokens: int = 16384
|
llm_max_tokens: int = 16384
|
||||||
|
|
||||||
|
baml_llm_provider: str = "openai"
|
||||||
|
baml_llm_model: str = "gpt-4o-mini"
|
||||||
|
baml_llm_endpoint: str = ""
|
||||||
|
baml_llm_api_key: Optional[str] = None
|
||||||
|
baml_llm_temperature: float = 0.0
|
||||||
|
|
||||||
transcription_model: str = "whisper-1"
|
transcription_model: str = "whisper-1"
|
||||||
graph_prompt_path: str = "generate_graph_prompt.txt"
|
graph_prompt_path: str = "generate_graph_prompt.txt"
|
||||||
llm_rate_limit_enabled: bool = False
|
llm_rate_limit_enabled: bool = False
|
||||||
|
|
@ -62,12 +69,13 @@ class LLMConfig(BaseSettings):
|
||||||
def model_post_init(self, __context) -> None:
|
def model_post_init(self, __context) -> None:
|
||||||
"""Initialize the BAML registry after the model is created."""
|
"""Initialize the BAML registry after the model is created."""
|
||||||
self.baml_registry.add_llm_client(
|
self.baml_registry.add_llm_client(
|
||||||
name=self.llm_provider,
|
name=self.baml_llm_provider,
|
||||||
provider=self.llm_provider,
|
provider=self.baml_llm_provider,
|
||||||
options={
|
options={
|
||||||
"model": self.llm_model,
|
"model": self.baml_llm_model,
|
||||||
"temperature": self.llm_temperature,
|
"temperature": self.baml_llm_temperature,
|
||||||
"api_key": self.llm_api_key,
|
"api_key": self.baml_llm_api_key,
|
||||||
|
"endpoint": self.baml_llm_endpoint,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
# Sets the primary client
|
# Sets the primary client
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue