Add temperature fallback for Ollama LLM binding
- Implement OLLAMA_LLM_TEMPERATURE env var - Fallback to global TEMPERATURE if unset - Remove redundant OllamaLLMOptions logic - Update env.example with new setting
This commit is contained in:
parent
936809a084
commit
3099748668
3 changed files with 15 additions and 38 deletions
|
|
@ -127,6 +127,8 @@ TIMEOUT=240
|
|||
OLLAMA_LLM_NUM_CTX=32768
|
||||
### Stop sequences for Ollama LLM
|
||||
# OLLAMA_LLM_STOP='["</s>", "Assistant:", "\n\n"]'
|
||||
### If OLLAMA_LLM_TEMPERATURE is not specified, the system will default to the value defined by TEMPERATURE
|
||||
# OLLAMA_LLM_TEMPERATURE=0.85
|
||||
### see also env.ollama-binding-options.example for fine tuning ollama
|
||||
|
||||
### Optional for Azure
|
||||
|
|
|
|||
|
|
@ -300,6 +300,17 @@ def parse_args() -> argparse.Namespace:
|
|||
# Inject LLM temperature configuration
|
||||
args.temperature = get_env_value("TEMPERATURE", DEFAULT_TEMPERATURE, float)
|
||||
|
||||
# Handle Ollama LLM temperature fallback when llm-binding is ollama
|
||||
if args.llm_binding == "ollama":
|
||||
# Check if OLLAMA_LLM_TEMPERATURE is set, if not fallback to global TEMPERATURE
|
||||
ollama_llm_temp = get_env_value("OLLAMA_LLM_TEMPERATURE", None)
|
||||
if ollama_llm_temp is None:
|
||||
# Fallback to global TEMPERATURE value
|
||||
args.ollama_llm_temperature = args.temperature
|
||||
else:
|
||||
# Use the explicitly set OLLAMA_LLM_TEMPERATURE
|
||||
args.ollama_llm_temperature = float(ollama_llm_temp)
|
||||
|
||||
# Select Document loading tool (DOCLING, DEFAULT)
|
||||
args.document_loading_engine = get_env_value("DOCUMENT_LOADING_ENGINE", "DEFAULT")
|
||||
|
||||
|
|
|
|||
|
|
@ -8,11 +8,10 @@ bindings and integrations.
|
|||
from argparse import ArgumentParser, Namespace
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
from dataclasses import asdict, dataclass, field, MISSING
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from typing import Any, ClassVar, List
|
||||
|
||||
from lightrag.utils import get_env_value, logger
|
||||
from lightrag.utils import get_env_value
|
||||
from lightrag.constants import DEFAULT_TEMPERATURE
|
||||
|
||||
|
||||
|
|
@ -437,44 +436,9 @@ class OllamaEmbeddingOptions(_OllamaOptionsMixin, BindingOptions):
|
|||
class OllamaLLMOptions(_OllamaOptionsMixin, BindingOptions):
|
||||
"""Options for Ollama LLM with specialized configuration for LLM tasks."""
|
||||
|
||||
# Override temperature field to track if it was explicitly set
|
||||
temperature: float = field(default_factory=lambda: MISSING)
|
||||
|
||||
# mandatory name of binding
|
||||
_binding_name: ClassVar[str] = "ollama_llm"
|
||||
|
||||
def __post_init__(self):
|
||||
"""Handle temperature parameter with correct priority logic"""
|
||||
# If temperature was not explicitly set, apply priority logic
|
||||
if self.temperature is MISSING:
|
||||
# Check OLLAMA_LLM_TEMPERATURE first (highest priority for env vars)
|
||||
ollama_temp = os.getenv("OLLAMA_LLM_TEMPERATURE")
|
||||
if ollama_temp is not None:
|
||||
try:
|
||||
self.temperature = float(ollama_temp)
|
||||
logger.debug(f"Using OLLAMA_LLM_TEMPERATURE: {self.temperature}")
|
||||
return
|
||||
except (ValueError, TypeError):
|
||||
logger.warning(
|
||||
f"Invalid OLLAMA_LLM_TEMPERATURE value: {ollama_temp}"
|
||||
)
|
||||
|
||||
# Check TEMPERATURE as fallback
|
||||
general_temp = os.getenv("TEMPERATURE")
|
||||
if general_temp is not None:
|
||||
try:
|
||||
self.temperature = float(general_temp)
|
||||
logger.debug(
|
||||
f"Using TEMPERATURE environment variable: {self.temperature}"
|
||||
)
|
||||
return
|
||||
except (ValueError, TypeError):
|
||||
logger.warning(f"Invalid TEMPERATURE value: {general_temp}")
|
||||
|
||||
# Use default value
|
||||
self.temperature = DEFAULT_TEMPERATURE
|
||||
logger.debug(f"Using default temperature: {self.temperature}")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Additional LLM Provider Bindings
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue