Update QueryParam
This commit is contained in:
parent
3da9f8aab4
commit
91d0f65476
3 changed files with 7 additions and 6 deletions
|
|
@ -61,9 +61,10 @@ OLLAMA_EMULATING_MODEL_TAG=latest
|
|||
### Number of text chunks to retrieve initially from vector search and keep after reranking
|
||||
# CHUNK_TOP_K=5
|
||||
|
||||
### Enable reranking for retrieved text chunks (default: true)
|
||||
# ENABLE_RERANK=true
|
||||
|
||||
### Rerank Configuration
|
||||
### Note: Reranking is now controlled per query via the 'enable_rerank' parameter (default: true)
|
||||
### The following configuration is only needed when you want to use reranking
|
||||
### Rerank model configuration (required when enable_rerank=true in query parameters)
|
||||
# RERANK_MODEL=BAAI/bge-reranker-v2-m3
|
||||
# RERANK_BINDING_HOST=https://api.your-rerank-provider.com/v1/rerank
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ T = TypeVar("T")
|
|||
class QueryParam:
|
||||
"""Configuration parameters for query execution in LightRAG."""
|
||||
|
||||
mode: Literal["local", "global", "hybrid", "naive", "mix", "bypass"] = "global"
|
||||
mode: Literal["local", "global", "hybrid", "naive", "mix", "bypass"] = "mix"
|
||||
"""Specifies the retrieval mode:
|
||||
- "local": Focuses on context-dependent information.
|
||||
- "global": Utilizes global knowledge.
|
||||
|
|
@ -85,7 +85,7 @@ class QueryParam:
|
|||
Format: [{"role": "user/assistant", "content": "message"}].
|
||||
"""
|
||||
|
||||
history_turns: int = 3
|
||||
history_turns: int = int(os.getenv("HISTORY_TURNS", "3"))
|
||||
"""Number of complete conversation turns (user-assistant pairs) to consider in the response context."""
|
||||
|
||||
ids: list[str] | None = None
|
||||
|
|
@ -102,7 +102,7 @@ class QueryParam:
|
|||
If proivded, this will be use instead of the default vaulue from prompt template.
|
||||
"""
|
||||
|
||||
enable_rerank: bool = True
|
||||
enable_rerank: bool = os.getenv("ENABLE_RERANK", "true").lower() == "true"
|
||||
"""Enable reranking for retrieved text chunks. If True but no rerank model is configured, a warning will be issued.
|
||||
Default is True to enable reranking when rerank model is available.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ def direct_log(message, enable_output: bool = False, level: str = "DEBUG"):
|
|||
"""
|
||||
if not enable_output:
|
||||
return
|
||||
|
||||
|
||||
# Get the current logger level from the lightrag logger
|
||||
try:
|
||||
from lightrag.utils import logger
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue