diff --git a/env.example b/env.example index 7f431fec..fd2cc43f 100644 --- a/env.example +++ b/env.example @@ -61,9 +61,10 @@ OLLAMA_EMULATING_MODEL_TAG=latest ### Number of text chunks to retrieve initially from vector search and keep after reranking # CHUNK_TOP_K=5 +### Enable reranking for retrieved text chunks (default: true) +# ENABLE_RERANK=true + ### Rerank Configuration -### Note: Reranking is now controlled per query via the 'enable_rerank' parameter (default: true) -### The following configuration is only needed when you want to use reranking ### Rerank model configuration (required when enable_rerank=true in query parameters) # RERANK_MODEL=BAAI/bge-reranker-v2-m3 # RERANK_BINDING_HOST=https://api.your-rerank-provider.com/v1/rerank diff --git a/lightrag/base.py b/lightrag/base.py index 4a2d1b40..ae13d92d 100644 --- a/lightrag/base.py +++ b/lightrag/base.py @@ -36,7 +36,7 @@ T = TypeVar("T") class QueryParam: """Configuration parameters for query execution in LightRAG.""" - mode: Literal["local", "global", "hybrid", "naive", "mix", "bypass"] = "global" + mode: Literal["local", "global", "hybrid", "naive", "mix", "bypass"] = "mix" """Specifies the retrieval mode: - "local": Focuses on context-dependent information. - "global": Utilizes global knowledge. @@ -85,7 +85,7 @@ class QueryParam: Format: [{"role": "user/assistant", "content": "message"}]. """ - history_turns: int = 3 + history_turns: int = int(os.getenv("HISTORY_TURNS", "3")) """Number of complete conversation turns (user-assistant pairs) to consider in the response context.""" ids: list[str] | None = None @@ -102,7 +102,7 @@ class QueryParam: If proivded, this will be use instead of the default vaulue from prompt template. """ - enable_rerank: bool = True + enable_rerank: bool = os.getenv("ENABLE_RERANK", "true").lower() == "true" """Enable reranking for retrieved text chunks. If True but no rerank model is configured, a warning will be issued. Default is True to enable reranking when rerank model is available. """ diff --git a/lightrag/kg/shared_storage.py b/lightrag/kg/shared_storage.py index 3c9d7ae2..228bf272 100644 --- a/lightrag/kg/shared_storage.py +++ b/lightrag/kg/shared_storage.py @@ -22,7 +22,7 @@ def direct_log(message, enable_output: bool = False, level: str = "DEBUG"): """ if not enable_output: return - + # Get the current logger level from the lightrag logger try: from lightrag.utils import logger