Fix mode parameter serialization error in Ollama chat API

• Use mode.value for API requests
• Add debug logging in aquery_llm
This commit is contained in:
yangdx 2025-09-27 15:11:51 +08:00
parent 81caee3498
commit 1766cddd6c
2 changed files with 3 additions and 1 deletions

View file

@ -499,7 +499,7 @@ class OllamaAPI:
prompt_tokens = estimate_tokens(cleaned_query)
param_dict = {
"mode": mode,
"mode": mode.value,
"stream": request.stream,
"only_need_context": only_need_context,
"conversation_history": conversation_history,

View file

@ -2322,6 +2322,8 @@ class LightRAG:
Returns:
dict[str, Any]: Complete response with structured data and LLM response.
"""
logger.debug(f"[aquery_llm] Query param: {param}")
global_config = asdict(self)
try: