Fix mode parameter serialization error in Ollama chat API
• Use mode.value for API requests • Add debug logging in aquery_llm
This commit is contained in:
parent
81caee3498
commit
1766cddd6c
2 changed files with 3 additions and 1 deletions
|
|
@ -499,7 +499,7 @@ class OllamaAPI:
|
|||
prompt_tokens = estimate_tokens(cleaned_query)
|
||||
|
||||
param_dict = {
|
||||
"mode": mode,
|
||||
"mode": mode.value,
|
||||
"stream": request.stream,
|
||||
"only_need_context": only_need_context,
|
||||
"conversation_history": conversation_history,
|
||||
|
|
|
|||
|
|
@ -2322,6 +2322,8 @@ class LightRAG:
|
|||
Returns:
|
||||
dict[str, Any]: Complete response with structured data and LLM response.
|
||||
"""
|
||||
logger.debug(f"[aquery_llm] Query param: {param}")
|
||||
|
||||
global_config = asdict(self)
|
||||
|
||||
try:
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue