Fix prompt respond cache fail when is_embedding_cache_enabled is true
This commit is contained in:
parent
5d14ab03eb
commit
bed5a97ae2
1 changed files with 1 additions and 1 deletions
|
|
@ -523,7 +523,7 @@ async def handle_cache(
|
|||
mode=mode,
|
||||
use_llm_check=use_llm_check,
|
||||
llm_func=llm_model_func if use_llm_check else None,
|
||||
original_prompt=prompt if use_llm_check else None,
|
||||
original_prompt=prompt,
|
||||
cache_type=cache_type,
|
||||
)
|
||||
if best_cached_response is not None:
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue