Merge pull request #1719 from danielaskdd/fix-redis-doc-delete

Fix LLM cache handling for Redis to address document deletion scenarios
This commit is contained in:
Daniel.y 2025-06-29 15:17:54 +08:00 committed by GitHub
commit 1800a169b6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -79,13 +79,59 @@ class RedisKVStorage(BaseKVStorage):
await self.close()
async def get_by_id(self, id: str) -> dict[str, Any] | None:
async with self._get_redis_connection() as redis:
try:
data = await redis.get(f"{self.namespace}:{id}")
return json.loads(data) if data else None
except json.JSONDecodeError as e:
logger.error(f"JSON decode error for id {id}: {e}")
return None
if id == "default":
# Find all cache entries with cache_type == "extract"
async with self._get_redis_connection() as redis:
try:
result = {}
pattern = f"{self.namespace}:*"
cursor = 0
while True:
cursor, keys = await redis.scan(
cursor, match=pattern, count=100
)
if keys:
# Batch get values for these keys
pipe = redis.pipeline()
for key in keys:
pipe.get(key)
values = await pipe.execute()
# Check each value for cache_type == "extract"
for key, value in zip(keys, values):
if value:
try:
data = json.loads(value)
if (
isinstance(data, dict)
and data.get("cache_type") == "extract"
):
# Extract cache key (remove namespace prefix)
cache_key = key.replace(
f"{self.namespace}:", ""
)
result[cache_key] = data
except json.JSONDecodeError:
continue
if cursor == 0:
break
return result if result else None
except Exception as e:
logger.error(f"Error scanning Redis for extract cache entries: {e}")
return None
else:
# Original behavior for non-"default" ids
async with self._get_redis_connection() as redis:
try:
data = await redis.get(f"{self.namespace}:{id}")
return json.loads(data) if data else None
except json.JSONDecodeError as e:
logger.error(f"JSON decode error for id {id}: {e}")
return None
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
async with self._get_redis_connection() as redis: