Revert "Fix LLM cache handling for Redis to address document deletion scenarios."

This reverts commit 14cda93988.
This commit is contained in:
yangdx 2025-06-29 22:35:40 +08:00
parent 10cd9c90e7
commit 4c2b4b4b6b

View file

@ -79,52 +79,13 @@ class RedisKVStorage(BaseKVStorage):
await self.close() await self.close()
async def get_by_id(self, id: str) -> dict[str, Any] | None: async def get_by_id(self, id: str) -> dict[str, Any] | None:
if id == "default": async with self._get_redis_connection() as redis:
# Find all cache entries with cache_type == "extract" try:
async with self._get_redis_connection() as redis: data = await redis.get(f"{self.namespace}:{id}")
try: return json.loads(data) if data else None
result = {} except json.JSONDecodeError as e:
pattern = f"{self.namespace}:*" logger.error(f"JSON decode error for id {id}: {e}")
cursor = 0 return None
while True:
cursor, keys = await redis.scan(cursor, match=pattern, count=100)
if keys:
# Batch get values for these keys
pipe = redis.pipeline()
for key in keys:
pipe.get(key)
values = await pipe.execute()
# Check each value for cache_type == "extract"
for key, value in zip(keys, values):
if value:
try:
data = json.loads(value)
if isinstance(data, dict) and data.get("cache_type") == "extract":
# Extract cache key (remove namespace prefix)
cache_key = key.replace(f"{self.namespace}:", "")
result[cache_key] = data
except json.JSONDecodeError:
continue
if cursor == 0:
break
return result if result else None
except Exception as e:
logger.error(f"Error scanning Redis for extract cache entries: {e}")
return None
else:
# Original behavior for non-"default" ids
async with self._get_redis_connection() as redis:
try:
data = await redis.get(f"{self.namespace}:{id}")
return json.loads(data) if data else None
except json.JSONDecodeError as e:
logger.error(f"JSON decode error for id {id}: {e}")
return None
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]: async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
async with self._get_redis_connection() as redis: async with self._get_redis_connection() as redis: