From 17faeb2fb8260c06450af08c0e2e176f4fe70653 Mon Sep 17 00:00:00 2001 From: yangdx Date: Thu, 14 Aug 2025 11:38:36 +0800 Subject: [PATCH 1/5] refactor: integrate document consistency validation into pipeline processing This ensures data consistency validation is part of the main processing pipeline and provides better monitoring of inconsistent document cleanup operations. --- lightrag/lightrag.py | 101 ++++++++++++++++++++++++++++--------------- 1 file changed, 66 insertions(+), 35 deletions(-) diff --git a/lightrag/lightrag.py b/lightrag/lightrag.py index ca7d6e63..d4d7fa47 100644 --- a/lightrag/lightrag.py +++ b/lightrag/lightrag.py @@ -1111,44 +1111,70 @@ class LightRAG: return track_id async def _validate_and_fix_document_consistency( - self, to_process_docs: dict[str, DocProcessingStatus] + self, + to_process_docs: dict[str, DocProcessingStatus], + pipeline_status: dict, + pipeline_status_lock: asyncio.Lock, ) -> dict[str, DocProcessingStatus]: - """Validate and fix document data consistency""" + """Validate and fix document data consistency by deleting inconsistent entries""" inconsistent_docs = [] + # Check each document's data consistency for doc_id, status_doc in to_process_docs.items(): # Check if corresponding content exists in full_docs content_data = await self.full_docs.get_by_id(doc_id) if not content_data: inconsistent_docs.append(doc_id) - logger.warning( - f"Document {doc_id} has status record but missing content in full_docs" - ) + async with pipeline_status_lock: + log_message = f"Data inconsistency detected: Document {doc_id} ({status_doc.file_path}) missing content data" + logger.warning(log_message) + pipeline_status["latest_message"] = log_message + pipeline_status["history_messages"].append(log_message) - # Mark inconsistent documents as FAILED + # Delete inconsistent document entries one by one if inconsistent_docs: - failed_updates = {} - for doc_id in inconsistent_docs: - status_doc = to_process_docs[doc_id] - failed_updates[doc_id] = { - "status": DocStatus.FAILED, - "error_msg": "Document content not found in full_docs storage - data inconsistency detected", - "content_summary": status_doc.content_summary, - "content_length": status_doc.content_length, - "created_at": status_doc.created_at, - "updated_at": datetime.now(timezone.utc).isoformat(), - "file_path": status_doc.file_path, - "track_id": status_doc.track_id, - } + async with pipeline_status_lock: + summary_message = f"Starting cleanup of {len(inconsistent_docs)} inconsistent document entries" + logger.info(summary_message) + pipeline_status["latest_message"] = summary_message + pipeline_status["history_messages"].append(summary_message) - await self.doc_status.upsert(failed_updates) - logger.info( - f"Marked {len(inconsistent_docs)} inconsistent documents as FAILED" - ) - - # Remove these documents from the processing list + successful_deletions = 0 for doc_id in inconsistent_docs: - to_process_docs.pop(doc_id, None) + try: + status_doc = to_process_docs[doc_id] + file_path = getattr(status_doc, "file_path", "unknown_source") + + # Delete doc_status entry + await self.doc_status.delete([doc_id]) + successful_deletions += 1 + + # Log successful deletion + async with pipeline_status_lock: + log_message = f"Deleted inconsistent document entry: {doc_id} ({file_path})" + logger.info(log_message) + pipeline_status["latest_message"] = log_message + pipeline_status["history_messages"].append(log_message) + + # Remove from processing list + to_process_docs.pop(doc_id, None) + + except Exception as e: + # Log deletion failure + async with pipeline_status_lock: + error_message = ( + f"Failed to delete document entry: {doc_id} - {str(e)}" + ) + logger.error(error_message) + pipeline_status["latest_message"] = error_message + pipeline_status["history_messages"].append(error_message) + + # Final summary log + async with pipeline_status_lock: + final_message = f"Data consistency cleanup completed: successfully deleted {successful_deletions} entries" + logger.info(final_message) + pipeline_status["latest_message"] = final_message + pipeline_status["history_messages"].append(final_message) return to_process_docs @@ -1192,15 +1218,6 @@ class LightRAG: logger.info("No documents to process") return - # Validate document data consistency and fix any issues - to_process_docs = await self._validate_and_fix_document_consistency( - to_process_docs - ) - - if not to_process_docs: - logger.info("No valid documents to process after consistency check") - return - pipeline_status.update( { "busy": True, @@ -1233,6 +1250,20 @@ class LightRAG: pipeline_status["history_messages"].append(log_message) break + # Validate document data consistency and fix any issues as part of the pipeline + to_process_docs = await self._validate_and_fix_document_consistency( + to_process_docs, pipeline_status, pipeline_status_lock + ) + + if not to_process_docs: + log_message = ( + "No valid documents to process after consistency check" + ) + logger.info(log_message) + pipeline_status["latest_message"] = log_message + pipeline_status["history_messages"].append(log_message) + break + log_message = f"Processing {len(to_process_docs)} document(s)" logger.info(log_message) From 28fc075c59b4c6e93a2ed40417cc240f9f7ec28b Mon Sep 17 00:00:00 2001 From: yangdx Date: Thu, 14 Aug 2025 11:49:58 +0800 Subject: [PATCH 2/5] Simplify inconsistency logging and cleanup messages --- lightrag/lightrag.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/lightrag/lightrag.py b/lightrag/lightrag.py index d4d7fa47..c81b583c 100644 --- a/lightrag/lightrag.py +++ b/lightrag/lightrag.py @@ -1125,16 +1125,11 @@ class LightRAG: content_data = await self.full_docs.get_by_id(doc_id) if not content_data: inconsistent_docs.append(doc_id) - async with pipeline_status_lock: - log_message = f"Data inconsistency detected: Document {doc_id} ({status_doc.file_path}) missing content data" - logger.warning(log_message) - pipeline_status["latest_message"] = log_message - pipeline_status["history_messages"].append(log_message) # Delete inconsistent document entries one by one if inconsistent_docs: async with pipeline_status_lock: - summary_message = f"Starting cleanup of {len(inconsistent_docs)} inconsistent document entries" + summary_message = f"Inconsistent document entries found: {len(inconsistent_docs)}" logger.info(summary_message) pipeline_status["latest_message"] = summary_message pipeline_status["history_messages"].append(summary_message) @@ -1151,7 +1146,7 @@ class LightRAG: # Log successful deletion async with pipeline_status_lock: - log_message = f"Deleted inconsistent document entry: {doc_id} ({file_path})" + log_message = f"Deleted entry: {doc_id} ({file_path})" logger.info(log_message) pipeline_status["latest_message"] = log_message pipeline_status["history_messages"].append(log_message) @@ -1163,7 +1158,7 @@ class LightRAG: # Log deletion failure async with pipeline_status_lock: error_message = ( - f"Failed to delete document entry: {doc_id} - {str(e)}" + f"Failed to delete entry: {doc_id} - {str(e)}" ) logger.error(error_message) pipeline_status["latest_message"] = error_message From 65a4437f788faafaf9c4882d74222810513a0608 Mon Sep 17 00:00:00 2001 From: yangdx Date: Thu, 14 Aug 2025 12:33:36 +0800 Subject: [PATCH 3/5] Fix: Persist document data immediately after index update --- lightrag/lightrag.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lightrag/lightrag.py b/lightrag/lightrag.py index c81b583c..ef263128 100644 --- a/lightrag/lightrag.py +++ b/lightrag/lightrag.py @@ -1103,6 +1103,8 @@ class LightRAG: for doc_id in new_docs.keys() } await self.full_docs.upsert(full_docs_data) + # Persist data to disk immediately + await self.full_docs.index_done_callback() # Store document status (without content) await self.doc_status.upsert(new_docs) From 772f981e7e70af5d0ba49489eaee6e795d82cbd6 Mon Sep 17 00:00:00 2001 From: yangdx Date: Thu, 14 Aug 2025 12:35:39 +0800 Subject: [PATCH 4/5] fix: check and process queued docs even when upload directory is empty --- lightrag/api/routers/document_routes.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/lightrag/api/routers/document_routes.py b/lightrag/api/routers/document_routes.py index 924b642a..ffb085a3 100644 --- a/lightrag/api/routers/document_routes.py +++ b/lightrag/api/routers/document_routes.py @@ -1052,12 +1052,14 @@ async def run_scanning_process( total_files = len(new_files) logger.info(f"Found {total_files} files to index.") - if not new_files: - return - - # Process all files at once with track_id - await pipeline_index_files(rag, new_files, track_id) - logger.info(f"Scanning process completed: {total_files} files Processed.") + if new_files: + # Process all files at once with track_id + await pipeline_index_files(rag, new_files, track_id) + logger.info(f"Scanning process completed: {total_files} files Processed.") + else: + # No new files to index, check if there are any documents in the queue + logger.info("No upload file found, check if there are any documents in the queue...") + await rag.apipeline_process_enqueue_documents() except Exception as e: logger.error(f"Error during scanning process: {str(e)}") From 3bba5fc50627256c1d93d283328f5deb926586ab Mon Sep 17 00:00:00 2001 From: yangdx Date: Thu, 14 Aug 2025 13:03:23 +0800 Subject: [PATCH 5/5] Fix linting --- lightrag/api/routers/document_routes.py | 4 +++- lightrag/lightrag.py | 8 ++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/lightrag/api/routers/document_routes.py b/lightrag/api/routers/document_routes.py index ffb085a3..eaec6fbf 100644 --- a/lightrag/api/routers/document_routes.py +++ b/lightrag/api/routers/document_routes.py @@ -1058,7 +1058,9 @@ async def run_scanning_process( logger.info(f"Scanning process completed: {total_files} files Processed.") else: # No new files to index, check if there are any documents in the queue - logger.info("No upload file found, check if there are any documents in the queue...") + logger.info( + "No upload file found, check if there are any documents in the queue..." + ) await rag.apipeline_process_enqueue_documents() except Exception as e: diff --git a/lightrag/lightrag.py b/lightrag/lightrag.py index ef263128..942cc55a 100644 --- a/lightrag/lightrag.py +++ b/lightrag/lightrag.py @@ -1131,7 +1131,9 @@ class LightRAG: # Delete inconsistent document entries one by one if inconsistent_docs: async with pipeline_status_lock: - summary_message = f"Inconsistent document entries found: {len(inconsistent_docs)}" + summary_message = ( + f"Inconsistent document entries found: {len(inconsistent_docs)}" + ) logger.info(summary_message) pipeline_status["latest_message"] = summary_message pipeline_status["history_messages"].append(summary_message) @@ -1159,9 +1161,7 @@ class LightRAG: except Exception as e: # Log deletion failure async with pipeline_status_lock: - error_message = ( - f"Failed to delete entry: {doc_id} - {str(e)}" - ) + error_message = f"Failed to delete entry: {doc_id} - {str(e)}" logger.error(error_message) pipeline_status["latest_message"] = error_message pipeline_status["history_messages"].append(error_message)