fix potential deadlock

This commit is contained in:
estevez.sebastian@gmail.com 2025-07-31 13:41:22 -04:00
parent 155f7edba9
commit 75afc242c7
2 changed files with 8 additions and 13 deletions

View file

@ -163,7 +163,7 @@ class ConnectorService:
# Create custom processor for connector files # Create custom processor for connector files
from models.processors import ConnectorFileProcessor from models.processors import ConnectorFileProcessor
processor = ConnectorFileProcessor(self, connection_id, files_to_process) processor = ConnectorFileProcessor(self, connection_id, files_to_process, user_id)
# Use file IDs as items (no more fake file paths!) # Use file IDs as items (no more fake file paths!)
file_ids = [file_info['id'] for file_info in files_to_process] file_ids = [file_info['id'] for file_info in files_to_process]
@ -191,7 +191,7 @@ class ConnectorService:
# Create custom processor for specific connector files # Create custom processor for specific connector files
from models.processors import ConnectorFileProcessor from models.processors import ConnectorFileProcessor
# We'll pass file_ids as the files_info, the processor will handle ID-only files # We'll pass file_ids as the files_info, the processor will handle ID-only files
processor = ConnectorFileProcessor(self, connection_id, file_ids) processor = ConnectorFileProcessor(self, connection_id, file_ids, user_id)
# Create custom task using TaskService # Create custom task using TaskService
task_id = await self.task_service.create_custom_task(user_id, file_ids, processor) task_id = await self.task_service.create_custom_task(user_id, file_ids, processor)

View file

@ -34,10 +34,11 @@ class DocumentFileProcessor(TaskProcessor):
class ConnectorFileProcessor(TaskProcessor): class ConnectorFileProcessor(TaskProcessor):
"""Processor for connector file uploads""" """Processor for connector file uploads"""
def __init__(self, connector_service, connection_id: str, files_to_process: list): def __init__(self, connector_service, connection_id: str, files_to_process: list, user_id: str = None):
self.connector_service = connector_service self.connector_service = connector_service
self.connection_id = connection_id self.connection_id = connection_id
self.files_to_process = files_to_process self.files_to_process = files_to_process
self.user_id = user_id
# Create lookup map for file info - handle both file objects and file IDs # Create lookup map for file info - handle both file objects and file IDs
self.file_info_map = {} self.file_info_map = {}
for f in files_to_process: for f in files_to_process:
@ -64,18 +65,12 @@ class ConnectorFileProcessor(TaskProcessor):
# Get file content from connector (the connector will fetch metadata if needed) # Get file content from connector (the connector will fetch metadata if needed)
document = await connector.get_file_content(file_id) document = await connector.get_file_content(file_id)
# Get user_id from task store lookup # Use the user_id passed during initialization
user_id = None if not self.user_id:
for uid, tasks in self.connector_service.task_service.task_store.items(): raise ValueError("user_id not provided to ConnectorFileProcessor")
if upload_task.task_id in tasks:
user_id = uid
break
if not user_id:
raise ValueError("Could not determine user_id for task")
# Process using existing pipeline # Process using existing pipeline
result = await self.connector_service.process_connector_document(document, user_id) result = await self.connector_service.process_connector_document(document, self.user_id)
file_task.status = TaskStatus.COMPLETED file_task.status = TaskStatus.COMPLETED
file_task.result = result file_task.result = result