refactor: Add maximum document batch size for document processing

This commit is contained in:
Igor Ilic 2025-09-25 18:03:17 +02:00
parent 997b85e1ce
commit 6636fe8afd

View file

@ -37,6 +37,8 @@ from ..tasks.task import Task
logger = get_logger("run_tasks(tasks: [Task], data)") logger = get_logger("run_tasks(tasks: [Task], data)")
# TODO: See if this parameter should be configurable as input for run_tasks itself
DOCUMENT_BATCH_SIZE = 10
def override_run_tasks(new_gen): def override_run_tasks(new_gen):
@ -266,7 +268,11 @@ async def run_tasks(
if incremental_loading: if incremental_loading:
data = await resolve_data_directories(data) data = await resolve_data_directories(data)
# Create async tasks per data item that will run the pipeline for the data item # Create and gather batches of async tasks of data items that will run the pipeline for the data item
results = []
for start in range(0, len(data), DOCUMENT_BATCH_SIZE):
document_batch = data[start : start + DOCUMENT_BATCH_SIZE]
data_item_tasks = [ data_item_tasks = [
asyncio.create_task( asyncio.create_task(
_run_tasks_data_item( _run_tasks_data_item(
@ -281,9 +287,10 @@ async def run_tasks(
incremental_loading, incremental_loading,
) )
) )
for data_item in data for data_item in document_batch
] ]
results = await asyncio.gather(*data_item_tasks)
results.extend(await asyncio.gather(*data_item_tasks))
# Remove skipped data items from results # Remove skipped data items from results
results = [result for result in results if result] results = [result for result in results if result]