fix: Resolve issue with data element incremental loading for multiple datasets
This commit is contained in:
parent
c73e8964a1
commit
c9a3f48398
1 changed files with 2 additions and 3 deletions
|
|
@ -115,9 +115,8 @@ async def run_tasks_data_item_incremental(
|
||||||
data_point = (
|
data_point = (
|
||||||
await session.execute(select(Data).filter(Data.id == data_id))
|
await session.execute(select(Data).filter(Data.id == data_id))
|
||||||
).scalar_one_or_none()
|
).scalar_one_or_none()
|
||||||
data_point.pipeline_status[pipeline_name] = {
|
status_for_pipeline = data_point.pipeline_status.setdefault(pipeline_name, {})
|
||||||
str(dataset.id): DataItemStatus.DATA_ITEM_PROCESSING_COMPLETED
|
status_for_pipeline[str(dataset.id)] = DataItemStatus.DATA_ITEM_PROCESSING_COMPLETED
|
||||||
}
|
|
||||||
await session.merge(data_point)
|
await session.merge(data_point)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue