From 950d29a678f6140322aa2f7e436112865ff134f9 Mon Sep 17 00:00:00 2001 From: Igor Ilic Date: Tue, 26 Aug 2025 14:04:33 +0200 Subject: [PATCH] refactor: Update typing --- .../modules/pipelines/layers/authorized_user_datasets.py | 2 +- cognee/modules/pipelines/layers/process_pipeline_check.py | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/cognee/modules/pipelines/layers/authorized_user_datasets.py b/cognee/modules/pipelines/layers/authorized_user_datasets.py index 2b4117208..fd9c400ca 100644 --- a/cognee/modules/pipelines/layers/authorized_user_datasets.py +++ b/cognee/modules/pipelines/layers/authorized_user_datasets.py @@ -13,7 +13,7 @@ from cognee.modules.data.methods import ( async def authorized_user_datasets( - datasets: Union[str, list[str], list[UUID]], user: User = None + datasets: Union[str, UUID, list[str], list[UUID]], user: User = None ) -> Tuple[User, List[Dataset]]: """ Function handles creation and dataset authorization if datasets already exist for Cognee. diff --git a/cognee/modules/pipelines/layers/process_pipeline_check.py b/cognee/modules/pipelines/layers/process_pipeline_check.py index 12cbe07aa..8318bb50e 100644 --- a/cognee/modules/pipelines/layers/process_pipeline_check.py +++ b/cognee/modules/pipelines/layers/process_pipeline_check.py @@ -1,3 +1,4 @@ +from typing import Union, Optional from cognee.modules.data.models import Dataset from cognee.modules.data.models import Data from cognee.modules.pipelines.models import PipelineRunStatus @@ -15,7 +16,7 @@ logger = get_logger(__name__) async def process_pipeline_check( dataset: Dataset, data: list[Data], pipeline_name: str -) -> [None, PipelineRunStarted, PipelineRunCompleted]: +) -> Optional[Union[PipelineRunStarted, PipelineRunCompleted]]: """ Function used to determine if pipeline is currently being processed or was already processed. In case pipeline was or is being processed return value is returned and current pipline execution should be stopped. @@ -34,9 +35,7 @@ async def process_pipeline_check( if isinstance(dataset, Dataset): task_status = await get_pipeline_status([dataset.id], pipeline_name) else: - task_status = [ - PipelineRunStatus.DATASET_PROCESSING_COMPLETED - ] # TODO: this is a random assignment, find permanent solution + task_status = {} if str(dataset.id) in task_status: if task_status[str(dataset.id)] == PipelineRunStatus.DATASET_PROCESSING_STARTED: