fix: Resolve pipeline id issue (#1035)
<!-- .github/pull_request_template.md --> ## Description <!-- Provide a clear description of the changes in this PR --> ## DCO Affirmation I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin.
This commit is contained in:
commit
cb905bba45
1 changed files with 6 additions and 5 deletions
|
|
@ -7,6 +7,7 @@ from cognee.modules.data.methods.get_dataset_data import get_dataset_data
|
||||||
from cognee.modules.data.models import Data, Dataset
|
from cognee.modules.data.models import Data, Dataset
|
||||||
from cognee.modules.pipelines.operations.run_tasks import run_tasks
|
from cognee.modules.pipelines.operations.run_tasks import run_tasks
|
||||||
from cognee.modules.pipelines.models import PipelineRunStatus
|
from cognee.modules.pipelines.models import PipelineRunStatus
|
||||||
|
from cognee.modules.pipelines.utils import generate_pipeline_id
|
||||||
from cognee.modules.pipelines.operations.get_pipeline_status import get_pipeline_status
|
from cognee.modules.pipelines.operations.get_pipeline_status import get_pipeline_status
|
||||||
from cognee.modules.pipelines.methods import get_pipeline_run_by_dataset
|
from cognee.modules.pipelines.methods import get_pipeline_run_by_dataset
|
||||||
|
|
||||||
|
|
@ -124,22 +125,22 @@ async def run_pipeline(
|
||||||
|
|
||||||
# Ugly hack, but no easier way to do this.
|
# Ugly hack, but no easier way to do this.
|
||||||
if pipeline_name == "add_pipeline":
|
if pipeline_name == "add_pipeline":
|
||||||
|
pipeline_id = generate_pipeline_id(user.id, dataset.id, pipeline_name)
|
||||||
# Refresh the add pipeline status so data is added to a dataset.
|
# Refresh the add pipeline status so data is added to a dataset.
|
||||||
# Without this the app_pipeline status will be DATASET_PROCESSING_COMPLETED and will skip the execution.
|
# Without this the app_pipeline status will be DATASET_PROCESSING_COMPLETED and will skip the execution.
|
||||||
dataset_id = uuid5(NAMESPACE_OID, f"{dataset.name}{str(user.id)}")
|
|
||||||
|
|
||||||
await log_pipeline_run_initiated(
|
await log_pipeline_run_initiated(
|
||||||
pipeline_id=uuid5(NAMESPACE_OID, "add_pipeline"),
|
pipeline_id=pipeline_id,
|
||||||
pipeline_name="add_pipeline",
|
pipeline_name="add_pipeline",
|
||||||
dataset_id=dataset_id,
|
dataset_id=dataset.id,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Refresh the cognify pipeline status after we add new files.
|
# Refresh the cognify pipeline status after we add new files.
|
||||||
# Without this the cognify_pipeline status will be DATASET_PROCESSING_COMPLETED and will skip the execution.
|
# Without this the cognify_pipeline status will be DATASET_PROCESSING_COMPLETED and will skip the execution.
|
||||||
await log_pipeline_run_initiated(
|
await log_pipeline_run_initiated(
|
||||||
pipeline_id=uuid5(NAMESPACE_OID, "cognify_pipeline"),
|
pipeline_id=pipeline_id,
|
||||||
pipeline_name="cognify_pipeline",
|
pipeline_name="cognify_pipeline",
|
||||||
dataset_id=dataset_id,
|
dataset_id=dataset.id,
|
||||||
)
|
)
|
||||||
|
|
||||||
dataset_id = dataset.id
|
dataset_id = dataset.id
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue