fix: ingest_data error

This commit is contained in:
Boris Arzentar 2025-06-04 13:16:00 +02:00
parent 1d099a28a6
commit fa8006546b
No known key found for this signature in database
GPG key ID: D5CC274C784807B7
4 changed files with 17 additions and 12 deletions

View file

@ -39,14 +39,16 @@ export default function ActivityLog({ ref }: ActivityLogProps) {
}));
return (
<div className="flex flex-col gap-2 overflow-y-auto max-h-96" id="activityLogContainer">
{activityLog.map((activity) => (
<div key={activity.id} className="flex gap-2 items-top">
<span className="flex-1/3 text-xs text-gray-300 whitespace-nowrap mt-1.5">{formatter.format(activity.timestamp)}: </span>
<span className="flex-2/3 text-white whitespace-normal">{activity.activity}</span>
</div>
))}
{!activityLog.length && <span className="text-white">No activity logged.</span>}
<div className="overflow-y-auto max-h-96" id="activityLogContainer">
<div className="flex flex-col-reverse gap-2">
{activityLog.map((activity) => (
<div key={activity.id} className="flex gap-2 items-top">
<span className="flex-1/3 text-xs text-gray-300 whitespace-nowrap mt-1.5">{formatter.format(activity.timestamp)}: </span>
<span className="flex-2/3 text-white whitespace-normal">{activity.activity}</span>
</div>
))}
{!activityLog.length && <span className="text-white">No activity logged.</span>}
</div>
</div>
);
}

View file

@ -5,8 +5,8 @@ const NODE_COLORS = {
Document: formatHex(colors.blue[500]),
Chunk: formatHex(colors.green[500]),
Entity: formatHex(colors.yellow[500]),
EntityType: formatHex(colors.purple[500]),
NodeSet: formatHex(colors.indigo[800]),
EntityType: formatHex(colors.purple[800]),
NodeSet: formatHex(colors.indigo[300]),
GitHubUser: formatHex(colors.gray[300]),
Comment: formatHex(colors.amber[500]),
Issue: formatHex(colors.red[500]),

View file

@ -4,7 +4,7 @@ from cognee.modules.pipelines.utils import generate_pipeline_id, generate_pipeli
def get_crewai_pipeline_run_id(user_id: UUID):
dataset_id = uuid5(NAMESPACE_OID, "GitHub")
dataset_id = uuid5(NAMESPACE_OID, "Github")
pipeline_id = generate_pipeline_id(user_id, "github_pipeline")
pipeline_run_id = generate_pipeline_run_id(pipeline_id, dataset_id)

View file

@ -128,11 +128,14 @@ async def ingest_data(
existing_datasets = await get_authorized_existing_datasets(
user=user, permission_type="write", datasets=[dataset_name]
)
dataset = await load_or_create_datasets(
datasets = await load_or_create_datasets(
dataset_names=[dataset_name],
existing_datasets=existing_datasets,
user=user,
)
dataset = next(
(dataset for dataset in datasets if dataset.name == "Github")
)
# Check to see if data should be updated
data_point = (