fix: add default params to run_tasks (#563)
<!-- .github/pull_request_template.md --> ## Description <!-- Provide a clear description of the changes in this PR --> ## DCO Affirmation I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin <!-- This is an auto-generated comment: release notes by coderabbit.ai --> ## Summary by CodeRabbit - **New Features** - Enhanced the task execution process by enabling default values for certain parameters, allowing users to trigger task processing without supplying every input explicitly. - **Bug Fixes** - Adjusted asynchronous handling for the `retrieved_edges_to_string` function to ensure proper execution flow in various components. - **Documentation** - Updated markdown formatting in the Jupyter notebook for improved readability and structure. <!-- end of auto-generated comment: release notes by coderabbit.ai --> --------- Co-authored-by: hajdul88 <52442977+hajdul88@users.noreply.github.com>
This commit is contained in:
parent
e56d86b410
commit
ada466879e
5 changed files with 27 additions and 12 deletions
|
|
@ -1,7 +1,7 @@
|
|||
import inspect
|
||||
import json
|
||||
import logging
|
||||
from uuid import UUID
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from typing import Any
|
||||
from cognee.modules.pipelines.operations import (
|
||||
|
|
@ -269,7 +269,12 @@ async def run_tasks_with_telemetry(tasks: list[Task], data, pipeline_name: str):
|
|||
raise error
|
||||
|
||||
|
||||
async def run_tasks(tasks: list[Task], dataset_id: UUID, data: Any, pipeline_name: str):
|
||||
async def run_tasks(
|
||||
tasks: list[Task],
|
||||
dataset_id: UUID = uuid4(),
|
||||
data: Any = None,
|
||||
pipeline_name: str = "unknown_pipeline",
|
||||
):
|
||||
pipeline_id = uuid5(NAMESPACE_OID, pipeline_name)
|
||||
|
||||
pipeline_run = await log_pipeline_run_start(pipeline_id, dataset_id, data)
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from cognee.infrastructure.llm.prompts import read_query_prompt
|
|||
from cognee.modules.retrieval.description_to_codepart_search import (
|
||||
code_description_to_code_part_search,
|
||||
)
|
||||
from evals.eval_utils import download_github_repo, retrieved_edges_to_string
|
||||
from evals.eval_utils import download_github_repo
|
||||
|
||||
|
||||
def check_install_package(package_name):
|
||||
|
|
|
|||
|
|
@ -122,7 +122,7 @@ async def get_context_with_brute_force_triplet_search(instance: dict) -> str:
|
|||
|
||||
found_triplets = await brute_force_triplet_search(instance["question"], top_k=5)
|
||||
|
||||
search_results_str = retrieved_edges_to_string(found_triplets)
|
||||
search_results_str = await retrieved_edges_to_string(found_triplets)
|
||||
|
||||
return search_results_str
|
||||
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ async def main():
|
|||
|
||||
args = {
|
||||
"question": query,
|
||||
"context": retrieved_edges_to_string(triplets),
|
||||
"context": await retrieved_edges_to_string(triplets),
|
||||
}
|
||||
|
||||
user_prompt = render_prompt("graph_context_for_question.txt", args)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,9 @@
|
|||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": "# Cognee Graphiti integration demo"
|
||||
"source": [
|
||||
"# Cognee Graphiti integration demo"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
|
@ -12,7 +14,9 @@
|
|||
"languageId": "plaintext"
|
||||
}
|
||||
},
|
||||
"source": "First we import the necessary libaries"
|
||||
"source": [
|
||||
"First we import the necessary libaries"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
|
|
@ -90,7 +94,9 @@
|
|||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": "## Input texts with temporal information"
|
||||
"source": [
|
||||
"## Input texts with temporal information"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
|
|
@ -113,7 +119,9 @@
|
|||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": "## Running graphiti + transforming its graph into cognee's core system (graph transformation + vector embeddings)"
|
||||
"source": [
|
||||
"## Running graphiti + transforming its graph into cognee's core system (graph transformation + vector embeddings)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
|
|
@ -181,11 +189,13 @@
|
|||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": "## Retrieving and generating answer from graphiti graph with cognee retriever"
|
||||
"source": [
|
||||
"## Retrieving and generating answer from graphiti graph with cognee retriever"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-01-15T10:44:27.844438Z",
|
||||
|
|
@ -213,7 +223,7 @@
|
|||
")\n",
|
||||
"\n",
|
||||
"# Step 3: Preparing the Context for the LLM\n",
|
||||
"context = retrieved_edges_to_string(triplets)\n",
|
||||
"context = await retrieved_edges_to_string(triplets)\n",
|
||||
"\n",
|
||||
"args = {\"question\": query, \"context\": context}\n",
|
||||
"\n",
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue