From e815a3fc140d79fb0f6987b7ef730b2a4cd437b6 Mon Sep 17 00:00:00 2001 From: hajdul88 <52442977+hajdul88@users.noreply.github.com> Date: Wed, 17 Sep 2025 14:12:47 +0200 Subject: [PATCH] chore: changes ontology file path parameter to the new config structure --- cognee/api/v1/cognify/cognify.py | 14 +++++++++----- cognee/tasks/graph/extract_graph_from_data.py | 10 +++++++++- examples/python/ontology_demo_example.py | 9 ++++++++- examples/python/ontology_demo_example_2.py | 8 +++++++- 4 files changed, 33 insertions(+), 8 deletions(-) diff --git a/cognee/api/v1/cognify/cognify.py b/cognee/api/v1/cognify/cognify.py index e933bafd8..f90e487e0 100644 --- a/cognee/api/v1/cognify/cognify.py +++ b/cognee/api/v1/cognify/cognify.py @@ -10,7 +10,8 @@ from cognee.infrastructure.llm import get_max_chunk_tokens from cognee.modules.pipelines import run_pipeline from cognee.modules.pipelines.tasks.task import Task from cognee.modules.chunking.TextChunker import TextChunker -from cognee.modules.ontology.rdf_xml.RDFLibOntologyResolver import RDFLibOntologyResolver +from cognee.modules.ontology.ontology_config import OntologyConfig +from cognee.modules.ontology.get_ontology_resolver import get_ontology_resolver from cognee.modules.users.models import User from cognee.tasks.documents import ( @@ -39,7 +40,7 @@ async def cognify( graph_model: BaseModel = KnowledgeGraph, chunker=TextChunker, chunk_size: int = None, - ontology_file_path: Optional[str] = None, + ontology_config: OntologyConfig = None, vector_db_config: dict = None, graph_db_config: dict = None, run_in_background: bool = False, @@ -188,11 +189,14 @@ async def cognify( - LLM_RATE_LIMIT_ENABLED: Enable rate limiting (default: False) - LLM_RATE_LIMIT_REQUESTS: Max requests per interval (default: 60) """ + if ontology_config is None: + ontology_config = get_ontology_resolver() + if temporal_cognify: tasks = await get_temporal_tasks(user, chunker, chunk_size) else: tasks = await get_default_tasks( - user, graph_model, chunker, chunk_size, ontology_file_path, custom_prompt + user, graph_model, chunker, chunk_size, ontology_config, custom_prompt ) # By calling get pipeline executor we get a function that will have the run_pipeline run in the background or a function that we will need to wait for @@ -216,7 +220,7 @@ async def get_default_tasks( # TODO: Find out a better way to do this (Boris's graph_model: BaseModel = KnowledgeGraph, chunker=TextChunker, chunk_size: int = None, - ontology_file_path: Optional[str] = None, + ontology_config: OntologyConfig = get_ontology_resolver(), custom_prompt: Optional[str] = None, ) -> list[Task]: default_tasks = [ @@ -230,7 +234,7 @@ async def get_default_tasks( # TODO: Find out a better way to do this (Boris's Task( extract_graph_from_data, graph_model=graph_model, - ontology_adapter=RDFLibOntologyResolver(ontology_file=ontology_file_path), + ontology_config=ontology_config, custom_prompt=custom_prompt, task_config={"batch_size": 10}, ), # Generate knowledge graphs from the document chunks. diff --git a/cognee/tasks/graph/extract_graph_from_data.py b/cognee/tasks/graph/extract_graph_from_data.py index 6681dd975..f0ef9c7f9 100644 --- a/cognee/tasks/graph/extract_graph_from_data.py +++ b/cognee/tasks/graph/extract_graph_from_data.py @@ -4,6 +4,8 @@ from pydantic import BaseModel from cognee.infrastructure.databases.graph import get_graph_engine from cognee.tasks.storage.add_data_points import add_data_points +from cognee.modules.ontology.ontology_config import OntologyConfig +from cognee.modules.ontology.get_ontology_resolver import get_ontology_resolver from cognee.modules.ontology.rdf_xml.RDFLibOntologyResolver import RDFLibOntologyResolver from cognee.modules.chunking.models.DocumentChunk import DocumentChunk from cognee.modules.graph.utils import ( @@ -70,7 +72,7 @@ async def integrate_chunk_graphs( async def extract_graph_from_data( data_chunks: List[DocumentChunk], graph_model: Type[BaseModel], - ontology_adapter: RDFLibOntologyResolver = None, + ontology_config: OntologyConfig = None, custom_prompt: Optional[str] = None, ) -> List[DocumentChunk]: """ @@ -101,4 +103,10 @@ async def extract_graph_from_data( if edge.source_node_id in valid_node_ids and edge.target_node_id in valid_node_ids ] + # Extract resolver from config if provided, otherwise get default + if ontology_config is None: + ontology_config = get_ontology_resolver() + + ontology_adapter = ontology_config["resolver"] + return await integrate_chunk_graphs(data_chunks, chunk_graphs, graph_model, ontology_adapter) diff --git a/examples/python/ontology_demo_example.py b/examples/python/ontology_demo_example.py index 8243faef5..ea1ab8b72 100644 --- a/examples/python/ontology_demo_example.py +++ b/examples/python/ontology_demo_example.py @@ -5,6 +5,8 @@ import cognee from cognee.api.v1.search import SearchType from cognee.api.v1.visualize.visualize import visualize_graph from cognee.shared.logging_utils import setup_logging +from cognee.modules.ontology.get_ontology_resolver import get_ontology_resolver +from cognee.modules.ontology.rdf_xml.RDFLibOntologyResolver import RDFLibOntologyResolver text_1 = """ 1. Audi @@ -60,7 +62,12 @@ async def main(): os.path.dirname(os.path.abspath(__file__)), "ontology_input_example/basic_ontology.owl" ) - await cognee.cognify(ontology_file_path=ontology_path) + # Create ontology config with custom ontology file + ontology_config = get_ontology_resolver( + resolver=RDFLibOntologyResolver(ontology_file=ontology_path) + ) + + await cognee.cognify(ontology_config=ontology_config) print("Knowledge with ontology created.") # Step 4: Query insights diff --git a/examples/python/ontology_demo_example_2.py b/examples/python/ontology_demo_example_2.py index 22fb19862..e897da2e5 100644 --- a/examples/python/ontology_demo_example_2.py +++ b/examples/python/ontology_demo_example_2.py @@ -5,6 +5,8 @@ import os import textwrap from cognee.api.v1.search import SearchType from cognee.api.v1.visualize.visualize import visualize_graph +from cognee.modules.ontology.get_ontology_resolver import get_ontology_resolver +from cognee.modules.ontology.rdf_xml.RDFLibOntologyResolver import RDFLibOntologyResolver async def run_pipeline(ontology_path=None): @@ -17,7 +19,11 @@ async def run_pipeline(ontology_path=None): await cognee.add(scientific_papers_dir) - pipeline_run = await cognee.cognify(ontology_file_path=ontology_path) + ontology_config = get_ontology_resolver( + resolver=RDFLibOntologyResolver(ontology_file=ontology_path) + ) + + pipeline_run = await cognee.cognify(ontology_config=ontology_config) return pipeline_run