fix: graph prompt path (#769)

<!-- .github/pull_request_template.md -->

## Description
Fix graph prompt path

## DCO Affirmation
I affirm that all code in every commit of this pull request conforms to
the terms of the Topoteretes Developer Certificate of Origin.

---------

Co-authored-by: Evain Arthur <arthur.evain35@gmail.com>
Co-authored-by: Vasilije <8619304+Vasilije1990@users.noreply.github.com>
Co-authored-by: Boris <boris@topoteretes.com>
This commit is contained in:
Igor Ilic 2025-04-23 12:03:51 +02:00 committed by GitHub
parent c5cba01d3c
commit 60da1c899e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -1,4 +1,5 @@
from typing import Type, Optional
import os
from typing import Type
from pydantic import BaseModel
from cognee.infrastructure.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.prompts import render_prompt
@ -10,7 +11,18 @@ async def extract_content_graph(content: str, response_model: Type[BaseModel]):
llm_config = get_llm_config()
prompt_path = llm_config.graph_prompt_path
system_prompt = render_prompt(prompt_path, {})
# Check if the prompt path is an absolute path or just a filename
if os.path.isabs(prompt_path):
# directory containing the file
base_directory = os.path.dirname(prompt_path)
# just the filename itself
prompt_path = os.path.basename(prompt_path)
else:
base_directory = None
system_prompt = render_prompt(prompt_path, {}, base_directory=base_directory)
content_graph = await llm_client.acreate_structured_output(
content, system_prompt, response_model
)