feat: Implements generation and retrieval and adjusts imports
This commit is contained in:
parent
124a26335e
commit
d0646a1694
1 changed files with 36 additions and 7 deletions
|
|
@ -1,16 +1,20 @@
|
|||
import asyncio
|
||||
|
||||
import cognee
|
||||
from cognee.api.v1.search import SearchType
|
||||
import logging
|
||||
from cognee.modules.pipelines import Task, run_tasks
|
||||
from cognee.tasks.temporal_awareness import (
|
||||
build_graph_with_temporal_awareness,
|
||||
search_graph_with_temporal_awareness,
|
||||
)
|
||||
from cognee.shared.utils import setup_logging
|
||||
from cognee.tasks.temporal_awareness import build_graph_with_temporal_awareness
|
||||
from cognee.infrastructure.databases.relational import (
|
||||
create_db_and_tables as create_relational_db_and_tables,
|
||||
)
|
||||
from cognee.tasks.storage.index_graph_edges import index_graphiti_nodes_and_edges
|
||||
from cognee.tasks.temporal_awareness.index_graphiti_objects import (
|
||||
index_and_transform_graphiti_nodes_and_edges,
|
||||
)
|
||||
from cognee.modules.retrieval.brute_force_triplet_search import brute_force_triplet_search
|
||||
from cognee.tasks.completion.graph_query_completion import retrieved_edges_to_string
|
||||
from cognee.infrastructure.llm.prompts import read_query_prompt, render_prompt
|
||||
from cognee.infrastructure.llm.get_llm_client import get_llm_client
|
||||
|
||||
text_list = [
|
||||
"Kamala Harris is the Attorney General of California. She was previously "
|
||||
|
|
@ -36,8 +40,33 @@ async def main():
|
|||
async for result in pipeline:
|
||||
print(result)
|
||||
|
||||
await index_graphiti_nodes_and_edges()
|
||||
await index_and_transform_graphiti_nodes_and_edges()
|
||||
|
||||
query = "When was Kamala Harris in office?"
|
||||
triplets = await brute_force_triplet_search(
|
||||
query=query,
|
||||
top_k=3,
|
||||
collections=["graphitinode_content", "graphitinode_name", "graphitinode_summary"],
|
||||
)
|
||||
|
||||
args = {
|
||||
"question": query,
|
||||
"context": retrieved_edges_to_string(triplets),
|
||||
}
|
||||
|
||||
user_prompt = render_prompt("graph_context_for_question.txt", args)
|
||||
system_prompt = read_query_prompt("answer_simple_question_restricted.txt")
|
||||
|
||||
llm_client = get_llm_client()
|
||||
computed_answer = await llm_client.acreate_structured_output(
|
||||
text_input=user_prompt,
|
||||
system_prompt=system_prompt,
|
||||
response_model=str,
|
||||
)
|
||||
|
||||
print(computed_answer)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup_logging(logging.INFO)
|
||||
asyncio.run(main())
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue