Prepare for the presentation, add info
This commit is contained in:
parent
9ad22edb15
commit
4ea9a2c134
6 changed files with 645 additions and 278 deletions
308
Demo_graph.ipynb
308
Demo_graph.ipynb
File diff suppressed because one or more lines are too long
115
cognitive_architecture/api/v1/cognify/cognify.py
Normal file
115
cognitive_architecture/api/v1/cognify/cognify.py
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
import asyncio
|
||||
import logging
|
||||
|
||||
from langchain.prompts import ChatPromptTemplate
|
||||
import json
|
||||
from langchain.document_loaders import TextLoader
|
||||
from langchain.document_loaders import DirectoryLoader
|
||||
from langchain.chains import create_extraction_chain
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
import re
|
||||
|
||||
from dotenv import load_dotenv
|
||||
import os
|
||||
|
||||
# Load environment variables from .env file
|
||||
load_dotenv()
|
||||
import instructor
|
||||
from openai import OpenAI
|
||||
|
||||
|
||||
aclient = instructor.patch(OpenAI())
|
||||
|
||||
from typing import Optional, List, Type
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from cognitive_architecture.modules.cognify.llm.classify_content import classify_into_categories
|
||||
from cognitive_architecture.modules.cognify.llm.content_to_cog_layers import content_to_cog_layers
|
||||
from cognitive_architecture.modules.cognify.llm.content_to_propositions import generate_graph
|
||||
from cognitive_architecture.shared.data_models import DefaultContentPrediction, KnowledgeGraph, DefaultCognitiveLayer
|
||||
|
||||
|
||||
|
||||
async def cognify():
|
||||
"""This function is responsible for the cognitive processing of the content."""
|
||||
# Load the content from the text file
|
||||
|
||||
|
||||
# Classify the content into categories
|
||||
input_article_one = """ In the nicest possible way, Britons have always been a bit silly about animals. “Keeping pets, for the English, is not so much a leisure activity as it is an entire way of life,” wrote the anthropologist Kate Fox in Watching the English, nearly 20 years ago. Our dogs, in particular, have been an acceptable outlet for emotions and impulses we otherwise keep strictly controlled – our latent desire to be demonstratively affectionate, to be silly and chat to strangers. If this seems like an exaggeration, consider the different reactions you’d get if you struck up a conversation with someone in a park with a dog, versus someone on the train.
|
||||
|
||||
Indeed, British society has been set up to accommodate these four-legged ambassadors. In the UK – unlike Australia, say, or New Zealand – dogs are not just permitted on public transport but often openly encouraged. Many pubs and shops display waggish signs, reading, “Dogs welcome, people tolerated”, and have treat jars on their counters. The other day, as I was waiting outside a cafe with a friend’s dog, the barista urged me to bring her inside.
|
||||
|
||||
For years, Britons’ non-partisan passion for animals has been consistent amid dwindling common ground. But lately, rather than bringing out the best in us, our relationship with dogs is increasingly revealing us at our worst – and our supposed “best friends” are paying the price.
|
||||
|
||||
As with so many latent traits in the national psyche, it all came unleashed with the pandemic, when many people thought they might as well make the most of all that time at home and in local parks with a dog. Between 2019 and 2022, the number of pet dogs in the UK rose from about nine million to 13 million. But there’s long been a seasonal surge around this time of year, substantial enough for the Dogs Trust charity to coin its famous slogan back in 1978: “A dog is for life, not just for Christmas.”
|
||||
|
||||
Green spaces, meanwhile, have been steadily declining, and now many of us have returned to the office, just as those “pandemic dogs” are entering their troublesome teens. It’s a combustible combination and we are already seeing the results: the number of dog attacks recorded by police in England and Wales rose by more than a third between 2018 and 2022.
|
||||
|
||||
At the same time, sites such as Pets4Homes.co.uk are replete with listings for dogs that, their owners accept “with deep regret”, are no longer suited to their lifestyles now that lockdown is over. It may have felt as if it would go on for ever, but was there ever any suggestion it was going to last the average dog’s lifespan of a decade?
|
||||
|
||||
Living beings are being downgraded to mere commodities. You can see it reflected the “designer” breeds currently in fashion, the French bulldogs and pugs that look cute but spend their entire lives in discomfort. American XL bully dogs, now so controversial, are often sought after as a signifier of masculinity: roping an entire other life in service of our egos. Historically, many of Britain’s most popular breeds evolved to hunt vermin, retrieve game, herd, or otherwise do a specific job alongside humans; these days we are breeding and buying them for their aesthetic appeal.
|
||||
|
||||
Underpinning this is a shift to what was long disdained as the “American” approach: treating pets as substitutes for children. In the past in Britain, dogs were treasured on their own terms, for the qualities that made them dogs, and as such, sometimes better than people: their friendliness and trustingness and how they opened up the world for us. They were indulged, certainly – by allowing them on to the sofa or in our beds, for instance, when we’d sworn we never would – but in ways that did not negate or deny their essential otherness.
|
||||
|
||||
Now we have more dogs of such ludicrous proportions, they struggle to function as dogs at all – and we treat them accordingly, indulging them as we would ourselves: by buying unnecessary things. The total spend on pets in the UK has more than doubled in the past decade, reaching nearly £10bn last year. That huge rise has not just come from essentials: figures from the marketing agency Mintel suggest that one in five UK owners like their pet to “keep up with the latest trends” in grooming or, heaven forbid, outfits.
|
||||
|
||||
These days pet “boutiques” – like the one that recently opened on my street in Norwich, selling “cold-pressed” dog treats, “paw and nose balms” and spa services – are a widespread sign of gentrification. But it’s not just wealthier areas: this summer in Great Yarmouth, one of the most deprived towns in the country, I noticed seaside stalls selling not one but two brands of ice-cream for dogs.
|
||||
|
||||
It suggests dog-lovers have become untethered from their companions’ desires, let alone their needs. Let’s be honest: most dogs would be thrilled to bits to be eating a paper bag, or even their own faeces. And although they are certainly delighted by ice-cream, they don’t need it. But the ways we ourselves find solace – in consumption, by indulging our simian “treat brain” with things that we don’t need and/or aren’t good for us – we have simply extended to our pets.
|
||||
|
||||
It’s hard not to see the rise in dog-friendly restaurants, cinema screenings and even churches as similar to the ludicrous expenditure: a way to placate the two-legged being on the end of the lead (regardless of the experience of others in the vicinity).
|
||||
|
||||
Meanwhile, many dogs suffer daily deprivation, their worlds made small and monotonous by our busy modern schedules. These are social animals: it’s not natural for them to live without other dogs, let alone in an empty house for eight hours a day, Monday to Friday. If we are besieged by badly behaved dogs, the cause isn’t hard to pinpoint. Many behavioural problems can be alleviated and even addressed by sufficient exercise, supervision and consistent routines, but instead of organising our lives so that our pets may thrive, we show our love with a Halloween-themed cookie, or a new outfit for Instagram likes.
|
||||
|
||||
It’s easy to forget that we are sharing our homes with a descendant of the wolf when it is dressed in sheep’s clothing; but the more we learn about animals, the clearer it becomes that our treatment of them, simultaneously adoring and alienated, means they are leading strange, unsatisfying simulacra of the lives they ought to lead.
|
||||
|
||||
But for as long as we choose to share our lives with pets, the bar should be the same as for any relationship we value: being prepared to make sacrifices for their wellbeing, prioritising quality time and care, and loving them as they are – not for how they reflect on us, or how we’d like them to be.
|
||||
|
||||
|
||||
"""
|
||||
required_layers_one = await classify_into_categories(input_article_one, "classify_content.txt",
|
||||
DefaultContentPrediction)
|
||||
|
||||
def transform_dict(original):
|
||||
# Extract the first subclass from the list (assuming there could be more)
|
||||
subclass_enum = original['label']['subclass'][0]
|
||||
|
||||
# The data type is derived from 'type' and converted to lowercase
|
||||
data_type = original['label']['type'].lower()
|
||||
|
||||
# The context name is the name of the Enum member (e.g., 'NEWS_STORIES')
|
||||
context_name = subclass_enum.name.replace('_', ' ').title()
|
||||
|
||||
# The layer name is the value of the Enum member (e.g., 'News stories and blog posts')
|
||||
layer_name = subclass_enum.value
|
||||
|
||||
# Construct the new dictionary
|
||||
new_dict = {
|
||||
'data_type': data_type,
|
||||
'context_name': data_type.upper(), # llm context classification
|
||||
'layer_name': layer_name # llm layer classification
|
||||
}
|
||||
|
||||
return new_dict
|
||||
|
||||
# Transform the original dictionary
|
||||
transformed_dict_1 = transform_dict(required_layers_one.dict())
|
||||
cognitive_layers_one = await content_to_cog_layers("generate_cog_layers.txt", transformed_dict_1,
|
||||
response_model=DefaultCognitiveLayer)
|
||||
cognitive_layers_one = [layer_subgroup.name for layer_subgroup in cognitive_layers_one.cognitive_layers]
|
||||
|
||||
async def generate_graphs_for_all_layers(text_input: str, layers: List[str], response_model: Type[BaseModel]):
|
||||
tasks = [generate_graph(text_input, "generate_graph_prompt.txt", {'layer': layer}, response_model) for layer in
|
||||
layers]
|
||||
return await asyncio.gather(*tasks)
|
||||
|
||||
# Execute the async function and print results for each set of layers
|
||||
async def async_graph_per_layer(text_input: str, cognitive_layers: List[str]):
|
||||
graphs = await generate_graphs_for_all_layers(text_input, cognitive_layers, KnowledgeGraph)
|
||||
# for layer, graph in zip(cognitive_layers, graphs):
|
||||
# print(f"{layer}: {graph}")
|
||||
return graphs
|
||||
|
||||
# Run the async function for each set of cognitive layers
|
||||
layer_1_graph = await async_graph_per_layer(input_article_one, cognitive_layers_one)
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
""" Here we update semantic graph with content that classifier produced"""
|
||||
from datetime import datetime
|
||||
from enum import Enum, auto
|
||||
from typing import Type, Optional, Any
|
||||
from pydantic import BaseModel
|
||||
from cognitive_architecture.infrastructure.databases.graph.get_graph_client import get_graph_client
|
||||
from cognitive_architecture.shared.data_models import GraphDBType, DefaultGraphModel, Document, DocumentType, Category, Relationship, UserProperties, UserLocation
|
||||
|
||||
|
||||
def add_classification_nodes(G, id, classification_data):
|
||||
context = classification_data['context_name']
|
||||
layer = classification_data['layer_name']
|
||||
|
||||
# Create the layer classification node ID using the context_name
|
||||
layer_classification_node_id = f'LLM_LAYER_CLASSIFICATION:{context}:{id}'
|
||||
|
||||
# Add the node to the graph, unpacking the node data from the dictionary
|
||||
G.add_node(layer_classification_node_id, **classification_data)
|
||||
|
||||
# Link this node to the corresponding document node
|
||||
G.add_edge(id, layer_classification_node_id, relationship='classified_as')
|
||||
|
||||
# Create the detailed classification node ID using the context_name
|
||||
detailed_classification_node_id = f'LLM_CLASSIFICATION:LAYER:{layer}:{id}'
|
||||
|
||||
# Add the detailed classification node, reusing the same node data
|
||||
G.add_node(detailed_classification_node_id, **classification_data)
|
||||
|
||||
# Link the detailed classification node to the layer classification node
|
||||
G.add_edge(layer_classification_node_id, detailed_classification_node_id, relationship='contains_analysis')
|
||||
return G
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import asyncio
|
||||
|
||||
# Assuming all necessary imports and GraphDBType, get_graph_client, Document, DocumentType, etc. are defined
|
||||
|
||||
# Initialize the graph client
|
||||
graph_client = get_graph_client(GraphDBType.NETWORKX)
|
||||
|
||||
|
||||
G = asyncio.run(add_classification_nodes(graph_client, 'document_id', {'data_type': 'text',
|
||||
'context_name': 'TEXT',
|
||||
'layer_name': 'Articles, essays, and reports'}))
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
""" Here we update semantic graph with content that classifier produced"""
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from enum import Enum, auto
|
||||
from typing import Type, Optional, Any
|
||||
from pydantic import BaseModel
|
||||
from cognitive_architecture.infrastructure.databases.graph.get_graph_client import get_graph_client
|
||||
from cognitive_architecture.shared.data_models import GraphDBType, DefaultGraphModel, Document, DocumentType, Category, Relationship, UserProperties, UserLocation
|
||||
|
||||
|
||||
def add_propositions(G, category_name, subclass_content, layer_description, new_data, layer_uuid,
|
||||
layer_decomposition_uuid):
|
||||
""" Add nodes and edges to the graph for the given LLM knowledge graph and the layer"""
|
||||
|
||||
# Find the node ID for the subclass within the category
|
||||
G.load_graph_from_file()
|
||||
G = graph_client.graph
|
||||
subclass_node_id = None
|
||||
for node, data in G.nodes(data=True):
|
||||
if subclass_content in node:
|
||||
subclass_node_id = node
|
||||
|
||||
print(subclass_node_id)
|
||||
|
||||
if not subclass_node_id:
|
||||
print(f"Subclass '{subclass_content}' under category '{category_name}' not found in the graph.")
|
||||
return G
|
||||
|
||||
# Mapping from old node IDs to new node IDs
|
||||
node_id_mapping = {}
|
||||
|
||||
# Add nodes from the Pydantic object
|
||||
for node in new_data.nodes:
|
||||
unique_node_id = uuid.uuid4()
|
||||
new_node_id = f"{node.description} - {str(layer_uuid)} - {str(layer_decomposition_uuid)} - {str(unique_node_id)}"
|
||||
G.add_node(new_node_id,
|
||||
created_at=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
updated_at=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
description=node.description,
|
||||
category=node.category,
|
||||
memory_type=node.memory_type,
|
||||
layer_uuid=str(layer_uuid),
|
||||
layer_description=str(layer_description),
|
||||
layer_decomposition_uuid=str(layer_decomposition_uuid),
|
||||
id=str(unique_node_id),
|
||||
type='detail')
|
||||
G.add_edge(subclass_node_id, new_node_id, relationship='detail')
|
||||
|
||||
# Store the mapping from old node ID to new node ID
|
||||
node_id_mapping[node.id] = new_node_id
|
||||
|
||||
# Add edges from the Pydantic object using the new node IDs
|
||||
for edge in new_data.edges:
|
||||
# Use the mapping to get the new node IDs
|
||||
source_node_id = node_id_mapping.get(edge.source)
|
||||
target_node_id = node_id_mapping.get(edge.target)
|
||||
|
||||
if source_node_id and target_node_id:
|
||||
G.add_edge(source_node_id, target_node_id, description=edge.description, relationship='relation')
|
||||
else:
|
||||
print(f"Could not find mapping for edge from {edge.source} to {edge.target}")
|
||||
|
||||
return G
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import asyncio
|
||||
|
||||
# Assuming all necessary imports and GraphDBType, get_graph_client, Document, DocumentType, etc. are defined
|
||||
|
||||
# Initialize the graph client
|
||||
graph_client = get_graph_client(GraphDBType.NETWORKX)
|
||||
G = asyncio.run(add_propositions(graph_client, 'category_name', 'subclass_content', 'layer_description', 'new_data', 'layer_uuid',
|
||||
'layer_decomposition_uuid'))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import asyncio
|
||||
|
||||
# Assuming all necessary imports and GraphDBType, get_graph_client, Document, DocumentType, etc. are defined
|
||||
|
||||
# Initialize the graph client
|
||||
graph_client = get_graph_client(GraphDBType.NETWORKX)
|
||||
209
cognitive_architecture/modules/cognify/graph/create.py
Normal file
209
cognitive_architecture/modules/cognify/graph/create.py
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
""" This module is responsible for creating a semantic graph """
|
||||
from datetime import datetime
|
||||
from enum import Enum, auto
|
||||
from typing import Type, Optional, Any
|
||||
from pydantic import BaseModel
|
||||
from cognitive_architecture.infrastructure.databases.graph.get_graph_client import get_graph_client
|
||||
from cognitive_architecture.shared.data_models import GraphDBType, DefaultGraphModel, Document, DocumentType, Category, Relationship, UserProperties, UserLocation
|
||||
|
||||
|
||||
|
||||
|
||||
async def generate_node_id(instance: BaseModel) -> str:
|
||||
for field in ['id', 'doc_id', 'location_id', 'type_id']:
|
||||
if hasattr(instance, field):
|
||||
return f"{instance.__class__.__name__}:{getattr(instance, field)}"
|
||||
return f"{instance.__class__.__name__}:default"
|
||||
async def add_node_and_edge(client, parent_id: Optional[str], node_id: str, node_data: dict, relationship_data: dict):
|
||||
await client.add_node(node_id, **node_data) # Add the current node with its data
|
||||
if parent_id:
|
||||
# Add an edge between the parent node and the current node with the correct relationship data
|
||||
await client.add_edge(parent_id, node_id, **relationship_data)
|
||||
|
||||
|
||||
async def process_attribute(G, parent_id: Optional[str], attribute: str, value: Any):
|
||||
if isinstance(value, BaseModel):
|
||||
node_id = await generate_node_id(value)
|
||||
node_data = value.dict(exclude={'default_relationship'})
|
||||
# Use the specified default relationship for the edge between the parent node and the current node
|
||||
relationship_data = value.default_relationship.dict() if hasattr(value, 'default_relationship') else {}
|
||||
await add_node_and_edge(G, parent_id, node_id, node_data, relationship_data)
|
||||
|
||||
# Recursively process nested attributes to ensure all nodes and relationships are added to the graph
|
||||
for sub_attr, sub_val in value.__dict__.items(): # Access attributes and their values directly
|
||||
await process_attribute(G, node_id, sub_attr, sub_val)
|
||||
|
||||
elif isinstance(value, list) and all(isinstance(item, BaseModel) for item in value):
|
||||
# For lists of BaseModel instances, process each item in the list
|
||||
for item in value:
|
||||
await process_attribute(G, parent_id, attribute, item)
|
||||
|
||||
async def create_dynamic(graph_model, client) :
|
||||
await client.load_graph_from_file()
|
||||
root_id = await generate_node_id(graph_model)
|
||||
node_data = graph_model.dict(exclude={'default_relationship', 'id'})
|
||||
print(node_data)
|
||||
await client.add_node(root_id, **node_data)
|
||||
|
||||
for attribute_name, attribute_value in graph_model:
|
||||
await process_attribute(client, root_id, attribute_name, attribute_value)
|
||||
|
||||
return client
|
||||
|
||||
|
||||
async def create_semantic_graph(graph_model_instance, graph_client):
|
||||
await graph_client.load_graph_from_file()
|
||||
|
||||
# Dynamic graph creation based on the provided graph model instance
|
||||
G = await create_dynamic(graph_model_instance, graph_client)
|
||||
|
||||
# Example of adding a node and saving the graph can be demonstrated in the __main__ section or in tests
|
||||
|
||||
return G
|
||||
|
||||
if __name__ == "__main__":
|
||||
import asyncio
|
||||
|
||||
# Assuming all necessary imports and GraphDBType, get_graph_client, Document, DocumentType, etc. are defined
|
||||
|
||||
# Initialize the graph client
|
||||
graph_client = get_graph_client(GraphDBType.NETWORKX)
|
||||
|
||||
# Define a GraphModel instance with example data
|
||||
graph_model_instance = DefaultGraphModel(
|
||||
id="user123",
|
||||
documents=[
|
||||
Document(
|
||||
doc_id="doc1",
|
||||
title="Document 1",
|
||||
summary="Summary of Document 1",
|
||||
content_id="content_id_for_doc1",
|
||||
doc_type=DocumentType(type_id="PDF", description="Portable Document Format"),
|
||||
categories=[
|
||||
Category(category_id="finance", name="Finance", default_relationship=Relationship(type="belongs_to")),
|
||||
Category(category_id="tech", name="Technology", default_relationship=Relationship(type="belongs_to"))
|
||||
],
|
||||
default_relationship=Relationship(type='has_document')
|
||||
),
|
||||
Document(
|
||||
doc_id="doc2",
|
||||
title="Document 2",
|
||||
summary="Summary of Document 2",
|
||||
content_id="content_id_for_doc2",
|
||||
doc_type=DocumentType(type_id="TXT", description="Text File"),
|
||||
categories=[
|
||||
Category(category_id="health", name="Health", default_relationship=Relationship(type="belongs_to")),
|
||||
Category(category_id="wellness", name="Wellness", default_relationship=Relationship(type="belongs_to"))
|
||||
],
|
||||
default_relationship=Relationship(type='has_document')
|
||||
)
|
||||
],
|
||||
user_properties=UserProperties(
|
||||
custom_properties={"age": "30"},
|
||||
location=UserLocation(location_id="ny", description="New York", default_relationship=Relationship(type='located_in'))
|
||||
),
|
||||
default_fields={
|
||||
'created_at': datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
'updated_at': datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
}
|
||||
)
|
||||
|
||||
# Run the graph creation asynchronously
|
||||
G = asyncio.run(create_semantic_graph(graph_model_instance, graph_client))
|
||||
|
||||
# Optionally, here you can add more nodes, edges, or perform other operations on the graph G
|
||||
|
||||
# async def create_semantic_graph(
|
||||
# ):
|
||||
# graph_type = GraphDBType.NETWORKX
|
||||
#
|
||||
# # Call the get_graph_client function with the selected graph type
|
||||
# graph_client = get_graph_client(graph_type)
|
||||
#
|
||||
# print(graph_client)
|
||||
#
|
||||
# await graph_client.load_graph_from_file()
|
||||
# #
|
||||
# #
|
||||
# #
|
||||
# # b = await graph_client.add_node("23ds", {
|
||||
# # 'username': 'exampleUser',
|
||||
# # 'email': 'user@example.com'
|
||||
# # })
|
||||
# #
|
||||
# # await graph_client.save_graph_to_file(b)
|
||||
# graph_model_instance = DefaultGraphModel(
|
||||
# id="user123",
|
||||
# documents=[
|
||||
# Document(
|
||||
# doc_id="doc1",
|
||||
# title="Document 1",
|
||||
# summary="Summary of Document 1",
|
||||
# content_id="content_id_for_doc1", # Assuming external content storage ID
|
||||
# doc_type=DocumentType(type_id="PDF", description="Portable Document Format"),
|
||||
# categories=[
|
||||
# Category(category_id="finance", name="Finance",
|
||||
# default_relationship=Relationship(type="belongs_to")),
|
||||
# Category(category_id="tech", name="Technology",
|
||||
# default_relationship=Relationship(type="belongs_to"))
|
||||
# ],
|
||||
# default_relationship=Relationship(type='has_document')
|
||||
# ),
|
||||
# Document(
|
||||
# doc_id="doc2",
|
||||
# title="Document 2",
|
||||
# summary="Summary of Document 2",
|
||||
# content_id="content_id_for_doc2",
|
||||
# doc_type=DocumentType(type_id="TXT", description="Text File"),
|
||||
# categories=[
|
||||
# Category(category_id="health", name="Health", default_relationship=Relationship(type="belongs_to")),
|
||||
# Category(category_id="wellness", name="Wellness",
|
||||
# default_relationship=Relationship(type="belongs_to"))
|
||||
# ],
|
||||
# default_relationship=Relationship(type='has_document')
|
||||
# )
|
||||
# ],
|
||||
# user_properties=UserProperties(
|
||||
# custom_properties={"age": "30"},
|
||||
# location=UserLocation(location_id="ny", description="New York",
|
||||
# default_relationship=Relationship(type='located_in'))
|
||||
# ),
|
||||
# default_fields={
|
||||
# 'created_at': datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
# 'updated_at': datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
# }
|
||||
# )
|
||||
#
|
||||
# G = await create_dynamic(graph_model_instance, graph_client)
|
||||
#
|
||||
# # print("Nodes and their data:")
|
||||
# # for node, data in G.graph.nodes(data=True):
|
||||
# # print(node, data)
|
||||
# #
|
||||
# # # Print edges with their data
|
||||
# # print("\nEdges and their data:")
|
||||
# # for source, target, data in G.graph.edges(data=True):
|
||||
# # print(f"{source} -> {target} {data}")
|
||||
# # print(G)
|
||||
#
|
||||
#
|
||||
#
|
||||
#
|
||||
#
|
||||
#
|
||||
#
|
||||
#
|
||||
#
|
||||
#
|
||||
# # return await graph_client.create( user_id = user_id, custom_user_properties=custom_user_properties, required_layers=required_layers, default_fields=default_fields, existing_graph=existing_graph)
|
||||
#
|
||||
#
|
||||
# if __name__ == "__main__":
|
||||
# import asyncio
|
||||
#
|
||||
# user_id = 'user123'
|
||||
# custom_user_properties = {
|
||||
# 'username': 'exampleUser',
|
||||
# 'email': 'user@example.com'
|
||||
# }
|
||||
# asyncio.run(create_semantic_graph())
|
||||
|
|
@ -1,146 +0,0 @@
|
|||
""" This module is responsible for creating a semantic graph """
|
||||
from datetime import datetime
|
||||
from enum import Enum, auto
|
||||
from typing import Type, Optional, Any
|
||||
from pydantic import BaseModel
|
||||
from cognitive_architecture.infrastructure.databases.graph.get_graph_client import get_graph_client
|
||||
from cognitive_architecture.shared.data_models import GraphDBType, DefaultGraphModel, Document, DocumentType, Category, Relationship, UserProperties, UserLocation
|
||||
|
||||
|
||||
|
||||
|
||||
async def generate_node_id(instance: BaseModel) -> str:
|
||||
for field in ['id', 'doc_id', 'location_id', 'type_id']:
|
||||
if hasattr(instance, field):
|
||||
return f"{instance.__class__.__name__}:{getattr(instance, field)}"
|
||||
return f"{instance.__class__.__name__}:default"
|
||||
async def add_node_and_edge(client, parent_id: Optional[str], node_id: str, node_data: dict, relationship_data: dict):
|
||||
await client.add_node(node_id, **node_data) # Add the current node with its data
|
||||
if parent_id:
|
||||
# Add an edge between the parent node and the current node with the correct relationship data
|
||||
await client.add_edge(parent_id, node_id, **relationship_data)
|
||||
|
||||
|
||||
async def process_attribute(G, parent_id: Optional[str], attribute: str, value: Any):
|
||||
if isinstance(value, BaseModel):
|
||||
node_id = await generate_node_id(value)
|
||||
node_data = value.dict(exclude={'default_relationship'})
|
||||
# Use the specified default relationship for the edge between the parent node and the current node
|
||||
relationship_data = value.default_relationship.dict() if hasattr(value, 'default_relationship') else {}
|
||||
await add_node_and_edge(G, parent_id, node_id, node_data, relationship_data)
|
||||
|
||||
# Recursively process nested attributes to ensure all nodes and relationships are added to the graph
|
||||
for sub_attr, sub_val in value.__dict__.items(): # Access attributes and their values directly
|
||||
await process_attribute(G, node_id, sub_attr, sub_val)
|
||||
|
||||
elif isinstance(value, list) and all(isinstance(item, BaseModel) for item in value):
|
||||
# For lists of BaseModel instances, process each item in the list
|
||||
for item in value:
|
||||
await process_attribute(G, parent_id, attribute, item)
|
||||
|
||||
async def create_dynamic(graph_model, client) :
|
||||
await client.load_graph_from_file()
|
||||
root_id = await generate_node_id(graph_model)
|
||||
node_data = graph_model.dict(exclude={'default_relationship', 'id'})
|
||||
print(node_data)
|
||||
await client.add_node(root_id, **node_data)
|
||||
|
||||
for attribute_name, attribute_value in graph_model:
|
||||
await process_attribute(client, root_id, attribute_name, attribute_value)
|
||||
|
||||
return client
|
||||
|
||||
async def create_semantic_graph(
|
||||
):
|
||||
graph_type = GraphDBType.NETWORKX
|
||||
|
||||
# Call the get_graph_client function with the selected graph type
|
||||
graph_client = get_graph_client(graph_type)
|
||||
|
||||
print(graph_client)
|
||||
|
||||
await graph_client.load_graph_from_file()
|
||||
#
|
||||
#
|
||||
#
|
||||
# b = await graph_client.add_node("23ds", {
|
||||
# 'username': 'exampleUser',
|
||||
# 'email': 'user@example.com'
|
||||
# })
|
||||
#
|
||||
# await graph_client.save_graph_to_file(b)
|
||||
graph_model_instance = DefaultGraphModel(
|
||||
id="user123",
|
||||
documents=[
|
||||
Document(
|
||||
doc_id="doc1",
|
||||
title="Document 1",
|
||||
summary="Summary of Document 1",
|
||||
content_id="content_id_for_doc1", # Assuming external content storage ID
|
||||
doc_type=DocumentType(type_id="PDF", description="Portable Document Format"),
|
||||
categories=[
|
||||
Category(category_id="finance", name="Finance",
|
||||
default_relationship=Relationship(type="belongs_to")),
|
||||
Category(category_id="tech", name="Technology",
|
||||
default_relationship=Relationship(type="belongs_to"))
|
||||
],
|
||||
default_relationship=Relationship(type='has_document')
|
||||
),
|
||||
Document(
|
||||
doc_id="doc2",
|
||||
title="Document 2",
|
||||
summary="Summary of Document 2",
|
||||
content_id="content_id_for_doc2",
|
||||
doc_type=DocumentType(type_id="TXT", description="Text File"),
|
||||
categories=[
|
||||
Category(category_id="health", name="Health", default_relationship=Relationship(type="belongs_to")),
|
||||
Category(category_id="wellness", name="Wellness",
|
||||
default_relationship=Relationship(type="belongs_to"))
|
||||
],
|
||||
default_relationship=Relationship(type='has_document')
|
||||
)
|
||||
],
|
||||
user_properties=UserProperties(
|
||||
custom_properties={"age": "30"},
|
||||
location=UserLocation(location_id="ny", description="New York",
|
||||
default_relationship=Relationship(type='located_in'))
|
||||
),
|
||||
default_fields={
|
||||
'created_at': datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
'updated_at': datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
}
|
||||
)
|
||||
|
||||
G = await create_dynamic(graph_model_instance, graph_client)
|
||||
|
||||
# print("Nodes and their data:")
|
||||
# for node, data in G.graph.nodes(data=True):
|
||||
# print(node, data)
|
||||
#
|
||||
# # Print edges with their data
|
||||
# print("\nEdges and their data:")
|
||||
# for source, target, data in G.graph.edges(data=True):
|
||||
# print(f"{source} -> {target} {data}")
|
||||
# print(G)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# return await graph_client.create( user_id = user_id, custom_user_properties=custom_user_properties, required_layers=required_layers, default_fields=default_fields, existing_graph=existing_graph)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import asyncio
|
||||
|
||||
user_id = 'user123'
|
||||
custom_user_properties = {
|
||||
'username': 'exampleUser',
|
||||
'email': 'user@example.com'
|
||||
}
|
||||
asyncio.run(create_semantic_graph())
|
||||
Loading…
Add table
Reference in a new issue