""" Copyright 2024, Zep Software, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import logging from contextlib import suppress from time import time from typing import Any from uuid import uuid4 import pydantic from pydantic import BaseModel, Field from graphiti_core.graphiti_types import GraphitiClients from graphiti_core.helpers import MAX_REFLEXION_ITERATIONS, semaphore_gather from graphiti_core.llm_client import LLMClient from graphiti_core.llm_client.config import ModelSize from graphiti_core.nodes import EntityNode, EpisodeType, EpisodicNode, create_entity_node_embeddings from graphiti_core.prompts import prompt_library from graphiti_core.prompts.dedupe_nodes import NodeResolutions from graphiti_core.prompts.extract_nodes import ( ExtractedEntities, ExtractedEntity, MissedEntities, ) from graphiti_core.search.search import search from graphiti_core.search.search_config import SearchResults from graphiti_core.search.search_config_recipes import NODE_HYBRID_SEARCH_RRF from graphiti_core.search.search_filters import SearchFilters from graphiti_core.utils.datetime_utils import utc_now from graphiti_core.utils.maintenance.edge_operations import filter_existing_duplicate_of_edges logger = logging.getLogger(__name__) async def extract_nodes_reflexion( llm_client: LLMClient, episode: EpisodicNode, previous_episodes: list[EpisodicNode], node_names: list[str], ) -> list[str]: # Prepare context for LLM context = { 'episode_content': episode.content, 'previous_episodes': [ep.content for ep in previous_episodes], 'extracted_entities': node_names, } llm_response = await llm_client.generate_response( prompt_library.extract_nodes.reflexion(context), MissedEntities ) missed_entities = llm_response.get('missed_entities', []) return missed_entities async def extract_nodes( clients: GraphitiClients, episode: EpisodicNode, previous_episodes: list[EpisodicNode], entity_types: dict[str, BaseModel] | None = None, ) -> list[EntityNode]: start = time() llm_client = clients.llm_client llm_response = {} custom_prompt = '' entities_missed = True reflexion_iterations = 0 entity_types_context = [ { 'entity_type_id': 0, 'entity_type_name': 'Entity', 'entity_type_description': 'Default entity classification. Use this entity type if the entity is not one of the other listed types.', } ] entity_types_context += ( [ { 'entity_type_id': i + 1, 'entity_type_name': type_name, 'entity_type_description': type_model.__doc__, } for i, (type_name, type_model) in enumerate(entity_types.items()) ] if entity_types is not None else [] ) context = { 'episode_content': episode.content, 'episode_timestamp': episode.valid_at.isoformat(), 'previous_episodes': [ep.content for ep in previous_episodes], 'custom_prompt': custom_prompt, 'entity_types': entity_types_context, 'source_description': episode.source_description, } while entities_missed and reflexion_iterations <= MAX_REFLEXION_ITERATIONS: if episode.source == EpisodeType.message: llm_response = await llm_client.generate_response( prompt_library.extract_nodes.extract_message(context), response_model=ExtractedEntities, ) elif episode.source == EpisodeType.text: llm_response = await llm_client.generate_response( prompt_library.extract_nodes.extract_text(context), response_model=ExtractedEntities ) elif episode.source == EpisodeType.json: llm_response = await llm_client.generate_response( prompt_library.extract_nodes.extract_json(context), response_model=ExtractedEntities ) extracted_entities: list[ExtractedEntity] = [ ExtractedEntity(**entity_types_context) for entity_types_context in llm_response.get('extracted_entities', []) ] reflexion_iterations += 1 if reflexion_iterations < MAX_REFLEXION_ITERATIONS: missing_entities = await extract_nodes_reflexion( llm_client, episode, previous_episodes, [entity.name for entity in extracted_entities], ) entities_missed = len(missing_entities) != 0 custom_prompt = 'Make sure that the following entities are extracted: ' for entity in missing_entities: custom_prompt += f'\n{entity},' filtered_extracted_entities = [entity for entity in extracted_entities if entity.name.strip()] end = time() logger.debug(f'Extracted new nodes: {filtered_extracted_entities} in {(end - start) * 1000} ms') # Convert the extracted data into EntityNode objects extracted_nodes = [] for extracted_entity in filtered_extracted_entities: entity_type_name = entity_types_context[extracted_entity.entity_type_id].get( 'entity_type_name' ) labels: list[str] = list({'Entity', str(entity_type_name)}) new_node = EntityNode( name=extracted_entity.name, group_id=episode.group_id, labels=labels, summary='', created_at=utc_now(), ) extracted_nodes.append(new_node) logger.debug(f'Created new node: {new_node.name} (UUID: {new_node.uuid})') logger.debug(f'Extracted nodes: {[(n.name, n.uuid) for n in extracted_nodes]}') return extracted_nodes async def dedupe_extracted_nodes( llm_client: LLMClient, extracted_nodes: list[EntityNode], existing_nodes: list[EntityNode], ) -> tuple[list[EntityNode], dict[str, str]]: start = time() # build existing node map node_map: dict[str, EntityNode] = {} for node in existing_nodes: node_map[node.uuid] = node # Prepare context for LLM existing_nodes_context = [ {'uuid': node.uuid, 'name': node.name, 'summary': node.summary} for node in existing_nodes ] extracted_nodes_context = [ {'uuid': node.uuid, 'name': node.name, 'summary': node.summary} for node in extracted_nodes ] context = { 'existing_nodes': existing_nodes_context, 'extracted_nodes': extracted_nodes_context, } llm_response = await llm_client.generate_response(prompt_library.dedupe_nodes.node(context)) duplicate_data = llm_response.get('duplicates', []) end = time() logger.debug(f'Deduplicated nodes: {duplicate_data} in {(end - start) * 1000} ms') uuid_map: dict[str, str] = {} for duplicate in duplicate_data: uuid_value = duplicate['duplicate_of'] uuid_map[duplicate['uuid']] = uuid_value nodes: list[EntityNode] = [] for node in extracted_nodes: if node.uuid in uuid_map: existing_uuid = uuid_map[node.uuid] existing_node = node_map[existing_uuid] nodes.append(existing_node) else: nodes.append(node) return nodes, uuid_map async def resolve_extracted_nodes( clients: GraphitiClients, extracted_nodes: list[EntityNode], episode: EpisodicNode | None = None, previous_episodes: list[EpisodicNode] | None = None, entity_types: dict[str, BaseModel] | None = None, ) -> tuple[list[EntityNode], dict[str, str], list[tuple[EntityNode, EntityNode]]]: llm_client = clients.llm_client driver = clients.driver search_results: list[SearchResults] = await semaphore_gather( *[ search( clients=clients, query=node.name, group_ids=[node.group_id], search_filter=SearchFilters(), config=NODE_HYBRID_SEARCH_RRF, ) for node in extracted_nodes ] ) existing_nodes_dict: dict[str, EntityNode] = { node.uuid: node for result in search_results for node in result.nodes } existing_nodes: list[EntityNode] = list(existing_nodes_dict.values()) existing_nodes_context = ( [ { **{ 'idx': i, 'name': candidate.name, 'entity_types': candidate.labels, }, **candidate.attributes, } for i, candidate in enumerate(existing_nodes) ], ) entity_types_dict: dict[str, BaseModel] = entity_types if entity_types is not None else {} # Prepare context for LLM extracted_nodes_context = [ { 'id': i, 'name': node.name, 'entity_type': node.labels, 'entity_type_description': entity_types_dict.get( next((item for item in node.labels if item != 'Entity'), '') ).__doc__ or 'Default Entity Type', } for i, node in enumerate(extracted_nodes) ] context = { 'extracted_nodes': extracted_nodes_context, 'existing_nodes': existing_nodes_context, 'episode_content': episode.content if episode is not None else '', 'previous_episodes': [ep.content for ep in previous_episodes] if previous_episodes is not None else [], } llm_response = await llm_client.generate_response( prompt_library.dedupe_nodes.nodes(context), response_model=NodeResolutions, ) node_resolutions: list = llm_response.get('entity_resolutions', []) resolved_nodes: list[EntityNode] = [] uuid_map: dict[str, str] = {} node_duplicates: list[tuple[EntityNode, EntityNode]] = [] for resolution in node_resolutions: resolution_id: int = resolution.get('id', -1) duplicate_idx: int = resolution.get('duplicate_idx', -1) extracted_node = extracted_nodes[resolution_id] resolved_node = ( existing_nodes[duplicate_idx] if 0 <= duplicate_idx < len(existing_nodes) else extracted_node ) # resolved_node.name = resolution.get('name') resolved_nodes.append(resolved_node) uuid_map[extracted_node.uuid] = resolved_node.uuid additional_duplicates: list[int] = resolution.get('additional_duplicates', []) for idx in additional_duplicates: existing_node = existing_nodes[idx] if idx < len(existing_nodes) else resolved_node if existing_node == resolved_node: continue node_duplicates.append((resolved_node, existing_nodes[idx])) logger.debug(f'Resolved nodes: {[(n.name, n.uuid) for n in resolved_nodes]}') new_node_duplicates: list[ tuple[EntityNode, EntityNode] ] = await filter_existing_duplicate_of_edges(driver, node_duplicates) return resolved_nodes, uuid_map, new_node_duplicates async def extract_attributes_from_nodes( clients: GraphitiClients, nodes: list[EntityNode], episode: EpisodicNode | None = None, previous_episodes: list[EpisodicNode] | None = None, entity_types: dict[str, BaseModel] | None = None, ) -> list[EntityNode]: llm_client = clients.llm_client embedder = clients.embedder updated_nodes: list[EntityNode] = await semaphore_gather( *[ extract_attributes_from_node( llm_client, node, episode, previous_episodes, entity_types.get(next((item for item in node.labels if item != 'Entity'), '')) if entity_types is not None else None, ) for node in nodes ] ) await create_entity_node_embeddings(embedder, updated_nodes) return updated_nodes async def extract_attributes_from_node( llm_client: LLMClient, node: EntityNode, episode: EpisodicNode | None = None, previous_episodes: list[EpisodicNode] | None = None, entity_type: BaseModel | None = None, ) -> EntityNode: node_context: dict[str, Any] = { 'name': node.name, 'summary': node.summary, 'entity_types': node.labels, 'attributes': node.attributes, } attributes_definitions: dict[str, Any] = { 'summary': ( str, Field( description='Summary containing the important information about the entity. Under 250 words', ), ) } if entity_type is not None: for field_name, field_info in entity_type.model_fields.items(): attributes_definitions[field_name] = ( field_info.annotation, Field(description=field_info.description), ) unique_model_name = f'EntityAttributes_{uuid4().hex}' entity_attributes_model = pydantic.create_model(unique_model_name, **attributes_definitions) summary_context: dict[str, Any] = { 'node': node_context, 'episode_content': episode.content if episode is not None else '', 'previous_episodes': [ep.content for ep in previous_episodes] if previous_episodes is not None else [], } llm_response = await llm_client.generate_response( prompt_library.extract_nodes.extract_attributes(summary_context), response_model=entity_attributes_model, model_size=ModelSize.small, ) node.summary = llm_response.get('summary', node.summary) node_attributes = {key: value for key, value in llm_response.items()} with suppress(KeyError): del node_attributes['summary'] node.attributes.update(node_attributes) return node async def dedupe_node_list( llm_client: LLMClient, nodes: list[EntityNode], ) -> tuple[list[EntityNode], dict[str, str]]: start = time() # build node map node_map = {} for node in nodes: node_map[node.uuid] = node # Prepare context for LLM nodes_context = [{'uuid': node.uuid, 'name': node.name, **node.attributes} for node in nodes] context = { 'nodes': nodes_context, } llm_response = await llm_client.generate_response( prompt_library.dedupe_nodes.node_list(context) ) nodes_data = llm_response.get('nodes', []) end = time() logger.debug(f'Deduplicated nodes: {nodes_data} in {(end - start) * 1000} ms') # Get full node data unique_nodes = [] uuid_map: dict[str, str] = {} for node_data in nodes_data: node_instance: EntityNode | None = node_map.get(node_data['uuids'][0]) if node_instance is None: logger.warning(f'Node {node_data["uuids"][0]} not found in node map') continue node_instance.summary = node_data['summary'] unique_nodes.append(node_instance) for uuid in node_data['uuids'][1:]: uuid_value = node_map[node_data['uuids'][0]].uuid uuid_map[uuid] = uuid_value return unique_nodes, uuid_map