improve memory leak (#478)

This commit is contained in:
Preston Rasmussen 2025-05-12 16:32:27 -04:00 committed by GitHub
parent cd097cd4c7
commit 4198483993
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 7 additions and 11 deletions

View file

@ -458,14 +458,10 @@ async def community_search(
config.mmr_lambda, config.mmr_lambda,
) )
elif config.reranker == CommunityReranker.cross_encoder: elif config.reranker == CommunityReranker.cross_encoder:
summary_to_uuid_map = { name_to_uuid_map = {node.name: node.uuid for result in search_results for node in result}
node.summary: node.uuid for result in search_results for node in result reranked_nodes = await cross_encoder.rank(query, list(name_to_uuid_map.keys()))
}
reranked_summaries = await cross_encoder.rank(query, list(summary_to_uuid_map.keys()))
reranked_uuids = [ reranked_uuids = [
summary_to_uuid_map[fact] name_to_uuid_map[name] for name, score in reranked_nodes if score >= reranker_min_score
for fact, score in reranked_summaries
if score >= reranker_min_score
] ]
reranked_communities = [community_uuid_map[uuid] for uuid in reranked_uuids] reranked_communities = [community_uuid_map[uuid] for uuid in reranked_uuids]

View file

@ -18,6 +18,7 @@ import logging
from contextlib import suppress from contextlib import suppress
from time import time from time import time
from typing import Any from typing import Any
from uuid import uuid4
import pydantic import pydantic
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
@ -395,7 +396,8 @@ async def extract_attributes_from_node(
Field(description=field_info.description), Field(description=field_info.description),
) )
entity_attributes_model = pydantic.create_model('EntityAttributes', **attributes_definitions) unique_model_name = f'EntityAttributes_{uuid4().hex}'
entity_attributes_model = pydantic.create_model(unique_model_name, **attributes_definitions)
summary_context: dict[str, Any] = { summary_context: dict[str, Any] = {
'node': node_context, 'node': node_context,
@ -411,12 +413,10 @@ async def extract_attributes_from_node(
) )
node.summary = llm_response.get('summary', node.summary) node.summary = llm_response.get('summary', node.summary)
node.name = llm_response.get('name', node.name)
node_attributes = {key: value for key, value in llm_response.items()} node_attributes = {key: value for key, value in llm_response.items()}
with suppress(KeyError): with suppress(KeyError):
del node_attributes['summary'] del node_attributes['summary']
del node_attributes['name']
node.attributes.update(node_attributes) node.attributes.update(node_attributes)

View file

@ -1,7 +1,7 @@
[project] [project]
name = "graphiti-core" name = "graphiti-core"
description = "A temporal graph building library" description = "A temporal graph building library"
version = "0.11.6pre6" version = "0.11.6pre7"
authors = [ authors = [
{ "name" = "Paul Paliychuk", "email" = "paul@getzep.com" }, { "name" = "Paul Paliychuk", "email" = "paul@getzep.com" },
{ "name" = "Preston Rasmussen", "email" = "preston@getzep.com" }, { "name" = "Preston Rasmussen", "email" = "preston@getzep.com" },