refactor: Use include_payload where necessary

This commit is contained in:
Igor Ilic 2026-01-15 11:32:04 +01:00
parent 51a9ff0613
commit 4f7ab87683
10 changed files with 22 additions and 9 deletions

View file

@ -236,6 +236,7 @@ class NeptuneAnalyticsAdapter(NeptuneGraphDB, VectorDBInterface):
query_vector: Optional[List[float]] = None,
limit: Optional[int] = None,
with_vector: bool = False,
include_payload: bool = False, # TODO: Add support for this parameter
):
"""
Perform a search in the specified collection using either a text query or a vector

View file

@ -47,7 +47,9 @@ class ChunksRetriever(BaseRetriever):
vector_engine = get_vector_engine()
try:
found_chunks = await vector_engine.search("DocumentChunk_text", query, limit=self.top_k)
found_chunks = await vector_engine.search(
"DocumentChunk_text", query, limit=self.top_k, include_payload=True
)
logger.info(f"Found {len(found_chunks)} chunks from vector search")
await update_node_access_timestamps(found_chunks)

View file

@ -67,7 +67,9 @@ class TripletRetriever(BaseRetriever):
"In order to use TRIPLET_COMPLETION first use the create_triplet_embeddings memify pipeline. "
)
found_triplets = await vector_engine.search("Triplet_text", query, limit=self.top_k)
found_triplets = await vector_engine.search(
"Triplet_text", query, limit=self.top_k, include_payload=True
)
if len(found_triplets) == 0:
return ""

View file

@ -97,7 +97,7 @@ async def test_vector_engine_search_none_limit():
query_vector = (await vector_engine.embedding_engine.embed_text([query_text]))[0]
result = await vector_engine.search(
collection_name=collection_name, query_vector=query_vector, limit=None
collection_name=collection_name, query_vector=query_vector, limit=None, include_payload=True
)
# Check that we did not accidentally use any default value for limit

View file

@ -48,7 +48,7 @@ async def main():
from cognee.infrastructure.databases.vector import get_vector_engine
vector_engine = get_vector_engine()
random_node = (await vector_engine.search("Entity_name", "AI"))[0]
random_node = (await vector_engine.search("Entity_name", "AI", include_payload=True))[0]
random_node_name = random_node.payload["text"]
search_results = await cognee.search(

View file

@ -63,7 +63,9 @@ async def main():
from cognee.infrastructure.databases.vector import get_vector_engine
vector_engine = get_vector_engine()
random_node = (await vector_engine.search("Entity_name", "Quantum computer"))[0]
random_node = (
await vector_engine.search("Entity_name", "Quantum computer", include_payload=True)
)[0]
random_node_name = random_node.payload["text"]
search_results = await cognee.search(

View file

@ -52,7 +52,9 @@ async def main():
await cognee.cognify([dataset_name])
vector_engine = get_vector_engine()
random_node = (await vector_engine.search("Entity_name", "Quantum computer"))[0]
random_node = (
await vector_engine.search("Entity_name", "Quantum computer", include_payload=True)
)[0]
random_node_name = random_node.payload["text"]
search_results = await cognee.search(

View file

@ -163,7 +163,9 @@ async def main():
await test_getting_of_documents(dataset_name_1)
vector_engine = get_vector_engine()
random_node = (await vector_engine.search("Entity_name", "Quantum computer"))[0]
random_node = (
await vector_engine.search("Entity_name", "Quantum computer", include_payload=True)
)[0]
random_node_name = random_node.payload["text"]
search_results = await cognee.search(

View file

@ -58,7 +58,9 @@ async def main():
from cognee.infrastructure.databases.vector import get_vector_engine
vector_engine = get_vector_engine()
random_node = (await vector_engine.search("Entity_name", "Quantum computer"))[0]
random_node = (
await vector_engine.search("Entity_name", "Quantum computer", include_payload=True)
)[0]
random_node_name = random_node.payload["text"]
search_results = await cognee.search(

View file

@ -43,7 +43,7 @@ async def main():
from cognee.infrastructure.databases.vector import get_vector_engine
vector_engine = get_vector_engine()
random_node = (await vector_engine.search("Entity_name", "AI"))[0]
random_node = (await vector_engine.search("Entity_name", "AI", include_payload=True))[0]
random_node_name = random_node.payload["text"]
search_results = await cognee.search(