test: add integration test for brute_force_triplet_search
This commit is contained in:
parent
e5d4100070
commit
872795f0cc
1 changed files with 67 additions and 0 deletions
|
|
@ -0,0 +1,67 @@
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
|
import cognee
|
||||||
|
|
||||||
|
from cognee.modules.graph.cognee_graph.CogneeGraphElements import Edge
|
||||||
|
from cognee.modules.retrieval.utils.brute_force_triplet_search import brute_force_triplet_search
|
||||||
|
|
||||||
|
|
||||||
|
skip_without_provider = pytest.mark.skipif(
|
||||||
|
not (os.getenv("OPENAI_API_KEY") or os.getenv("AZURE_OPENAI_API_KEY")),
|
||||||
|
reason="requires embedding/vector provider credentials",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def clean_environment():
|
||||||
|
"""Configure isolated storage and ensure cleanup before/after."""
|
||||||
|
base_dir = pathlib.Path(__file__).parent.parent.parent.parent
|
||||||
|
system_directory_path = str(base_dir / ".cognee_system/test_brute_force_triplet_search_e2e")
|
||||||
|
data_directory_path = str(base_dir / ".data_storage/test_brute_force_triplet_search_e2e")
|
||||||
|
|
||||||
|
cognee.config.system_root_directory(system_directory_path)
|
||||||
|
cognee.config.data_root_directory(data_directory_path)
|
||||||
|
|
||||||
|
await cognee.prune.prune_data()
|
||||||
|
await cognee.prune.prune_system(metadata=True)
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
try:
|
||||||
|
await cognee.prune.prune_data()
|
||||||
|
await cognee.prune.prune_system(metadata=True)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@skip_without_provider
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_brute_force_triplet_search_end_to_end(clean_environment):
|
||||||
|
"""Minimal end-to-end exercise of single and batch triplet search."""
|
||||||
|
|
||||||
|
text = """
|
||||||
|
Cognee is an open-source AI memory engine that structures data into searchable formats for use with AI agents.
|
||||||
|
The company focuses on persistent memory systems using knowledge graphs and vector search.
|
||||||
|
It is a Berlin-based startup building infrastructure for context-aware AI applications.
|
||||||
|
"""
|
||||||
|
|
||||||
|
await cognee.add(text)
|
||||||
|
await cognee.cognify()
|
||||||
|
|
||||||
|
single_result = await brute_force_triplet_search(query="What is NLP?", top_k=1)
|
||||||
|
assert isinstance(single_result, list)
|
||||||
|
if single_result:
|
||||||
|
assert all(isinstance(edge, Edge) for edge in single_result)
|
||||||
|
|
||||||
|
batch_queries = ["What is Cognee?", "What is the company's focus?"]
|
||||||
|
batch_result = await brute_force_triplet_search(query_batch=batch_queries, top_k=1)
|
||||||
|
|
||||||
|
assert isinstance(batch_result, list)
|
||||||
|
assert len(batch_result) == len(batch_queries)
|
||||||
|
assert all(isinstance(per_query, list) for per_query in batch_result)
|
||||||
|
for per_query in batch_result:
|
||||||
|
if per_query:
|
||||||
|
assert all(isinstance(edge, Edge) for edge in per_query)
|
||||||
Loading…
Add table
Reference in a new issue