Test for update function (#1487)
<!-- .github/pull_request_template.md --> ## Description Add updating of documents to test that runs across different OS ## Type of Change <!-- Please check the relevant option --> - [ ] Bug fix (non-breaking change that fixes an issue) - [ ] New feature (non-breaking change that adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to change) - [ ] Documentation update - [ ] Code refactoring - [ ] Performance improvement - [ ] Other (please specify): ## Pre-submission Checklist <!-- Please check all boxes that apply before submitting your PR --> - [ ] **I have tested my changes thoroughly before submitting this PR** - [ ] **This PR contains minimal changes necessary to address the issue/feature** - [ ] My code follows the project's coding standards and style guidelines - [ ] I have added tests that prove my fix is effective or that my feature works - [ ] I have added necessary documentation (if applicable) - [ ] All new and existing tests pass - [ ] I have searched existing PRs to ensure this change hasn't been submitted already - [ ] I have linked any relevant issues in the description - [ ] My commits have clear and descriptive messages ## DCO Affirmation I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin.
This commit is contained in:
commit
8d9fbc0c0e
5 changed files with 39 additions and 4 deletions
|
|
@ -19,6 +19,7 @@ from .api.v1.add import add
|
|||
from .api.v1.delete import delete
|
||||
from .api.v1.cognify import cognify
|
||||
from .modules.memify import memify
|
||||
from .api.v1.update import update
|
||||
from .api.v1.config.config import config
|
||||
from .api.v1.datasets.datasets import datasets
|
||||
from .api.v1.prune import prune
|
||||
|
|
|
|||
|
|
@ -73,7 +73,11 @@ def get_add_router() -> APIRouter:
|
|||
|
||||
try:
|
||||
add_run = await cognee_add(
|
||||
data, datasetName, user=user, dataset_id=datasetId, node_set=node_set
|
||||
data,
|
||||
datasetName,
|
||||
user=user,
|
||||
dataset_id=datasetId,
|
||||
node_set=node_set if node_set else None,
|
||||
)
|
||||
|
||||
if isinstance(add_run, PipelineRunErrored):
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ def get_update_router() -> APIRouter:
|
|||
data=data,
|
||||
dataset_id=dataset_id,
|
||||
user=user,
|
||||
node_set=node_set,
|
||||
node_set=node_set if node_set else None,
|
||||
)
|
||||
|
||||
# If any cognify run errored return JSONResponse with proper error status code
|
||||
|
|
|
|||
|
|
@ -10,9 +10,9 @@ from cognee.api.v1.cognify import cognify
|
|||
async def update(
|
||||
data_id: UUID,
|
||||
data: Union[BinaryIO, list[BinaryIO], str, list[str]],
|
||||
dataset_id: UUID,
|
||||
user: User = None,
|
||||
node_set: Optional[List[str]] = None,
|
||||
dataset_id: Optional[UUID] = None,
|
||||
vector_db_config: dict = None,
|
||||
graph_db_config: dict = None,
|
||||
preferred_loaders: List[str] = None,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from cognee.modules.search.operations import get_history
|
|||
from cognee.modules.users.methods import get_default_user
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.modules.search.types import SearchType
|
||||
from cognee import update
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
|
@ -42,7 +43,7 @@ async def main():
|
|||
|
||||
await cognee.add([text], dataset_name)
|
||||
|
||||
await cognee.cognify([dataset_name])
|
||||
cognify_run_info = await cognee.cognify([dataset_name])
|
||||
|
||||
from cognee.infrastructure.databases.vector import get_vector_engine
|
||||
|
||||
|
|
@ -77,6 +78,35 @@ async def main():
|
|||
|
||||
assert len(history) == 6, "Search history is not correct."
|
||||
|
||||
# Test updating of documents
|
||||
# Get Pipeline Run object
|
||||
pipeline_run_obj = list(cognify_run_info.values())[0]
|
||||
for data_item in pipeline_run_obj.data_ingestion_info:
|
||||
# Update all documents in dataset to only contain Mark and Cindy information
|
||||
await update(
|
||||
dataset_id=pipeline_run_obj.dataset_id,
|
||||
data_id=data_item["data_id"],
|
||||
data="Mark met with Cindy at a cafe.",
|
||||
)
|
||||
|
||||
search_results = await cognee.search(
|
||||
query_type=SearchType.GRAPH_COMPLETION, query_text="What information do you contain?"
|
||||
)
|
||||
assert "Mark" in search_results[0], (
|
||||
"Failed to update document, no mention of Mark in search results"
|
||||
)
|
||||
assert "Cindy" in search_results[0], (
|
||||
"Failed to update document, no mention of Cindy in search results"
|
||||
)
|
||||
assert "Artificial intelligence" not in search_results[0], (
|
||||
"Failed to update document, Artificial intelligence still mentioned in search results"
|
||||
)
|
||||
|
||||
# Test visualization
|
||||
from cognee import visualize_graph
|
||||
|
||||
await visualize_graph()
|
||||
|
||||
# Assert local data files are cleaned properly
|
||||
await cognee.prune.prune_data()
|
||||
data_root_directory = get_storage_config()["data_root_directory"]
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue