feat: Add test for permissions, change Cognee search return value (#1058)
<!-- .github/pull_request_template.md --> ## Description Add tests for permissions for Cognee ## DCO Affirmation I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin.
This commit is contained in:
parent
cb45897d7d
commit
e51de46163
6 changed files with 249 additions and 10 deletions
29
.github/workflows/e2e_tests.yml
vendored
29
.github/workflows/e2e_tests.yml
vendored
|
|
@ -248,3 +248,32 @@ jobs:
|
|||
EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }}
|
||||
EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }}
|
||||
run: poetry run python ./cognee/tests/test_parallel_databases.py
|
||||
|
||||
test-permissions:
|
||||
name: Test permissions with different situations in Cognee
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cognee Setup
|
||||
uses: ./.github/actions/cognee_setup
|
||||
with:
|
||||
python-version: '3.11.x'
|
||||
|
||||
- name: Install specific graph db dependency
|
||||
run: |
|
||||
poetry install
|
||||
|
||||
- name: Run parallel databases test
|
||||
env:
|
||||
ENV: 'dev'
|
||||
LLM_MODEL: ${{ secrets.LLM_MODEL }}
|
||||
LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }}
|
||||
LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
|
||||
LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }}
|
||||
EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }}
|
||||
EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }}
|
||||
EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }}
|
||||
EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }}
|
||||
run: poetry run python ./cognee/tests/test_permissions.py
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import asyncio
|
||||
from pydantic import BaseModel
|
||||
from typing import Union, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.shared.data_models import KnowledgeGraph
|
||||
|
|
@ -29,7 +30,7 @@ update_status_lock = asyncio.Lock()
|
|||
|
||||
|
||||
async def cognify(
|
||||
datasets: Union[str, list[str]] = None,
|
||||
datasets: Union[str, list[str], list[UUID]] = None,
|
||||
user: User = None,
|
||||
graph_model: BaseModel = KnowledgeGraph,
|
||||
chunker=TextChunker,
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ async def delete(
|
|||
|
||||
# Handle different input types
|
||||
if isinstance(data, str):
|
||||
if data.startswith("file://"): # It's a file path
|
||||
if data.startswith("file://") or data.startswith("/"): # It's a file path
|
||||
with open(data.replace("file://", ""), mode="rb") as file:
|
||||
classified_data = classify(file)
|
||||
content_hash = classified_data.get_metadata()["content_hash"]
|
||||
|
|
@ -77,7 +77,7 @@ async def delete(
|
|||
# Handle list of inputs sequentially
|
||||
results = []
|
||||
for item in data:
|
||||
result = await delete(item, dataset_name, dataset[0].id, mode)
|
||||
result = await delete(item, dataset_name, dataset[0].id, mode, user=user)
|
||||
results.append(result)
|
||||
return {"status": "success", "message": "Multiple documents deleted", "results": results}
|
||||
else: # It's already a BinaryIO
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ async def search(
|
|||
"""
|
||||
# Use search function filtered by permissions if access control is enabled
|
||||
if os.getenv("ENABLE_BACKEND_ACCESS_CONTROL", "false").lower() == "true":
|
||||
return await permissions_search(
|
||||
return await authorized_search(
|
||||
query_text, query_type, user, dataset_ids, system_prompt_path, top_k
|
||||
)
|
||||
|
||||
|
|
@ -143,7 +143,7 @@ async def specific_search(
|
|||
return results
|
||||
|
||||
|
||||
async def permissions_search(
|
||||
async def authorized_search(
|
||||
query_text: str,
|
||||
query_type: SearchType,
|
||||
user: User = None,
|
||||
|
|
@ -190,7 +190,11 @@ async def specific_search_by_context(
|
|||
search_results = await specific_search(
|
||||
query_type, query_text, user, system_prompt_path=system_prompt_path, top_k=top_k
|
||||
)
|
||||
return {dataset.name: search_results}
|
||||
return {
|
||||
"search_result": search_results,
|
||||
"dataset_id": dataset.id,
|
||||
"dataset_name": dataset.name,
|
||||
}
|
||||
|
||||
# Search every dataset async based on query and appropriate database configuration
|
||||
tasks = []
|
||||
|
|
|
|||
|
|
@ -180,10 +180,12 @@ async def ingest_data(
|
|||
|
||||
await session.commit()
|
||||
|
||||
await give_permission_on_dataset(user, dataset.id, "read")
|
||||
await give_permission_on_dataset(user, dataset.id, "write")
|
||||
await give_permission_on_dataset(user, dataset.id, "delete")
|
||||
await give_permission_on_dataset(user, dataset.id, "share")
|
||||
# Only give permission if dataset owner is same as user (to avoid giving delete and share permission to non owner users)
|
||||
if dataset.owner_id == user.id:
|
||||
await give_permission_on_dataset(user, dataset.id, "read")
|
||||
await give_permission_on_dataset(user, dataset.id, "write")
|
||||
await give_permission_on_dataset(user, dataset.id, "delete")
|
||||
await give_permission_on_dataset(user, dataset.id, "share")
|
||||
|
||||
return file_paths
|
||||
|
||||
|
|
|
|||
203
cognee/tests/test_permissions.py
Normal file
203
cognee/tests/test_permissions.py
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
import os
|
||||
import cognee
|
||||
import pathlib
|
||||
|
||||
from cognee.modules.users.exceptions import PermissionDeniedError
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.modules.search.types import SearchType
|
||||
from cognee.modules.users.methods import get_default_user, create_user
|
||||
from cognee.modules.users.permissions.methods import authorized_give_permission_on_datasets
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
||||
async def main():
|
||||
# Enable permissions feature
|
||||
os.environ["ENABLE_BACKEND_ACCESS_CONTROL"] = "True"
|
||||
|
||||
# Clean up test directories before starting
|
||||
data_directory_path = str(
|
||||
pathlib.Path(
|
||||
os.path.join(pathlib.Path(__file__).parent, ".data_storage/test_permissions")
|
||||
).resolve()
|
||||
)
|
||||
cognee_directory_path = str(
|
||||
pathlib.Path(
|
||||
os.path.join(pathlib.Path(__file__).parent, ".cognee_system/test_permissions")
|
||||
).resolve()
|
||||
)
|
||||
|
||||
cognee.config.data_root_directory(data_directory_path)
|
||||
cognee.config.system_root_directory(cognee_directory_path)
|
||||
|
||||
await cognee.prune.prune_data()
|
||||
await cognee.prune.prune_system(metadata=True)
|
||||
|
||||
explanation_file_path = os.path.join(
|
||||
pathlib.Path(__file__).parent, "test_data/Natural_language_processing.txt"
|
||||
)
|
||||
|
||||
# Add document for default user
|
||||
await cognee.add([explanation_file_path], dataset_name="NLP")
|
||||
default_user = await get_default_user()
|
||||
|
||||
text = """A quantum computer is a computer that takes advantage of quantum mechanical phenomena.
|
||||
At small scales, physical matter exhibits properties of both particles and waves, and quantum computing leverages this behavior, specifically quantum superposition and entanglement, using specialized hardware that supports the preparation and manipulation of quantum states.
|
||||
Classical physics cannot explain the operation of these quantum devices, and a scalable quantum computer could perform some calculations exponentially faster (with respect to input size scaling) than any modern "classical" computer. In particular, a large-scale quantum computer could break widely used encryption schemes and aid physicists in performing physical simulations; however, the current state of the technology is largely experimental and impractical, with several obstacles to useful applications. Moreover, scalable quantum computers do not hold promise for many practical tasks, and for many important tasks quantum speedups are proven impossible.
|
||||
The basic unit of information in quantum computing is the qubit, similar to the bit in traditional digital electronics. Unlike a classical bit, a qubit can exist in a superposition of its two "basis" states. When measuring a qubit, the result is a probabilistic output of a classical bit, therefore making quantum computers nondeterministic in general. If a quantum computer manipulates the qubit in a particular way, wave interference effects can amplify the desired measurement results. The design of quantum algorithms involves creating procedures that allow a quantum computer to perform calculations efficiently and quickly.
|
||||
Physically engineering high-quality qubits has proven challenging. If a physical qubit is not sufficiently isolated from its environment, it suffers from quantum decoherence, introducing noise into calculations. Paradoxically, perfectly isolating qubits is also undesirable because quantum computations typically need to initialize qubits, perform controlled qubit interactions, and measure the resulting quantum states. Each of those operations introduces errors and suffers from noise, and such inaccuracies accumulate.
|
||||
In principle, a non-quantum (classical) computer can solve the same computational problems as a quantum computer, given enough time. Quantum advantage comes in the form of time complexity rather than computability, and quantum complexity theory shows that some quantum algorithms for carefully selected tasks require exponentially fewer computational steps than the best known non-quantum algorithms. Such tasks can in theory be solved on a large-scale quantum computer whereas classical computers would not finish computations in any reasonable amount of time. However, quantum speedup is not universal or even typical across computational tasks, since basic tasks such as sorting are proven to not allow any asymptotic quantum speedup. Claims of quantum supremacy have drawn significant attention to the discipline, but are demonstrated on contrived tasks, while near-term practical use cases remain limited.
|
||||
"""
|
||||
|
||||
# Add document for test user
|
||||
test_user = await create_user("user@example.com", "example")
|
||||
await cognee.add([text], dataset_name="QUANTUM", user=test_user)
|
||||
|
||||
await cognee.cognify(["NLP"], user=default_user)
|
||||
await cognee.cognify(["QUANTUM"], user=test_user)
|
||||
|
||||
# Check if default_user can only see information from the NLP dataset
|
||||
search_results = await cognee.search(
|
||||
query_type=SearchType.GRAPH_COMPLETION,
|
||||
query_text="What is in the document?",
|
||||
user=default_user,
|
||||
)
|
||||
assert len(search_results) == 1, "The search results list lenght is not one."
|
||||
print("\n\nExtracted sentences are:\n")
|
||||
for result in search_results:
|
||||
print(f"{result}\n")
|
||||
assert search_results[0]["dataset_name"] == "NLP", (
|
||||
f"Dict must contain dataset name 'NLP': {search_results[0]}"
|
||||
)
|
||||
|
||||
# Check if test_user can only see information from the QUANTUM dataset
|
||||
search_results = await cognee.search(
|
||||
query_type=SearchType.GRAPH_COMPLETION,
|
||||
query_text="What is in the document?",
|
||||
user=test_user,
|
||||
)
|
||||
assert len(search_results) == 1, "The search results list lenght is not one."
|
||||
print("\n\nExtracted sentences are:\n")
|
||||
for result in search_results:
|
||||
print(f"{result}\n")
|
||||
assert search_results[0]["dataset_name"] == "QUANTUM", (
|
||||
f"Dict must contain dataset name 'QUANTUM': {search_results[0]}"
|
||||
)
|
||||
|
||||
# Try to add document with default_user to test_users dataset (test write permission enforcement)
|
||||
test_user_dataset_id = search_results[0]["dataset_id"]
|
||||
add_error = False
|
||||
try:
|
||||
await cognee.add(
|
||||
[explanation_file_path],
|
||||
dataset_name="QUANTUM",
|
||||
dataset_id=test_user_dataset_id,
|
||||
user=default_user,
|
||||
)
|
||||
except PermissionDeniedError:
|
||||
add_error = True
|
||||
assert add_error, "PermissionDeniedError was not raised during add as expected"
|
||||
|
||||
# Try to cognify with default_user the test_users dataset (test write permission enforcement)
|
||||
cognify_error = False
|
||||
try:
|
||||
await cognee.cognify(datasets=[test_user_dataset_id], user=default_user)
|
||||
except PermissionDeniedError:
|
||||
cognify_error = True
|
||||
assert cognify_error, "PermissionDeniedError was not raised during cognify as expected"
|
||||
|
||||
# Try to add permission for a dataset default_user does not have share permission for
|
||||
give_permission_error = False
|
||||
try:
|
||||
await authorized_give_permission_on_datasets(
|
||||
default_user.id,
|
||||
[test_user_dataset_id],
|
||||
"write",
|
||||
default_user.id,
|
||||
)
|
||||
except PermissionDeniedError:
|
||||
give_permission_error = True
|
||||
assert give_permission_error, (
|
||||
"PermissionDeniedError was not raised during assignment of permission as expected"
|
||||
)
|
||||
|
||||
# Actually give permission to default_user to write on test_users dataset
|
||||
await authorized_give_permission_on_datasets(
|
||||
default_user.id,
|
||||
[test_user_dataset_id],
|
||||
"write",
|
||||
test_user.id,
|
||||
)
|
||||
|
||||
# Add new data to test_users dataset from default_user
|
||||
await cognee.add(
|
||||
[explanation_file_path],
|
||||
dataset_name="QUANTUM",
|
||||
dataset_id=test_user_dataset_id,
|
||||
user=default_user,
|
||||
)
|
||||
await cognee.cognify(datasets=[test_user_dataset_id], user=default_user)
|
||||
|
||||
# Actually give permission to default_user to read on test_users dataset
|
||||
await authorized_give_permission_on_datasets(
|
||||
default_user.id,
|
||||
[test_user_dataset_id],
|
||||
"read",
|
||||
test_user.id,
|
||||
)
|
||||
|
||||
# Check if default_user can see from test_users datasets now
|
||||
search_results = await cognee.search(
|
||||
query_type=SearchType.GRAPH_COMPLETION,
|
||||
query_text="What is in the document?",
|
||||
user=default_user,
|
||||
dataset_ids=[test_user_dataset_id],
|
||||
)
|
||||
assert len(search_results) == 1, "The search results list length is not one."
|
||||
print("\n\nExtracted sentences are:\n")
|
||||
for result in search_results:
|
||||
print(f"{result}\n")
|
||||
|
||||
assert search_results[0]["dataset_name"] == "QUANTUM", (
|
||||
f"Dict must contain dataset name 'QUANTUM': {search_results[0]}"
|
||||
)
|
||||
|
||||
# Check if default_user can only see information from both datasets now
|
||||
search_results = await cognee.search(
|
||||
query_type=SearchType.GRAPH_COMPLETION,
|
||||
query_text="What is in the document?",
|
||||
user=default_user,
|
||||
)
|
||||
assert len(search_results) == 2, "The search results list length is not two."
|
||||
print("\n\nExtracted sentences are:\n")
|
||||
for result in search_results:
|
||||
print(f"{result}\n")
|
||||
|
||||
# Try deleting data from test_user dataset with default_user without delete permission
|
||||
delete_error = False
|
||||
try:
|
||||
await cognee.delete([text], dataset_id=test_user_dataset_id, user=default_user)
|
||||
except PermissionDeniedError:
|
||||
delete_error = True
|
||||
|
||||
assert delete_error, "PermissionDeniedError was not raised during delete operation as expected"
|
||||
|
||||
# Try deleting data from test_user dataset with test_user
|
||||
await cognee.delete([text], dataset_id=test_user_dataset_id, user=test_user)
|
||||
|
||||
# Actually give permission to default_user to delete data for test_users dataset
|
||||
await authorized_give_permission_on_datasets(
|
||||
default_user.id,
|
||||
[test_user_dataset_id],
|
||||
"delete",
|
||||
test_user.id,
|
||||
)
|
||||
|
||||
# Try deleting data from test_user dataset with default_user after getting delete permission
|
||||
await cognee.delete([explanation_file_path], dataset_id=test_user_dataset_id, user=default_user)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import asyncio
|
||||
|
||||
asyncio.run(main())
|
||||
Loading…
Add table
Reference in a new issue