<!-- .github/pull_request_template.md --> ## Description This PR introduces a shared locked mechanism in KuzuAdapter to avoid use case when multiple subprocesses from different environments are trying to use the same Kuzu adatabase. ## Type of Change <!-- Please check the relevant option --> - [ ] Bug fix (non-breaking change that fixes an issue) - [x] New feature (non-breaking change that adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to change) - [ ] Documentation update - [ ] Code refactoring - [x] Performance improvement - [ ] Other (please specify): ## Screenshots/Videos (if applicable) None ## Pre-submission Checklist <!-- Please check all boxes that apply before submitting your PR --> - [x] **I have tested my changes thoroughly before submitting this PR** - [x] **This PR contains minimal changes necessary to address the issue/feature** - [x] My code follows the project's coding standards and style guidelines - [x] I have added tests that prove my fix is effective or that my feature works - [x] I have added necessary documentation (if applicable) - [x] All new and existing tests pass - [x] I have searched existing PRs to ensure this change hasn't been submitted already - [x] I have linked any relevant issues in the description - [x] My commits have clear and descriptive messages ## DCO Affirmation I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin.
84 lines
2.6 KiB
Python
84 lines
2.6 KiB
Python
import os
|
|
import asyncio
|
|
import cognee
|
|
import pathlib
|
|
import subprocess
|
|
|
|
from cognee.shared.logging_utils import get_logger
|
|
|
|
logger = get_logger()
|
|
|
|
"""
|
|
Test: Redis-based Kùzu Locking Across Subprocesses
|
|
|
|
This test ensures the Redis shared lock correctly serializes access to the Kùzu
|
|
database when multiple subprocesses (writer/reader and cognify tasks) run in parallel.
|
|
If this test fails, it indicates the locking mechanism is not properly handling
|
|
concurrent subprocess access.
|
|
"""
|
|
|
|
|
|
async def concurrent_subprocess_access():
|
|
data_directory_path = str(
|
|
pathlib.Path(
|
|
os.path.join(pathlib.Path(__file__).parent, ".data_storage/concurrent_tasks")
|
|
).resolve()
|
|
)
|
|
cognee_directory_path = str(
|
|
pathlib.Path(
|
|
os.path.join(pathlib.Path(__file__).parent, ".cognee_system/concurrent_tasks")
|
|
).resolve()
|
|
)
|
|
|
|
subprocess_directory_path = str(
|
|
pathlib.Path(os.path.join(pathlib.Path(__file__).parent, "subprocesses/")).resolve()
|
|
)
|
|
|
|
writer_path = subprocess_directory_path + "/writer.py"
|
|
reader_path = subprocess_directory_path + "/reader.py"
|
|
|
|
cognee.config.data_root_directory(data_directory_path)
|
|
cognee.config.system_root_directory(cognee_directory_path)
|
|
|
|
await cognee.prune.prune_data()
|
|
await cognee.prune.prune_system(metadata=True)
|
|
|
|
writer_process = subprocess.Popen([os.sys.executable, str(writer_path)])
|
|
|
|
reader_process = subprocess.Popen([os.sys.executable, str(reader_path)])
|
|
|
|
# Wait for both processes to complete
|
|
writer_process.wait()
|
|
reader_process.wait()
|
|
|
|
logger.info("Basic write read subprocess example finished")
|
|
|
|
await cognee.prune.prune_data()
|
|
await cognee.prune.prune_system(metadata=True)
|
|
|
|
text = """
|
|
This is the text of the first cognify subprocess
|
|
"""
|
|
await cognee.add(text, dataset_name="first_cognify_dataset")
|
|
|
|
text = """
|
|
This is the text of the second cognify subprocess
|
|
"""
|
|
await cognee.add(text, dataset_name="second_cognify_dataset")
|
|
|
|
first_cognify_path = subprocess_directory_path + "/simple_cognify_1.py"
|
|
second_cognify_path = subprocess_directory_path + "/simple_cognify_2.py"
|
|
|
|
first_cognify_process = subprocess.Popen([os.sys.executable, str(first_cognify_path)])
|
|
|
|
second_cognify_process = subprocess.Popen([os.sys.executable, str(second_cognify_path)])
|
|
|
|
# Wait for both processes to complete
|
|
first_cognify_process.wait()
|
|
second_cognify_process.wait()
|
|
|
|
logger.info("Database concurrent subprocess example finished")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
asyncio.run(concurrent_subprocess_access())
|