Fix linting

This commit is contained in:
yangdx 2025-11-18 08:07:54 +08:00
parent 21ad990e36
commit fc9f7c705e
2 changed files with 36 additions and 24 deletions

View file

@ -14,7 +14,7 @@ Comprehensive test coverage for LightRAG's workspace isolation feature, ensuring
## Test Categories ## Test Categories
### 1. Data Isolation Tests ### 1. Data Isolation Tests
**Tests:** 1, 4, 8, 9, 10 **Tests:** 1, 4, 8, 9, 10
**Purpose:** Verify that data in one workspace doesn't leak into another **Purpose:** Verify that data in one workspace doesn't leak into another
- **Test 1: Pipeline Status Isolation** - Core shared data structures remain separate - **Test 1: Pipeline Status Isolation** - Core shared data structures remain separate
@ -24,7 +24,7 @@ Comprehensive test coverage for LightRAG's workspace isolation feature, ensuring
- **Test 10: JsonKVStorage Integration** - Storage layer properly isolates data - **Test 10: JsonKVStorage Integration** - Storage layer properly isolates data
### 2. Lock Mechanism Tests ### 2. Lock Mechanism Tests
**Tests:** 2, 5, 6 **Tests:** 2, 5, 6
**Purpose:** Validate that locking mechanisms allow parallelism across workspaces while enforcing serialization within workspaces **Purpose:** Validate that locking mechanisms allow parallelism across workspaces while enforcing serialization within workspaces
- **Test 2: Lock Mechanism** - Different workspaces run in parallel, same workspace serializes - **Test 2: Lock Mechanism** - Different workspaces run in parallel, same workspace serializes
@ -32,7 +32,7 @@ Comprehensive test coverage for LightRAG's workspace isolation feature, ensuring
- **Test 6: Namespace Lock Isolation** - Different namespaces within same workspace are independent - **Test 6: Namespace Lock Isolation** - Different namespaces within same workspace are independent
### 3. Backward Compatibility Tests ### 3. Backward Compatibility Tests
**Test:** 3 **Test:** 3
**Purpose:** Ensure legacy code without workspace parameters still functions correctly **Purpose:** Ensure legacy code without workspace parameters still functions correctly
- Default workspace fallback behavior - Default workspace fallback behavior
@ -40,7 +40,7 @@ Comprehensive test coverage for LightRAG's workspace isolation feature, ensuring
- None vs empty string normalization - None vs empty string normalization
### 4. Error Handling Tests ### 4. Error Handling Tests
**Test:** 7 **Test:** 7
**Purpose:** Validate guardrails for invalid configurations **Purpose:** Validate guardrails for invalid configurations
- Missing workspace validation - Missing workspace validation
@ -48,7 +48,7 @@ Comprehensive test coverage for LightRAG's workspace isolation feature, ensuring
- Edge case handling - Edge case handling
### 5. End-to-End Integration Tests ### 5. End-to-End Integration Tests
**Test:** 11 **Test:** 11
**Purpose:** Validate complete LightRAG workflows maintain isolation **Purpose:** Validate complete LightRAG workflows maintain isolation
- Full document insertion pipeline - Full document insertion pipeline
@ -233,10 +233,10 @@ async def test_new_feature():
print("\n" + "=" * 60) print("\n" + "=" * 60)
print("TEST N: Feature Name") print("TEST N: Feature Name")
print("=" * 60) print("=" * 60)
# Test implementation # Test implementation
# ... # ...
print("✅ PASSED: Feature Name") print("✅ PASSED: Feature Name")
print(f" Validation details") print(f" Validation details")
``` ```

View file

@ -70,11 +70,11 @@ async def _measure_lock_parallelism(
workload: List[Tuple[str, str, str]], hold_time: float = 0.05 workload: List[Tuple[str, str, str]], hold_time: float = 0.05
) -> Tuple[int, List[Tuple[str, str]], Dict[str, float]]: ) -> Tuple[int, List[Tuple[str, str]], Dict[str, float]]:
"""Run lock acquisition workload and capture peak concurrency and timeline. """Run lock acquisition workload and capture peak concurrency and timeline.
Args: Args:
workload: List of (name, workspace, namespace) tuples workload: List of (name, workspace, namespace) tuples
hold_time: How long each worker holds the lock (seconds) hold_time: How long each worker holds the lock (seconds)
Returns: Returns:
Tuple of (max_parallel, timeline, metrics) where: Tuple of (max_parallel, timeline, metrics) where:
- max_parallel: Peak number of concurrent lock holders - max_parallel: Peak number of concurrent lock holders
@ -99,28 +99,28 @@ async def _measure_lock_parallelism(
running -= 1 running -= 1
await asyncio.gather(*(worker(*args) for args in workload)) await asyncio.gather(*(worker(*args) for args in workload))
metrics = { metrics = {
"total_duration": time.time() - start_time, "total_duration": time.time() - start_time,
"max_concurrency": max_parallel, "max_concurrency": max_parallel,
"avg_hold_time": hold_time, "avg_hold_time": hold_time,
"num_workers": len(workload), "num_workers": len(workload),
} }
return max_parallel, timeline, metrics return max_parallel, timeline, metrics
def _assert_no_timeline_overlap(timeline: List[Tuple[str, str]]) -> None: def _assert_no_timeline_overlap(timeline: List[Tuple[str, str]]) -> None:
"""Ensure that timeline events never overlap for sequential execution. """Ensure that timeline events never overlap for sequential execution.
This function implements a finite state machine that validates: This function implements a finite state machine that validates:
- No overlapping lock acquisitions (only one task active at a time) - No overlapping lock acquisitions (only one task active at a time)
- Proper lock release order (task releases its own lock) - Proper lock release order (task releases its own lock)
- All locks are properly released - All locks are properly released
Args: Args:
timeline: List of (name, event) tuples where event is "start" or "end" timeline: List of (name, event) tuples where event is "start" or "end"
Raises: Raises:
AssertionError: If timeline shows overlapping execution or improper locking AssertionError: If timeline shows overlapping execution or improper locking
""" """
@ -219,14 +219,14 @@ async def test_lock_mechanism():
# Test 2.1: Different workspaces should run in parallel # Test 2.1: Different workspaces should run in parallel
print("\nTest 2.1: Different workspaces locks should be parallel") print("\nTest 2.1: Different workspaces locks should be parallel")
# Support stress testing with configurable number of workers # Support stress testing with configurable number of workers
num_workers = PARALLEL_WORKERS if STRESS_TEST_MODE else 3 num_workers = PARALLEL_WORKERS if STRESS_TEST_MODE else 3
parallel_workload = [ parallel_workload = [
(f"ws_{chr(97+i)}", f"ws_{chr(97+i)}", "test_namespace") (f"ws_{chr(97+i)}", f"ws_{chr(97+i)}", "test_namespace")
for i in range(num_workers) for i in range(num_workers)
] ]
max_parallel, timeline_parallel, metrics = await _measure_lock_parallelism( max_parallel, timeline_parallel, metrics = await _measure_lock_parallelism(
parallel_workload parallel_workload
) )
@ -236,8 +236,12 @@ async def test_lock_mechanism():
) )
print("✅ PASSED: Lock Mechanism - Parallel (Different Workspaces)") print("✅ PASSED: Lock Mechanism - Parallel (Different Workspaces)")
print(f" Locks overlapped for different workspaces (max concurrency={max_parallel})") print(
print(f" Performance: {metrics['total_duration']:.3f}s for {metrics['num_workers']} workers") f" Locks overlapped for different workspaces (max concurrency={max_parallel})"
)
print(
f" Performance: {metrics['total_duration']:.3f}s for {metrics['num_workers']} workers"
)
# Test 2.2: Same workspace should serialize # Test 2.2: Same workspace should serialize
print("\nTest 2.2: Same workspace locks should serialize") print("\nTest 2.2: Same workspace locks should serialize")
@ -245,9 +249,11 @@ async def test_lock_mechanism():
("serial_run_1", "ws_same", "test_namespace"), ("serial_run_1", "ws_same", "test_namespace"),
("serial_run_2", "ws_same", "test_namespace"), ("serial_run_2", "ws_same", "test_namespace"),
] ]
max_parallel_serial, timeline_serial, metrics_serial = await _measure_lock_parallelism( (
serial_workload max_parallel_serial,
) timeline_serial,
metrics_serial,
) = await _measure_lock_parallelism(serial_workload)
assert max_parallel_serial == 1, ( assert max_parallel_serial == 1, (
"Same workspace locks should not overlap; " "Same workspace locks should not overlap; "
f"observed {max_parallel_serial} with timeline {timeline_serial}" f"observed {max_parallel_serial} with timeline {timeline_serial}"
@ -256,7 +262,9 @@ async def test_lock_mechanism():
print("✅ PASSED: Lock Mechanism - Serial (Same Workspace)") print("✅ PASSED: Lock Mechanism - Serial (Same Workspace)")
print(" Same workspace operations executed sequentially with no overlap") print(" Same workspace operations executed sequentially with no overlap")
print(f" Performance: {metrics_serial['total_duration']:.3f}s for {metrics_serial['num_workers']} tasks") print(
f" Performance: {metrics_serial['total_duration']:.3f}s for {metrics_serial['num_workers']} tasks"
)
# ============================================================================= # =============================================================================
@ -519,8 +527,12 @@ async def test_different_namespace_lock_isolation():
) )
print("✅ PASSED: Different Namespace Lock Isolation") print("✅ PASSED: Different Namespace Lock Isolation")
print(f" Different namespace locks ran in parallel (max concurrency={max_parallel})") print(
print(f" Performance: {metrics['total_duration']:.3f}s for {metrics['num_workers']} namespaces") f" Different namespace locks ran in parallel (max concurrency={max_parallel})"
)
print(
f" Performance: {metrics['total_duration']:.3f}s for {metrics['num_workers']} namespaces"
)
# ============================================================================= # =============================================================================