This commit is contained in:
hzywhite 2025-09-02 03:54:20 +08:00
parent 9b7ed84e05
commit d8b2264d8b
77 changed files with 2829 additions and 1102 deletions

View file

@ -58,6 +58,8 @@ from lightrag.kg.shared_storage import (
)
from fastapi.security import OAuth2PasswordRequestForm
from lightrag.api.auth import auth_handler
from lightrag.ragmanager import RAGManager
from raganything import RAGAnything, RAGAnythingConfig
# use the .env that is inside the current folder
# allows to use different .env file for each lightrag instance
@ -504,10 +506,144 @@ def create_app(args):
ollama_server_infos=ollama_server_infos,
)
# Initialize RAGAnything with comprehensive error handling
rag_anything = None
raganything_enabled = False
raganything_error_message = None
try:
api_key = get_env_value("LLM_BINDING_API_KEY", "", str)
base_url = get_env_value("LLM_BINDING_HOST", "", str)
# Validate required configuration
if not api_key:
raise ValueError(
"LLM_BINDING_API_KEY is required for RAGAnything functionality"
)
if not base_url:
raise ValueError(
"LLM_BINDING_HOST is required for RAGAnything functionality"
)
config = RAGAnythingConfig(
working_dir=args.working_dir or "./rag_storage",
parser="mineru", # Parser selection: mineru or docling
parse_method="auto", # Parse method: auto, ocr, or txt
enable_image_processing=True,
enable_table_processing=True,
enable_equation_processing=True,
)
# Define LLM model function
def llm_model_func(prompt, system_prompt=None, history_messages=[], **kwargs):
return openai_complete_if_cache(
"gpt-4o-mini",
prompt,
system_prompt=system_prompt,
history_messages=history_messages,
api_key=api_key,
base_url=base_url,
**kwargs,
)
# Define vision model function for image processing
def vision_model_func(
prompt, system_prompt=None, history_messages=[], image_data=None, **kwargs
):
if image_data:
return openai_complete_if_cache(
"gpt-4o",
"",
system_prompt=None,
history_messages=[],
messages=[
{"role": "system", "content": system_prompt}
if system_prompt
else None,
{
"role": "user",
"content": [
{"type": "text", "text": prompt},
{
"type": "image_url",
"image_url": {
"url": f"data:image/jpeg;base64,{image_data}"
},
},
],
}
if image_data
else {"role": "user", "content": prompt},
],
api_key=api_key,
base_url=base_url,
**kwargs,
)
else:
return llm_model_func(prompt, system_prompt, history_messages, **kwargs)
# Define embedding function
raganything_embedding_func = EmbeddingFunc(
embedding_dim=3072,
max_token_size=8192,
func=lambda texts: openai_embed(
texts,
model="text-embedding-3-large",
api_key=api_key,
base_url=base_url,
),
)
# Initialize RAGAnything with new dataclass structure
logger.info("Initializing RAGAnything functionality...")
rag_anything = RAGAnything(
lightrag=rag,
config=config,
llm_model_func=llm_model_func,
vision_model_func=vision_model_func,
embedding_func=raganything_embedding_func,
)
logger.info("Check the download status of the RAGANything parser...")
rag_anything.verify_parser_installation_once()
RAGManager.set_rag(rag_anything)
raganything_enabled = True
logger.info(
"✅ The RAGAnything feature has been successfully enabled, supporting multimodal document processing functionality"
)
except ImportError as e:
raganything_error_message = (
f"RAGAnything dependency package not installed: {str(e)}"
)
logger.warning(f"⚠️ {raganything_error_message}")
logger.info(
"💡 Please run 'pip install raganything' to install dependency packages to enable multimodal document processing functionality"
)
except ValueError as e:
raganything_error_message = f"RAGAnything configuration error: {str(e)}"
logger.warning(f"⚠️ {raganything_error_message}")
logger.info(
"💡 Please check if the environment variables LLM-BINDING_API_KEY and LLM-BINDING_HOST are set correctly"
)
except Exception as e:
raganything_error_message = f"RAGAnything initialization failed: {str(e)}"
logger.error(f"{raganything_error_message}")
logger.info(
"💡 The system will run in basic mode and only support standard document processing functions"
)
if not raganything_enabled:
logger.info(
"🔄 The system has been downgraded to basic mode, but LightRAG core functions are still available"
)
# Add routes
app.include_router(
create_document_routes(
rag,
rag_anything,
doc_manager,
api_key,
)

View file

@ -3,6 +3,9 @@ This module contains all document-related routes for the LightRAG API.
"""
import asyncio
import json
import uuid
from lightrag.utils import logger, get_pinyin_sort_key
import aiofiles
import shutil
@ -18,6 +21,7 @@ from fastapi import (
File,
HTTPException,
UploadFile,
Form,
)
from pydantic import BaseModel, Field, field_validator
@ -26,6 +30,7 @@ from lightrag.base import DeletionResult, DocProcessingStatus, DocStatus
from lightrag.utils import generate_track_id
from lightrag.api.utils_api import get_combined_auth_dependency
from ..config import global_args
from raganything import RAGAnything
# Function to format datetime to ISO format string with timezone information
@ -107,6 +112,80 @@ def sanitize_filename(filename: str, input_dir: Path) -> str:
return clean_name
class ScanRequest(BaseModel):
"""Request model for document scanning operations.
Attributes:
framework (str | None): Processing framework to use for scanning.
Can be "lightrag" or "raganything". If None, uses default framework.
"""
framework: str | None = None
class SchemeConfig(BaseModel):
"""Configuration model for processing schemes.
Defines the processing framework and optional extractor to use for document processing.
Attributes:
framework (Literal['lightrag', 'raganything']): Processing framework to use.
- "lightrag": Standard LightRAG processing for text-based documents
- "raganything": Advanced multimodal processing with image/table/equation support
extractor (Literal['mineru', 'docling', '']): Document extraction tool to use.
- "mineru": MinerU parser for comprehensive document parsing
- "docling": Docling parser for office document processing
- "": Default/automatic extractor selection
"""
framework: Literal["lightrag", "raganything"]
extractor: Literal["mineru", "docling", ""] = "" # 默认值
class Scheme(BaseModel):
"""Base model for processing schemes.
Attributes:
name (str): Human-readable name for the processing scheme
config (SchemeConfig): Configuration settings for the scheme
"""
name: str
config: SchemeConfig
class Scheme_include_id(Scheme):
"""Scheme model with unique identifier included.
Extends the base Scheme model to include a unique ID field for
identification and management operations.
Attributes:
id (int): Unique identifier for the scheme
name (str): Inherited from Scheme
config (SchemeConfig): Inherited from Scheme
"""
id: int
class SchemesResponse(BaseModel):
"""Response model for scheme management operations.
Used for all scheme-related endpoints to provide consistent response format
for scheme retrieval, creation, update, and deletion operations.
Attributes:
status (str): Operation status ("success", "error")
message (Optional[str]): Additional message with operation details
data (Optional[List[Dict[str, Any]]]): List of scheme objects when retrieving schemes
"""
status: str = Field(..., description="Operation status")
message: Optional[str] = Field(None, description="Additional message")
data: Optional[List[Dict[str, Any]]] = Field(None, description="List of schemes")
class ScanResponse(BaseModel):
"""Response model for document scanning operation
@ -372,12 +451,20 @@ class DocStatusResponse(BaseModel):
default=None, description="Additional metadata about the document"
)
file_path: str = Field(description="Path to the document file")
scheme_name: str = Field(
default=None, description="Name of the processing scheme used for this document"
)
multimodal_content: Optional[list[dict[str, any]]] = Field(
default=None, description="Multimodal content of the document"
)
class Config:
json_schema_extra = {
"example": {
"id": "doc_123456",
"content_summary": "Research paper on machine learning",
"scheme_name": "lightrag",
"multimodal_content": None,
"content_length": 15240,
"status": "PROCESSED",
"created_at": "2025-03-31T12:34:56",
@ -411,6 +498,8 @@ class DocsStatusesResponse(BaseModel):
{
"id": "doc_123",
"content_summary": "Pending document",
"scheme_name": "lightrag",
"multimodal_content": None,
"content_length": 5000,
"status": "PENDING",
"created_at": "2025-03-31T10:00:00",
@ -426,6 +515,8 @@ class DocsStatusesResponse(BaseModel):
{
"id": "doc_456",
"content_summary": "Processed document",
"scheme_name": "lightrag",
"multimodal_content": None,
"content_length": 8000,
"status": "PROCESSED",
"created_at": "2025-03-31T09:00:00",
@ -1297,6 +1388,57 @@ async def pipeline_index_files(
logger.error(traceback.format_exc())
async def pipeline_index_files_raganything(
rag_anything: RAGAnything,
file_paths: List[Path],
track_id: str = None,
scheme_name: str = None,
):
"""Index multiple files using RAGAnything framework for multimodal processing.
Args:
rag_anything (RAGAnything): RAGAnything instance for multimodal document processing
file_paths (List[Path]): List of file paths to be processed
track_id (str, optional): Tracking ID for batch monitoring. Defaults to None.
scheme_name (str, optional): Processing scheme name for categorization.
Defaults to None.
Note:
- Uses RAGAnything's process_document_complete_with_multimodal_content method for each file
- Supports multimodal content processing (images, tables, equations)
- Files are processed with "auto" parse method and "modelscope" source
- Output is saved to "./output" directory
- Errors are logged but don't stop processing of remaining files
"""
if not file_paths:
return
try:
# Use get_pinyin_sort_key for Chinese pinyin sorting
sorted_file_paths = sorted(
file_paths, key=lambda p: get_pinyin_sort_key(str(p))
)
# Process files sequentially with track_id
for file_path in sorted_file_paths:
success = (
await rag_anything.process_document_complete_with_multimodal_content(
file_path=str(file_path),
output_dir="./output",
parse_method="auto",
source="modelscope",
scheme_name=scheme_name,
)
)
if success:
pass
except Exception as e:
error_msg = f"Error indexing files: {str(e)}"
logger.error(error_msg)
logger.error(traceback.format_exc())
async def pipeline_index_texts(
rag: LightRAG,
texts: List[str],
@ -1326,7 +1468,11 @@ async def pipeline_index_texts(
async def run_scanning_process(
rag: LightRAG, doc_manager: DocumentManager, track_id: str = None
rag: LightRAG,
rag_anything: RAGAnything,
doc_manager: DocumentManager,
track_id: str = None,
scheme_name: str = None,
):
"""Background task to scan and index documents
@ -1342,8 +1488,47 @@ async def run_scanning_process(
if new_files:
# Process all files at once with track_id
await pipeline_index_files(rag, new_files, track_id)
logger.info(f"Scanning process completed: {total_files} files Processed.")
if scheme_name == "lightrag":
await pipeline_index_files(
rag, new_files, track_id, scheme_name=scheme_name
)
logger.info(
f"Scanning process completed with lightrag: {total_files} files Processed."
)
elif scheme_name == "raganything":
from lightrag.kg.shared_storage import get_namespace_data
try:
pipeline_status = await get_namespace_data("pipeline_status")
is_pipeline_scan_busy = pipeline_status.get("scan_disabled", False)
is_pipeline_busy = pipeline_status.get("busy", False)
if is_pipeline_busy:
logger.info(
"Pipe is currently busy, skipping raganything processing to avoid conflicts..."
)
return
if is_pipeline_scan_busy:
logger.info(
"Pipe is currently busy, skipping raganything processing to avoid conflicts..."
)
else:
await pipeline_index_files_raganything(
rag_anything, new_files, track_id, scheme_name=scheme_name
)
logger.info(
f"Scanning proces completed with raganything: {total_files} files Processed."
)
except Exception as e:
logger.debug(
f"Could not check pipeline status, proceeding with processing: {e}"
)
await pipeline_index_files_raganything(
rag_anything, new_files, track_id, scheme_name=scheme_name
)
logger.info(
f"Scanning process completed with raganything: {total_files} files Processed."
)
else:
# No new files to index, check if there are any documents in the queue
logger.info(
@ -1554,15 +1739,245 @@ async def background_delete_documents(
def create_document_routes(
rag: LightRAG, doc_manager: DocumentManager, api_key: Optional[str] = None
rag: LightRAG,
rag_anything: RAGAnything,
doc_manager: DocumentManager,
api_key: Optional[str] = None,
):
# Create combined auth dependency for document routes
combined_auth = get_combined_auth_dependency(api_key)
@router.get(
"/schemes",
response_model=SchemesResponse,
dependencies=[Depends(combined_auth)],
)
async def get_all_schemes():
"""Get all available processing schemes.
Retrieves the complete list of processing schemes from the schemes.json file.
Each scheme defines a processing framework (lightrag/raganything) and
optional extractor configuration (mineru/docling).
Returns:
SchemesResponse: Response containing:
- status (str): Operation status ("success")
- message (str): Success message
- data (List[Dict]): List of all available schemes with their configurations
Raises:
HTTPException: If file reading fails or JSON parsing errors occur (500)
"""
SCHEMES_FILE = Path("./examples/schemes.json")
if SCHEMES_FILE.exists():
with open(SCHEMES_FILE, "r", encoding="utf-8") as f:
try:
current_data = json.load(f)
except json.JSONDecodeError:
current_data = []
else:
current_data = []
SCHEMES_FILE.parent.mkdir(parents=True, exist_ok=True)
with open(SCHEMES_FILE, "w") as f:
json.dump(current_data, f)
return SchemesResponse(
status="success",
message="Schemes retrieved successfully",
data=current_data,
)
@router.post(
"/schemes",
response_model=SchemesResponse,
dependencies=[Depends(combined_auth)],
)
async def save_schemes(schemes: list[Scheme_include_id]):
"""Save/update processing schemes in batch.
Updates existing schemes with new configuration data. This endpoint performs
a partial update by modifying existing schemes based on their IDs while
preserving other schemes in the file.
Args:
schemes (list[Scheme_include_id]): List of schemes to update, each containing:
- id (int): Unique identifier of the scheme to update
- name (str): Display name for the scheme
- config (SchemeConfig): Configuration object with framework and extractor settings
Returns:
SchemesResponse: Response containing:
- status (str): Operation status ("success")
- message (str): Success message with count of saved schemes
- data (List[Dict]): Updated list of all schemes after modification
Raises:
HTTPException: If file operations fail or JSON processing errors occur (500)
"""
try:
SCHEMES_FILE = Path("./examples/schemes.json")
if SCHEMES_FILE.exists():
with open(SCHEMES_FILE, "r", encoding="utf-8") as f:
try:
current_data = json.load(f)
except json.JSONDecodeError:
current_data = []
else:
current_data = []
updated_item = {
"id": schemes[0].id,
"name": schemes[0].name,
"config": {
"framework": schemes[0].config.framework,
"extractor": schemes[0].config.extractor,
},
}
# 保存新方案
for item in current_data:
if item["id"] == updated_item["id"]:
item["name"] = updated_item["name"]
item["config"]["framework"] = updated_item["config"]["framework"]
item["config"]["extractor"] = updated_item["config"]["extractor"]
break
# 写回文件
with open(SCHEMES_FILE, "w", encoding="utf-8") as f:
json.dump(current_data, f, indent=4)
# 返回响应(从文件重新读取确保一致性)
with open(SCHEMES_FILE, "r", encoding="utf-8") as f:
data = json.load(f)
return SchemesResponse(
status="success",
message=f"Successfully saved {len(schemes)} schemes",
data=data,
)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.post(
"/schemes/add",
response_model=Scheme_include_id,
dependencies=[Depends(combined_auth)],
)
async def add_scheme(scheme: Scheme):
"""Add a new processing scheme.
Creates a new processing scheme with auto-generated ID and saves it to the
schemes configuration file. The new scheme will be available for document
processing operations.
Args:
scheme (Scheme): New scheme to add, containing:
- name (str): Display name for the scheme
- config (SchemeConfig): Configuration with framework and extractor settings
Returns:
Scheme_include_id: The created scheme with auto-generated ID, containing:
- id (int): Auto-generated unique identifier
- name (str): Display name of the scheme
- config (SchemeConfig): Processing configuration
Raises:
HTTPException: If file operations fail or ID generation conflicts occur (500)
"""
try:
SCHEMES_FILE = Path("./examples/schemes.json")
if SCHEMES_FILE.exists():
with open(SCHEMES_FILE, "r", encoding="utf-8") as f:
try:
current_data = json.load(f)
except json.JSONDecodeError:
current_data = []
else:
current_data = []
# 生成新ID简单实现实际项目应该用数据库自增ID
new_id = uuid.uuid4().int >> 96 # 生成一个较小的整数ID
while new_id in current_data:
new_id = uuid.uuid4().int >> 96
new_scheme = {
"id": new_id,
"name": scheme.name,
"config": {
"framework": scheme.config.framework,
"extractor": scheme.config.extractor,
},
}
current_data.append(new_scheme)
with open(SCHEMES_FILE, "w", encoding="utf-8") as f:
json.dump(current_data, f, ensure_ascii=False, indent=2)
return new_scheme
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.delete(
"/schemes/{scheme_id}",
response_model=Dict[str, str],
dependencies=[Depends(combined_auth)],
)
async def delete_scheme(scheme_id: int):
"""Delete a specific processing scheme by ID.
Removes a processing scheme from the configuration file. Once deleted,
the scheme will no longer be available for document processing operations.
Args:
scheme_id (int): Unique identifier of the scheme to delete
Returns:
Dict[str, str]: Success message containing:
- message (str): Confirmation message with the deleted scheme ID
Raises:
HTTPException:
- 404: If the scheme with the specified ID is not found
- 500: If file operations fail or other errors occur
"""
try:
SCHEMES_FILE = Path("./examples/schemes.json")
if SCHEMES_FILE.exists():
with open(SCHEMES_FILE, "r", encoding="utf-8") as f:
try:
current_data = json.load(f)
except json.JSONDecodeError:
current_data = []
else:
current_data = []
current_data_dict = {scheme["id"]: scheme for scheme in current_data}
if scheme_id not in current_data_dict: # 直接检查 id 是否存在
raise HTTPException(status_code=404, detail="Scheme not found")
for i, scheme in enumerate(current_data):
if scheme["id"] == scheme_id:
del current_data[i] # 直接删除列表中的元素
break
with open(SCHEMES_FILE, "w", encoding="utf-8") as f:
json.dump(current_data, f, ensure_ascii=False, indent=2)
return {"message": f"Scheme {scheme_id} deleted successfully"}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.post(
"/scan", response_model=ScanResponse, dependencies=[Depends(combined_auth)]
)
async def scan_for_new_documents(background_tasks: BackgroundTasks):
async def scan_for_new_documents(
request: ScanRequest, background_tasks: BackgroundTasks
):
"""
Trigger the scanning process for new documents.
@ -1573,11 +1988,19 @@ def create_document_routes(
Returns:
ScanResponse: A response object containing the scanning status and track_id
"""
scheme_name = request.framework
# Generate track_id with "scan" prefix for scanning operation
track_id = generate_track_id("scan")
# Start the scanning process in the background with track_id
background_tasks.add_task(run_scanning_process, rag, doc_manager, track_id)
background_tasks.add_task(
run_scanning_process,
rag,
rag_anything,
doc_manager,
track_id,
scheme_name=scheme_name,
)
return ScanResponse(
status="scanning_started",
message="Scanning process has been initiated in the background",
@ -1588,7 +2011,9 @@ def create_document_routes(
"/upload", response_model=InsertResponse, dependencies=[Depends(combined_auth)]
)
async def upload_to_input_dir(
background_tasks: BackgroundTasks, file: UploadFile = File(...)
background_tasks: BackgroundTasks,
file: UploadFile = File(...),
schemeId: str = Form(...),
):
"""
Upload a file to the input directory and index it.
@ -1600,6 +2025,8 @@ def create_document_routes(
Args:
background_tasks: FastAPI BackgroundTasks for async processing
file (UploadFile): The file to be uploaded. It must have an allowed extension.
schemeId (str): ID of the processing scheme to use for this file. The scheme
determines whether to use LightRAG or RAGAnything framework for processing.
Returns:
InsertResponse: A response object containing the upload status and a message.
@ -1632,8 +2059,59 @@ def create_document_routes(
track_id = generate_track_id("upload")
# Add to background tasks and get track_id
background_tasks.add_task(pipeline_index_file, rag, file_path, track_id)
def load_config():
try:
SCHEMES_FILE = Path("./examples/schemes.json")
with open(SCHEMES_FILE, "r") as f:
schemes = json.load(f)
for scheme in schemes:
if str(scheme.get("id")) == schemeId:
return scheme.get("config", {})
return {}
except Exception as e:
logger.error(
f"Failed to load config for scheme {schemeId}: {str(e)}"
)
return {}
config = load_config()
current_framework = config.get("framework")
doc_pre_id = f"doc-pre-{safe_filename}"
if current_framework and current_framework == "lightrag":
# Add to background tasks and get track_id
background_tasks.add_task(
pipeline_index_file,
rag,
file_path,
track_id,
scheme_name=current_framework,
)
else:
background_tasks.add_task(
rag_anything.process_document_complete_with_multimodal_content,
file_path=str(file_path),
output_dir="./output",
parse_method="auto",
source="modelscope",
scheme_name=current_framework,
)
await rag.doc_status.upsert(
{
doc_pre_id: {
"status": DocStatus.READY,
"content": "",
"content_summary": "",
"multimodal_content": [],
"scheme_name": current_framework,
"content_length": 0,
"created_at": "",
"updated_at": "",
"file_path": safe_filename,
}
}
)
return InsertResponse(
status="success",
@ -1854,6 +2332,42 @@ def create_document_routes(
f"Successfully dropped all {storage_success_count} storage components"
)
# Clean all parse_cache entries after successful storage drops
if storage_success_count > 0:
try:
if "history_messages" in pipeline_status:
pipeline_status["history_messages"].append(
"Cleaning parse_cache entries"
)
parse_cache_result = await rag.aclean_all_parse_cache()
if parse_cache_result.get("error"):
cache_error_msg = f"Warning: Failed to clean parse_cache: {parse_cache_result['error']}"
logger.warning(cache_error_msg)
if "history_messages" in pipeline_status:
pipeline_status["history_messages"].append(cache_error_msg)
else:
deleted_count = parse_cache_result.get("deleted_count", 0)
if deleted_count > 0:
cache_success_msg = f"Successfully cleaned {deleted_count} parse_cache entries"
logger.info(cache_success_msg)
if "history_messages" in pipeline_status:
pipeline_status["history_messages"].append(
cache_success_msg
)
else:
cache_empty_msg = "No parse_cache entries to clean"
logger.info(cache_empty_msg)
if "history_messages" in pipeline_status:
pipeline_status["history_messages"].append(
cache_empty_msg
)
except Exception as cache_error:
cache_error_msg = f"Warning: Exception while cleaning parse_cache: {str(cache_error)}"
logger.warning(cache_error_msg)
if "history_messages" in pipeline_status:
pipeline_status["history_messages"].append(cache_error_msg)
# If all storage operations failed, return error status and don't proceed with file deletion
if storage_success_count == 0 and storage_error_count > 0:
error_message = "All storage drop operations failed. Aborting document clearing process."
@ -2004,7 +2518,7 @@ def create_document_routes(
Get the status of all documents in the system.
This endpoint retrieves the current status of all documents, grouped by their
processing status (PENDING, PROCESSING, PROCESSED, FAILED).
processing status (READY, HANDLING, PENDING, PROCESSING, PROCESSED, FAILED).
Returns:
DocsStatusesResponse: A response object containing a dictionary where keys are
@ -2016,6 +2530,8 @@ def create_document_routes(
"""
try:
statuses = (
DocStatus.READY,
DocStatus.HANDLING,
DocStatus.PENDING,
DocStatus.PROCESSING,
DocStatus.PROCESSED,
@ -2045,6 +2561,7 @@ def create_document_routes(
error_msg=doc_status.error_msg,
metadata=doc_status.metadata,
file_path=doc_status.file_path,
scheme_name=doc_status.scheme_name,
)
)
return response
@ -2381,6 +2898,8 @@ def create_document_routes(
error_msg=doc.error_msg,
metadata=doc.metadata,
file_path=doc.file_path,
scheme_name=doc.scheme_name,
multimodal_content=doc.multimodal_content,
)
)

View file

@ -1 +1 @@
import{e as o,c as l,g as b,k as O,h as P,j as p,l as w,m as c,n as v,t as A,o as N}from"./_baseUniq-BrMEyGA7.js";import{a_ as g,aw as _,a$ as $,b0 as E,b1 as F,b2 as x,b3 as M,b4 as y,b5 as B,b6 as T}from"./mermaid-vendor-CAxUo7Zk.js";var S=/\s/;function G(n){for(var r=n.length;r--&&S.test(n.charAt(r)););return r}var H=/^\s+/;function L(n){return n&&n.slice(0,G(n)+1).replace(H,"")}var m=NaN,R=/^[-+]0x[0-9a-f]+$/i,q=/^0b[01]+$/i,z=/^0o[0-7]+$/i,C=parseInt;function K(n){if(typeof n=="number")return n;if(o(n))return m;if(g(n)){var r=typeof n.valueOf=="function"?n.valueOf():n;n=g(r)?r+"":r}if(typeof n!="string")return n===0?n:+n;n=L(n);var t=q.test(n);return t||z.test(n)?C(n.slice(2),t?2:8):R.test(n)?m:+n}var W=1/0,X=17976931348623157e292;function Y(n){if(!n)return n===0?n:0;if(n=K(n),n===W||n===-1/0){var r=n<0?-1:1;return r*X}return n===n?n:0}function D(n){var r=Y(n),t=r%1;return r===r?t?r-t:r:0}function fn(n){var r=n==null?0:n.length;return r?l(n):[]}var I=Object.prototype,J=I.hasOwnProperty,dn=_(function(n,r){n=Object(n);var t=-1,e=r.length,i=e>2?r[2]:void 0;for(i&&$(r[0],r[1],i)&&(e=1);++t<e;)for(var f=r[t],a=E(f),s=-1,d=a.length;++s<d;){var u=a[s],h=n[u];(h===void 0||F(h,I[u])&&!J.call(n,u))&&(n[u]=f[u])}return n});function un(n){var r=n==null?0:n.length;return r?n[r-1]:void 0}function Q(n){return function(r,t,e){var i=Object(r);if(!x(r)){var f=b(t);r=O(r),t=function(s){return f(i[s],s,i)}}var a=n(r,t,e);return a>-1?i[f?r[a]:a]:void 0}}var U=Math.max;function Z(n,r,t){var e=n==null?0:n.length;if(!e)return-1;var i=t==null?0:D(t);return i<0&&(i=U(e+i,0)),P(n,b(r),i)}var hn=Q(Z);function V(n,r){var t=-1,e=x(n)?Array(n.length):[];return p(n,function(i,f,a){e[++t]=r(i,f,a)}),e}function gn(n,r){var t=M(n)?w:V;return t(n,b(r))}var j=Object.prototype,k=j.hasOwnProperty;function nn(n,r){return n!=null&&k.call(n,r)}function bn(n,r){return n!=null&&c(n,r,nn)}function rn(n,r){return n<r}function tn(n,r,t){for(var e=-1,i=n.length;++e<i;){var f=n[e],a=r(f);if(a!=null&&(s===void 0?a===a&&!o(a):t(a,s)))var s=a,d=f}return d}function mn(n){return n&&n.length?tn(n,y,rn):void 0}function an(n,r,t,e){if(!g(n))return n;r=v(r,n);for(var i=-1,f=r.length,a=f-1,s=n;s!=null&&++i<f;){var d=A(r[i]),u=t;if(d==="__proto__"||d==="constructor"||d==="prototype")return n;if(i!=a){var h=s[d];u=void 0,u===void 0&&(u=g(h)?h:B(r[i+1])?[]:{})}T(s,d,u),s=s[d]}return n}function on(n,r,t){for(var e=-1,i=r.length,f={};++e<i;){var a=r[e],s=N(n,a);t(s,a)&&an(f,v(a,n),s)}return f}export{rn as a,tn as b,V as c,on as d,mn as e,fn as f,hn as g,bn as h,dn as i,D as j,un as l,gn as m,Y as t};
import{e as o,c as l,g as b,k as O,h as P,j as p,l as w,m as c,n as v,t as A,o as N}from"./_baseUniq-B85sKvyK.js";import{a_ as g,aw as _,a$ as $,b0 as E,b1 as F,b2 as x,b3 as M,b4 as y,b5 as B,b6 as T}from"./mermaid-vendor-D1lQkuud.js";var S=/\s/;function G(n){for(var r=n.length;r--&&S.test(n.charAt(r)););return r}var H=/^\s+/;function L(n){return n&&n.slice(0,G(n)+1).replace(H,"")}var m=NaN,R=/^[-+]0x[0-9a-f]+$/i,q=/^0b[01]+$/i,z=/^0o[0-7]+$/i,C=parseInt;function K(n){if(typeof n=="number")return n;if(o(n))return m;if(g(n)){var r=typeof n.valueOf=="function"?n.valueOf():n;n=g(r)?r+"":r}if(typeof n!="string")return n===0?n:+n;n=L(n);var t=q.test(n);return t||z.test(n)?C(n.slice(2),t?2:8):R.test(n)?m:+n}var W=1/0,X=17976931348623157e292;function Y(n){if(!n)return n===0?n:0;if(n=K(n),n===W||n===-1/0){var r=n<0?-1:1;return r*X}return n===n?n:0}function D(n){var r=Y(n),t=r%1;return r===r?t?r-t:r:0}function fn(n){var r=n==null?0:n.length;return r?l(n):[]}var I=Object.prototype,J=I.hasOwnProperty,dn=_(function(n,r){n=Object(n);var t=-1,e=r.length,i=e>2?r[2]:void 0;for(i&&$(r[0],r[1],i)&&(e=1);++t<e;)for(var f=r[t],a=E(f),s=-1,d=a.length;++s<d;){var u=a[s],h=n[u];(h===void 0||F(h,I[u])&&!J.call(n,u))&&(n[u]=f[u])}return n});function un(n){var r=n==null?0:n.length;return r?n[r-1]:void 0}function Q(n){return function(r,t,e){var i=Object(r);if(!x(r)){var f=b(t);r=O(r),t=function(s){return f(i[s],s,i)}}var a=n(r,t,e);return a>-1?i[f?r[a]:a]:void 0}}var U=Math.max;function Z(n,r,t){var e=n==null?0:n.length;if(!e)return-1;var i=t==null?0:D(t);return i<0&&(i=U(e+i,0)),P(n,b(r),i)}var hn=Q(Z);function V(n,r){var t=-1,e=x(n)?Array(n.length):[];return p(n,function(i,f,a){e[++t]=r(i,f,a)}),e}function gn(n,r){var t=M(n)?w:V;return t(n,b(r))}var j=Object.prototype,k=j.hasOwnProperty;function nn(n,r){return n!=null&&k.call(n,r)}function bn(n,r){return n!=null&&c(n,r,nn)}function rn(n,r){return n<r}function tn(n,r,t){for(var e=-1,i=n.length;++e<i;){var f=n[e],a=r(f);if(a!=null&&(s===void 0?a===a&&!o(a):t(a,s)))var s=a,d=f}return d}function mn(n){return n&&n.length?tn(n,y,rn):void 0}function an(n,r,t,e){if(!g(n))return n;r=v(r,n);for(var i=-1,f=r.length,a=f-1,s=n;s!=null&&++i<f;){var d=A(r[i]),u=t;if(d==="__proto__"||d==="constructor"||d==="prototype")return n;if(i!=a){var h=s[d];u=void 0,u===void 0&&(u=g(h)?h:B(r[i+1])?[]:{})}T(s,d,u),s=s[d]}return n}function on(n,r,t){for(var e=-1,i=r.length,f={};++e<i;){var a=r[e],s=N(n,a);t(s,a)&&an(f,v(a,n),s)}return f}export{rn as a,tn as b,V as c,on as d,mn as e,fn as f,hn as g,bn as h,dn as i,D as j,un as l,gn as m,Y as t};

File diff suppressed because one or more lines are too long

View file

@ -1 +1 @@
import{_ as l}from"./mermaid-vendor-CAxUo7Zk.js";function m(e,c){var i,t,o;e.accDescr&&((i=c.setAccDescription)==null||i.call(c,e.accDescr)),e.accTitle&&((t=c.setAccTitle)==null||t.call(c,e.accTitle)),e.title&&((o=c.setDiagramTitle)==null||o.call(c,e.title))}l(m,"populateCommonDb");export{m as p};
import{_ as l}from"./mermaid-vendor-D1lQkuud.js";function m(e,c){var i,t,o;e.accDescr&&((i=c.setAccDescription)==null||i.call(c,e.accDescr)),e.accTitle&&((t=c.setAccTitle)==null||t.call(c,e.accTitle)),e.title&&((o=c.setDiagramTitle)==null||o.call(c,e.title))}l(m,"populateCommonDb");export{m as p};

View file

@ -1 +1 @@
import{_ as n,a2 as x,j as l}from"./mermaid-vendor-CAxUo7Zk.js";var c=n((a,t)=>{const e=a.append("rect");if(e.attr("x",t.x),e.attr("y",t.y),e.attr("fill",t.fill),e.attr("stroke",t.stroke),e.attr("width",t.width),e.attr("height",t.height),t.name&&e.attr("name",t.name),t.rx&&e.attr("rx",t.rx),t.ry&&e.attr("ry",t.ry),t.attrs!==void 0)for(const r in t.attrs)e.attr(r,t.attrs[r]);return t.class&&e.attr("class",t.class),e},"drawRect"),d=n((a,t)=>{const e={x:t.startx,y:t.starty,width:t.stopx-t.startx,height:t.stopy-t.starty,fill:t.fill,stroke:t.stroke,class:"rect"};c(a,e).lower()},"drawBackgroundRect"),g=n((a,t)=>{const e=t.text.replace(x," "),r=a.append("text");r.attr("x",t.x),r.attr("y",t.y),r.attr("class","legend"),r.style("text-anchor",t.anchor),t.class&&r.attr("class",t.class);const s=r.append("tspan");return s.attr("x",t.x+t.textMargin*2),s.text(e),r},"drawText"),h=n((a,t,e,r)=>{const s=a.append("image");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",i)},"drawImage"),m=n((a,t,e,r)=>{const s=a.append("use");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",`#${i}`)},"drawEmbeddedImage"),y=n(()=>({x:0,y:0,width:100,height:100,fill:"#EDF2AE",stroke:"#666",anchor:"start",rx:0,ry:0}),"getNoteRect"),p=n(()=>({x:0,y:0,width:100,height:100,"text-anchor":"start",style:"#666",textMargin:0,rx:0,ry:0,tspan:!0}),"getTextObj");export{d as a,p as b,m as c,c as d,h as e,g as f,y as g};
import{_ as n,a2 as x,j as l}from"./mermaid-vendor-D1lQkuud.js";var c=n((a,t)=>{const e=a.append("rect");if(e.attr("x",t.x),e.attr("y",t.y),e.attr("fill",t.fill),e.attr("stroke",t.stroke),e.attr("width",t.width),e.attr("height",t.height),t.name&&e.attr("name",t.name),t.rx&&e.attr("rx",t.rx),t.ry&&e.attr("ry",t.ry),t.attrs!==void 0)for(const r in t.attrs)e.attr(r,t.attrs[r]);return t.class&&e.attr("class",t.class),e},"drawRect"),d=n((a,t)=>{const e={x:t.startx,y:t.starty,width:t.stopx-t.startx,height:t.stopy-t.starty,fill:t.fill,stroke:t.stroke,class:"rect"};c(a,e).lower()},"drawBackgroundRect"),g=n((a,t)=>{const e=t.text.replace(x," "),r=a.append("text");r.attr("x",t.x),r.attr("y",t.y),r.attr("class","legend"),r.style("text-anchor",t.anchor),t.class&&r.attr("class",t.class);const s=r.append("tspan");return s.attr("x",t.x+t.textMargin*2),s.text(e),r},"drawText"),h=n((a,t,e,r)=>{const s=a.append("image");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",i)},"drawImage"),m=n((a,t,e,r)=>{const s=a.append("use");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",`#${i}`)},"drawEmbeddedImage"),y=n(()=>({x:0,y:0,width:100,height:100,fill:"#EDF2AE",stroke:"#666",anchor:"start",rx:0,ry:0}),"getNoteRect"),p=n(()=>({x:0,y:0,width:100,height:100,"text-anchor":"start",style:"#666",textMargin:0,rx:0,ry:0,tspan:!0}),"getTextObj");export{d as a,p as b,m as c,c as d,h as e,g as f,y as g};

View file

@ -1 +1 @@
import{_ as s}from"./mermaid-vendor-CAxUo7Zk.js";var t,e=(t=class{constructor(i){this.init=i,this.records=this.init()}reset(){this.records=this.init()}},s(t,"ImperativeState"),t);export{e as I};
import{_ as s}from"./mermaid-vendor-D1lQkuud.js";var t,e=(t=class{constructor(i){this.init=i,this.records=this.init()}reset(){this.records=this.init()}},s(t,"ImperativeState"),t);export{e as I};

View file

@ -1 +1 @@
import{_ as a,d as o}from"./mermaid-vendor-CAxUo7Zk.js";var d=a((t,e)=>{let n;return e==="sandbox"&&(n=o("#i"+t)),(e==="sandbox"?o(n.nodes()[0].contentDocument.body):o("body")).select(`[id="${t}"]`)},"getDiagramElement");export{d as g};
import{_ as a,d as o}from"./mermaid-vendor-D1lQkuud.js";var d=a((t,e)=>{let n;return e==="sandbox"&&(n=o("#i"+t)),(e==="sandbox"?o(n.nodes()[0].contentDocument.body):o("body")).select(`[id="${t}"]`)},"getDiagramElement");export{d as g};

View file

@ -1,4 +1,4 @@
import{_ as e}from"./mermaid-vendor-CAxUo7Zk.js";var l=e(()=>`
import{_ as e}from"./mermaid-vendor-D1lQkuud.js";var l=e(()=>`
/* Font Awesome icon styling - consolidated */
.label-icon {
display: inline-block;

View file

@ -1 +1 @@
import{_ as a,e as w,l as x}from"./mermaid-vendor-CAxUo7Zk.js";var d=a((e,t,i,o)=>{e.attr("class",i);const{width:r,height:h,x:n,y:c}=u(e,t);w(e,h,r,o);const s=l(n,c,r,h,t);e.attr("viewBox",s),x.debug(`viewBox configured: ${s} with padding: ${t}`)},"setupViewPortForSVG"),u=a((e,t)=>{var o;const i=((o=e.node())==null?void 0:o.getBBox())||{width:0,height:0,x:0,y:0};return{width:i.width+t*2,height:i.height+t*2,x:i.x,y:i.y}},"calculateDimensionsWithPadding"),l=a((e,t,i,o,r)=>`${e-r} ${t-r} ${i} ${o}`,"createViewBox");export{d as s};
import{_ as a,e as w,l as x}from"./mermaid-vendor-D1lQkuud.js";var d=a((e,t,i,o)=>{e.attr("class",i);const{width:r,height:h,x:n,y:c}=u(e,t);w(e,h,r,o);const s=l(n,c,r,h,t);e.attr("viewBox",s),x.debug(`viewBox configured: ${s} with padding: ${t}`)},"setupViewPortForSVG"),u=a((e,t)=>{var o;const i=((o=e.node())==null?void 0:o.getBBox())||{width:0,height:0,x:0,y:0};return{width:i.width+t*2,height:i.height+t*2,x:i.x,y:i.y}},"calculateDimensionsWithPadding"),l=a((e,t,i,o,r)=>`${e-r} ${t-r} ${i} ${o}`,"createViewBox");export{d as s};

View file

@ -0,0 +1 @@
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-B1y3co0w.js";import{_ as i}from"./mermaid-vendor-D1lQkuud.js";import"./chunk-E2GYISFI-D0usJ_Ag.js";import"./chunk-BFAMUDN2-CnPPFHVQ.js";import"./chunk-SKB7J2MH-B9Dxe08c.js";import"./feature-graph-O__kUpH1.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};

View file

@ -1 +0,0 @@
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-9GI7VVtw.js";import{_ as i}from"./mermaid-vendor-CAxUo7Zk.js";import"./chunk-E2GYISFI-DReIXiCg.js";import"./chunk-BFAMUDN2-DQGv_fhY.js";import"./chunk-SKB7J2MH-D-vU5iV6.js";import"./feature-graph-C6IuADHZ.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};

View file

@ -0,0 +1 @@
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-B1y3co0w.js";import{_ as i}from"./mermaid-vendor-D1lQkuud.js";import"./chunk-E2GYISFI-D0usJ_Ag.js";import"./chunk-BFAMUDN2-CnPPFHVQ.js";import"./chunk-SKB7J2MH-B9Dxe08c.js";import"./feature-graph-O__kUpH1.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};

View file

@ -1 +0,0 @@
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-9GI7VVtw.js";import{_ as i}from"./mermaid-vendor-CAxUo7Zk.js";import"./chunk-E2GYISFI-DReIXiCg.js";import"./chunk-BFAMUDN2-DQGv_fhY.js";import"./chunk-SKB7J2MH-D-vU5iV6.js";import"./feature-graph-C6IuADHZ.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};

View file

@ -1 +0,0 @@
import{b as r}from"./_baseUniq-BrMEyGA7.js";var e=4;function a(o){return r(o,e)}export{a as c};

View file

@ -0,0 +1 @@
import{b as r}from"./_baseUniq-B85sKvyK.js";var e=4;function a(o){return r(o,e)}export{a as c};

View file

@ -1,4 +1,4 @@
import{p as y}from"./chunk-353BL4L5-DAdaHWGH.js";import{_ as l,s as B,g as S,t as z,q as F,a as P,b as E,F as v,K as W,e as T,z as D,G as _,H as A,l as w}from"./mermaid-vendor-CAxUo7Zk.js";import{p as N}from"./treemap-75Q7IDZK-BAguqzAo.js";import"./feature-graph-C6IuADHZ.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-BrMEyGA7.js";import"./_basePickBy-BEWgwF1U.js";import"./clone-D60V_qjf.js";var x={packet:[]},m=structuredClone(x),L=A.packet,Y=l(()=>{const t=v({...L,..._().packet});return t.showBits&&(t.paddingY+=10),t},"getConfig"),G=l(()=>m.packet,"getPacket"),H=l(t=>{t.length>0&&m.packet.push(t)},"pushWord"),I=l(()=>{D(),m=structuredClone(x)},"clear"),u={pushWord:H,getPacket:G,getConfig:Y,clear:I,setAccTitle:E,getAccTitle:P,setDiagramTitle:F,getDiagramTitle:z,getAccDescription:S,setAccDescription:B},K=1e4,M=l(t=>{y(t,u);let e=-1,o=[],n=1;const{bitsPerRow:i}=u.getConfig();for(let{start:a,end:r,bits:c,label:f}of t.blocks){if(a!==void 0&&r!==void 0&&r<a)throw new Error(`Packet block ${a} - ${r} is invalid. End must be greater than start.`);if(a??(a=e+1),a!==e+1)throw new Error(`Packet block ${a} - ${r??a} is not contiguous. It should start from ${e+1}.`);if(c===0)throw new Error(`Packet block ${a} is invalid. Cannot have a zero bit field.`);for(r??(r=a+(c??1)-1),c??(c=r-a+1),e=r,w.debug(`Packet block ${a} - ${e} with label ${f}`);o.length<=i+1&&u.getPacket().length<K;){const[d,p]=O({start:a,end:r,bits:c,label:f},n,i);if(o.push(d),d.end+1===n*i&&(u.pushWord(o),o=[],n++),!p)break;({start:a,end:r,bits:c,label:f}=p)}}u.pushWord(o)},"populate"),O=l((t,e,o)=>{if(t.start===void 0)throw new Error("start should have been set during first phase");if(t.end===void 0)throw new Error("end should have been set during first phase");if(t.start>t.end)throw new Error(`Block start ${t.start} is greater than block end ${t.end}.`);if(t.end+1<=e*o)return[t,void 0];const n=e*o-1,i=e*o;return[{start:t.start,end:n,label:t.label,bits:n-t.start},{start:i,end:t.end,label:t.label,bits:t.end-i}]},"getNextFittingBlock"),q={parse:l(async t=>{const e=await N("packet",t);w.debug(e),M(e)},"parse")},R=l((t,e,o,n)=>{const i=n.db,a=i.getConfig(),{rowHeight:r,paddingY:c,bitWidth:f,bitsPerRow:d}=a,p=i.getPacket(),s=i.getDiagramTitle(),k=r+c,g=k*(p.length+1)-(s?0:r),b=f*d+2,h=W(e);h.attr("viewbox",`0 0 ${b} ${g}`),T(h,g,b,a.useMaxWidth);for(const[C,$]of p.entries())U(h,$,C,a);h.append("text").text(s).attr("x",b/2).attr("y",g-k/2).attr("dominant-baseline","middle").attr("text-anchor","middle").attr("class","packetTitle")},"draw"),U=l((t,e,o,{rowHeight:n,paddingX:i,paddingY:a,bitWidth:r,bitsPerRow:c,showBits:f})=>{const d=t.append("g"),p=o*(n+a)+a;for(const s of e){const k=s.start%c*r+1,g=(s.end-s.start+1)*r-i;if(d.append("rect").attr("x",k).attr("y",p).attr("width",g).attr("height",n).attr("class","packetBlock"),d.append("text").attr("x",k+g/2).attr("y",p+n/2).attr("class","packetLabel").attr("dominant-baseline","middle").attr("text-anchor","middle").text(s.label),!f)continue;const b=s.end===s.start,h=p-2;d.append("text").attr("x",k+(b?g/2:0)).attr("y",h).attr("class","packetByte start").attr("dominant-baseline","auto").attr("text-anchor",b?"middle":"start").text(s.start),b||d.append("text").attr("x",k+g).attr("y",h).attr("class","packetByte end").attr("dominant-baseline","auto").attr("text-anchor","end").text(s.end)}},"drawWord"),X={draw:R},j={byteFontSize:"10px",startByteColor:"black",endByteColor:"black",labelColor:"black",labelFontSize:"12px",titleColor:"black",titleFontSize:"14px",blockStrokeColor:"black",blockStrokeWidth:"1",blockFillColor:"#efefef"},J=l(({packet:t}={})=>{const e=v(j,t);return`
import{p as y}from"./chunk-353BL4L5-F2PUrx5p.js";import{_ as l,s as B,g as S,t as z,q as F,a as P,b as E,F as v,K as W,e as T,z as D,G as _,H as A,l as w}from"./mermaid-vendor-D1lQkuud.js";import{p as N}from"./treemap-75Q7IDZK-sLoCiDs8.js";import"./feature-graph-O__kUpH1.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-B85sKvyK.js";import"./_basePickBy-BHlXv27S.js";import"./clone-DIEb_6FD.js";var x={packet:[]},m=structuredClone(x),L=A.packet,Y=l(()=>{const t=v({...L,..._().packet});return t.showBits&&(t.paddingY+=10),t},"getConfig"),G=l(()=>m.packet,"getPacket"),H=l(t=>{t.length>0&&m.packet.push(t)},"pushWord"),I=l(()=>{D(),m=structuredClone(x)},"clear"),u={pushWord:H,getPacket:G,getConfig:Y,clear:I,setAccTitle:E,getAccTitle:P,setDiagramTitle:F,getDiagramTitle:z,getAccDescription:S,setAccDescription:B},K=1e4,M=l(t=>{y(t,u);let e=-1,o=[],n=1;const{bitsPerRow:i}=u.getConfig();for(let{start:a,end:r,bits:c,label:f}of t.blocks){if(a!==void 0&&r!==void 0&&r<a)throw new Error(`Packet block ${a} - ${r} is invalid. End must be greater than start.`);if(a??(a=e+1),a!==e+1)throw new Error(`Packet block ${a} - ${r??a} is not contiguous. It should start from ${e+1}.`);if(c===0)throw new Error(`Packet block ${a} is invalid. Cannot have a zero bit field.`);for(r??(r=a+(c??1)-1),c??(c=r-a+1),e=r,w.debug(`Packet block ${a} - ${e} with label ${f}`);o.length<=i+1&&u.getPacket().length<K;){const[d,p]=O({start:a,end:r,bits:c,label:f},n,i);if(o.push(d),d.end+1===n*i&&(u.pushWord(o),o=[],n++),!p)break;({start:a,end:r,bits:c,label:f}=p)}}u.pushWord(o)},"populate"),O=l((t,e,o)=>{if(t.start===void 0)throw new Error("start should have been set during first phase");if(t.end===void 0)throw new Error("end should have been set during first phase");if(t.start>t.end)throw new Error(`Block start ${t.start} is greater than block end ${t.end}.`);if(t.end+1<=e*o)return[t,void 0];const n=e*o-1,i=e*o;return[{start:t.start,end:n,label:t.label,bits:n-t.start},{start:i,end:t.end,label:t.label,bits:t.end-i}]},"getNextFittingBlock"),q={parse:l(async t=>{const e=await N("packet",t);w.debug(e),M(e)},"parse")},R=l((t,e,o,n)=>{const i=n.db,a=i.getConfig(),{rowHeight:r,paddingY:c,bitWidth:f,bitsPerRow:d}=a,p=i.getPacket(),s=i.getDiagramTitle(),k=r+c,g=k*(p.length+1)-(s?0:r),b=f*d+2,h=W(e);h.attr("viewbox",`0 0 ${b} ${g}`),T(h,g,b,a.useMaxWidth);for(const[C,$]of p.entries())U(h,$,C,a);h.append("text").text(s).attr("x",b/2).attr("y",g-k/2).attr("dominant-baseline","middle").attr("text-anchor","middle").attr("class","packetTitle")},"draw"),U=l((t,e,o,{rowHeight:n,paddingX:i,paddingY:a,bitWidth:r,bitsPerRow:c,showBits:f})=>{const d=t.append("g"),p=o*(n+a)+a;for(const s of e){const k=s.start%c*r+1,g=(s.end-s.start+1)*r-i;if(d.append("rect").attr("x",k).attr("y",p).attr("width",g).attr("height",n).attr("class","packetBlock"),d.append("text").attr("x",k+g/2).attr("y",p+n/2).attr("class","packetLabel").attr("dominant-baseline","middle").attr("text-anchor","middle").text(s.label),!f)continue;const b=s.end===s.start,h=p-2;d.append("text").attr("x",k+(b?g/2:0)).attr("y",h).attr("class","packetByte start").attr("dominant-baseline","auto").attr("text-anchor",b?"middle":"start").text(s.start),b||d.append("text").attr("x",k+g).attr("y",h).attr("class","packetByte end").attr("dominant-baseline","auto").attr("text-anchor","end").text(s.end)}},"drawWord"),X={draw:R},j={byteFontSize:"10px",startByteColor:"black",endByteColor:"black",labelColor:"black",labelFontSize:"12px",titleColor:"black",titleFontSize:"14px",blockStrokeColor:"black",blockStrokeWidth:"1",blockFillColor:"#efefef"},J=l(({packet:t}={})=>{const e=v(j,t);return`
.packetByte {
font-size: ${e.byteFontSize};
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -1,4 +1,4 @@
import{g as q1}from"./chunk-E2GYISFI-DReIXiCg.js";import{_ as m,o as O1,l as ee,c as be,d as Se,p as H1,r as X1,u as i1,b as Q1,s as J1,q as Z1,a as $1,g as et,t as tt,k as st,v as it,J as rt,x as nt,y as s1,z as at,A as ut,B as lt,C as ot}from"./mermaid-vendor-CAxUo7Zk.js";import{g as ct}from"./chunk-BFAMUDN2-DQGv_fhY.js";import{s as ht}from"./chunk-SKB7J2MH-D-vU5iV6.js";import"./feature-graph-C6IuADHZ.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var dt="flowchart-",Pe,pt=(Pe=class{constructor(){this.vertexCounter=0,this.config=be(),this.vertices=new Map,this.edges=[],this.classes=new Map,this.subGraphs=[],this.subGraphLookup=new Map,this.tooltips=new Map,this.subCount=0,this.firstGraphFlag=!0,this.secCount=-1,this.posCrossRef=[],this.funs=[],this.setAccTitle=Q1,this.setAccDescription=J1,this.setDiagramTitle=Z1,this.getAccTitle=$1,this.getAccDescription=et,this.getDiagramTitle=tt,this.funs.push(this.setupToolTips.bind(this)),this.addVertex=this.addVertex.bind(this),this.firstGraph=this.firstGraph.bind(this),this.setDirection=this.setDirection.bind(this),this.addSubGraph=this.addSubGraph.bind(this),this.addLink=this.addLink.bind(this),this.setLink=this.setLink.bind(this),this.updateLink=this.updateLink.bind(this),this.addClass=this.addClass.bind(this),this.setClass=this.setClass.bind(this),this.destructLink=this.destructLink.bind(this),this.setClickEvent=this.setClickEvent.bind(this),this.setTooltip=this.setTooltip.bind(this),this.updateLinkInterpolate=this.updateLinkInterpolate.bind(this),this.setClickFun=this.setClickFun.bind(this),this.bindFunctions=this.bindFunctions.bind(this),this.lex={firstGraph:this.firstGraph.bind(this)},this.clear(),this.setGen("gen-2")}sanitizeText(i){return st.sanitizeText(i,this.config)}lookUpDomId(i){for(const n of this.vertices.values())if(n.id===i)return n.domId;return i}addVertex(i,n,a,u,l,f,c={},A){var V,C;if(!i||i.trim().length===0)return;let r;if(A!==void 0){let p;A.includes(`
import{g as q1}from"./chunk-E2GYISFI-D0usJ_Ag.js";import{_ as m,o as O1,l as ee,c as be,d as Se,p as H1,r as X1,u as i1,b as Q1,s as J1,q as Z1,a as $1,g as et,t as tt,k as st,v as it,J as rt,x as nt,y as s1,z as at,A as ut,B as lt,C as ot}from"./mermaid-vendor-D1lQkuud.js";import{g as ct}from"./chunk-BFAMUDN2-CnPPFHVQ.js";import{s as ht}from"./chunk-SKB7J2MH-B9Dxe08c.js";import"./feature-graph-O__kUpH1.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var dt="flowchart-",Pe,pt=(Pe=class{constructor(){this.vertexCounter=0,this.config=be(),this.vertices=new Map,this.edges=[],this.classes=new Map,this.subGraphs=[],this.subGraphLookup=new Map,this.tooltips=new Map,this.subCount=0,this.firstGraphFlag=!0,this.secCount=-1,this.posCrossRef=[],this.funs=[],this.setAccTitle=Q1,this.setAccDescription=J1,this.setDiagramTitle=Z1,this.getAccTitle=$1,this.getAccDescription=et,this.getDiagramTitle=tt,this.funs.push(this.setupToolTips.bind(this)),this.addVertex=this.addVertex.bind(this),this.firstGraph=this.firstGraph.bind(this),this.setDirection=this.setDirection.bind(this),this.addSubGraph=this.addSubGraph.bind(this),this.addLink=this.addLink.bind(this),this.setLink=this.setLink.bind(this),this.updateLink=this.updateLink.bind(this),this.addClass=this.addClass.bind(this),this.setClass=this.setClass.bind(this),this.destructLink=this.destructLink.bind(this),this.setClickEvent=this.setClickEvent.bind(this),this.setTooltip=this.setTooltip.bind(this),this.updateLinkInterpolate=this.updateLinkInterpolate.bind(this),this.setClickFun=this.setClickFun.bind(this),this.bindFunctions=this.bindFunctions.bind(this),this.lex={firstGraph:this.firstGraph.bind(this)},this.clear(),this.setGen("gen-2")}sanitizeText(i){return st.sanitizeText(i,this.config)}lookUpDomId(i){for(const n of this.vertices.values())if(n.id===i)return n.domId;return i}addVertex(i,n,a,u,l,f,c={},A){var V,C;if(!i||i.trim().length===0)return;let r;if(A!==void 0){let p;A.includes(`
`)?p=A+`
`:p=`{
`+A+`

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -1,2 +1,2 @@
import{_ as e,l as o,K as i,e as n,L as p}from"./mermaid-vendor-CAxUo7Zk.js";import{p as m}from"./treemap-75Q7IDZK-BAguqzAo.js";import"./feature-graph-C6IuADHZ.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-BrMEyGA7.js";import"./_basePickBy-BEWgwF1U.js";import"./clone-D60V_qjf.js";var g={parse:e(async r=>{const a=await m("info",r);o.debug(a)},"parse")},v={version:p.version+""},d=e(()=>v.version,"getVersion"),c={getVersion:d},l=e((r,a,s)=>{o.debug(`rendering info diagram
import{_ as e,l as o,K as i,e as n,L as p}from"./mermaid-vendor-D1lQkuud.js";import{p as m}from"./treemap-75Q7IDZK-sLoCiDs8.js";import"./feature-graph-O__kUpH1.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-B85sKvyK.js";import"./_basePickBy-BHlXv27S.js";import"./clone-DIEb_6FD.js";var g={parse:e(async r=>{const a=await m("info",r);o.debug(a)},"parse")},v={version:p.version+""},d=e(()=>v.version,"getVersion"),c={getVersion:d},l=e((r,a,s)=>{o.debug(`rendering info diagram
`+r);const t=i(a);n(t,100,400,!0),t.append("g").append("text").attr("x",100).attr("y",40).attr("class","version").attr("font-size",32).style("text-anchor","middle").text(`v${s}`)},"draw"),f={draw:l},L={parser:g,db:c,renderer:f};export{L as diagram};

View file

@ -1,4 +1,4 @@
import{a as gt,g as lt,f as mt,d as xt}from"./chunk-67H74DCK-CPez3pRp.js";import{g as kt}from"./chunk-E2GYISFI-DReIXiCg.js";import{_ as r,g as _t,s as bt,a as vt,b as wt,t as Tt,q as St,c as R,d as G,e as $t,z as Mt,N as et}from"./mermaid-vendor-CAxUo7Zk.js";import"./feature-graph-C6IuADHZ.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var U=function(){var t=r(function(h,n,a,l){for(a=a||{},l=h.length;l--;a[h[l]]=n);return a},"o"),e=[6,8,10,11,12,14,16,17,18],s=[1,9],c=[1,10],i=[1,11],f=[1,12],u=[1,13],y=[1,14],g={trace:r(function(){},"trace"),yy:{},symbols_:{error:2,start:3,journey:4,document:5,EOF:6,line:7,SPACE:8,statement:9,NEWLINE:10,title:11,acc_title:12,acc_title_value:13,acc_descr:14,acc_descr_value:15,acc_descr_multiline_value:16,section:17,taskName:18,taskData:19,$accept:0,$end:1},terminals_:{2:"error",4:"journey",6:"EOF",8:"SPACE",10:"NEWLINE",11:"title",12:"acc_title",13:"acc_title_value",14:"acc_descr",15:"acc_descr_value",16:"acc_descr_multiline_value",17:"section",18:"taskName",19:"taskData"},productions_:[0,[3,3],[5,0],[5,2],[7,2],[7,1],[7,1],[7,1],[9,1],[9,2],[9,2],[9,1],[9,1],[9,2]],performAction:r(function(n,a,l,d,p,o,v){var k=o.length-1;switch(p){case 1:return o[k-1];case 2:this.$=[];break;case 3:o[k-1].push(o[k]),this.$=o[k-1];break;case 4:case 5:this.$=o[k];break;case 6:case 7:this.$=[];break;case 8:d.setDiagramTitle(o[k].substr(6)),this.$=o[k].substr(6);break;case 9:this.$=o[k].trim(),d.setAccTitle(this.$);break;case 10:case 11:this.$=o[k].trim(),d.setAccDescription(this.$);break;case 12:d.addSection(o[k].substr(8)),this.$=o[k].substr(8);break;case 13:d.addTask(o[k-1],o[k]),this.$="task";break}},"anonymous"),table:[{3:1,4:[1,2]},{1:[3]},t(e,[2,2],{5:3}),{6:[1,4],7:5,8:[1,6],9:7,10:[1,8],11:s,12:c,14:i,16:f,17:u,18:y},t(e,[2,7],{1:[2,1]}),t(e,[2,3]),{9:15,11:s,12:c,14:i,16:f,17:u,18:y},t(e,[2,5]),t(e,[2,6]),t(e,[2,8]),{13:[1,16]},{15:[1,17]},t(e,[2,11]),t(e,[2,12]),{19:[1,18]},t(e,[2,4]),t(e,[2,9]),t(e,[2,10]),t(e,[2,13])],defaultActions:{},parseError:r(function(n,a){if(a.recoverable)this.trace(n);else{var l=new Error(n);throw l.hash=a,l}},"parseError"),parse:r(function(n){var a=this,l=[0],d=[],p=[null],o=[],v=this.table,k="",C=0,K=0,dt=2,Q=1,yt=o.slice.call(arguments,1),_=Object.create(this.lexer),I={yy:{}};for(var O in this.yy)Object.prototype.hasOwnProperty.call(this.yy,O)&&(I.yy[O]=this.yy[O]);_.setInput(n,I.yy),I.yy.lexer=_,I.yy.parser=this,typeof _.yylloc>"u"&&(_.yylloc={});var Y=_.yylloc;o.push(Y);var ft=_.options&&_.options.ranges;typeof I.yy.parseError=="function"?this.parseError=I.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function pt(w){l.length=l.length-2*w,p.length=p.length-w,o.length=o.length-w}r(pt,"popStack");function D(){var w;return w=d.pop()||_.lex()||Q,typeof w!="number"&&(w instanceof Array&&(d=w,w=d.pop()),w=a.symbols_[w]||w),w}r(D,"lex");for(var b,A,T,q,F={},N,M,tt,z;;){if(A=l[l.length-1],this.defaultActions[A]?T=this.defaultActions[A]:((b===null||typeof b>"u")&&(b=D()),T=v[A]&&v[A][b]),typeof T>"u"||!T.length||!T[0]){var X="";z=[];for(N in v[A])this.terminals_[N]&&N>dt&&z.push("'"+this.terminals_[N]+"'");_.showPosition?X="Parse error on line "+(C+1)+`:
import{a as gt,g as lt,f as mt,d as xt}from"./chunk-67H74DCK-DEv48gUD.js";import{g as kt}from"./chunk-E2GYISFI-D0usJ_Ag.js";import{_ as r,g as _t,s as bt,a as vt,b as wt,t as Tt,q as St,c as R,d as G,e as $t,z as Mt,N as et}from"./mermaid-vendor-D1lQkuud.js";import"./feature-graph-O__kUpH1.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var U=function(){var t=r(function(h,n,a,l){for(a=a||{},l=h.length;l--;a[h[l]]=n);return a},"o"),e=[6,8,10,11,12,14,16,17,18],s=[1,9],c=[1,10],i=[1,11],f=[1,12],u=[1,13],y=[1,14],g={trace:r(function(){},"trace"),yy:{},symbols_:{error:2,start:3,journey:4,document:5,EOF:6,line:7,SPACE:8,statement:9,NEWLINE:10,title:11,acc_title:12,acc_title_value:13,acc_descr:14,acc_descr_value:15,acc_descr_multiline_value:16,section:17,taskName:18,taskData:19,$accept:0,$end:1},terminals_:{2:"error",4:"journey",6:"EOF",8:"SPACE",10:"NEWLINE",11:"title",12:"acc_title",13:"acc_title_value",14:"acc_descr",15:"acc_descr_value",16:"acc_descr_multiline_value",17:"section",18:"taskName",19:"taskData"},productions_:[0,[3,3],[5,0],[5,2],[7,2],[7,1],[7,1],[7,1],[9,1],[9,2],[9,2],[9,1],[9,1],[9,2]],performAction:r(function(n,a,l,d,p,o,v){var k=o.length-1;switch(p){case 1:return o[k-1];case 2:this.$=[];break;case 3:o[k-1].push(o[k]),this.$=o[k-1];break;case 4:case 5:this.$=o[k];break;case 6:case 7:this.$=[];break;case 8:d.setDiagramTitle(o[k].substr(6)),this.$=o[k].substr(6);break;case 9:this.$=o[k].trim(),d.setAccTitle(this.$);break;case 10:case 11:this.$=o[k].trim(),d.setAccDescription(this.$);break;case 12:d.addSection(o[k].substr(8)),this.$=o[k].substr(8);break;case 13:d.addTask(o[k-1],o[k]),this.$="task";break}},"anonymous"),table:[{3:1,4:[1,2]},{1:[3]},t(e,[2,2],{5:3}),{6:[1,4],7:5,8:[1,6],9:7,10:[1,8],11:s,12:c,14:i,16:f,17:u,18:y},t(e,[2,7],{1:[2,1]}),t(e,[2,3]),{9:15,11:s,12:c,14:i,16:f,17:u,18:y},t(e,[2,5]),t(e,[2,6]),t(e,[2,8]),{13:[1,16]},{15:[1,17]},t(e,[2,11]),t(e,[2,12]),{19:[1,18]},t(e,[2,4]),t(e,[2,9]),t(e,[2,10]),t(e,[2,13])],defaultActions:{},parseError:r(function(n,a){if(a.recoverable)this.trace(n);else{var l=new Error(n);throw l.hash=a,l}},"parseError"),parse:r(function(n){var a=this,l=[0],d=[],p=[null],o=[],v=this.table,k="",C=0,K=0,dt=2,Q=1,yt=o.slice.call(arguments,1),_=Object.create(this.lexer),I={yy:{}};for(var O in this.yy)Object.prototype.hasOwnProperty.call(this.yy,O)&&(I.yy[O]=this.yy[O]);_.setInput(n,I.yy),I.yy.lexer=_,I.yy.parser=this,typeof _.yylloc>"u"&&(_.yylloc={});var Y=_.yylloc;o.push(Y);var ft=_.options&&_.options.ranges;typeof I.yy.parseError=="function"?this.parseError=I.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function pt(w){l.length=l.length-2*w,p.length=p.length-w,o.length=o.length-w}r(pt,"popStack");function D(){var w;return w=d.pop()||_.lex()||Q,typeof w!="number"&&(w instanceof Array&&(d=w,w=d.pop()),w=a.symbols_[w]||w),w}r(D,"lex");for(var b,A,T,q,F={},N,M,tt,z;;){if(A=l[l.length-1],this.defaultActions[A]?T=this.defaultActions[A]:((b===null||typeof b>"u")&&(b=D()),T=v[A]&&v[A][b]),typeof T>"u"||!T.length||!T[0]){var X="";z=[];for(N in v[A])this.terminals_[N]&&N>dt&&z.push("'"+this.terminals_[N]+"'");_.showPosition?X="Parse error on line "+(C+1)+`:
`+_.showPosition()+`
Expecting `+z.join(", ")+", got '"+(this.terminals_[b]||b)+"'":X="Parse error on line "+(C+1)+": Unexpected "+(b==Q?"end of input":"'"+(this.terminals_[b]||b)+"'"),this.parseError(X,{text:_.match,token:this.terminals_[b]||b,line:_.yylineno,loc:Y,expected:z})}if(T[0]instanceof Array&&T.length>1)throw new Error("Parse Error: multiple actions possible at state: "+A+", token: "+b);switch(T[0]){case 1:l.push(b),p.push(_.yytext),o.push(_.yylloc),l.push(T[1]),b=null,K=_.yyleng,k=_.yytext,C=_.yylineno,Y=_.yylloc;break;case 2:if(M=this.productions_[T[1]][1],F.$=p[p.length-M],F._$={first_line:o[o.length-(M||1)].first_line,last_line:o[o.length-1].last_line,first_column:o[o.length-(M||1)].first_column,last_column:o[o.length-1].last_column},ft&&(F._$.range=[o[o.length-(M||1)].range[0],o[o.length-1].range[1]]),q=this.performAction.apply(F,[k,K,C,I.yy,T[1],p,o].concat(yt)),typeof q<"u")return q;M&&(l=l.slice(0,-1*M*2),p=p.slice(0,-1*M),o=o.slice(0,-1*M)),l.push(this.productions_[T[1]][0]),p.push(F.$),o.push(F._$),tt=v[l[l.length-2]][l[l.length-1]],l.push(tt);break;case 3:return!0}}return!0},"parse")},m=function(){var h={EOF:1,parseError:r(function(a,l){if(this.yy.parser)this.yy.parser.parseError(a,l);else throw new Error(a)},"parseError"),setInput:r(function(n,a){return this.yy=a||this.yy||{},this._input=n,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},"setInput"),input:r(function(){var n=this._input[0];this.yytext+=n,this.yyleng++,this.offset++,this.match+=n,this.matched+=n;var a=n.match(/(?:\r\n?|\n).*/g);return a?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),n},"input"),unput:r(function(n){var a=n.length,l=n.split(/(?:\r\n?|\n)/g);this._input=n+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-a),this.offset-=a;var d=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),l.length-1&&(this.yylineno-=l.length-1);var p=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:l?(l.length===d.length?this.yylloc.first_column:0)+d[d.length-l.length].length-l[0].length:this.yylloc.first_column-a},this.options.ranges&&(this.yylloc.range=[p[0],p[0]+this.yyleng-a]),this.yyleng=this.yytext.length,this},"unput"),more:r(function(){return this._more=!0,this},"more"),reject:r(function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).
`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},"reject"),less:r(function(n){this.unput(this.match.slice(n))},"less"),pastInput:r(function(){var n=this.matched.substr(0,this.matched.length-this.match.length);return(n.length>20?"...":"")+n.substr(-20).replace(/\n/g,"")},"pastInput"),upcomingInput:r(function(){var n=this.match;return n.length<20&&(n+=this._input.substr(0,20-n.length)),(n.substr(0,20)+(n.length>20?"...":"")).replace(/\n/g,"")},"upcomingInput"),showPosition:r(function(){var n=this.pastInput(),a=new Array(n.length+1).join("-");return n+this.upcomingInput()+`

View file

@ -1,4 +1,4 @@
import{g as fe}from"./chunk-E2GYISFI-DReIXiCg.js";import{_ as c,l as te,c as W,K as ye,a8 as be,a9 as me,aa as _e,a3 as Ee,H as Y,i as G,v as ke,J as Se,a4 as Ne,a5 as le,a6 as ce}from"./mermaid-vendor-CAxUo7Zk.js";import"./feature-graph-C6IuADHZ.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var $=function(){var t=c(function(_,s,n,a){for(n=n||{},a=_.length;a--;n[_[a]]=s);return n},"o"),g=[1,4],d=[1,13],r=[1,12],p=[1,15],E=[1,16],f=[1,20],h=[1,19],L=[6,7,8],C=[1,26],w=[1,24],N=[1,25],i=[6,7,11],H=[1,31],x=[6,7,11,24],P=[1,6,13,16,17,20,23],M=[1,35],U=[1,36],A=[1,6,7,11,13,16,17,20,23],j=[1,38],V={trace:c(function(){},"trace"),yy:{},symbols_:{error:2,start:3,mindMap:4,spaceLines:5,SPACELINE:6,NL:7,KANBAN:8,document:9,stop:10,EOF:11,statement:12,SPACELIST:13,node:14,shapeData:15,ICON:16,CLASS:17,nodeWithId:18,nodeWithoutId:19,NODE_DSTART:20,NODE_DESCR:21,NODE_DEND:22,NODE_ID:23,SHAPE_DATA:24,$accept:0,$end:1},terminals_:{2:"error",6:"SPACELINE",7:"NL",8:"KANBAN",11:"EOF",13:"SPACELIST",16:"ICON",17:"CLASS",20:"NODE_DSTART",21:"NODE_DESCR",22:"NODE_DEND",23:"NODE_ID",24:"SHAPE_DATA"},productions_:[0,[3,1],[3,2],[5,1],[5,2],[5,2],[4,2],[4,3],[10,1],[10,1],[10,1],[10,2],[10,2],[9,3],[9,2],[12,3],[12,2],[12,2],[12,2],[12,1],[12,2],[12,1],[12,1],[12,1],[12,1],[14,1],[14,1],[19,3],[18,1],[18,4],[15,2],[15,1]],performAction:c(function(s,n,a,o,u,e,B){var l=e.length-1;switch(u){case 6:case 7:return o;case 8:o.getLogger().trace("Stop NL ");break;case 9:o.getLogger().trace("Stop EOF ");break;case 11:o.getLogger().trace("Stop NL2 ");break;case 12:o.getLogger().trace("Stop EOF2 ");break;case 15:o.getLogger().info("Node: ",e[l-1].id),o.addNode(e[l-2].length,e[l-1].id,e[l-1].descr,e[l-1].type,e[l]);break;case 16:o.getLogger().info("Node: ",e[l].id),o.addNode(e[l-1].length,e[l].id,e[l].descr,e[l].type);break;case 17:o.getLogger().trace("Icon: ",e[l]),o.decorateNode({icon:e[l]});break;case 18:case 23:o.decorateNode({class:e[l]});break;case 19:o.getLogger().trace("SPACELIST");break;case 20:o.getLogger().trace("Node: ",e[l-1].id),o.addNode(0,e[l-1].id,e[l-1].descr,e[l-1].type,e[l]);break;case 21:o.getLogger().trace("Node: ",e[l].id),o.addNode(0,e[l].id,e[l].descr,e[l].type);break;case 22:o.decorateNode({icon:e[l]});break;case 27:o.getLogger().trace("node found ..",e[l-2]),this.$={id:e[l-1],descr:e[l-1],type:o.getType(e[l-2],e[l])};break;case 28:this.$={id:e[l],descr:e[l],type:0};break;case 29:o.getLogger().trace("node found ..",e[l-3]),this.$={id:e[l-3],descr:e[l-1],type:o.getType(e[l-2],e[l])};break;case 30:this.$=e[l-1]+e[l];break;case 31:this.$=e[l];break}},"anonymous"),table:[{3:1,4:2,5:3,6:[1,5],8:g},{1:[3]},{1:[2,1]},{4:6,6:[1,7],7:[1,8],8:g},{6:d,7:[1,10],9:9,12:11,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},t(L,[2,3]),{1:[2,2]},t(L,[2,4]),t(L,[2,5]),{1:[2,6],6:d,12:21,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},{6:d,9:22,12:11,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},{6:C,7:w,10:23,11:N},t(i,[2,24],{18:17,19:18,14:27,16:[1,28],17:[1,29],20:f,23:h}),t(i,[2,19]),t(i,[2,21],{15:30,24:H}),t(i,[2,22]),t(i,[2,23]),t(x,[2,25]),t(x,[2,26]),t(x,[2,28],{20:[1,32]}),{21:[1,33]},{6:C,7:w,10:34,11:N},{1:[2,7],6:d,12:21,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},t(P,[2,14],{7:M,11:U}),t(A,[2,8]),t(A,[2,9]),t(A,[2,10]),t(i,[2,16],{15:37,24:H}),t(i,[2,17]),t(i,[2,18]),t(i,[2,20],{24:j}),t(x,[2,31]),{21:[1,39]},{22:[1,40]},t(P,[2,13],{7:M,11:U}),t(A,[2,11]),t(A,[2,12]),t(i,[2,15],{24:j}),t(x,[2,30]),{22:[1,41]},t(x,[2,27]),t(x,[2,29])],defaultActions:{2:[2,1],6:[2,2]},parseError:c(function(s,n){if(n.recoverable)this.trace(s);else{var a=new Error(s);throw a.hash=n,a}},"parseError"),parse:c(function(s){var n=this,a=[0],o=[],u=[null],e=[],B=this.table,l="",z=0,ie=0,ue=2,re=1,ge=e.slice.call(arguments,1),b=Object.create(this.lexer),T={yy:{}};for(var J in this.yy)Object.prototype.hasOwnProperty.call(this.yy,J)&&(T.yy[J]=this.yy[J]);b.setInput(s,T.yy),T.yy.lexer=b,T.yy.parser=this,typeof b.yylloc>"u"&&(b.yylloc={});var q=b.yylloc;e.push(q);var de=b.options&&b.options.ranges;typeof T.yy.parseError=="function"?this.parseError=T.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function pe(S){a.length=a.length-2*S,u.length=u.length-S,e.length=e.length-S}c(pe,"popStack");function ae(){var S;return S=o.pop()||b.lex()||re,typeof S!="number"&&(S instanceof Array&&(o=S,S=o.pop()),S=n.symbols_[S]||S),S}c(ae,"lex");for(var k,R,v,Q,F={},K,I,oe,X;;){if(R=a[a.length-1],this.defaultActions[R]?v=this.defaultActions[R]:((k===null||typeof k>"u")&&(k=ae()),v=B[R]&&B[R][k]),typeof v>"u"||!v.length||!v[0]){var Z="";X=[];for(K in B[R])this.terminals_[K]&&K>ue&&X.push("'"+this.terminals_[K]+"'");b.showPosition?Z="Parse error on line "+(z+1)+`:
import{g as fe}from"./chunk-E2GYISFI-D0usJ_Ag.js";import{_ as c,l as te,c as W,K as ye,a8 as be,a9 as me,aa as _e,a3 as Ee,H as Y,i as G,v as ke,J as Se,a4 as Ne,a5 as le,a6 as ce}from"./mermaid-vendor-D1lQkuud.js";import"./feature-graph-O__kUpH1.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var $=function(){var t=c(function(_,s,n,a){for(n=n||{},a=_.length;a--;n[_[a]]=s);return n},"o"),g=[1,4],d=[1,13],r=[1,12],p=[1,15],E=[1,16],f=[1,20],h=[1,19],L=[6,7,8],C=[1,26],w=[1,24],N=[1,25],i=[6,7,11],H=[1,31],x=[6,7,11,24],P=[1,6,13,16,17,20,23],M=[1,35],U=[1,36],A=[1,6,7,11,13,16,17,20,23],j=[1,38],V={trace:c(function(){},"trace"),yy:{},symbols_:{error:2,start:3,mindMap:4,spaceLines:5,SPACELINE:6,NL:7,KANBAN:8,document:9,stop:10,EOF:11,statement:12,SPACELIST:13,node:14,shapeData:15,ICON:16,CLASS:17,nodeWithId:18,nodeWithoutId:19,NODE_DSTART:20,NODE_DESCR:21,NODE_DEND:22,NODE_ID:23,SHAPE_DATA:24,$accept:0,$end:1},terminals_:{2:"error",6:"SPACELINE",7:"NL",8:"KANBAN",11:"EOF",13:"SPACELIST",16:"ICON",17:"CLASS",20:"NODE_DSTART",21:"NODE_DESCR",22:"NODE_DEND",23:"NODE_ID",24:"SHAPE_DATA"},productions_:[0,[3,1],[3,2],[5,1],[5,2],[5,2],[4,2],[4,3],[10,1],[10,1],[10,1],[10,2],[10,2],[9,3],[9,2],[12,3],[12,2],[12,2],[12,2],[12,1],[12,2],[12,1],[12,1],[12,1],[12,1],[14,1],[14,1],[19,3],[18,1],[18,4],[15,2],[15,1]],performAction:c(function(s,n,a,o,u,e,B){var l=e.length-1;switch(u){case 6:case 7:return o;case 8:o.getLogger().trace("Stop NL ");break;case 9:o.getLogger().trace("Stop EOF ");break;case 11:o.getLogger().trace("Stop NL2 ");break;case 12:o.getLogger().trace("Stop EOF2 ");break;case 15:o.getLogger().info("Node: ",e[l-1].id),o.addNode(e[l-2].length,e[l-1].id,e[l-1].descr,e[l-1].type,e[l]);break;case 16:o.getLogger().info("Node: ",e[l].id),o.addNode(e[l-1].length,e[l].id,e[l].descr,e[l].type);break;case 17:o.getLogger().trace("Icon: ",e[l]),o.decorateNode({icon:e[l]});break;case 18:case 23:o.decorateNode({class:e[l]});break;case 19:o.getLogger().trace("SPACELIST");break;case 20:o.getLogger().trace("Node: ",e[l-1].id),o.addNode(0,e[l-1].id,e[l-1].descr,e[l-1].type,e[l]);break;case 21:o.getLogger().trace("Node: ",e[l].id),o.addNode(0,e[l].id,e[l].descr,e[l].type);break;case 22:o.decorateNode({icon:e[l]});break;case 27:o.getLogger().trace("node found ..",e[l-2]),this.$={id:e[l-1],descr:e[l-1],type:o.getType(e[l-2],e[l])};break;case 28:this.$={id:e[l],descr:e[l],type:0};break;case 29:o.getLogger().trace("node found ..",e[l-3]),this.$={id:e[l-3],descr:e[l-1],type:o.getType(e[l-2],e[l])};break;case 30:this.$=e[l-1]+e[l];break;case 31:this.$=e[l];break}},"anonymous"),table:[{3:1,4:2,5:3,6:[1,5],8:g},{1:[3]},{1:[2,1]},{4:6,6:[1,7],7:[1,8],8:g},{6:d,7:[1,10],9:9,12:11,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},t(L,[2,3]),{1:[2,2]},t(L,[2,4]),t(L,[2,5]),{1:[2,6],6:d,12:21,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},{6:d,9:22,12:11,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},{6:C,7:w,10:23,11:N},t(i,[2,24],{18:17,19:18,14:27,16:[1,28],17:[1,29],20:f,23:h}),t(i,[2,19]),t(i,[2,21],{15:30,24:H}),t(i,[2,22]),t(i,[2,23]),t(x,[2,25]),t(x,[2,26]),t(x,[2,28],{20:[1,32]}),{21:[1,33]},{6:C,7:w,10:34,11:N},{1:[2,7],6:d,12:21,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},t(P,[2,14],{7:M,11:U}),t(A,[2,8]),t(A,[2,9]),t(A,[2,10]),t(i,[2,16],{15:37,24:H}),t(i,[2,17]),t(i,[2,18]),t(i,[2,20],{24:j}),t(x,[2,31]),{21:[1,39]},{22:[1,40]},t(P,[2,13],{7:M,11:U}),t(A,[2,11]),t(A,[2,12]),t(i,[2,15],{24:j}),t(x,[2,30]),{22:[1,41]},t(x,[2,27]),t(x,[2,29])],defaultActions:{2:[2,1],6:[2,2]},parseError:c(function(s,n){if(n.recoverable)this.trace(s);else{var a=new Error(s);throw a.hash=n,a}},"parseError"),parse:c(function(s){var n=this,a=[0],o=[],u=[null],e=[],B=this.table,l="",z=0,ie=0,ue=2,re=1,ge=e.slice.call(arguments,1),b=Object.create(this.lexer),T={yy:{}};for(var J in this.yy)Object.prototype.hasOwnProperty.call(this.yy,J)&&(T.yy[J]=this.yy[J]);b.setInput(s,T.yy),T.yy.lexer=b,T.yy.parser=this,typeof b.yylloc>"u"&&(b.yylloc={});var q=b.yylloc;e.push(q);var de=b.options&&b.options.ranges;typeof T.yy.parseError=="function"?this.parseError=T.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function pe(S){a.length=a.length-2*S,u.length=u.length-S,e.length=e.length-S}c(pe,"popStack");function ae(){var S;return S=o.pop()||b.lex()||re,typeof S!="number"&&(S instanceof Array&&(o=S,S=o.pop()),S=n.symbols_[S]||S),S}c(ae,"lex");for(var k,R,v,Q,F={},K,I,oe,X;;){if(R=a[a.length-1],this.defaultActions[R]?v=this.defaultActions[R]:((k===null||typeof k>"u")&&(k=ae()),v=B[R]&&B[R][k]),typeof v>"u"||!v.length||!v[0]){var Z="";X=[];for(K in B[R])this.terminals_[K]&&K>ue&&X.push("'"+this.terminals_[K]+"'");b.showPosition?Z="Parse error on line "+(z+1)+`:
`+b.showPosition()+`
Expecting `+X.join(", ")+", got '"+(this.terminals_[k]||k)+"'":Z="Parse error on line "+(z+1)+": Unexpected "+(k==re?"end of input":"'"+(this.terminals_[k]||k)+"'"),this.parseError(Z,{text:b.match,token:this.terminals_[k]||k,line:b.yylineno,loc:q,expected:X})}if(v[0]instanceof Array&&v.length>1)throw new Error("Parse Error: multiple actions possible at state: "+R+", token: "+k);switch(v[0]){case 1:a.push(k),u.push(b.yytext),e.push(b.yylloc),a.push(v[1]),k=null,ie=b.yyleng,l=b.yytext,z=b.yylineno,q=b.yylloc;break;case 2:if(I=this.productions_[v[1]][1],F.$=u[u.length-I],F._$={first_line:e[e.length-(I||1)].first_line,last_line:e[e.length-1].last_line,first_column:e[e.length-(I||1)].first_column,last_column:e[e.length-1].last_column},de&&(F._$.range=[e[e.length-(I||1)].range[0],e[e.length-1].range[1]]),Q=this.performAction.apply(F,[l,ie,z,T.yy,v[1],u,e].concat(ge)),typeof Q<"u")return Q;I&&(a=a.slice(0,-1*I*2),u=u.slice(0,-1*I),e=e.slice(0,-1*I)),a.push(this.productions_[v[1]][0]),u.push(F.$),e.push(F._$),oe=B[a[a.length-2]][a[a.length-1]],a.push(oe);break;case 3:return!0}}return!0},"parse")},m=function(){var _={EOF:1,parseError:c(function(n,a){if(this.yy.parser)this.yy.parser.parseError(n,a);else throw new Error(n)},"parseError"),setInput:c(function(s,n){return this.yy=n||this.yy||{},this._input=s,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},"setInput"),input:c(function(){var s=this._input[0];this.yytext+=s,this.yyleng++,this.offset++,this.match+=s,this.matched+=s;var n=s.match(/(?:\r\n?|\n).*/g);return n?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),s},"input"),unput:c(function(s){var n=s.length,a=s.split(/(?:\r\n?|\n)/g);this._input=s+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-n),this.offset-=n;var o=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),a.length-1&&(this.yylineno-=a.length-1);var u=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:a?(a.length===o.length?this.yylloc.first_column:0)+o[o.length-a.length].length-a[0].length:this.yylloc.first_column-n},this.options.ranges&&(this.yylloc.range=[u[0],u[0]+this.yyleng-n]),this.yyleng=this.yytext.length,this},"unput"),more:c(function(){return this._more=!0,this},"more"),reject:c(function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).
`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},"reject"),less:c(function(s){this.unput(this.match.slice(s))},"less"),pastInput:c(function(){var s=this.matched.substr(0,this.matched.length-this.match.length);return(s.length>20?"...":"")+s.substr(-20).replace(/\n/g,"")},"pastInput"),upcomingInput:c(function(){var s=this.match;return s.length<20&&(s+=this._input.substr(0,20-s.length)),(s.substr(0,20)+(s.length>20?"...":"")).replace(/\n/g,"")},"upcomingInput"),showPosition:c(function(){var s=this.pastInput(),n=new Array(s.length+1).join("-");return s+this.upcomingInput()+`

File diff suppressed because one or more lines are too long

View file

@ -1,4 +1,4 @@
import{p as N}from"./chunk-353BL4L5-DAdaHWGH.js";import{_ as i,g as B,s as U,a as q,b as H,t as K,q as V,l as C,c as Z,F as j,K as J,M as Q,N as z,O as X,e as Y,z as tt,P as et,H as at}from"./mermaid-vendor-CAxUo7Zk.js";import{p as rt}from"./treemap-75Q7IDZK-BAguqzAo.js";import"./feature-graph-C6IuADHZ.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-BrMEyGA7.js";import"./_basePickBy-BEWgwF1U.js";import"./clone-D60V_qjf.js";var it=at.pie,D={sections:new Map,showData:!1},f=D.sections,w=D.showData,st=structuredClone(it),ot=i(()=>structuredClone(st),"getConfig"),nt=i(()=>{f=new Map,w=D.showData,tt()},"clear"),lt=i(({label:t,value:a})=>{f.has(t)||(f.set(t,a),C.debug(`added new section: ${t}, with value: ${a}`))},"addSection"),ct=i(()=>f,"getSections"),pt=i(t=>{w=t},"setShowData"),dt=i(()=>w,"getShowData"),F={getConfig:ot,clear:nt,setDiagramTitle:V,getDiagramTitle:K,setAccTitle:H,getAccTitle:q,setAccDescription:U,getAccDescription:B,addSection:lt,getSections:ct,setShowData:pt,getShowData:dt},gt=i((t,a)=>{N(t,a),a.setShowData(t.showData),t.sections.map(a.addSection)},"populateDb"),ut={parse:i(async t=>{const a=await rt("pie",t);C.debug(a),gt(a,F)},"parse")},mt=i(t=>`
import{p as N}from"./chunk-353BL4L5-F2PUrx5p.js";import{_ as i,g as B,s as U,a as q,b as H,t as K,q as V,l as C,c as Z,F as j,K as J,M as Q,N as z,O as X,e as Y,z as tt,P as et,H as at}from"./mermaid-vendor-D1lQkuud.js";import{p as rt}from"./treemap-75Q7IDZK-sLoCiDs8.js";import"./feature-graph-O__kUpH1.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-B85sKvyK.js";import"./_basePickBy-BHlXv27S.js";import"./clone-DIEb_6FD.js";var it=at.pie,D={sections:new Map,showData:!1},f=D.sections,w=D.showData,st=structuredClone(it),ot=i(()=>structuredClone(st),"getConfig"),nt=i(()=>{f=new Map,w=D.showData,tt()},"clear"),lt=i(({label:t,value:a})=>{f.has(t)||(f.set(t,a),C.debug(`added new section: ${t}, with value: ${a}`))},"addSection"),ct=i(()=>f,"getSections"),pt=i(t=>{w=t},"setShowData"),dt=i(()=>w,"getShowData"),F={getConfig:ot,clear:nt,setDiagramTitle:V,getDiagramTitle:K,setAccTitle:H,getAccTitle:q,setAccDescription:U,getAccDescription:B,addSection:lt,getSections:ct,setShowData:pt,getShowData:dt},gt=i((t,a)=>{N(t,a),a.setShowData(t.showData),t.sections.map(a.addSection)},"populateDb"),ut={parse:i(async t=>{const a=await rt("pie",t);C.debug(a),gt(a,F)},"parse")},mt=i(t=>`
.pieCircle{
stroke: ${t.pieStrokeColor};
stroke-width : ${t.pieStrokeWidth};

View file

@ -1 +1 @@
import{s as r,b as e,a,S as i}from"./chunk-OW32GOEJ-Bpuvralp.js";import{_ as s}from"./mermaid-vendor-CAxUo7Zk.js";import"./chunk-BFAMUDN2-DQGv_fhY.js";import"./chunk-SKB7J2MH-D-vU5iV6.js";import"./feature-graph-C6IuADHZ.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var f={parser:a,get db(){return new i(2)},renderer:e,styles:r,init:s(t=>{t.state||(t.state={}),t.state.arrowMarkerAbsolute=t.arrowMarkerAbsolute},"init")};export{f as diagram};
import{s as r,b as e,a,S as i}from"./chunk-OW32GOEJ-3k-Fc-vC.js";import{_ as s}from"./mermaid-vendor-D1lQkuud.js";import"./chunk-BFAMUDN2-CnPPFHVQ.js";import"./chunk-SKB7J2MH-B9Dxe08c.js";import"./feature-graph-O__kUpH1.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var f={parser:a,get db(){return new i(2)},renderer:e,styles:r,init:s(t=>{t.state||(t.state={}),t.state.arrowMarkerAbsolute=t.arrowMarkerAbsolute},"init")};export{f as diagram};

View file

@ -1,4 +1,4 @@
import{_ as s,c as xt,l as E,d as q,a3 as kt,a4 as _t,a5 as bt,a6 as vt,N as nt,D as wt,a7 as St,z as Et}from"./mermaid-vendor-CAxUo7Zk.js";import"./feature-graph-C6IuADHZ.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var X=function(){var n=s(function(f,r,a,h){for(a=a||{},h=f.length;h--;a[f[h]]=r);return a},"o"),t=[6,8,10,11,12,14,16,17,20,21],e=[1,9],l=[1,10],i=[1,11],d=[1,12],c=[1,13],g=[1,16],m=[1,17],p={trace:s(function(){},"trace"),yy:{},symbols_:{error:2,start:3,timeline:4,document:5,EOF:6,line:7,SPACE:8,statement:9,NEWLINE:10,title:11,acc_title:12,acc_title_value:13,acc_descr:14,acc_descr_value:15,acc_descr_multiline_value:16,section:17,period_statement:18,event_statement:19,period:20,event:21,$accept:0,$end:1},terminals_:{2:"error",4:"timeline",6:"EOF",8:"SPACE",10:"NEWLINE",11:"title",12:"acc_title",13:"acc_title_value",14:"acc_descr",15:"acc_descr_value",16:"acc_descr_multiline_value",17:"section",20:"period",21:"event"},productions_:[0,[3,3],[5,0],[5,2],[7,2],[7,1],[7,1],[7,1],[9,1],[9,2],[9,2],[9,1],[9,1],[9,1],[9,1],[18,1],[19,1]],performAction:s(function(r,a,h,u,y,o,S){var k=o.length-1;switch(y){case 1:return o[k-1];case 2:this.$=[];break;case 3:o[k-1].push(o[k]),this.$=o[k-1];break;case 4:case 5:this.$=o[k];break;case 6:case 7:this.$=[];break;case 8:u.getCommonDb().setDiagramTitle(o[k].substr(6)),this.$=o[k].substr(6);break;case 9:this.$=o[k].trim(),u.getCommonDb().setAccTitle(this.$);break;case 10:case 11:this.$=o[k].trim(),u.getCommonDb().setAccDescription(this.$);break;case 12:u.addSection(o[k].substr(8)),this.$=o[k].substr(8);break;case 15:u.addTask(o[k],0,""),this.$=o[k];break;case 16:u.addEvent(o[k].substr(2)),this.$=o[k];break}},"anonymous"),table:[{3:1,4:[1,2]},{1:[3]},n(t,[2,2],{5:3}),{6:[1,4],7:5,8:[1,6],9:7,10:[1,8],11:e,12:l,14:i,16:d,17:c,18:14,19:15,20:g,21:m},n(t,[2,7],{1:[2,1]}),n(t,[2,3]),{9:18,11:e,12:l,14:i,16:d,17:c,18:14,19:15,20:g,21:m},n(t,[2,5]),n(t,[2,6]),n(t,[2,8]),{13:[1,19]},{15:[1,20]},n(t,[2,11]),n(t,[2,12]),n(t,[2,13]),n(t,[2,14]),n(t,[2,15]),n(t,[2,16]),n(t,[2,4]),n(t,[2,9]),n(t,[2,10])],defaultActions:{},parseError:s(function(r,a){if(a.recoverable)this.trace(r);else{var h=new Error(r);throw h.hash=a,h}},"parseError"),parse:s(function(r){var a=this,h=[0],u=[],y=[null],o=[],S=this.table,k="",M=0,C=0,B=2,J=1,O=o.slice.call(arguments,1),_=Object.create(this.lexer),N={yy:{}};for(var L in this.yy)Object.prototype.hasOwnProperty.call(this.yy,L)&&(N.yy[L]=this.yy[L]);_.setInput(r,N.yy),N.yy.lexer=_,N.yy.parser=this,typeof _.yylloc>"u"&&(_.yylloc={});var v=_.yylloc;o.push(v);var $=_.options&&_.options.ranges;typeof N.yy.parseError=="function"?this.parseError=N.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function R(T){h.length=h.length-2*T,y.length=y.length-T,o.length=o.length-T}s(R,"popStack");function A(){var T;return T=u.pop()||_.lex()||J,typeof T!="number"&&(T instanceof Array&&(u=T,T=u.pop()),T=a.symbols_[T]||T),T}s(A,"lex");for(var w,H,I,K,F={},j,P,et,G;;){if(H=h[h.length-1],this.defaultActions[H]?I=this.defaultActions[H]:((w===null||typeof w>"u")&&(w=A()),I=S[H]&&S[H][w]),typeof I>"u"||!I.length||!I[0]){var Q="";G=[];for(j in S[H])this.terminals_[j]&&j>B&&G.push("'"+this.terminals_[j]+"'");_.showPosition?Q="Parse error on line "+(M+1)+`:
import{_ as s,c as xt,l as E,d as q,a3 as kt,a4 as _t,a5 as bt,a6 as vt,N as nt,D as wt,a7 as St,z as Et}from"./mermaid-vendor-D1lQkuud.js";import"./feature-graph-O__kUpH1.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var X=function(){var n=s(function(f,r,a,h){for(a=a||{},h=f.length;h--;a[f[h]]=r);return a},"o"),t=[6,8,10,11,12,14,16,17,20,21],e=[1,9],l=[1,10],i=[1,11],d=[1,12],c=[1,13],g=[1,16],m=[1,17],p={trace:s(function(){},"trace"),yy:{},symbols_:{error:2,start:3,timeline:4,document:5,EOF:6,line:7,SPACE:8,statement:9,NEWLINE:10,title:11,acc_title:12,acc_title_value:13,acc_descr:14,acc_descr_value:15,acc_descr_multiline_value:16,section:17,period_statement:18,event_statement:19,period:20,event:21,$accept:0,$end:1},terminals_:{2:"error",4:"timeline",6:"EOF",8:"SPACE",10:"NEWLINE",11:"title",12:"acc_title",13:"acc_title_value",14:"acc_descr",15:"acc_descr_value",16:"acc_descr_multiline_value",17:"section",20:"period",21:"event"},productions_:[0,[3,3],[5,0],[5,2],[7,2],[7,1],[7,1],[7,1],[9,1],[9,2],[9,2],[9,1],[9,1],[9,1],[9,1],[18,1],[19,1]],performAction:s(function(r,a,h,u,y,o,S){var k=o.length-1;switch(y){case 1:return o[k-1];case 2:this.$=[];break;case 3:o[k-1].push(o[k]),this.$=o[k-1];break;case 4:case 5:this.$=o[k];break;case 6:case 7:this.$=[];break;case 8:u.getCommonDb().setDiagramTitle(o[k].substr(6)),this.$=o[k].substr(6);break;case 9:this.$=o[k].trim(),u.getCommonDb().setAccTitle(this.$);break;case 10:case 11:this.$=o[k].trim(),u.getCommonDb().setAccDescription(this.$);break;case 12:u.addSection(o[k].substr(8)),this.$=o[k].substr(8);break;case 15:u.addTask(o[k],0,""),this.$=o[k];break;case 16:u.addEvent(o[k].substr(2)),this.$=o[k];break}},"anonymous"),table:[{3:1,4:[1,2]},{1:[3]},n(t,[2,2],{5:3}),{6:[1,4],7:5,8:[1,6],9:7,10:[1,8],11:e,12:l,14:i,16:d,17:c,18:14,19:15,20:g,21:m},n(t,[2,7],{1:[2,1]}),n(t,[2,3]),{9:18,11:e,12:l,14:i,16:d,17:c,18:14,19:15,20:g,21:m},n(t,[2,5]),n(t,[2,6]),n(t,[2,8]),{13:[1,19]},{15:[1,20]},n(t,[2,11]),n(t,[2,12]),n(t,[2,13]),n(t,[2,14]),n(t,[2,15]),n(t,[2,16]),n(t,[2,4]),n(t,[2,9]),n(t,[2,10])],defaultActions:{},parseError:s(function(r,a){if(a.recoverable)this.trace(r);else{var h=new Error(r);throw h.hash=a,h}},"parseError"),parse:s(function(r){var a=this,h=[0],u=[],y=[null],o=[],S=this.table,k="",M=0,C=0,B=2,J=1,O=o.slice.call(arguments,1),_=Object.create(this.lexer),N={yy:{}};for(var L in this.yy)Object.prototype.hasOwnProperty.call(this.yy,L)&&(N.yy[L]=this.yy[L]);_.setInput(r,N.yy),N.yy.lexer=_,N.yy.parser=this,typeof _.yylloc>"u"&&(_.yylloc={});var v=_.yylloc;o.push(v);var $=_.options&&_.options.ranges;typeof N.yy.parseError=="function"?this.parseError=N.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function R(T){h.length=h.length-2*T,y.length=y.length-T,o.length=o.length-T}s(R,"popStack");function A(){var T;return T=u.pop()||_.lex()||J,typeof T!="number"&&(T instanceof Array&&(u=T,T=u.pop()),T=a.symbols_[T]||T),T}s(A,"lex");for(var w,H,I,K,F={},j,P,et,G;;){if(H=h[h.length-1],this.defaultActions[H]?I=this.defaultActions[H]:((w===null||typeof w>"u")&&(w=A()),I=S[H]&&S[H][w]),typeof I>"u"||!I.length||!I[0]){var Q="";G=[];for(j in S[H])this.terminals_[j]&&j>B&&G.push("'"+this.terminals_[j]+"'");_.showPosition?Q="Parse error on line "+(M+1)+`:
`+_.showPosition()+`
Expecting `+G.join(", ")+", got '"+(this.terminals_[w]||w)+"'":Q="Parse error on line "+(M+1)+": Unexpected "+(w==J?"end of input":"'"+(this.terminals_[w]||w)+"'"),this.parseError(Q,{text:_.match,token:this.terminals_[w]||w,line:_.yylineno,loc:v,expected:G})}if(I[0]instanceof Array&&I.length>1)throw new Error("Parse Error: multiple actions possible at state: "+H+", token: "+w);switch(I[0]){case 1:h.push(w),y.push(_.yytext),o.push(_.yylloc),h.push(I[1]),w=null,C=_.yyleng,k=_.yytext,M=_.yylineno,v=_.yylloc;break;case 2:if(P=this.productions_[I[1]][1],F.$=y[y.length-P],F._$={first_line:o[o.length-(P||1)].first_line,last_line:o[o.length-1].last_line,first_column:o[o.length-(P||1)].first_column,last_column:o[o.length-1].last_column},$&&(F._$.range=[o[o.length-(P||1)].range[0],o[o.length-1].range[1]]),K=this.performAction.apply(F,[k,C,M,N.yy,I[1],y,o].concat(O)),typeof K<"u")return K;P&&(h=h.slice(0,-1*P*2),y=y.slice(0,-1*P),o=o.slice(0,-1*P)),h.push(this.productions_[I[1]][0]),y.push(F.$),o.push(F._$),et=S[h[h.length-2]][h[h.length-1]],h.push(et);break;case 3:return!0}}return!0},"parse")},x=function(){var f={EOF:1,parseError:s(function(a,h){if(this.yy.parser)this.yy.parser.parseError(a,h);else throw new Error(a)},"parseError"),setInput:s(function(r,a){return this.yy=a||this.yy||{},this._input=r,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},"setInput"),input:s(function(){var r=this._input[0];this.yytext+=r,this.yyleng++,this.offset++,this.match+=r,this.matched+=r;var a=r.match(/(?:\r\n?|\n).*/g);return a?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),r},"input"),unput:s(function(r){var a=r.length,h=r.split(/(?:\r\n?|\n)/g);this._input=r+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-a),this.offset-=a;var u=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),h.length-1&&(this.yylineno-=h.length-1);var y=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:h?(h.length===u.length?this.yylloc.first_column:0)+u[u.length-h.length].length-h[0].length:this.yylloc.first_column-a},this.options.ranges&&(this.yylloc.range=[y[0],y[0]+this.yyleng-a]),this.yyleng=this.yytext.length,this},"unput"),more:s(function(){return this._more=!0,this},"more"),reject:s(function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).
`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},"reject"),less:s(function(r){this.unput(this.match.slice(r))},"less"),pastInput:s(function(){var r=this.matched.substr(0,this.matched.length-this.match.length);return(r.length>20?"...":"")+r.substr(-20).replace(/\n/g,"")},"pastInput"),upcomingInput:s(function(){var r=this.match;return r.length<20&&(r+=this._input.substr(0,20-r.length)),(r.substr(0,20)+(r.length>20?"...":"")).replace(/\n/g,"")},"upcomingInput"),showPosition:s(function(){var r=this.pastInput(),a=new Array(r.length+1).join("-");return r+this.upcomingInput()+`

View file

@ -1,27 +1,27 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="Cache-Control" content="no-cache, no-store, must-revalidate" />
<meta http-equiv="Pragma" content="no-cache" />
<meta http-equiv="Expires" content="0" />
<link rel="icon" type="image/png" href="favicon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Lightrag</title>
<script type="module" crossorigin src="/webui/assets/index-Ctn6Ym96.js"></script>
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="Cache-Control" content="no-cache, no-store, must-revalidate" />
<meta http-equiv="Pragma" content="no-cache" />
<meta http-equiv="Expires" content="0" />
<link rel="icon" type="image/png" href="favicon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Lightrag</title>
<script type="module" crossorigin src="/webui/assets/index-CG6xnoZg.js"></script>
<link rel="modulepreload" crossorigin href="/webui/assets/react-vendor-DEwriMA6.js">
<link rel="modulepreload" crossorigin href="/webui/assets/ui-vendor-CeCm8EER.js">
<link rel="modulepreload" crossorigin href="/webui/assets/graph-vendor-B-X5JegA.js">
<link rel="modulepreload" crossorigin href="/webui/assets/utils-vendor-BysuhMZA.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-graph-C6IuADHZ.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-documents-BSJWpkhB.js">
<link rel="modulepreload" crossorigin href="/webui/assets/mermaid-vendor-CAxUo7Zk.js">
<link rel="modulepreload" crossorigin href="/webui/assets/markdown-vendor-DmIvJdn7.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-retrieval-kyTSZozC.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-graph-O__kUpH1.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-documents-YVkYGMC7.js">
<link rel="modulepreload" crossorigin href="/webui/assets/mermaid-vendor-D1lQkuud.js">
<link rel="modulepreload" crossorigin href="/webui/assets/markdown-vendor-04BBRGLL.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-retrieval-BPsB1OIe.js">
<link rel="stylesheet" crossorigin href="/webui/assets/feature-graph-BipNuM18.css">
<link rel="stylesheet" crossorigin href="/webui/assets/index-CafJWW1u.css">
</head>
<body>
<div id="root"></div>
</body>
</html>
<link rel="stylesheet" crossorigin href="/webui/assets/index-B4ejae5O.css">
</head>
<body>
<div id="root"></div>
</body>
</html>

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

View file

@ -675,6 +675,8 @@ class BaseGraphStorage(StorageNameSpace, ABC):
class DocStatus(str, Enum):
"""Document processing status"""
READY = "ready"
HANDLING = "handling"
PENDING = "pending"
PROCESSING = "processing"
PROCESSED = "processed"
@ -707,6 +709,12 @@ class DocProcessingStatus:
"""Error message if failed"""
metadata: dict[str, Any] = field(default_factory=dict)
"""Additional metadata"""
multimodal_content: list[dict[str, any]] | None = None
"""raganything: multimodal_content"""
multimodal_processed: bool | None = None
"""raganything: multimodal_processed"""
scheme_name: str | None = None
"""lightrag or raganything"""
@dataclass

View file

@ -9,6 +9,7 @@ import warnings
from dataclasses import asdict, dataclass, field
from datetime import datetime, timezone
from functools import partial
from pathlib import Path
from typing import (
Any,
AsyncIterator,
@ -92,6 +93,7 @@ from .utils import (
)
from .types import KnowledgeGraph
from dotenv import load_dotenv
from .ragmanager import RAGManager
# use the .env that is inside the current folder
# allows to use different .env file for each lightrag instance
@ -129,6 +131,9 @@ class LightRAG:
doc_status_storage: str = field(default="JsonDocStatusStorage")
"""Storage type for tracking document processing statuses."""
input_dir: str = field(default_factory=lambda: os.getenv("INPUT_DIR", "./input"))
"""Directory containing input documents"""
# Workspace
# ---
@ -821,11 +826,15 @@ class LightRAG:
def insert(
self,
input: str | list[str],
multimodal_content: list[dict[str, any]]
| list[list[dict[str, any]]]
| None = None,
split_by_character: str | None = None,
split_by_character_only: bool = False,
ids: str | list[str] | None = None,
file_paths: str | list[str] | None = None,
track_id: str | None = None,
scheme_name: str | None = None,
) -> str:
"""Sync Insert documents with checkpoint support
@ -847,26 +856,34 @@ class LightRAG:
self.ainsert(
input,
split_by_character,
multimodal_content,
split_by_character_only,
ids,
file_paths,
track_id,
scheme_name,
)
)
async def ainsert(
self,
input: str | list[str],
multimodal_content: list[dict[str, any]]
| list[list[dict[str, any]]]
| None = None,
split_by_character: str | None = None,
split_by_character_only: bool = False,
ids: str | list[str] | None = None,
file_paths: str | list[str] | None = None,
track_id: str | None = None,
scheme_name: str | None = None,
) -> str:
"""Async Insert documents with checkpoint support
Args:
input: Single document string or list of document strings
multimodal_content (list[dict[str, any]] | list[list[dict[str, any]]] | None, optional):
Multimodal content (images, tables, equations) associated with documents
split_by_character: if split_by_character is not None, split the string by character, if chunk longer than
chunk_token_size, it will be split again by token size.
split_by_character_only: if split_by_character_only is True, split the string by character only, when
@ -874,6 +891,7 @@ class LightRAG:
ids: list of unique document IDs, if not provided, MD5 hash IDs will be generated
file_paths: list of file paths corresponding to each document, used for citation
track_id: tracking ID for monitoring processing status, if not provided, will be generated
scheme_name (str | None, optional): Scheme name for categorizing documents
Returns:
str: tracking ID for monitoring processing status
@ -882,7 +900,29 @@ class LightRAG:
if track_id is None:
track_id = generate_track_id("insert")
await self.apipeline_enqueue_documents(input, ids, file_paths, track_id)
paths_to_check = [file_paths] if isinstance(file_paths, str) else file_paths
base_input_dir = Path(self.input_dir)
if self.workspace:
current_input_dir = base_input_dir / self.workspace
else:
current_input_dir = base_input_dir
await self.apipeline_enqueue_documents(
input,
multimodal_content,
ids,
file_paths,
track_id,
scheme_name=scheme_name,
)
for file_path in paths_to_check:
current_file_path = current_input_dir / file_path
if current_file_path.exists():
self.move_file_to_enqueue(current_file_path)
else:
continue
await self.apipeline_process_enqueue_documents(
split_by_character, split_by_character_only
)
@ -964,9 +1004,11 @@ class LightRAG:
async def apipeline_enqueue_documents(
self,
input: str | list[str],
multimodal_content: list[dict[str, any]] | None = None,
ids: list[str] | None = None,
file_paths: str | list[str] | None = None,
track_id: str | None = None,
scheme_name: str | None = None,
) -> str:
"""
Pipeline for Processing Documents
@ -978,9 +1020,12 @@ class LightRAG:
Args:
input: Single document string or list of document strings
multimodal_content (list[dict[str, any]] | list[list[dict[str, any]]] | None, optional):
Multimodal content (images, tables, equations) associated with documents
ids: list of unique document IDs, if not provided, MD5 hash IDs will be generated
file_paths: list of file paths corresponding to each document, used for citation
track_id: tracking ID for monitoring processing status, if not provided, will be generated with "enqueue" prefix
scheme_name (str | None, optional): Scheme name for categorizing documents
Returns:
str: tracking ID for monitoring processing status
@ -1051,13 +1096,15 @@ class LightRAG:
id_: {
"status": DocStatus.PENDING,
"content_summary": get_content_summary(content_data["content"]),
"multimodal_content": multimodal_content,
"content_length": len(content_data["content"]),
"created_at": datetime.now(timezone.utc).isoformat(),
"updated_at": datetime.now(timezone.utc).isoformat(),
"file_path": content_data[
"file_path"
], # Store file path in document status
"track_id": track_id, # Store track_id in document status
"track_id": track_id,
"scheme_name": scheme_name, # Store track_id in document status
}
for id_, content_data in contents.items()
}
@ -1088,6 +1135,12 @@ class LightRAG:
if doc_id in new_docs
}
new_docs_idList = [
f"doc-pre-{new_docs[doc_id]['file_path']}"
for doc_id in unique_new_doc_ids
if doc_id in new_docs
]
if not new_docs:
logger.warning("No new unique documents were found.")
return
@ -1105,6 +1158,10 @@ class LightRAG:
# Store document status (without content)
await self.doc_status.upsert(new_docs)
logger.debug(f"Stored {len(new_docs)} new unique documents")
await self.doc_status.index_done_callback()
await self.doc_status.delete(new_docs_idList)
logger.info(f"Deleted {new_docs_idList} Successful")
return track_id
@ -1280,6 +1337,7 @@ class LightRAG:
docs_to_reset[doc_id] = {
"status": DocStatus.PENDING,
"content_summary": status_doc.content_summary,
"multimodal_content": status_doc.multimodal_content,
"content_length": status_doc.content_length,
"created_at": status_doc.created_at,
"updated_at": datetime.now(timezone.utc).isoformat(),
@ -1288,11 +1346,15 @@ class LightRAG:
# Clear any error messages and processing metadata
"error_msg": "",
"metadata": {},
"scheme_name": status_doc.scheme_name,
}
# Update the status in to_process_docs as well
status_doc.status = DocStatus.PENDING
reset_count += 1
logger.info(
f"Document {status_doc.file_path} from PROCESSING/FAILED to PENDING status"
)
# Update doc_status storage if there are documents to reset
if docs_to_reset:
@ -1506,6 +1568,7 @@ class LightRAG:
chunks.keys()
), # Save chunks list
"content_summary": status_doc.content_summary,
"multimodal_content": status_doc.multimodal_content,
"content_length": status_doc.content_length,
"created_at": status_doc.created_at,
"updated_at": datetime.now(
@ -1516,6 +1579,7 @@ class LightRAG:
"metadata": {
"processing_start_time": processing_start_time
},
"scheme_name": status_doc.scheme_name,
}
}
)
@ -1581,6 +1645,7 @@ class LightRAG:
"status": DocStatus.FAILED,
"error_msg": str(e),
"content_summary": status_doc.content_summary,
"multimodal_content": status_doc.multimodal_content,
"content_length": status_doc.content_length,
"created_at": status_doc.created_at,
"updated_at": datetime.now(
@ -1592,6 +1657,7 @@ class LightRAG:
"processing_start_time": processing_start_time,
"processing_end_time": processing_end_time,
},
"scheme_name": status_doc.scheme_name,
}
}
)
@ -1624,10 +1690,11 @@ class LightRAG:
await self.doc_status.upsert(
{
doc_id: {
"status": DocStatus.PROCESSED,
"status": DocStatus.PROCESSING,
"chunks_count": len(chunks),
"chunks_list": list(chunks.keys()),
"content_summary": status_doc.content_summary,
"multimodal_content": status_doc.multimodal_content,
"content_length": status_doc.content_length,
"created_at": status_doc.created_at,
"updated_at": datetime.now(
@ -1639,6 +1706,32 @@ class LightRAG:
"processing_start_time": processing_start_time,
"processing_end_time": processing_end_time,
},
"scheme_name": status_doc.scheme_name,
}
}
)
if (
status_doc.multimodal_content
and len(status_doc.multimodal_content) > 0
):
raganything_instance = RAGManager.get_rag()
await raganything_instance._process_multimodal_content(
status_doc.multimodal_content,
status_doc.file_path,
doc_id,
pipeline_status=pipeline_status,
pipeline_status_lock=pipeline_status_lock,
)
current_doc_status = await self.doc_status.get_by_id(
doc_id
)
await self.doc_status.upsert(
{
doc_id: {
**current_doc_status,
"status": DocStatus.PROCESSED,
}
}
)
@ -1682,6 +1775,7 @@ class LightRAG:
"status": DocStatus.FAILED,
"error_msg": str(e),
"content_summary": status_doc.content_summary,
"multimodal_content": status_doc.multimodal_content,
"content_length": status_doc.content_length,
"created_at": status_doc.created_at,
"updated_at": datetime.now().isoformat(),
@ -1691,6 +1785,7 @@ class LightRAG:
"processing_start_time": processing_start_time,
"processing_end_time": processing_end_time,
},
"scheme_name": status_doc.scheme_name,
}
}
)
@ -2170,6 +2265,156 @@ class LightRAG:
# Return the dictionary containing statuses only for the found document IDs
return found_statuses
async def aclean_parse_cache_by_doc_ids(
self, doc_ids: str | list[str]
) -> dict[str, Any]:
"""异步清理指定文档ID的parse_cache条目
Args:
doc_ids: 单个文档ID字符串或文档ID列表
Returns:
包含清理结果的字典:
- deleted_entries: 已删除的缓存条目列表
- not_found: 未找到的文档ID列表
- error: 错误信息如果操作失败
"""
import json
from pathlib import Path
# 标准化输入为列表
if isinstance(doc_ids, str):
doc_ids = [doc_ids]
result = {"deleted_entries": [], "not_found": [], "error": None}
try:
# 构建parse_cache文件路径使用类的存储位置变量
if self.workspace:
# 如果有workspace使用workspace子目录
cache_file_path = (
Path(self.working_dir)
/ self.workspace
/ "kv_store_parse_cache.json"
)
else:
# 默认使用working_dir
cache_file_path = Path(self.working_dir) / "kv_store_parse_cache.json"
# 检查parse_cache文件是否存在
if not cache_file_path.exists():
logger.warning(f"Parse cache文件未找到: {cache_file_path}")
result["not_found"] = doc_ids.copy()
return result
# 读取当前的parse_cache数据
with open(cache_file_path, "r", encoding="utf-8") as f:
cache_data = json.load(f)
# 查找需要删除的条目并记录找到的doc_ids
entries_to_delete = []
doc_ids_set = set(doc_ids)
found_doc_ids = set()
for cache_key, cache_entry in cache_data.items():
if (
isinstance(cache_entry, dict)
and cache_entry.get("doc_id") in doc_ids_set
):
entries_to_delete.append(cache_key)
result["deleted_entries"].append(cache_key)
found_doc_ids.add(cache_entry.get("doc_id"))
# 删除找到的条目
for cache_key in entries_to_delete:
del cache_data[cache_key]
# 找出未找到的doc_ids
result["not_found"] = list(doc_ids_set - found_doc_ids)
# 写回更新后的缓存数据
with open(cache_file_path, "w", encoding="utf-8") as f:
json.dump(cache_data, f, indent=2, ensure_ascii=False)
logger.info(
f"已删除 {len(entries_to_delete)} 个parse_cache条目文档ID: {doc_ids}"
)
except Exception as e:
error_msg = f"清理parse_cache时出错: {str(e)}"
logger.error(error_msg)
result["error"] = error_msg
return result
def clean_parse_cache_by_doc_ids(self, doc_ids: str | list[str]) -> dict[str, Any]:
"""同步清理指定文档ID的parse_cache条目
Args:
doc_ids: 单个文档ID字符串或文档ID列表
Returns:
包含清理结果的字典
"""
loop = always_get_an_event_loop()
return loop.run_until_complete(self.aclean_parse_cache_by_doc_ids(doc_ids))
async def aclean_all_parse_cache(self) -> dict[str, Any]:
"""异步清理所有parse_cache条目
Returns:
包含清理结果的字典:
- deleted_count: 删除的条目数量
- error: 错误信息如果操作失败
"""
import json
from pathlib import Path
result = {"deleted_count": 0, "error": None}
try:
# 构建parse_cache文件路径
if self.workspace:
cache_file_path = (
Path(self.working_dir)
/ self.workspace
/ "kv_store_parse_cache.json"
)
else:
cache_file_path = Path(self.working_dir) / "kv_store_parse_cache.json"
if not cache_file_path.exists():
logger.warning(f"Parse cache文件未找到: {cache_file_path}")
return result
# 读取当前缓存以统计条目数量
with open(cache_file_path, "r", encoding="utf-8") as f:
cache_data = json.load(f)
result["deleted_count"] = len(cache_data)
# 清空所有条目
with open(cache_file_path, "w", encoding="utf-8") as f:
json.dump({}, f, indent=2)
logger.info(f"已清空所有 {result['deleted_count']} 个parse_cache条目")
except Exception as e:
error_msg = f"清空parse_cache时出错: {str(e)}"
logger.error(error_msg)
result["error"] = error_msg
return result
def clean_all_parse_cache(self) -> dict[str, Any]:
"""同步清理所有parse_cache条目
Returns:
包含清理结果的字典
"""
loop = always_get_an_event_loop()
return loop.run_until_complete(self.aclean_all_parse_cache())
async def adelete_by_doc_id(self, doc_id: str) -> DeletionResult:
"""Delete a document and all its related data, including chunks, graph elements, and cached entries.

View file

@ -1402,7 +1402,29 @@ async def merge_nodes_and_edges(
if full_entities_storage and full_relations_storage and doc_id:
try:
# Merge all entities: original entities + entities added during edge processing
final_entity_names = set()
existing_entites_data = None
existing_relations_data = None
try:
existing_entites_data = await full_entities_storage.get_by_id(doc_id)
existing_relations_data = await full_relations_storage.get_by_id(doc_id)
except Exception as e:
logger.debug(
f"Could not retrieve existing entity/relation data for {doc_id}: {e}"
)
existing_entites_names = set()
if existing_entites_data and existing_entites_data.get("entity_names"):
existing_entites_names.update(existing_entites_data["entity_names"])
existing_relation_pairs = set()
if existing_relations_data and existing_relations_data.get(
"relation_pairs"
):
for pair in existing_relations_data["relation_pairs"]:
existing_relation_pairs.add(tuple(sorted(pair)))
final_entity_names = existing_entites_names.copy()
# Add original processed entities
for entity_data in processed_entities:
@ -1415,7 +1437,7 @@ async def merge_nodes_and_edges(
final_entity_names.add(added_entity["entity_name"])
# Collect all relation pairs
final_relation_pairs = set()
final_relation_pairs = existing_relation_pairs.copy()
for edge_data in processed_edges:
if edge_data:
src_id = edge_data.get("src_id")
@ -1425,6 +1447,12 @@ async def merge_nodes_and_edges(
final_relation_pairs.add(relation_pair)
log_message = f"Phase 3: Updating final {len(final_entity_names)}({len(processed_entities)}+{len(all_added_entities)}) entities and {len(final_relation_pairs)} relations from {doc_id}"
new_entities_count = len(final_entity_names) - len(existing_entites_names)
new_relation_count = len(final_relation_pairs) - len(
existing_relation_pairs
)
log_message = f"Phase 3: Merging storage - existing: {len(existing_entites_names)} entitites, {len(existing_relation_pairs)} relations; new: {new_entities_count} entities. {new_relation_count} relations; total: {len(final_entity_names)} entities, {len(final_relation_pairs)} relations"
logger.info(log_message)
async with pipeline_status_lock:
pipeline_status["latest_message"] = log_message

18
lightrag/ragmanager.py Normal file
View file

@ -0,0 +1,18 @@
class RAGManager:
_instance = None
_rag = None
def __new__(cls):
if cls._instance is None:
cls._instance = super().__new__(cls)
return cls._instance
@classmethod
def set_rag(cls, rag_instance):
cls._rag = rag_instance
@classmethod
def get_rag(cls):
if cls._rag is None:
raise ValueError("RAG instance not initialized!")
return cls._rag

View file

@ -15,6 +15,8 @@ import GraphViewer from '@/features/GraphViewer'
import DocumentManager from '@/features/DocumentManager'
import RetrievalTesting from '@/features/RetrievalTesting'
import ApiSite from '@/features/ApiSite'
import { SchemeProvider } from '@/contexts/SchemeContext';
import { Tabs, TabsContent } from '@/components/ui/Tabs'
@ -204,9 +206,11 @@ function App() {
>
<SiteHeader />
<div className="relative grow">
<TabsContent value="documents" className="absolute top-0 right-0 bottom-0 left-0 overflow-auto">
<DocumentManager />
</TabsContent>
<SchemeProvider>
<TabsContent value="documents" className="absolute top-0 right-0 bottom-0 left-0 overflow-auto">
<DocumentManager />
</TabsContent>
</SchemeProvider>
<TabsContent value="knowledge-graph" className="absolute top-0 right-0 bottom-0 left-0 overflow-hidden">
<GraphViewer />
</TabsContent>

View file

@ -158,7 +158,7 @@ export type DeleteDocResponse = {
doc_id: string
}
export type DocStatus = 'pending' | 'processing' | 'processed' | 'failed'
export type DocStatus = 'pending' | 'processing' | 'processed' | 'ready' | 'handling' | 'failed'
export type DocStatusResponse = {
id: string
@ -172,6 +172,7 @@ export type DocStatusResponse = {
error_msg?: string
metadata?: Record<string, any>
file_path: string
scheme_name: string
}
export type DocsStatusesResponse = {
@ -249,6 +250,23 @@ export type LoginResponse = {
webui_description?: string
}
export type Scheme = {
id: number;
name: string;
config: {
framework: 'lightrag' | 'raganything';
extractor?: 'mineru' | 'docling' | undefined; // Optional extractor field
};
};
type AddSchemeParams = Omit<Scheme, 'id'>;
export type SchemesResponse = {
status: string;
message: string;
data: Scheme[];
};
export const InvalidApiKeyError = 'Invalid API Key'
export const RequireApiKeError = 'API Key required'
@ -302,6 +320,32 @@ axiosInstance.interceptors.response.use(
)
// API methods
export const getSchemes = async (): Promise<SchemesResponse> => {
const response = await axiosInstance.get('/documents/schemes');
return response.data;
};
export const saveSchemes = async (schemes: Scheme[]): Promise<{ message: string }> => {
const response = await axiosInstance.post('/documents/schemes', schemes);
return response.data;
};
export const addScheme = async (scheme: AddSchemeParams): Promise<Scheme> => {
try {
const response = await axiosInstance.post('/documents/schemes/add', scheme);
// 验证响应数据是否符合 Scheme 类型(可选,取决于 axios 的配置)
return response.data;
} catch (error) {
console.error('Failed to add scheme:', error);
throw error; // 重新抛出错误,由调用方处理
}
};
export const deleteScheme = async (schemeId: number): Promise<{ message: string }> => {
const response = await axiosInstance.delete(`/documents/schemes/${schemeId}`);
return response.data;
};
export const queryGraphs = async (
label: string,
maxDepth: number,
@ -335,8 +379,10 @@ export const getDocuments = async (): Promise<DocsStatusesResponse> => {
return response.data
}
export const scanNewDocuments = async (): Promise<ScanResponse> => {
const response = await axiosInstance.post('/documents/scan')
export const scanNewDocuments = async (framework?: string): Promise<ScanResponse> => {
const response = await axiosInstance.post('/documents/scan', {
framework
})
return response.data
}
@ -547,11 +593,12 @@ export const insertTexts = async (texts: string[]): Promise<DocActionResponse> =
export const uploadDocument = async (
file: File,
schemeId: number | '',
onUploadProgress?: (percentCompleted: number) => void
): Promise<DocActionResponse> => {
const formData = new FormData()
formData.append('file', file)
formData.append('schemeId', schemeId.toString())
const response = await axiosInstance.post('/documents/upload', formData, {
headers: {
'Content-Type': 'multipart/form-data'
@ -570,11 +617,12 @@ export const uploadDocument = async (
export const batchUploadDocuments = async (
files: File[],
schemeId: number | '',
onUploadProgress?: (fileName: string, percentCompleted: number) => void
): Promise<DocActionResponse[]> => {
return await Promise.all(
files.map(async (file) => {
return await uploadDocument(file, (percentCompleted) => {
return await uploadDocument(file, schemeId, (percentCompleted) => {
onUploadProgress?.(file.name, percentCompleted)
})
})

View file

@ -0,0 +1,165 @@
.scheme-manager-container {
font-family: Arial, sans-serif;
max-width: 1000px;
margin: 0 auto;
}
.toggle-button {
padding: 8px 16px;
background-color: #4CAF50;
color: white;
border: none;
border-radius: 4px;
cursor: pointer;
font-size: 14px;
margin-bottom: 20px;
}
.toggle-button:hover {
background-color: #45a049;
}
.scheme-modal {
border: 1px solid #ddd;
border-radius: 8px;
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
overflow: hidden;
}
.modal-header {
display: flex;
justify-content: space-between;
align-items: center;
padding: 16px;
background-color: #f5f5f5;
border-bottom: 1px solid #ddd;
}
.modal-header h2 {
margin: 0;
font-size: 18px;
}
.close-button {
background: none;
border: none;
font-size: 20px;
cursor: pointer;
color: #999;
}
.close-button:hover {
color: #333;
}
.modal-content {
display: flex;
height: 500px;
}
.left-panel {
flex: 0 0 300px;
padding: 16px;
border-right: 1px solid #ddd;
overflow-y: auto;
}
.right-panel {
flex: 1;
padding: 16px;
overflow-y: auto;
}
.add-scheme-form {
display: flex;
margin-bottom: 16px;
gap: 8px;
}
.scheme-name-input {
flex: 1;
padding: 8px;
border: 1px solid #ddd;
border-radius: 4px;
}
.add-button {
padding: 8px 12px;
background-color: #2196F3;
color: white;
border: none;
border-radius: 4px;
cursor: pointer;
}
.add-button:hover {
background-color: #0b7dda;
}
.scheme-list ul {
list-style: none;
padding: 0;
margin: 0;
}
.scheme-item {
display: flex;
justify-content: space-between;
align-items: center;
padding: 12px;
margin-bottom: 8px;
background-color: #f9f9f9;
border-radius: 4px;
cursor: pointer;
transition: background-color 0.2s;
}
.scheme-item:hover {
background-color: #f0f0f0;
}
.scheme-item.active {
background-color: #e3f2fd;
border-left: 3px solid #2196F3;
}
.delete-button {
background: none;
border: none;
color: #f44336;
cursor: pointer;
font-size: 16px;
padding: 0 4px;
}
.delete-button:hover {
color: #d32f2f;
}
.empty-message, .select-message {
color: #666;
text-align: center;
padding: 20px;
}
.config-form {
margin-top: 16px;
}
.form-group {
margin-bottom: 16px;
}
.form-group label {
display: block;
margin-bottom: 8px;
font-weight: bold;
}
.form-group input,
.form-group select {
width: 100%;
padding: 8px;
border: 1px solid #ddd;
border-radius: 4px;
}

View file

@ -0,0 +1,282 @@
import React, { useRef,useState, useEffect } from "react";
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
DialogTrigger
} from '@/components/ui/Dialog';
import Button from "@/components/ui/Button";
import { PlusIcon } from "lucide-react";
import { Alert, AlertDescription } from "@/components/ui/Alert";
import { AlertCircle } from "lucide-react";
import {
getSchemes,
saveSchemes,
addScheme,
deleteScheme,
Scheme
} from '@/api/lightrag';
import { useScheme } from '@/contexts/SchemeContext';
import { useTranslation } from 'react-i18next';
interface SchemeConfig {
framework: 'lightrag' | 'raganything';
extractor?: 'mineru' | 'docling';
}
const SchemeManagerDialog = () => {
const { t } = useTranslation();
const [open, setOpen] = useState(false);
const [schemes, setSchemes] = useState<Scheme[]>([]);
const [newSchemeName, setNewSchemeName] = useState("");
const [error, _setError] = useState<string | undefined>();
const [isLoading, setIsLoading] = useState(true);
const setError = (err?: string) => _setError(err);
const scrollRef = useRef<HTMLDivElement>(null);
const { selectedScheme, setSelectedScheme } = useScheme();
// 加载方案数据
useEffect(() => {
const loadSchemes = async () => {
try {
setIsLoading(true);
const response = await getSchemes();
setSchemes(response.data);
localStorage.getItem('selectedSchemeId') && setSelectedScheme(response.data.find(s => s.id === Number(localStorage.getItem('selectedSchemeId'))) || undefined);
} catch (err) {
setError(err instanceof Error ? err.message : t('schemeManager.errors.loadFailed'));
} finally {
setIsLoading(false);
}
};
loadSchemes();
}, []);
// 自动滚动到底部
useEffect(() => {
handleSelectScheme(selectedScheme?.id!);
if (!scrollRef.current) return;
const scrollToBottom = () => {
const container = scrollRef.current!;
const { scrollHeight } = container;
container.scrollTop = scrollHeight;
};
setTimeout(scrollToBottom, 0);
}, [schemes]);
// 检查方案名是否已存在
const isNameTaken = (name: string): boolean => {
return schemes.some(scheme => scheme.name.trim() === name.trim());
};
// 选中方案(更新 Context
const handleSelectScheme = (schemeId: number) => {
const scheme = schemes.find((s) => s.id === schemeId);
if (scheme) {
setSelectedScheme(scheme);
localStorage.setItem('selectedSchemeId', String(scheme.id));
}
};
// 添加新方案
const handleAddScheme = async () => {
const trimmedName = newSchemeName.trim();
if (!trimmedName) {
setError(t('schemeManager.errors.nameEmpty'));
return;
}
if (isNameTaken(trimmedName)) {
setError(t('schemeManager.errors.nameExists'));
return;
}
try {
const newScheme = await addScheme({
name: trimmedName,
config: { framework: 'lightrag', extractor: undefined },
});
// 更新方案列表
setSchemes((prevSchemes) => [...prevSchemes, newScheme]);
// 选中新方案
setSelectedScheme(newScheme);
// 清空输入和错误
setNewSchemeName("");
setError(undefined);
} catch (err) {
setError(err instanceof Error ? err.message : t('schemeManager.errors.addFailed'));
}
};
// 删除方案
const handleDeleteScheme = async (schemeId: number) => {
try {
await deleteScheme(schemeId);
setSchemes(schemes.filter(s => s.id !== schemeId));
if (selectedScheme?.id === schemeId) {
setSelectedScheme(undefined); // 清除 Context 中的选中状态
}
} catch (err) {
setError(err instanceof Error ? err.message : t('schemeManager.errors.deleteFailed'));
}
};
// 更新方案配置
const handleConfigChange = async (updates: Partial<SchemeConfig>) => {
if (!selectedScheme) return;
const updatedScheme = {
...selectedScheme,
config: {
...selectedScheme.config,
...updates,
framework: updates.framework ?? selectedScheme.config?.framework ?? 'lightrag',
extractor: updates.extractor || selectedScheme.config?.extractor || (updates.framework === 'raganything' ? 'mineru' : undefined),
},
};
setSchemes(schemes.map(s => s.id === selectedScheme.id ? updatedScheme : s));
await saveSchemes([updatedScheme]);
};
if (isLoading) {
return (
<div className="flex justify-center items-center h-screen">
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-500" />
</div>
);
}
return (
<Dialog open={open} onOpenChange={setOpen}>
<DialogTrigger asChild>
<Button variant="default" side="bottom" size="sm">
<PlusIcon className="size-4" />
{t('schemeManager.button')}
</Button>
</DialogTrigger>
<DialogContent className="sm:max-w-[800px]">
<DialogHeader>
<DialogTitle>{t('schemeManager.title')}</DialogTitle>
<DialogDescription>{t('schemeManager.description')}</DialogDescription>
</DialogHeader>
<div className="flex h-[500px] gap-4">
{/* 左侧:方案列表 */}
<div className="w-1/3 rounded-lg border p-4 bg-gray-50 flex flex-col">
<h3 className="mb-4 font-semibold">{t('schemeManager.schemeList')}</h3>
{/* 创建新方案输入框 */}
<div className="flex gap-2 mb-4">
<input
type="text"
value={newSchemeName}
onChange={(e) => {
if (e.target.value.length > 50) return;
setNewSchemeName(e.target.value);
setError(undefined);
}}
onKeyPress={(e) => e.key === 'Enter' && handleAddScheme()}
placeholder={t('schemeManager.inputPlaceholder')}
className="w-full px-3 py-1.5 border rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500"
/>
<Button onClick={handleAddScheme} size="sm">
<PlusIcon className="size-4" />
</Button>
</div>
{/* 错误提示 */}
{error && (
<Alert variant="destructive" className="mb-4">
<AlertCircle className="size-4" />
<AlertDescription>{error}</AlertDescription>
</Alert>
)}
{/* 方案列表 */}
<div ref={scrollRef} className="flex-1 overflow-y-auto border rounded-md p-1 bg-white">
{schemes.length === 0 ? (
<p className="text-gray-500 text-center py-4">{t('schemeManager.emptySchemes')}</p>
) : (
<div className="space-y-2">
{schemes.map((scheme) => (
<div
key={scheme.id}
className={`flex items-center justify-between p-2 rounded-md cursor-pointer transition-colors truncate ${
selectedScheme?.id === scheme.id
? "bg-blue-100 text-blue-700"
: "hover:bg-gray-100"
}`}
onClick={() => handleSelectScheme(scheme.id)}
>
<div className="flex-1 truncate mr-2" title={scheme.name}>
{scheme.name}
</div>
<button
onClick={(e) => {
e.stopPropagation();
handleDeleteScheme(scheme.id);
}}
className="ml-2 text-red-500 hover:text-red-700 hover:bg-red-100 rounded-full p-1 transition-colors"
title={t('schemeManager.deleteTooltip')}
>
</button>
</div>
))}
</div>
)}
</div>
</div>
{/* 右侧:方案配置 */}
<div className="flex-1 rounded-lg border p-4 bg-gray-50">
<h3 className="mb-4 font-semibold">{t('schemeManager.schemeConfig')}</h3>
{selectedScheme ? (
<div className="space-y-4">
<div>
<label className="block text-sm mb-1">{t('schemeManager.processingFramework')}</label>
<select
value={selectedScheme.config?.framework || "lightrag"}
onChange={(e) => handleConfigChange({ framework: e.target.value as 'lightrag' | 'raganything' })}
className="w-full px-3 py-1.5 border rounded-md focus:outline-none"
>
<option value="lightrag">LightRAG</option>
<option value="raganything">RAGAnything</option>
</select>
</div>
{selectedScheme.config?.framework === "raganything" && (
<div>
<label className="block text-sm mb-1">{t('schemeManager.extractionTool')}</label>
<select
value={selectedScheme.config?.extractor || "mineru"}
onChange={(e) => handleConfigChange({ extractor: e.target.value as 'mineru' | 'docling' })}
className="w-full px-3 py-1.5 border rounded-md focus:outline-none"
>
<option value="mineru">Mineru</option>
<option value="docling">DocLing</option>
</select>
</div>
)}
</div>
) : (
<div className="flex flex-col items-center justify-center h-[70%] text-gray-500">
<AlertCircle className="size-12 mb-4 opacity-50" />
<p>{t('schemeManager.selectSchemePrompt')}</p>
</div>
)}
</div>
</div>
</DialogContent>
</Dialog>
);
};
export default SchemeManagerDialog;

View file

@ -16,6 +16,7 @@ import { uploadDocument } from '@/api/lightrag'
import { UploadIcon } from 'lucide-react'
import { useTranslation } from 'react-i18next'
import { useScheme } from '@/contexts/SchemeContext';
interface UploadDocumentsDialogProps {
onDocumentsUploaded?: () => Promise<void>
@ -27,6 +28,7 @@ export default function UploadDocumentsDialog({ onDocumentsUploaded }: UploadDoc
const [isUploading, setIsUploading] = useState(false)
const [progresses, setProgresses] = useState<Record<string, number>>({})
const [fileErrors, setFileErrors] = useState<Record<string, string>>({})
const { selectedScheme } = useScheme();
const handleRejectedFiles = useCallback(
(rejectedFiles: FileRejection[]) => {
@ -58,6 +60,11 @@ export default function UploadDocumentsDialog({ onDocumentsUploaded }: UploadDoc
const handleDocumentsUpload = useCallback(
async (filesToUpload: File[]) => {
if (!selectedScheme) {
toast.error(t('schemeManager.upload.noSchemeSelected'));
return;
}
setIsUploading(true)
let hasSuccessfulUpload = false
@ -95,7 +102,7 @@ export default function UploadDocumentsDialog({ onDocumentsUploaded }: UploadDoc
[file.name]: 0
}))
const result = await uploadDocument(file, (percentCompleted: number) => {
const result = await uploadDocument(file, selectedScheme?.id, (percentCompleted: number) => {
console.debug(t('documentPanel.uploadDocuments.single.uploading', { name: file.name, percent: percentCompleted }))
setProgresses((pre) => ({
...pre,
@ -175,7 +182,7 @@ export default function UploadDocumentsDialog({ onDocumentsUploaded }: UploadDoc
setIsUploading(false)
}
},
[setIsUploading, setProgresses, setFileErrors, t, onDocumentsUploaded]
[setIsUploading, setProgresses, setFileErrors, t, onDocumentsUploaded, selectedScheme]
)
return (
@ -201,7 +208,11 @@ export default function UploadDocumentsDialog({ onDocumentsUploaded }: UploadDoc
<DialogHeader>
<DialogTitle>{t('documentPanel.uploadDocuments.title')}</DialogTitle>
<DialogDescription>
{t('documentPanel.uploadDocuments.description')}
{selectedScheme ? (
<>{t('schemeManager.upload.currentScheme')}<strong>{selectedScheme.name}</strong></>
) : (
t('schemeManager.upload.noSchemeMessage')
)}
</DialogDescription>
</DialogHeader>
<FileUploader

View file

@ -0,0 +1,28 @@
// contexts/SchemeContext.tsx
import React, { createContext, useContext, useState } from 'react';
import { Scheme } from '@/api/lightrag';
interface SchemeContextType {
selectedScheme: Scheme | undefined;
setSelectedScheme: (scheme: Scheme | undefined) => void;
}
const SchemeContext = createContext<SchemeContextType | undefined>(undefined);
export const SchemeProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => {
const [selectedScheme, setSelectedScheme] = useState<Scheme | undefined>();
return (
<SchemeContext.Provider value={{ selectedScheme, setSelectedScheme }}>
{children}
</SchemeContext.Provider>
);
};
export const useScheme = () => {
const context = useContext(SchemeContext);
if (context === undefined) {
throw new Error('useScheme must be used within a SchemeProvider');
}
return context;
};

View file

@ -18,6 +18,8 @@ import UploadDocumentsDialog from '@/components/documents/UploadDocumentsDialog'
import ClearDocumentsDialog from '@/components/documents/ClearDocumentsDialog'
import DeleteDocumentsDialog from '@/components/documents/DeleteDocumentsDialog'
import PaginationControls from '@/components/ui/PaginationControls'
import { SchemeProvider } from '@/contexts/SchemeContext';
import SchemeManager from '@/components/documents/SchemeManager/SchemeManager'
import {
scanNewDocuments,
@ -35,6 +37,8 @@ import { useBackendState } from '@/stores/state'
import { RefreshCwIcon, ActivityIcon, ArrowUpIcon, ArrowDownIcon, RotateCcwIcon, CheckSquareIcon, XIcon } from 'lucide-react'
import PipelineStatusDialog from '@/components/documents/PipelineStatusDialog'
import { useScheme } from '@/contexts/SchemeContext';
type StatusFilter = DocStatus | 'all';
@ -148,6 +152,8 @@ type SortField = 'created_at' | 'updated_at' | 'id' | 'file_path';
type SortDirection = 'asc' | 'desc';
export default function DocumentManager() {
const { selectedScheme } = useScheme();
console.log('selectedScheme in DocumentManager:', selectedScheme);
// Track component mount status
const isMountedRef = useRef(true);
@ -209,6 +215,8 @@ export default function DocumentManager() {
processing: 1,
pending: 1,
failed: 1,
ready: 1,
handling: 1
});
// State for document selection
@ -255,6 +263,8 @@ export default function DocumentManager() {
processing: 1,
pending: 1,
failed: 1,
ready: 1,
handling: 1
});
};
@ -390,7 +400,9 @@ export default function DocumentManager() {
const prevStatusCounts = useRef({
processed: 0,
processing: 0,
handling: 0,
pending: 0,
ready: 0,
failed: 0
})
@ -469,11 +481,39 @@ export default function DocumentManager() {
};
}, [docs]);
<<<<<<< Updated upstream
// New paginated data fetching function
const fetchPaginatedDocuments = useCallback(async (
page: number,
pageSize: number,
statusFilter: StatusFilter
=======
// Utility function to update component state
const updateComponentState = useCallback((response: any) => {
setPagination(response.pagination);
setCurrentPageDocs(response.documents);
setStatusCounts(response.status_counts);
// Update legacy docs state for backward compatibility
const legacyDocs: DocsStatusesResponse = {
statuses: {
processed: response.documents.filter((doc: DocStatusResponse) => doc.status === 'processed'),
processing: response.documents.filter((doc: DocStatusResponse) => doc.status === 'processing'),
pending: response.documents.filter((doc: DocStatusResponse) => doc.status === 'pending'),
failed: response.documents.filter((doc: DocStatusResponse) => doc.status === 'failed'),
ready: response.documents.filter((doc: DocStatusResponse) => doc.status === 'ready'),
handling: response.documents.filter((doc: DocStatusResponse) => doc.status === 'handling')
}
};
setDocs(response.pagination.total_count > 0 ? legacyDocs : null);
}, []);
// Intelligent refresh function: handles all boundary cases
const handleIntelligentRefresh = useCallback(async (
targetPage?: number, // Optional target page, defaults to current page
resetToFirst?: boolean // Whether to force reset to first page
>>>>>>> Stashed changes
) => {
try {
if (!isMountedRef.current) return;
@ -566,7 +606,14 @@ export default function DocumentManager() {
// Check if component is still mounted before starting the request
if (!isMountedRef.current) return;
const { status, message, track_id: _track_id } = await scanNewDocuments(); // eslint-disable-line @typescript-eslint/no-unused-vars
if (!selectedScheme) {
toast.error(t('documentPanel.documentManager.errors.missingSchemeId'));
return; // 直接返回,不继续执行
}
const framework = selectedScheme.config?.framework;
const { status, message, track_id: _track_id } = await scanNewDocuments(framework); // eslint-disable-line @typescript-eslint/no-unused-vars
// Check again if component is still mounted after the request completes
if (!isMountedRef.current) return;
@ -595,7 +642,7 @@ export default function DocumentManager() {
toast.error(t('documentPanel.documentManager.errors.scanFailed', { error: errorMessage(err) }));
}
}
}, [t, startPollingInterval, currentTab, health, statusCounts])
}, [t, startPollingInterval, currentTab, health, statusCounts, selectedScheme])
// Handle page size change - update state and save to store
const handlePageSizeChange = useCallback((newPageSize: number) => {
@ -611,6 +658,8 @@ export default function DocumentManager() {
processing: 1,
pending: 1,
failed: 1,
ready: 1,
handling: 1
});
setPagination(prev => ({ ...prev, page: 1, page_size: newPageSize }));
@ -650,8 +699,9 @@ export default function DocumentManager() {
processed: response.documents.filter(doc => doc.status === 'processed'),
processing: response.documents.filter(doc => doc.status === 'processing'),
pending: response.documents.filter(doc => doc.status === 'pending'),
failed: response.documents.filter(doc => doc.status === 'failed')
}
failed: response.documents.filter(doc => doc.status === 'failed'),
ready: response.documents.filter(doc => doc.status === 'ready'),
handling: response.documents.filter(doc => doc.status === 'handling'), }
};
if (response.pagination.total_count > 0) {
@ -716,7 +766,9 @@ export default function DocumentManager() {
const newStatusCounts = {
processed: docs?.statuses?.processed?.length || 0,
processing: docs?.statuses?.processing?.length || 0,
handling: docs?.statuses?.handling?.length || 0,
pending: docs?.statuses?.pending?.length || 0,
ready: docs?.statuses?.ready?.length || 0,
failed: docs?.statuses?.failed?.length || 0
}
@ -904,6 +956,7 @@ export default function DocumentManager() {
<ClearDocumentsDialog onDocumentsCleared={handleDocumentsCleared} />
) : null}
<UploadDocumentsDialog onDocumentsUploaded={fetchDocuments} />
<SchemeManager />
<PipelineStatusDialog
open={showPipelineStatus}
onOpenChange={setShowPipelineStatus}
@ -952,6 +1005,17 @@ export default function DocumentManager() {
>
{t('documentPanel.documentManager.status.processing')} ({statusCounts.PROCESSING || statusCounts.processing || 0})
</Button>
<Button
size="sm"
variant={statusFilter === 'handling' ? 'secondary' : 'outline'}
onClick={() => setStatusFilter('handling')}
className={cn(
documentCounts.handling > 0 ? 'text-purple-600' : 'text-gray-500',
statusFilter === 'handling' && 'bg-purple-100 dark:bg-purple-900/30 font-medium border border-purple-400 dark:border-purple-600 shadow-sm'
)}
>
{t('documentPanel.documentManager.status.handling')} ({statusCounts.HANDLING || statusCounts.handling || 0})
</Button>
<Button
size="sm"
variant={statusFilter === 'pending' ? 'secondary' : 'outline'}
@ -964,6 +1028,17 @@ export default function DocumentManager() {
>
{t('documentPanel.documentManager.status.pending')} ({statusCounts.PENDING || statusCounts.pending || 0})
</Button>
<Button
size="sm"
variant={statusFilter === 'ready' ? 'secondary' : 'outline'}
onClick={() => setStatusFilter('ready')}
className={cn(
documentCounts.ready > 0 ? 'text-gray-600' : 'text-gray-500',
statusFilter === 'ready' && 'bg-gray-100 dark:bg-gray-900/30 font-medium border border-gray-400 dark:border-gray-600 shadow-sm'
)}
>
{t('documentPanel.documentManager.status.ready')} ({statusCounts.READY || statusCounts.ready || 0})
</Button>
<Button
size="sm"
variant={statusFilter === 'failed' ? 'secondary' : 'outline'}
@ -1044,6 +1119,7 @@ export default function DocumentManager() {
</div>
</TableHead>
<TableHead>{t('documentPanel.documentManager.columns.summary')}</TableHead>
<TableHead>{t('documentPanel.documentManager.columns.handler')}</TableHead>
<TableHead>{t('documentPanel.documentManager.columns.status')}</TableHead>
<TableHead>{t('documentPanel.documentManager.columns.length')}</TableHead>
<TableHead>{t('documentPanel.documentManager.columns.chunks')}</TableHead>
@ -1115,6 +1191,9 @@ export default function DocumentManager() {
</div>
</div>
</TableCell>
<TableCell className="truncate max-w-[150px]">
{doc.scheme_name || '-'}
</TableCell>
<TableCell>
{doc.status === 'processed' && (
<span className="text-green-600">{t('documentPanel.documentManager.status.completed')}</span>
@ -1128,6 +1207,12 @@ export default function DocumentManager() {
{doc.status === 'failed' && (
<span className="text-red-600">{t('documentPanel.documentManager.status.failed')}</span>
)}
{doc.status === 'ready' && (
<span className="text-purple-600">{t('documentPanel.documentManager.status.ready')}</span>
)}
{doc.status === 'handling' && (
<span className="text-gray-600">{t('documentPanel.documentManager.status.handling')}</span>
)}
{doc.error_msg && (
<span className="ml-2 text-red-500" title={doc.error_msg}>
@ -1137,10 +1222,10 @@ export default function DocumentManager() {
<TableCell>{doc.content_length ?? '-'}</TableCell>
<TableCell>{doc.chunks_count ?? '-'}</TableCell>
<TableCell className="truncate">
{new Date(doc.created_at).toLocaleString()}
{doc.created_at ? new Date(doc.created_at).toLocaleString() : '-'}
</TableCell>
<TableCell className="truncate">
{new Date(doc.updated_at).toLocaleString()}
{doc.updated_at ? new Date(doc.updated_at).toLocaleString() : '-'}
</TableCell>
<TableCell className="text-center">
<Checkbox

View file

@ -126,6 +126,7 @@
"id": "المعرف",
"fileName": "اسم الملف",
"summary": "الملخص",
"handler": "المعالج",
"status": "الحالة",
"length": "الطول",
"chunks": "الأجزاء",
@ -138,7 +139,9 @@
"all": "الكل",
"completed": "مكتمل",
"processing": "قيد المعالجة",
"handling": "استخراج",
"pending": "معلق",
"ready": "جاهز",
"failed": "فشل"
},
"errors": {
@ -410,5 +413,30 @@
"prevPage": "الصفحة السابقة",
"nextPage": "الصفحة التالية",
"lastPage": "الصفحة الأخيرة"
},
"schemeManager": {
"button": "مخططات معالجة المستندات",
"title": "مدير المخططات",
"description": "إنشاء مخططات جديدة وتكوين الخيارات",
"schemeList": "قائمة المخططات",
"schemeConfig": "تكوين المخطط",
"inputPlaceholder": "أدخل اسم المخطط",
"deleteTooltip": "حذف المخطط",
"emptySchemes": "لا توجد مخططات متاحة",
"selectSchemePrompt": "يرجى تحديد أو إنشاء مخطط أولاً",
"processingFramework": "إطار المعالجة",
"extractionTool": "أداة الاستخراج",
"errors": {
"loadFailed": "فشل تحميل المخططات",
"nameEmpty": "لا يمكن أن يكون اسم المخطط فارغًا",
"nameExists": "اسم المخطط موجود بالفعل",
"addFailed": "فشل إضافة المخطط",
"deleteFailed": "فشل حذف المخطط"
},
"upload": {
"noSchemeSelected": "لم يتم تحديد مخطط معالجة، يرجى تحديد مخطط معالجة المستندات أولاً!",
"currentScheme": "المخطط الحالي: ",
"noSchemeMessage": "لم يتم تحديد مخطط معالجة، يرجى إضافة وتحديد واحد أولاً"
}
}
}

View file

@ -126,6 +126,7 @@
"id": "ID",
"fileName": "File Name",
"summary": "Summary",
"handler": "Handler",
"status": "Status",
"length": "Length",
"chunks": "Chunks",
@ -138,7 +139,9 @@
"all": "All",
"completed": "Completed",
"processing": "Processing",
"handling": "Handling",
"pending": "Pending",
"ready": "Ready",
"failed": "Failed"
},
"errors": {
@ -410,5 +413,30 @@
"prevPage": "Previous Page",
"nextPage": "Next Page",
"lastPage": "Last Page"
},
"schemeManager": {
"button": "Document Processing Schemes",
"title": "Scheme Manager",
"description": "Create new schemes and configure options",
"schemeList": "Scheme List",
"schemeConfig": "Scheme Configuration",
"inputPlaceholder": "Enter scheme name",
"deleteTooltip": "Delete scheme",
"emptySchemes": "No schemes available",
"selectSchemePrompt": "Please select or create a scheme first",
"processingFramework": "Processing Framework",
"extractionTool": "Extraction Tool",
"errors": {
"loadFailed": "Failed to load schemes",
"nameEmpty": "Scheme name cannot be empty",
"nameExists": "Scheme name already exists",
"addFailed": "Failed to add scheme",
"deleteFailed": "Failed to delete scheme"
},
"upload": {
"noSchemeSelected": "No processing scheme selected, please select a document processing scheme first!",
"currentScheme": "Current scheme: ",
"noSchemeMessage": "No processing scheme selected, please add and select one"
}
}
}

View file

@ -126,6 +126,7 @@
"id": "ID",
"fileName": "Nom du fichier",
"summary": "Résumé",
"handler": "Gestionnaire",
"status": "Statut",
"length": "Longueur",
"chunks": "Fragments",
@ -138,7 +139,9 @@
"all": "Tous",
"completed": "Terminé",
"processing": "En traitement",
"handling": "Extraction",
"pending": "En attente",
"ready": "Prêt",
"failed": "Échoué"
},
"errors": {
@ -410,5 +413,30 @@
"prevPage": "Page précédente",
"nextPage": "Page suivante",
"lastPage": "Dernière page"
},
"schemeManager": {
"button": "Schémas de traitement de documents",
"title": "Gestionnaire de schémas",
"description": "Créer de nouveaux schémas et configurer les options",
"schemeList": "Liste des schémas",
"schemeConfig": "Configuration du schéma",
"inputPlaceholder": "Entrer le nom du schéma",
"deleteTooltip": "Supprimer le schéma",
"emptySchemes": "Aucun schéma disponible",
"selectSchemePrompt": "Veuillez d'abord sélectionner ou créer un schéma",
"processingFramework": "Framework de traitement",
"extractionTool": "Outil d'extraction",
"errors": {
"loadFailed": "Échec du chargement des schémas",
"nameEmpty": "Le nom du schéma ne peut pas être vide",
"nameExists": "Le nom du schéma existe déjà",
"addFailed": "Échec de l'ajout du schéma",
"deleteFailed": "Échec de la suppression du schéma"
},
"upload": {
"noSchemeSelected": "Aucun schéma de traitement sélectionné, veuillez d'abord sélectionner un schéma de traitement de documents !",
"currentScheme": "Schéma actuel : ",
"noSchemeMessage": "Aucun schéma de traitement sélectionné, veuillez d'abord en ajouter et en sélectionner un"
}
}
}

View file

@ -126,6 +126,7 @@
"id": "ID",
"fileName": "文件名",
"summary": "摘要",
"handler": "处理方案",
"status": "状态",
"length": "长度",
"chunks": "分块",
@ -138,7 +139,9 @@
"all": "全部",
"completed": "已完成",
"processing": "处理中",
"handling": "提取中",
"pending": "等待中",
"ready": "准备中",
"failed": "失败"
},
"errors": {
@ -410,5 +413,30 @@
"prevPage": "上一页",
"nextPage": "下一页",
"lastPage": "末页"
},
"schemeManager": {
"button": "文档处理方案",
"title": "方案管理器",
"description": "创建新方案并配置选项",
"schemeList": "方案列表",
"schemeConfig": "方案配置",
"inputPlaceholder": "输入方案名称",
"deleteTooltip": "删除方案",
"emptySchemes": "暂无方案",
"selectSchemePrompt": "请先选择或创建一个方案",
"processingFramework": "处理框架",
"extractionTool": "提取工具",
"errors": {
"loadFailed": "加载方案失败",
"nameEmpty": "方案名称不能为空",
"nameExists": "方案名称已存在",
"addFailed": "添加方案失败",
"deleteFailed": "删除方案失败"
},
"upload": {
"noSchemeSelected": "未选择处理方案,请先选择文档处理方案!",
"currentScheme": "当前方案:",
"noSchemeMessage": "未选择处理方案,请先添加选择"
}
}
}

View file

@ -126,6 +126,7 @@
"id": "ID",
"fileName": "檔案名稱",
"summary": "摘要",
"handler": "處理方案",
"status": "狀態",
"length": "長度",
"chunks": "分塊",
@ -138,7 +139,9 @@
"all": "全部",
"completed": "已完成",
"processing": "處理中",
"handling": "提取中",
"pending": "等待中",
"ready": "準備中",
"failed": "失敗"
},
"errors": {
@ -410,5 +413,30 @@
"prevPage": "上一頁",
"nextPage": "下一頁",
"lastPage": "最後一頁"
},
"schemeManager": {
"button": "文件處理方案",
"title": "方案管理器",
"description": "建立新方案並設定選項",
"schemeList": "方案清單",
"schemeConfig": "方案設定",
"inputPlaceholder": "輸入方案名稱",
"deleteTooltip": "刪除方案",
"emptySchemes": "暫無方案",
"selectSchemePrompt": "請先選擇或建立一個方案",
"processingFramework": "處理框架",
"extractionTool": "提取工具",
"errors": {
"loadFailed": "載入方案失敗",
"nameEmpty": "方案名稱不能為空",
"nameExists": "方案名稱已存在",
"addFailed": "新增方案失敗",
"deleteFailed": "刪除方案失敗"
},
"upload": {
"noSchemeSelected": "未選擇處理方案,請先選擇文件處理方案!",
"currentScheme": "目前方案:",
"noSchemeMessage": "未選擇處理方案,請先新增選擇"
}
}
}