Merge branch 'main' into fix/dark-mode-graph-text-colors

This commit is contained in:
Roman Marchuk 2025-10-01 17:41:28 -04:00 committed by GitHub
commit 1bd84f0005
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
149 changed files with 9564 additions and 4866 deletions

View file

@ -35,6 +35,18 @@ jobs:
echo "Found tag: $TAG"
echo "tag=$TAG" >> $GITHUB_OUTPUT
- name: Check if pre-release
id: check_prerelease
run: |
TAG="${{ steps.get_tag.outputs.tag }}"
if [[ "$TAG" == *"rc"* ]] || [[ "$TAG" == *"dev"* ]]; then
echo "is_prerelease=true" >> $GITHUB_OUTPUT
echo "This is a pre-release version: $TAG"
else
echo "is_prerelease=false" >> $GITHUB_OUTPUT
echo "This is a stable release: $TAG"
fi
- name: Update version in __init__.py
run: |
sed -i "s/__version__ = \".*\"/__version__ = \"${{ steps.get_tag.outputs.tag }}\"/" lightrag/__init__.py
@ -48,7 +60,7 @@ jobs:
images: ghcr.io/${{ github.repository }}
tags: |
type=raw,value=${{ steps.get_tag.outputs.tag }}
type=raw,value=latest
type=raw,value=latest,enable=${{ steps.check_prerelease.outputs.is_prerelease == 'false' }}
- name: Build and push Docker image
uses: docker/build-push-action@v5

View file

@ -335,15 +335,12 @@ class QueryParam:
ll_keywords: list[str] = field(default_factory=list)
"""List of low-level keywords to refine retrieval focus."""
# History mesages is only send to LLM for context, not used for retrieval
conversation_history: list[dict[str, str]] = field(default_factory=list)
"""Stores past conversation history to maintain context.
Format: [{"role": "user/assistant", "content": "message"}].
"""
# Deprated: history message have negtive effect on query performance
history_turns: int = 0
"""Number of complete conversation turns (user-assistant pairs) to consider in the response context."""
ids: list[str] | None = None
"""List of ids to filter the results."""
@ -355,7 +352,8 @@ class QueryParam:
user_prompt: str | None = None
"""User-provided prompt for the query.
If proivded, this will be use instead of the default vaulue from prompt template.
Addition instructions for LLM. If provided, this will be inject into the prompt template.
It's purpose is the let user customize the way LLM generate the response.
"""
enable_rerank: bool = True

View file

@ -336,15 +336,12 @@ class QueryParam:
max_total_tokens: int = int(os.getenv("MAX_TOTAL_TOKENS", "30000"))
"""Maximum total tokens budget for the entire query context (entities + relations + chunks + system prompt)."""
# History mesages is only send to LLM for context, not used for retrieval
conversation_history: list[dict[str, str]] = field(default_factory=list)
"""Stores past conversation history to maintain context.
Format: [{"role": "user/assistant", "content": "message"}].
"""
# Deprated: history message have negtive effect on query performance
history_turns: int = 0
"""Number of complete conversation turns (user-assistant pairs) to consider in the response context."""
ids: list[str] | None = None
"""List of ids to filter the results."""
@ -356,7 +353,8 @@ class QueryParam:
user_prompt: str | None = None
"""User-provided prompt for the query.
If proivded, this will be use instead of the default vaulue from prompt template.
Addition instructions for LLM. If provided, this will be inject into the prompt template.
It's purpose is the let user customize the way LLM generate the response.
"""
enable_rerank: bool = True

View file

@ -125,7 +125,7 @@ ENABLE_LLM_CACHE_FOR_EXTRACT=true
SUMMARY_LANGUAGE=English
### Entity types that the LLM will attempt to recognize
# ENTITY_TYPES='["Person", "Organization", "Location", "Event", "Concept", "Method", "Content", "Data", "Artifact", "NaturalObject"]'
# ENTITY_TYPES='["Person", "Creature", "Organization", "Location", "Event", "Concept", "Method", "Content", "Data", "Artifact", "NaturalObject"]'
### Chunk size for document splitting, 500~1500 is recommended
# CHUNK_SIZE=1200
@ -175,6 +175,8 @@ LLM_BINDING_API_KEY=your_api_key
# LLM_BINDING=openai
### OpenAI Compatible API Specific Parameters
### Increased temperature values may mitigate infinite inference loops in certain LLM, such as Qwen3-30B.
# OPENAI_LLM_TEMPERATURE=0.9
### Set the max_tokens to mitigate endless output of some LLM (less than LLM_TIMEOUT * llm_output_tokens/second, i.e. 9000 = 180s * 50 tokens/s)
### Typically, max_tokens does not include prompt content, though some models, such as Gemini Models, are exceptions
### For vLLM/SGLang doployed models, or most of OpenAI compatible API provider
@ -183,17 +185,19 @@ LLM_BINDING_API_KEY=your_api_key
OPENAI_LLM_MAX_COMPLETION_TOKENS=9000
#### OpenAI's new API utilizes max_completion_tokens instead of max_tokens
# OPENAI_LLM_MAX_TOKENS=9000
# OPENAI_LLM_MAX_COMPLETION_TOKENS=9000
### use the following command to see all support options for OpenAI, azure_openai or OpenRouter
### lightrag-server --llm-binding openai --help
### OpenAI Specific Parameters
# OPENAI_LLM_REASONING_EFFORT=minimal
### OpenRouter Specific Parameters
# OPENAI_LLM_EXTRA_BODY='{"reasoning": {"enabled": false}}'
### Qwen3 Specific Parameters depoly by vLLM
# OPENAI_LLM_EXTRA_BODY='{"chat_template_kwargs": {"enable_thinking": false}}'
### use the following command to see all support options for OpenAI, azure_openai or OpenRouter
### lightrag-server --llm-binding openai --help
### use the following command to see all support options for Ollama LLM
### lightrag-server --llm-binding ollama --help
### Ollama Server Specific Parameters
### OLLAMA_LLM_NUM_CTX must be provided, and should at least larger than MAX_TOTAL_TOKENS + 2000
OLLAMA_LLM_NUM_CTX=32768
@ -201,8 +205,6 @@ OLLAMA_LLM_NUM_CTX=32768
# OLLAMA_LLM_NUM_PREDICT=9000
### Stop sequences for Ollama LLM
# OLLAMA_LLM_STOP='["</s>", "<|EOT|>"]'
### use the following command to see all support options for Ollama LLM
### lightrag-server --llm-binding ollama --help
### Bedrock Specific Parameters
# BEDROCK_LLM_TEMPERATURE=1.0
@ -312,7 +314,7 @@ POSTGRES_IVFFLAT_LISTS=100
NEO4J_URI=neo4j+s://xxxxxxxx.databases.neo4j.io
NEO4J_USERNAME=neo4j
NEO4J_PASSWORD='your_password'
NEO4J_DATABASE=noe4j
NEO4J_DATABASE=neo4j
NEO4J_MAX_CONNECTION_POOL_SIZE=100
NEO4J_CONNECTION_TIMEOUT=30
NEO4J_CONNECTION_ACQUISITION_TIMEOUT=30

View file

@ -1,5 +1,5 @@
from .lightrag import LightRAG as LightRAG, QueryParam as QueryParam
__version__ = "1.4.8.1"
__version__ = "1.4.9.1"
__author__ = "Zirui Guo"
__url__ = "https://github.com/HKUDS/LightRAG"

View file

@ -140,18 +140,6 @@ docker compose up
```
> 可以通过以下链接获取官方的docker compose文件[docker-compose.yml]( https://raw.githubusercontent.com/HKUDS/LightRAG/refs/heads/main/docker-compose.yml) 。如需获取LightRAG的历史版本镜像可以访问以下链接: [LightRAG Docker Images]( https://github.com/HKUDS/LightRAG/pkgs/container/lightrag)
### 启动时自动扫描
当使用 `--auto-scan-at-startup` 参数启动LightRAG Server时系统将自动
1. 扫描输入目录中的新文件
2. 为尚未在数据库中的新文档建立索引
3. 使所有内容立即可用于 RAG 查询
这种工作模式给启动一个临时的RAG任务提供给了方便。
> `--input-dir` 参数指定要扫描的输入目录。您可以从 webui 触发输入目录扫描。
### 启动多个LightRAG实例
有两种方式可以启动多个LightRAG实例。第一种方式是为每个实例配置一个完全独立的工作环境。此时需要为每个实例创建一个独立的工作目录然后在这个工作目录上放置一个当前实例专用的`.env`配置文件。不同实例的配置文件中的服务器监听端口不能重复,然后在工作目录上执行 lightrag-server 启动服务即可。
@ -290,7 +278,17 @@ LIGHTRAG_API_KEY=your-secure-api-key-here
WHITELIST_PATHS=/health,/api/*
```
> 健康检查和 Ollama 模拟端点默认不进行 API 密钥检查。
> 健康检查和 Ollama 模拟端点默认不进行 API 密钥检查。为了安全原因如果不需要提供Ollama服务应该把`/api/*`从WHITELIST_PATHS中移除。
API Key使用的请求头是 `X-API-Key` 。以下是使用API访问LightRAG Server的一个例子
```
curl -X 'POST' \
'http://localhost:9621/documents/scan' \
-H 'accept: application/json' \
-H 'X-API-Key: your-secure-api-key-here-123' \
-d ''
```
* 账户凭证Web 界面需要登录后才能访问)
@ -432,7 +430,6 @@ LIGHTRAG_DOC_STATUS_STORAGE=PGDocStatusStorage
| --ssl-keyfile | None | SSL 私钥文件路径(如果启用 --ssl 则必需) |
| --llm-binding | ollama | LLM 绑定类型lollms、ollama、openai、openai-ollama、azure_openai、aws_bedrock |
| --embedding-binding | ollama | 嵌入绑定类型lollms、ollama、openai、azure_openai、aws_bedrock |
| auto-scan-at-startup | - | 扫描输入目录中的新文件并开始索引 |
### Reranking 配置

View file

@ -143,18 +143,6 @@ docker compose up
> You can get the official docker compose file from here: [docker-compose.yml](https://raw.githubusercontent.com/HKUDS/LightRAG/refs/heads/main/docker-compose.yml). For historical versions of LightRAG docker images, visit this link: [LightRAG Docker Images](https://github.com/HKUDS/LightRAG/pkgs/container/lightrag)
### Auto scan on startup
When starting the LightRAG Server with the `--auto-scan-at-startup` parameter, the system will automatically:
1. Scan for new files in the input directory
2. Index new documents that aren't already in the database
3. Make all content immediately available for RAG queries
This offers an efficient method for deploying ad-hoc RAG processes.
> The `--input-dir` parameter specifies the input directory to scan. You can trigger the input directory scan from the Web UI.
### Starting Multiple LightRAG Instances
There are two ways to start multiple LightRAG instances. The first way is to configure a completely independent working environment for each instance. This requires creating a separate working directory for each instance and placing a dedicated `.env` configuration file in that directory. The server listening ports in the configuration files of different instances cannot be the same. Then, you can start the service by running `lightrag-server` in the working directory.
@ -292,7 +280,17 @@ LIGHTRAG_API_KEY=your-secure-api-key-here
WHITELIST_PATHS=/health,/api/*
```
> Health check and Ollama emulation endpoints are excluded from API Key check by default.
> Health check and Ollama emulation endpoints are excluded from API Key check by default. For security reasons, remove `/api/*` from `WHITELIST_PATHS` if the Ollama service is not required.
The API key is passed using the request header `X-API-Key`. Below is an example of accessing the LightRAG Server via API:
```
curl -X 'POST' \
'http://localhost:9621/documents/scan' \
-H 'accept: application/json' \
-H 'X-API-Key: your-secure-api-key-here-123' \
-d ''
```
* Account credentials (the Web UI requires login before access can be granted):
@ -434,7 +432,6 @@ You cannot change storage implementation selection after adding documents to Lig
| --ssl-keyfile | None | Path to SSL private key file (required if --ssl is enabled) |
| --llm-binding | ollama | LLM binding type (lollms, ollama, openai, openai-ollama, azure_openai, aws_bedrock) |
| --embedding-binding | ollama | Embedding binding type (lollms, ollama, openai, azure_openai, aws_bedrock) |
| --auto-scan-at-startup| - | Scan input directory for new files and start indexing |
### Reranking Configuration

View file

@ -1 +1 @@
__api_version__ = "0222"
__api_version__ = "0235"

View file

@ -206,13 +206,6 @@ def parse_args() -> argparse.Namespace:
help="Default workspace for all storage",
)
parser.add_argument(
"--auto-scan-at-startup",
action="store_true",
default=False,
help="Enable automatic scanning when the program starts",
)
# Server workers configuration
parser.add_argument(
"--workers",

View file

@ -2,8 +2,9 @@
LightRAG FastAPI Server
"""
from fastapi import FastAPI, Depends, HTTPException
import asyncio
from fastapi import FastAPI, Depends, HTTPException, Request
from fastapi.exceptions import RequestValidationError
from fastapi.responses import JSONResponse
import os
import logging
import logging.config
@ -45,7 +46,6 @@ from lightrag.constants import (
from lightrag.api.routers.document_routes import (
DocumentManager,
create_document_routes,
run_scanning_process,
)
from lightrag.api.routers.query_routes import create_query_routes
from lightrag.api.routers.graph_routes import create_graph_routes
@ -54,7 +54,6 @@ from lightrag.api.routers.ollama_api import OllamaAPI
from lightrag.utils import logger, set_verbose_debug
from lightrag.kg.shared_storage import (
get_namespace_data,
get_pipeline_status_lock,
initialize_pipeline_status,
cleanup_keyed_lock,
finalize_share_data,
@ -212,24 +211,6 @@ def create_app(args):
# Data migration regardless of storage implementation
await rag.check_and_migrate_data()
pipeline_status = await get_namespace_data("pipeline_status")
should_start_autoscan = False
async with get_pipeline_status_lock():
# Auto scan documents if enabled
if args.auto_scan_at_startup:
if not pipeline_status.get("autoscanned", False):
pipeline_status["autoscanned"] = True
should_start_autoscan = True
# Only run auto scan when no other process started it first
if should_start_autoscan:
# Create background task
task = asyncio.create_task(run_scanning_process(rag, doc_manager))
app.state.background_tasks.add(task)
task.add_done_callback(app.state.background_tasks.discard)
logger.info(f"Process {os.getpid()} auto scan task started at startup.")
ASCIIColors.green("\nServer is ready to accept connections! 🚀\n")
yield
@ -266,6 +247,35 @@ def create_app(args):
app = FastAPI(**app_kwargs)
# Add custom validation error handler for /query/data endpoint
@app.exception_handler(RequestValidationError)
async def validation_exception_handler(
request: Request, exc: RequestValidationError
):
# Check if this is a request to /query/data endpoint
if request.url.path.endswith("/query/data"):
# Extract error details
error_details = []
for error in exc.errors():
field_path = " -> ".join(str(loc) for loc in error["loc"])
error_details.append(f"{field_path}: {error['msg']}")
error_message = "; ".join(error_details)
# Return in the expected format for /query/data
return JSONResponse(
status_code=400,
content={
"status": "failure",
"message": f"Validation error: {error_message}",
"data": {},
"metadata": {},
},
)
else:
# For other endpoints, return the default FastAPI validation error
return JSONResponse(status_code=422, content={"detail": exc.errors()})
def get_cors_origins():
"""Get allowed origins from global_args
Returns a list of allowed origins, defaults to ["*"] if not set

View file

@ -745,6 +745,60 @@ class DocumentManager:
return any(filename.lower().endswith(ext) for ext in self.supported_extensions)
def validate_file_path_security(file_path_str: str, base_dir: Path) -> Optional[Path]:
"""
Validate file path security to prevent Path Traversal attacks.
Args:
file_path_str: The file path string to validate
base_dir: The base directory that the file must be within
Returns:
Path: Safe file path if valid, None if unsafe or invalid
"""
if not file_path_str or not file_path_str.strip():
return None
try:
# Clean the file path string
clean_path_str = file_path_str.strip()
# Check for obvious path traversal patterns before processing
# This catches both Unix (..) and Windows (..\) style traversals
if ".." in clean_path_str:
# Additional check for Windows-style backslash traversal
if (
"\\..\\" in clean_path_str
or clean_path_str.startswith("..\\")
or clean_path_str.endswith("\\..")
):
# logger.warning(
# f"Security violation: Windows path traversal attempt detected - {file_path_str}"
# )
return None
# Normalize path separators (convert backslashes to forward slashes)
# This helps handle Windows-style paths on Unix systems
normalized_path = clean_path_str.replace("\\", "/")
# Create path object and resolve it (handles symlinks and relative paths)
candidate_path = (base_dir / normalized_path).resolve()
base_dir_resolved = base_dir.resolve()
# Check if the resolved path is within the base directory
if not candidate_path.is_relative_to(base_dir_resolved):
# logger.warning(
# f"Security violation: Path traversal attempt detected - {file_path_str}"
# )
return None
return candidate_path
except (OSError, ValueError, Exception) as e:
logger.warning(f"Invalid file path detected: {file_path_str} - {str(e)}")
return None
def get_unique_filename_in_enqueued(target_dir: Path, original_name: str) -> str:
"""Generate a unique filename in the target directory by adding numeric suffixes if needed
@ -1341,9 +1395,37 @@ async def run_scanning_process(
logger.info(f"Found {total_files} files to index.")
if new_files:
# Process all files at once with track_id
await pipeline_index_files(rag, new_files, track_id)
logger.info(f"Scanning process completed: {total_files} files Processed.")
# Check for files with PROCESSED status and filter them out
valid_files = []
processed_files = []
for file_path in new_files:
filename = file_path.name
existing_doc_data = await rag.doc_status.get_doc_by_file_path(filename)
if existing_doc_data and existing_doc_data.get("status") == "processed":
# File is already PROCESSED, skip it with warning
processed_files.append(filename)
logger.warning(f"Skipping already processed file: {filename}")
else:
# File is new or in non-PROCESSED status, add to processing list
valid_files.append(file_path)
# Process valid files (new files + non-PROCESSED status files)
if valid_files:
await pipeline_index_files(rag, valid_files, track_id)
if processed_files:
logger.info(
f"Scanning process completed: {len(valid_files)} files Processed {len(processed_files)} skipped."
)
else:
logger.info(
f"Scanning process completed: {len(valid_files)} files Processed."
)
else:
logger.info(
"No files to process after filtering already processed files."
)
else:
# No new files to index, check if there are any documents in the queue
logger.info(
@ -1429,51 +1511,37 @@ async def background_delete_documents(
):
try:
deleted_files = []
# check and delete files from input_dir directory
file_path = doc_manager.input_dir / result.file_path
if file_path.exists():
try:
file_path.unlink()
deleted_files.append(file_path.name)
file_delete_msg = f"Successfully deleted input_dir file: {result.file_path}"
logger.info(file_delete_msg)
async with pipeline_status_lock:
pipeline_status["latest_message"] = (
file_delete_msg
)
pipeline_status["history_messages"].append(
file_delete_msg
)
except Exception as file_error:
file_error_msg = f"Failed to delete input_dir file {result.file_path}: {str(file_error)}"
logger.debug(file_error_msg)
async with pipeline_status_lock:
pipeline_status["latest_message"] = (
file_error_msg
)
pipeline_status["history_messages"].append(
file_error_msg
)
# SECURITY FIX: Use secure path validation to prevent arbitrary file deletion
safe_file_path = validate_file_path_security(
result.file_path, doc_manager.input_dir
)
# Also check and delete files from __enqueued__ directory
enqueued_dir = doc_manager.input_dir / "__enqueued__"
if enqueued_dir.exists():
# Look for files with the same name or similar names (with numeric suffixes)
base_name = Path(result.file_path).stem
extension = Path(result.file_path).suffix
# Search for exact match and files with numeric suffixes
for enqueued_file in enqueued_dir.glob(
f"{base_name}*{extension}"
):
if safe_file_path is None:
# Security violation detected - log and skip file deletion
security_msg = f"Security violation: Unsafe file path detected for deletion - {result.file_path}"
logger.warning(security_msg)
async with pipeline_status_lock:
pipeline_status["latest_message"] = security_msg
pipeline_status["history_messages"].append(
security_msg
)
else:
# check and delete files from input_dir directory
if safe_file_path.exists():
try:
enqueued_file.unlink()
deleted_files.append(enqueued_file.name)
logger.info(
f"Successfully deleted enqueued file: {enqueued_file.name}"
)
except Exception as enqueued_error:
file_error_msg = f"Failed to delete enqueued file {enqueued_file.name}: {str(enqueued_error)}"
safe_file_path.unlink()
deleted_files.append(safe_file_path.name)
file_delete_msg = f"Successfully deleted input_dir file: {result.file_path}"
logger.info(file_delete_msg)
async with pipeline_status_lock:
pipeline_status["latest_message"] = (
file_delete_msg
)
pipeline_status["history_messages"].append(
file_delete_msg
)
except Exception as file_error:
file_error_msg = f"Failed to delete input_dir file {result.file_path}: {str(file_error)}"
logger.debug(file_error_msg)
async with pipeline_status_lock:
pipeline_status["latest_message"] = (
@ -1483,8 +1551,47 @@ async def background_delete_documents(
file_error_msg
)
# Also check and delete files from __enqueued__ directory
enqueued_dir = doc_manager.input_dir / "__enqueued__"
if enqueued_dir.exists():
# SECURITY FIX: Validate that the file path is safe before processing
# Only proceed if the original path validation passed
base_name = Path(result.file_path).stem
extension = Path(result.file_path).suffix
# Search for exact match and files with numeric suffixes
for enqueued_file in enqueued_dir.glob(
f"{base_name}*{extension}"
):
# Additional security check: ensure enqueued file is within enqueued directory
safe_enqueued_path = (
validate_file_path_security(
enqueued_file.name, enqueued_dir
)
)
if safe_enqueued_path is not None:
try:
enqueued_file.unlink()
deleted_files.append(enqueued_file.name)
logger.info(
f"Successfully deleted enqueued file: {enqueued_file.name}"
)
except Exception as enqueued_error:
file_error_msg = f"Failed to delete enqueued file {enqueued_file.name}: {str(enqueued_error)}"
logger.debug(file_error_msg)
async with pipeline_status_lock:
pipeline_status[
"latest_message"
] = file_error_msg
pipeline_status[
"history_messages"
].append(file_error_msg)
else:
security_msg = f"Security violation: Unsafe enqueued file path detected - {enqueued_file.name}"
logger.warning(security_msg)
if deleted_files == []:
file_error_msg = f"File deletion skipped, missing file: {result.file_path}"
file_error_msg = f"File deletion skipped, missing or unsafe file: {result.file_path}"
logger.warning(file_error_msg)
async with pipeline_status_lock:
pipeline_status["latest_message"] = file_error_msg
@ -1618,8 +1725,19 @@ def create_document_routes(
detail=f"Unsupported file type. Supported types: {doc_manager.supported_extensions}",
)
# Check if filename already exists in doc_status storage
existing_doc_data = await rag.doc_status.get_doc_by_file_path(safe_filename)
if existing_doc_data:
# Get document status information for error message
status = existing_doc_data.get("status", "unknown")
return InsertResponse(
status="duplicated",
message=f"File '{safe_filename}' already exists in document storage (Status: {status}).",
track_id="",
)
file_path = doc_manager.input_dir / safe_filename
# Check if file already exists
# Check if file already exists in file system
if file_path.exists():
return InsertResponse(
status="duplicated",
@ -1669,6 +1787,24 @@ def create_document_routes(
HTTPException: If an error occurs during text processing (500).
"""
try:
# Check if file_source already exists in doc_status storage
if (
request.file_source
and request.file_source.strip()
and request.file_source != "unknown_source"
):
existing_doc_data = await rag.doc_status.get_doc_by_file_path(
request.file_source
)
if existing_doc_data:
# Get document status information for error message
status = existing_doc_data.get("status", "unknown")
return InsertResponse(
status="duplicated",
message=f"File source '{request.file_source}' already exists in document storage (Status: {status}).",
track_id="",
)
# Generate track_id for text insertion
track_id = generate_track_id("insert")
@ -1715,6 +1851,26 @@ def create_document_routes(
HTTPException: If an error occurs during text processing (500).
"""
try:
# Check if any file_sources already exist in doc_status storage
if request.file_sources:
for file_source in request.file_sources:
if (
file_source
and file_source.strip()
and file_source != "unknown_source"
):
existing_doc_data = await rag.doc_status.get_doc_by_file_path(
file_source
)
if existing_doc_data:
# Get document status information for error message
status = existing_doc_data.get("status", "unknown")
return InsertResponse(
status="duplicated",
message=f"File source '{file_source}' already exists in document storage (Status: {status}).",
track_id="",
)
# Generate track_id for texts insertion
track_id = generate_track_id("insert")
@ -2017,20 +2173,24 @@ def create_document_routes(
logger.error(traceback.format_exc())
raise HTTPException(status_code=500, detail=str(e))
# TODO: Deprecated
@router.get(
"", response_model=DocsStatusesResponse, dependencies=[Depends(combined_auth)]
)
async def documents() -> DocsStatusesResponse:
"""
Get the status of all documents in the system.
Get the status of all documents in the system. This endpoint is deprecated; use /documents/paginated instead.
To prevent excessive resource consumption, a maximum of 1,000 records is returned.
This endpoint retrieves the current status of all documents, grouped by their
processing status (PENDING, PROCESSING, PROCESSED, FAILED).
processing status (PENDING, PROCESSING, PROCESSED, FAILED). The results are
limited to 1000 total documents with fair distribution across all statuses.
Returns:
DocsStatusesResponse: A response object containing a dictionary where keys are
DocStatus values and values are lists of DocStatusResponse
objects representing documents in each status category.
Maximum 1000 documents total will be returned.
Raises:
HTTPException: If an error occurs while retrieving document statuses (500).
@ -2047,12 +2207,45 @@ def create_document_routes(
results: List[Dict[str, DocProcessingStatus]] = await asyncio.gather(*tasks)
response = DocsStatusesResponse()
total_documents = 0
max_documents = 1000
# Convert results to lists for easier processing
status_documents = []
for idx, result in enumerate(results):
status = statuses[idx]
docs_list = []
for doc_id, doc_status in result.items():
docs_list.append((doc_id, doc_status))
status_documents.append((status, docs_list))
# Fair distribution: round-robin across statuses
status_indices = [0] * len(
status_documents
) # Track current index for each status
current_status_idx = 0
while total_documents < max_documents:
# Check if we have any documents left to process
has_remaining = False
for status_idx, (status, docs_list) in enumerate(status_documents):
if status_indices[status_idx] < len(docs_list):
has_remaining = True
break
if not has_remaining:
break
# Try to get a document from the current status
status, docs_list = status_documents[current_status_idx]
current_index = status_indices[current_status_idx]
if current_index < len(docs_list):
doc_id, doc_status = docs_list[current_index]
if status not in response.statuses:
response.statuses[status] = []
response.statuses[status].append(
DocStatusResponse(
id=doc_id,
@ -2068,6 +2261,13 @@ def create_document_routes(
file_path=doc_status.file_path,
)
)
status_indices[current_status_idx] += 1
total_documents += 1
# Move to next status (round-robin)
current_status_idx = (current_status_idx + 1) % len(status_documents)
return response
except Exception as e:
logger.error(f"Error GET /documents: {str(e)}")

View file

@ -45,6 +45,56 @@ def create_graph_routes(rag, api_key: Optional[str] = None):
status_code=500, detail=f"Error getting graph labels: {str(e)}"
)
@router.get("/graph/label/popular", dependencies=[Depends(combined_auth)])
async def get_popular_labels(
limit: int = Query(
300, description="Maximum number of popular labels to return", ge=1, le=1000
),
):
"""
Get popular labels by node degree (most connected entities)
Args:
limit (int): Maximum number of labels to return (default: 300, max: 1000)
Returns:
List[str]: List of popular labels sorted by degree (highest first)
"""
try:
return await rag.chunk_entity_relation_graph.get_popular_labels(limit)
except Exception as e:
logger.error(f"Error getting popular labels: {str(e)}")
logger.error(traceback.format_exc())
raise HTTPException(
status_code=500, detail=f"Error getting popular labels: {str(e)}"
)
@router.get("/graph/label/search", dependencies=[Depends(combined_auth)])
async def search_labels(
q: str = Query(..., description="Search query string"),
limit: int = Query(
50, description="Maximum number of search results to return", ge=1, le=100
),
):
"""
Search labels with fuzzy matching
Args:
q (str): Search query string
limit (int): Maximum number of results to return (default: 50, max: 100)
Returns:
List[str]: List of matching labels sorted by relevance
"""
try:
return await rag.chunk_entity_relation_graph.search_labels(q, limit)
except Exception as e:
logger.error(f"Error searching labels with query '{q}': {str(e)}")
logger.error(traceback.format_exc())
raise HTTPException(
status_code=500, detail=f"Error searching labels: {str(e)}"
)
@router.get("/graphs", dependencies=[Depends(combined_auth)])
async def get_knowledge_graph(
label: str = Query(..., description="Label to get knowledge graph for"),

View file

@ -483,6 +483,12 @@ class OllamaAPI:
if not messages:
raise HTTPException(status_code=400, detail="No messages provided")
# Validate that the last message is from a user
if messages[-1].role != "user":
raise HTTPException(
status_code=400, detail="Last message must be from user role"
)
# Get the last message as query and previous messages as history
query = messages[-1].content
# Convert OllamaMessage objects to dictionaries
@ -499,7 +505,7 @@ class OllamaAPI:
prompt_tokens = estimate_tokens(cleaned_query)
param_dict = {
"mode": mode,
"mode": mode.value,
"stream": request.stream,
"only_need_context": only_need_context,
"conversation_history": conversation_history,
@ -510,12 +516,6 @@ class OllamaAPI:
if user_prompt is not None:
param_dict["user_prompt"] = user_prompt
if (
hasattr(self.rag, "args")
and self.rag.args.history_turns is not None
):
param_dict["history_turns"] = self.rag.args.history_turns
query_param = QueryParam(**param_dict)
if request.stream:

File diff suppressed because it is too large Load diff

View file

@ -218,8 +218,6 @@ def display_splash_screen(args: argparse.Namespace) -> None:
ASCIIColors.yellow(f"{args.log_level}")
ASCIIColors.white(" ├─ Verbose Debug: ", end="")
ASCIIColors.yellow(f"{args.verbose}")
ASCIIColors.white(" ├─ History Turns: ", end="")
ASCIIColors.yellow(f"{args.history_turns}")
ASCIIColors.white(" ├─ API Key: ", end="")
ASCIIColors.yellow("Set" if args.key else "Not Set")
ASCIIColors.white(" └─ JWT Auth: ", end="")

View file

@ -1 +0,0 @@
import{e as v,c as b,g as m,k as O,h as P,j as p,l as w,m as c,n as x,t as A,o as N}from"./_baseUniq-CN7ubuxw.js";import{aU as g,aq as _,aV as $,aW as E,aX as F,aY as I,aZ as M,a_ as y,a$ as B,b0 as T}from"./mermaid-vendor-CuJcWrH0.js";var S=/\s/;function q(n){for(var r=n.length;r--&&S.test(n.charAt(r)););return r}var G=/^\s+/;function H(n){return n&&n.slice(0,q(n)+1).replace(G,"")}var o=NaN,L=/^[-+]0x[0-9a-f]+$/i,R=/^0b[01]+$/i,W=/^0o[0-7]+$/i,X=parseInt;function Y(n){if(typeof n=="number")return n;if(v(n))return o;if(g(n)){var r=typeof n.valueOf=="function"?n.valueOf():n;n=g(r)?r+"":r}if(typeof n!="string")return n===0?n:+n;n=H(n);var t=R.test(n);return t||W.test(n)?X(n.slice(2),t?2:8):L.test(n)?o:+n}var z=1/0,C=17976931348623157e292;function K(n){if(!n)return n===0?n:0;if(n=Y(n),n===z||n===-1/0){var r=n<0?-1:1;return r*C}return n===n?n:0}function U(n){var r=K(n),t=r%1;return r===r?t?r-t:r:0}function fn(n){var r=n==null?0:n.length;return r?b(n):[]}var l=Object.prototype,Z=l.hasOwnProperty,dn=_(function(n,r){n=Object(n);var t=-1,e=r.length,a=e>2?r[2]:void 0;for(a&&$(r[0],r[1],a)&&(e=1);++t<e;)for(var f=r[t],i=E(f),s=-1,d=i.length;++s<d;){var u=i[s],h=n[u];(h===void 0||F(h,l[u])&&!Z.call(n,u))&&(n[u]=f[u])}return n});function un(n){var r=n==null?0:n.length;return r?n[r-1]:void 0}function D(n){return function(r,t,e){var a=Object(r);if(!I(r)){var f=m(t);r=O(r),t=function(s){return f(a[s],s,a)}}var i=n(r,t,e);return i>-1?a[f?r[i]:i]:void 0}}var J=Math.max;function Q(n,r,t){var e=n==null?0:n.length;if(!e)return-1;var a=t==null?0:U(t);return a<0&&(a=J(e+a,0)),P(n,m(r),a)}var hn=D(Q);function V(n,r){var t=-1,e=I(n)?Array(n.length):[];return p(n,function(a,f,i){e[++t]=r(a,f,i)}),e}function gn(n,r){var t=M(n)?w:V;return t(n,m(r))}var j=Object.prototype,k=j.hasOwnProperty;function nn(n,r){return n!=null&&k.call(n,r)}function mn(n,r){return n!=null&&c(n,r,nn)}function rn(n,r){return n<r}function tn(n,r,t){for(var e=-1,a=n.length;++e<a;){var f=n[e],i=r(f);if(i!=null&&(s===void 0?i===i&&!v(i):t(i,s)))var s=i,d=f}return d}function on(n){return n&&n.length?tn(n,y,rn):void 0}function an(n,r,t,e){if(!g(n))return n;r=x(r,n);for(var a=-1,f=r.length,i=f-1,s=n;s!=null&&++a<f;){var d=A(r[a]),u=t;if(d==="__proto__"||d==="constructor"||d==="prototype")return n;if(a!=i){var h=s[d];u=void 0,u===void 0&&(u=g(h)?h:B(r[a+1])?[]:{})}T(s,d,u),s=s[d]}return n}function vn(n,r,t){for(var e=-1,a=r.length,f={};++e<a;){var i=r[e],s=N(n,i);t(s,i)&&an(f,x(i,n),s)}return f}export{rn as a,tn as b,V as c,vn as d,on as e,fn as f,hn as g,mn as h,dn as i,U as j,un as l,gn as m,K as t};

View file

@ -0,0 +1 @@
import{e as o,c as l,g as b,k as O,h as P,j as p,l as w,m as c,n as v,t as A,o as N}from"./_baseUniq-D5S4RK2K.js";import{a_ as g,aw as _,a$ as $,b0 as E,b1 as F,b2 as x,b3 as M,b4 as y,b5 as B,b6 as T}from"./mermaid-vendor-kG8Trx8h.js";var S=/\s/;function G(n){for(var r=n.length;r--&&S.test(n.charAt(r)););return r}var H=/^\s+/;function L(n){return n&&n.slice(0,G(n)+1).replace(H,"")}var m=NaN,R=/^[-+]0x[0-9a-f]+$/i,q=/^0b[01]+$/i,z=/^0o[0-7]+$/i,C=parseInt;function K(n){if(typeof n=="number")return n;if(o(n))return m;if(g(n)){var r=typeof n.valueOf=="function"?n.valueOf():n;n=g(r)?r+"":r}if(typeof n!="string")return n===0?n:+n;n=L(n);var t=q.test(n);return t||z.test(n)?C(n.slice(2),t?2:8):R.test(n)?m:+n}var W=1/0,X=17976931348623157e292;function Y(n){if(!n)return n===0?n:0;if(n=K(n),n===W||n===-1/0){var r=n<0?-1:1;return r*X}return n===n?n:0}function D(n){var r=Y(n),t=r%1;return r===r?t?r-t:r:0}function fn(n){var r=n==null?0:n.length;return r?l(n):[]}var I=Object.prototype,J=I.hasOwnProperty,dn=_(function(n,r){n=Object(n);var t=-1,e=r.length,i=e>2?r[2]:void 0;for(i&&$(r[0],r[1],i)&&(e=1);++t<e;)for(var f=r[t],a=E(f),s=-1,d=a.length;++s<d;){var u=a[s],h=n[u];(h===void 0||F(h,I[u])&&!J.call(n,u))&&(n[u]=f[u])}return n});function un(n){var r=n==null?0:n.length;return r?n[r-1]:void 0}function Q(n){return function(r,t,e){var i=Object(r);if(!x(r)){var f=b(t);r=O(r),t=function(s){return f(i[s],s,i)}}var a=n(r,t,e);return a>-1?i[f?r[a]:a]:void 0}}var U=Math.max;function Z(n,r,t){var e=n==null?0:n.length;if(!e)return-1;var i=t==null?0:D(t);return i<0&&(i=U(e+i,0)),P(n,b(r),i)}var hn=Q(Z);function V(n,r){var t=-1,e=x(n)?Array(n.length):[];return p(n,function(i,f,a){e[++t]=r(i,f,a)}),e}function gn(n,r){var t=M(n)?w:V;return t(n,b(r))}var j=Object.prototype,k=j.hasOwnProperty;function nn(n,r){return n!=null&&k.call(n,r)}function bn(n,r){return n!=null&&c(n,r,nn)}function rn(n,r){return n<r}function tn(n,r,t){for(var e=-1,i=n.length;++e<i;){var f=n[e],a=r(f);if(a!=null&&(s===void 0?a===a&&!o(a):t(a,s)))var s=a,d=f}return d}function mn(n){return n&&n.length?tn(n,y,rn):void 0}function an(n,r,t,e){if(!g(n))return n;r=v(r,n);for(var i=-1,f=r.length,a=f-1,s=n;s!=null&&++i<f;){var d=A(r[i]),u=t;if(d==="__proto__"||d==="constructor"||d==="prototype")return n;if(i!=a){var h=s[d];u=void 0,u===void 0&&(u=g(h)?h:B(r[i+1])?[]:{})}T(s,d,u),s=s[d]}return n}function on(n,r,t){for(var e=-1,i=r.length,f={};++e<i;){var a=r[e],s=N(n,a);t(s,a)&&an(f,v(a,n),s)}return f}export{rn as a,tn as b,V as c,on as d,mn as e,fn as f,hn as g,bn as h,dn as i,D as j,un as l,gn as m,Y as t};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -1 +1 @@
import{_ as l}from"./mermaid-vendor-CuJcWrH0.js";function m(e,c){var i,t,o;e.accDescr&&((i=c.setAccDescription)==null||i.call(c,e.accDescr)),e.accTitle&&((t=c.setAccTitle)==null||t.call(c,e.accTitle)),e.title&&((o=c.setDiagramTitle)==null||o.call(c,e.title))}l(m,"populateCommonDb");export{m as p};
import{_ as l}from"./mermaid-vendor-kG8Trx8h.js";function m(e,c){var i,t,o;e.accDescr&&((i=c.setAccDescription)==null||i.call(c,e.accDescr)),e.accTitle&&((t=c.setAccTitle)==null||t.call(c,e.accTitle)),e.title&&((o=c.setDiagramTitle)==null||o.call(c,e.title))}l(m,"populateCommonDb");export{m as p};

View file

@ -1 +1 @@
import{_ as n,a1 as x,j as l}from"./mermaid-vendor-CuJcWrH0.js";var c=n((a,t)=>{const e=a.append("rect");if(e.attr("x",t.x),e.attr("y",t.y),e.attr("fill",t.fill),e.attr("stroke",t.stroke),e.attr("width",t.width),e.attr("height",t.height),t.name&&e.attr("name",t.name),t.rx&&e.attr("rx",t.rx),t.ry&&e.attr("ry",t.ry),t.attrs!==void 0)for(const r in t.attrs)e.attr(r,t.attrs[r]);return t.class&&e.attr("class",t.class),e},"drawRect"),d=n((a,t)=>{const e={x:t.startx,y:t.starty,width:t.stopx-t.startx,height:t.stopy-t.starty,fill:t.fill,stroke:t.stroke,class:"rect"};c(a,e).lower()},"drawBackgroundRect"),g=n((a,t)=>{const e=t.text.replace(x," "),r=a.append("text");r.attr("x",t.x),r.attr("y",t.y),r.attr("class","legend"),r.style("text-anchor",t.anchor),t.class&&r.attr("class",t.class);const s=r.append("tspan");return s.attr("x",t.x+t.textMargin*2),s.text(e),r},"drawText"),h=n((a,t,e,r)=>{const s=a.append("image");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",i)},"drawImage"),m=n((a,t,e,r)=>{const s=a.append("use");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",`#${i}`)},"drawEmbeddedImage"),y=n(()=>({x:0,y:0,width:100,height:100,fill:"#EDF2AE",stroke:"#666",anchor:"start",rx:0,ry:0}),"getNoteRect"),p=n(()=>({x:0,y:0,width:100,height:100,"text-anchor":"start",style:"#666",textMargin:0,rx:0,ry:0,tspan:!0}),"getTextObj");export{d as a,p as b,m as c,c as d,h as e,g as f,y as g};
import{_ as n,a2 as x,j as l}from"./mermaid-vendor-kG8Trx8h.js";var c=n((a,t)=>{const e=a.append("rect");if(e.attr("x",t.x),e.attr("y",t.y),e.attr("fill",t.fill),e.attr("stroke",t.stroke),e.attr("width",t.width),e.attr("height",t.height),t.name&&e.attr("name",t.name),t.rx&&e.attr("rx",t.rx),t.ry&&e.attr("ry",t.ry),t.attrs!==void 0)for(const r in t.attrs)e.attr(r,t.attrs[r]);return t.class&&e.attr("class",t.class),e},"drawRect"),d=n((a,t)=>{const e={x:t.startx,y:t.starty,width:t.stopx-t.startx,height:t.stopy-t.starty,fill:t.fill,stroke:t.stroke,class:"rect"};c(a,e).lower()},"drawBackgroundRect"),g=n((a,t)=>{const e=t.text.replace(x," "),r=a.append("text");r.attr("x",t.x),r.attr("y",t.y),r.attr("class","legend"),r.style("text-anchor",t.anchor),t.class&&r.attr("class",t.class);const s=r.append("tspan");return s.attr("x",t.x+t.textMargin*2),s.text(e),r},"drawText"),h=n((a,t,e,r)=>{const s=a.append("image");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",i)},"drawImage"),m=n((a,t,e,r)=>{const s=a.append("use");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",`#${i}`)},"drawEmbeddedImage"),y=n(()=>({x:0,y:0,width:100,height:100,fill:"#EDF2AE",stroke:"#666",anchor:"start",rx:0,ry:0}),"getNoteRect"),p=n(()=>({x:0,y:0,width:100,height:100,"text-anchor":"start",style:"#666",textMargin:0,rx:0,ry:0,tspan:!0}),"getTextObj");export{d as a,p as b,m as c,c as d,h as e,g as f,y as g};

View file

@ -1 +1 @@
import{_ as s}from"./mermaid-vendor-CuJcWrH0.js";var t,e=(t=class{constructor(i){this.init=i,this.records=this.init()}reset(){this.records=this.init()}},s(t,"ImperativeState"),t);export{e as I};
import{_ as s}from"./mermaid-vendor-kG8Trx8h.js";var t,e=(t=class{constructor(i){this.init=i,this.records=this.init()}reset(){this.records=this.init()}},s(t,"ImperativeState"),t);export{e as I};

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1 @@
import{_ as a,d as o}from"./mermaid-vendor-kG8Trx8h.js";var d=a((t,e)=>{let n;return e==="sandbox"&&(n=o("#i"+t)),(e==="sandbox"?o(n.nodes()[0].contentDocument.body):o("body")).select(`[id="${t}"]`)},"getDiagramElement");export{d as g};

View file

@ -0,0 +1,15 @@
import{_ as e}from"./mermaid-vendor-kG8Trx8h.js";var l=e(()=>`
/* Font Awesome icon styling - consolidated */
.label-icon {
display: inline-block;
height: 1em;
overflow: visible;
vertical-align: -0.125em;
}
.node .label-icon path {
fill: currentColor;
stroke: revert;
stroke-width: revert;
}
`,"getIconStyles");export{l as g};

File diff suppressed because one or more lines are too long

View file

@ -1 +0,0 @@
import{_ as n,d as r,e as d,l as g}from"./mermaid-vendor-CuJcWrH0.js";var u=n((e,t)=>{let o;return t==="sandbox"&&(o=r("#i"+e)),(t==="sandbox"?r(o.nodes()[0].contentDocument.body):r("body")).select(`[id="${e}"]`)},"getDiagramElement"),b=n((e,t,o,i)=>{e.attr("class",o);const{width:a,height:s,x:h,y:x}=l(e,t);d(e,s,a,i);const c=w(h,x,a,s,t);e.attr("viewBox",c),g.debug(`viewBox configured: ${c} with padding: ${t}`)},"setupViewPortForSVG"),l=n((e,t)=>{var i;const o=((i=e.node())==null?void 0:i.getBBox())||{width:0,height:0,x:0,y:0};return{width:o.width+t*2,height:o.height+t*2,x:o.x,y:o.y}},"calculateDimensionsWithPadding"),w=n((e,t,o,i,a)=>`${e-a} ${t-a} ${o} ${i}`,"createViewBox");export{u as g,b as s};

View file

@ -0,0 +1 @@
import{_ as a,e as w,l as x}from"./mermaid-vendor-kG8Trx8h.js";var d=a((e,t,i,o)=>{e.attr("class",i);const{width:r,height:h,x:n,y:c}=u(e,t);w(e,h,r,o);const s=l(n,c,r,h,t);e.attr("viewBox",s),x.debug(`viewBox configured: ${s} with padding: ${t}`)},"setupViewPortForSVG"),u=a((e,t)=>{var o;const i=((o=e.node())==null?void 0:o.getBBox())||{width:0,height:0,x:0,y:0};return{width:i.width+t*2,height:i.height+t*2,x:i.x,y:i.y}},"calculateDimensionsWithPadding"),l=a((e,t,i,o,r)=>`${e-r} ${t-r} ${i} ${o}`,"createViewBox");export{d as s};

View file

@ -1 +0,0 @@
import{s as a,c as s,a as e,C as t}from"./chunk-A2AXSNBT-DGt_KYJH.js";import{_ as i}from"./mermaid-vendor-CuJcWrH0.js";import"./chunk-RZ5BOZE2-D3gEMbCJ.js";import"./feature-graph-CeEpF_Yb.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var f={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{f as diagram};

View file

@ -0,0 +1 @@
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-BmUQcRTs.js";import{_ as i}from"./mermaid-vendor-kG8Trx8h.js";import"./chunk-E2GYISFI-Cp-cMcdp.js";import"./chunk-BFAMUDN2-ILoF6WWe.js";import"./chunk-SKB7J2MH-CCgDhfF0.js";import"./feature-graph-7h-VS-iL.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};

View file

@ -1 +0,0 @@
import{s as a,c as s,a as e,C as t}from"./chunk-A2AXSNBT-DGt_KYJH.js";import{_ as i}from"./mermaid-vendor-CuJcWrH0.js";import"./chunk-RZ5BOZE2-D3gEMbCJ.js";import"./feature-graph-CeEpF_Yb.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var f={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{f as diagram};

View file

@ -0,0 +1 @@
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-BmUQcRTs.js";import{_ as i}from"./mermaid-vendor-kG8Trx8h.js";import"./chunk-E2GYISFI-Cp-cMcdp.js";import"./chunk-BFAMUDN2-ILoF6WWe.js";import"./chunk-SKB7J2MH-CCgDhfF0.js";import"./feature-graph-7h-VS-iL.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};

View file

@ -0,0 +1 @@
import{b as r}from"./_baseUniq-D5S4RK2K.js";var e=4;function a(o){return r(o,e)}export{a as c};

View file

@ -1 +0,0 @@
import{b as r}from"./_baseUniq-CN7ubuxw.js";var e=4;function a(o){return r(o,e)}export{a as c};

View file

@ -0,0 +1,24 @@
import{p as y}from"./chunk-353BL4L5-CG4Wb5Ni.js";import{_ as l,s as B,g as S,t as z,q as F,a as P,b as E,F as v,K as W,e as T,z as D,G as _,H as A,l as w}from"./mermaid-vendor-kG8Trx8h.js";import{p as N}from"./treemap-75Q7IDZK-M3vYzjwI.js";import"./feature-graph-7h-VS-iL.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-D5S4RK2K.js";import"./_basePickBy-DEiY070O.js";import"./clone-D1RenvQh.js";var x={packet:[]},m=structuredClone(x),L=A.packet,Y=l(()=>{const t=v({...L,..._().packet});return t.showBits&&(t.paddingY+=10),t},"getConfig"),G=l(()=>m.packet,"getPacket"),H=l(t=>{t.length>0&&m.packet.push(t)},"pushWord"),I=l(()=>{D(),m=structuredClone(x)},"clear"),u={pushWord:H,getPacket:G,getConfig:Y,clear:I,setAccTitle:E,getAccTitle:P,setDiagramTitle:F,getDiagramTitle:z,getAccDescription:S,setAccDescription:B},K=1e4,M=l(t=>{y(t,u);let e=-1,o=[],n=1;const{bitsPerRow:i}=u.getConfig();for(let{start:a,end:r,bits:c,label:f}of t.blocks){if(a!==void 0&&r!==void 0&&r<a)throw new Error(`Packet block ${a} - ${r} is invalid. End must be greater than start.`);if(a??(a=e+1),a!==e+1)throw new Error(`Packet block ${a} - ${r??a} is not contiguous. It should start from ${e+1}.`);if(c===0)throw new Error(`Packet block ${a} is invalid. Cannot have a zero bit field.`);for(r??(r=a+(c??1)-1),c??(c=r-a+1),e=r,w.debug(`Packet block ${a} - ${e} with label ${f}`);o.length<=i+1&&u.getPacket().length<K;){const[d,p]=O({start:a,end:r,bits:c,label:f},n,i);if(o.push(d),d.end+1===n*i&&(u.pushWord(o),o=[],n++),!p)break;({start:a,end:r,bits:c,label:f}=p)}}u.pushWord(o)},"populate"),O=l((t,e,o)=>{if(t.start===void 0)throw new Error("start should have been set during first phase");if(t.end===void 0)throw new Error("end should have been set during first phase");if(t.start>t.end)throw new Error(`Block start ${t.start} is greater than block end ${t.end}.`);if(t.end+1<=e*o)return[t,void 0];const n=e*o-1,i=e*o;return[{start:t.start,end:n,label:t.label,bits:n-t.start},{start:i,end:t.end,label:t.label,bits:t.end-i}]},"getNextFittingBlock"),q={parse:l(async t=>{const e=await N("packet",t);w.debug(e),M(e)},"parse")},R=l((t,e,o,n)=>{const i=n.db,a=i.getConfig(),{rowHeight:r,paddingY:c,bitWidth:f,bitsPerRow:d}=a,p=i.getPacket(),s=i.getDiagramTitle(),k=r+c,g=k*(p.length+1)-(s?0:r),b=f*d+2,h=W(e);h.attr("viewbox",`0 0 ${b} ${g}`),T(h,g,b,a.useMaxWidth);for(const[C,$]of p.entries())U(h,$,C,a);h.append("text").text(s).attr("x",b/2).attr("y",g-k/2).attr("dominant-baseline","middle").attr("text-anchor","middle").attr("class","packetTitle")},"draw"),U=l((t,e,o,{rowHeight:n,paddingX:i,paddingY:a,bitWidth:r,bitsPerRow:c,showBits:f})=>{const d=t.append("g"),p=o*(n+a)+a;for(const s of e){const k=s.start%c*r+1,g=(s.end-s.start+1)*r-i;if(d.append("rect").attr("x",k).attr("y",p).attr("width",g).attr("height",n).attr("class","packetBlock"),d.append("text").attr("x",k+g/2).attr("y",p+n/2).attr("class","packetLabel").attr("dominant-baseline","middle").attr("text-anchor","middle").text(s.label),!f)continue;const b=s.end===s.start,h=p-2;d.append("text").attr("x",k+(b?g/2:0)).attr("y",h).attr("class","packetByte start").attr("dominant-baseline","auto").attr("text-anchor",b?"middle":"start").text(s.start),b||d.append("text").attr("x",k+g).attr("y",h).attr("class","packetByte end").attr("dominant-baseline","auto").attr("text-anchor","end").text(s.end)}},"drawWord"),X={draw:R},j={byteFontSize:"10px",startByteColor:"black",endByteColor:"black",labelColor:"black",labelFontSize:"12px",titleColor:"black",titleFontSize:"14px",blockStrokeColor:"black",blockStrokeWidth:"1",blockFillColor:"#efefef"},J=l(({packet:t}={})=>{const e=v(j,t);return`
.packetByte {
font-size: ${e.byteFontSize};
}
.packetByte.start {
fill: ${e.startByteColor};
}
.packetByte.end {
fill: ${e.endByteColor};
}
.packetLabel {
fill: ${e.labelColor};
font-size: ${e.labelFontSize};
}
.packetTitle {
fill: ${e.titleColor};
font-size: ${e.titleFontSize};
}
.packetBlock {
stroke: ${e.blockStrokeColor};
stroke-width: ${e.blockStrokeWidth};
fill: ${e.blockFillColor};
}
`},"styles"),lt={parser:q,db:u,renderer:X,styles:J};export{lt as diagram};

View file

@ -1,43 +0,0 @@
import{p as k}from"./chunk-4BMEZGHF-lnpW8Ujf.js";import{_ as l,s as R,g as F,t as I,q as _,a as E,b as D,K as G,z,F as y,G as C,H as P,l as H,Q as V}from"./mermaid-vendor-CuJcWrH0.js";import{p as W}from"./radar-MK3ICKWK-bzElGvQt.js";import"./feature-graph-CeEpF_Yb.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-CN7ubuxw.js";import"./_basePickBy-4-C6UZHi.js";import"./clone-Du5yQfgG.js";var h={showLegend:!0,ticks:5,max:null,min:0,graticule:"circle"},w={axes:[],curves:[],options:h},g=structuredClone(w),B=P.radar,j=l(()=>y({...B,...C().radar}),"getConfig"),b=l(()=>g.axes,"getAxes"),q=l(()=>g.curves,"getCurves"),K=l(()=>g.options,"getOptions"),N=l(a=>{g.axes=a.map(t=>({name:t.name,label:t.label??t.name}))},"setAxes"),Q=l(a=>{g.curves=a.map(t=>({name:t.name,label:t.label??t.name,entries:U(t.entries)}))},"setCurves"),U=l(a=>{if(a[0].axis==null)return a.map(e=>e.value);const t=b();if(t.length===0)throw new Error("Axes must be populated before curves for reference entries");return t.map(e=>{const r=a.find(s=>{var o;return((o=s.axis)==null?void 0:o.$refText)===e.name});if(r===void 0)throw new Error("Missing entry for axis "+e.label);return r.value})},"computeCurveEntries"),X=l(a=>{var e,r,s,o,i;const t=a.reduce((n,c)=>(n[c.name]=c,n),{});g.options={showLegend:((e=t.showLegend)==null?void 0:e.value)??h.showLegend,ticks:((r=t.ticks)==null?void 0:r.value)??h.ticks,max:((s=t.max)==null?void 0:s.value)??h.max,min:((o=t.min)==null?void 0:o.value)??h.min,graticule:((i=t.graticule)==null?void 0:i.value)??h.graticule}},"setOptions"),Y=l(()=>{z(),g=structuredClone(w)},"clear"),$={getAxes:b,getCurves:q,getOptions:K,setAxes:N,setCurves:Q,setOptions:X,getConfig:j,clear:Y,setAccTitle:D,getAccTitle:E,setDiagramTitle:_,getDiagramTitle:I,getAccDescription:F,setAccDescription:R},Z=l(a=>{k(a,$);const{axes:t,curves:e,options:r}=a;$.setAxes(t),$.setCurves(e),$.setOptions(r)},"populate"),J={parse:l(async a=>{const t=await W("radar",a);H.debug(t),Z(t)},"parse")},tt=l((a,t,e,r)=>{const s=r.db,o=s.getAxes(),i=s.getCurves(),n=s.getOptions(),c=s.getConfig(),d=s.getDiagramTitle(),u=G(t),p=et(u,c),m=n.max??Math.max(...i.map(f=>Math.max(...f.entries))),x=n.min,v=Math.min(c.width,c.height)/2;at(p,o,v,n.ticks,n.graticule),rt(p,o,v,c),M(p,o,i,x,m,n.graticule,c),T(p,i,n.showLegend,c),p.append("text").attr("class","radarTitle").text(d).attr("x",0).attr("y",-c.height/2-c.marginTop)},"draw"),et=l((a,t)=>{const e=t.width+t.marginLeft+t.marginRight,r=t.height+t.marginTop+t.marginBottom,s={x:t.marginLeft+t.width/2,y:t.marginTop+t.height/2};return a.attr("viewbox",`0 0 ${e} ${r}`).attr("width",e).attr("height",r),a.append("g").attr("transform",`translate(${s.x}, ${s.y})`)},"drawFrame"),at=l((a,t,e,r,s)=>{if(s==="circle")for(let o=0;o<r;o++){const i=e*(o+1)/r;a.append("circle").attr("r",i).attr("class","radarGraticule")}else if(s==="polygon"){const o=t.length;for(let i=0;i<r;i++){const n=e*(i+1)/r,c=t.map((d,u)=>{const p=2*u*Math.PI/o-Math.PI/2,m=n*Math.cos(p),x=n*Math.sin(p);return`${m},${x}`}).join(" ");a.append("polygon").attr("points",c).attr("class","radarGraticule")}}},"drawGraticule"),rt=l((a,t,e,r)=>{const s=t.length;for(let o=0;o<s;o++){const i=t[o].label,n=2*o*Math.PI/s-Math.PI/2;a.append("line").attr("x1",0).attr("y1",0).attr("x2",e*r.axisScaleFactor*Math.cos(n)).attr("y2",e*r.axisScaleFactor*Math.sin(n)).attr("class","radarAxisLine"),a.append("text").text(i).attr("x",e*r.axisLabelFactor*Math.cos(n)).attr("y",e*r.axisLabelFactor*Math.sin(n)).attr("class","radarAxisLabel")}},"drawAxes");function M(a,t,e,r,s,o,i){const n=t.length,c=Math.min(i.width,i.height)/2;e.forEach((d,u)=>{if(d.entries.length!==n)return;const p=d.entries.map((m,x)=>{const v=2*Math.PI*x/n-Math.PI/2,f=A(m,r,s,c),O=f*Math.cos(v),S=f*Math.sin(v);return{x:O,y:S}});o==="circle"?a.append("path").attr("d",L(p,i.curveTension)).attr("class",`radarCurve-${u}`):o==="polygon"&&a.append("polygon").attr("points",p.map(m=>`${m.x},${m.y}`).join(" ")).attr("class",`radarCurve-${u}`)})}l(M,"drawCurves");function A(a,t,e,r){const s=Math.min(Math.max(a,t),e);return r*(s-t)/(e-t)}l(A,"relativeRadius");function L(a,t){const e=a.length;let r=`M${a[0].x},${a[0].y}`;for(let s=0;s<e;s++){const o=a[(s-1+e)%e],i=a[s],n=a[(s+1)%e],c=a[(s+2)%e],d={x:i.x+(n.x-o.x)*t,y:i.y+(n.y-o.y)*t},u={x:n.x-(c.x-i.x)*t,y:n.y-(c.y-i.y)*t};r+=` C${d.x},${d.y} ${u.x},${u.y} ${n.x},${n.y}`}return`${r} Z`}l(L,"closedRoundCurve");function T(a,t,e,r){if(!e)return;const s=(r.width/2+r.marginRight)*3/4,o=-(r.height/2+r.marginTop)*3/4,i=20;t.forEach((n,c)=>{const d=a.append("g").attr("transform",`translate(${s}, ${o+c*i})`);d.append("rect").attr("width",12).attr("height",12).attr("class",`radarLegendBox-${c}`),d.append("text").attr("x",16).attr("y",0).attr("class","radarLegendText").text(n.label)})}l(T,"drawLegend");var st={draw:tt},nt=l((a,t)=>{let e="";for(let r=0;r<a.THEME_COLOR_LIMIT;r++){const s=a[`cScale${r}`];e+=`
.radarCurve-${r} {
color: ${s};
fill: ${s};
fill-opacity: ${t.curveOpacity};
stroke: ${s};
stroke-width: ${t.curveStrokeWidth};
}
.radarLegendBox-${r} {
fill: ${s};
fill-opacity: ${t.curveOpacity};
stroke: ${s};
}
`}return e},"genIndexStyles"),ot=l(a=>{const t=V(),e=C(),r=y(t,e.themeVariables),s=y(r.radar,a);return{themeVariables:r,radarOptions:s}},"buildRadarStyleOptions"),it=l(({radar:a}={})=>{const{themeVariables:t,radarOptions:e}=ot(a);return`
.radarTitle {
font-size: ${t.fontSize};
color: ${t.titleColor};
dominant-baseline: hanging;
text-anchor: middle;
}
.radarAxisLine {
stroke: ${e.axisColor};
stroke-width: ${e.axisStrokeWidth};
}
.radarAxisLabel {
dominant-baseline: middle;
text-anchor: middle;
font-size: ${e.axisLabelFontSize}px;
color: ${e.axisColor};
}
.radarGraticule {
fill: ${e.graticuleColor};
fill-opacity: ${e.graticuleOpacity};
stroke: ${e.graticuleColor};
stroke-width: ${e.graticuleStrokeWidth};
}
.radarLegendText {
text-anchor: start;
font-size: ${e.legendFontSize}px;
dominant-baseline: hanging;
}
${nt(t,e)}
`},"styles"),ft={parser:J,db:$,renderer:st,styles:it};export{ft as diagram};

File diff suppressed because one or more lines are too long

View file

@ -1,24 +0,0 @@
import{p as w}from"./chunk-4BMEZGHF-lnpW8Ujf.js";import{_ as n,s as B,g as S,t as F,q as z,a as P,b as W,F as x,K as T,e as D,z as _,G as A,H as E,l as v}from"./mermaid-vendor-CuJcWrH0.js";import{p as N}from"./radar-MK3ICKWK-bzElGvQt.js";import"./feature-graph-CeEpF_Yb.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-CN7ubuxw.js";import"./_basePickBy-4-C6UZHi.js";import"./clone-Du5yQfgG.js";var C={packet:[]},h=structuredClone(C),L=E.packet,Y=n(()=>{const t=x({...L,...A().packet});return t.showBits&&(t.paddingY+=10),t},"getConfig"),G=n(()=>h.packet,"getPacket"),H=n(t=>{t.length>0&&h.packet.push(t)},"pushWord"),I=n(()=>{_(),h=structuredClone(C)},"clear"),m={pushWord:H,getPacket:G,getConfig:Y,clear:I,setAccTitle:W,getAccTitle:P,setDiagramTitle:z,getDiagramTitle:F,getAccDescription:S,setAccDescription:B},K=1e4,M=n(t=>{w(t,m);let e=-1,o=[],s=1;const{bitsPerRow:i}=m.getConfig();for(let{start:a,end:r,label:p}of t.blocks){if(r&&r<a)throw new Error(`Packet block ${a} - ${r} is invalid. End must be greater than start.`);if(a!==e+1)throw new Error(`Packet block ${a} - ${r??a} is not contiguous. It should start from ${e+1}.`);for(e=r??a,v.debug(`Packet block ${a} - ${e} with label ${p}`);o.length<=i+1&&m.getPacket().length<K;){const[b,c]=O({start:a,end:r,label:p},s,i);if(o.push(b),b.end+1===s*i&&(m.pushWord(o),o=[],s++),!c)break;({start:a,end:r,label:p}=c)}}m.pushWord(o)},"populate"),O=n((t,e,o)=>{if(t.end===void 0&&(t.end=t.start),t.start>t.end)throw new Error(`Block start ${t.start} is greater than block end ${t.end}.`);return t.end+1<=e*o?[t,void 0]:[{start:t.start,end:e*o-1,label:t.label},{start:e*o,end:t.end,label:t.label}]},"getNextFittingBlock"),q={parse:n(async t=>{const e=await N("packet",t);v.debug(e),M(e)},"parse")},R=n((t,e,o,s)=>{const i=s.db,a=i.getConfig(),{rowHeight:r,paddingY:p,bitWidth:b,bitsPerRow:c}=a,u=i.getPacket(),l=i.getDiagramTitle(),g=r+p,d=g*(u.length+1)-(l?0:r),k=b*c+2,f=T(e);f.attr("viewbox",`0 0 ${k} ${d}`),D(f,d,k,a.useMaxWidth);for(const[$,y]of u.entries())U(f,y,$,a);f.append("text").text(l).attr("x",k/2).attr("y",d-g/2).attr("dominant-baseline","middle").attr("text-anchor","middle").attr("class","packetTitle")},"draw"),U=n((t,e,o,{rowHeight:s,paddingX:i,paddingY:a,bitWidth:r,bitsPerRow:p,showBits:b})=>{const c=t.append("g"),u=o*(s+a)+a;for(const l of e){const g=l.start%p*r+1,d=(l.end-l.start+1)*r-i;if(c.append("rect").attr("x",g).attr("y",u).attr("width",d).attr("height",s).attr("class","packetBlock"),c.append("text").attr("x",g+d/2).attr("y",u+s/2).attr("class","packetLabel").attr("dominant-baseline","middle").attr("text-anchor","middle").text(l.label),!b)continue;const k=l.end===l.start,f=u-2;c.append("text").attr("x",g+(k?d/2:0)).attr("y",f).attr("class","packetByte start").attr("dominant-baseline","auto").attr("text-anchor",k?"middle":"start").text(l.start),k||c.append("text").attr("x",g+d).attr("y",f).attr("class","packetByte end").attr("dominant-baseline","auto").attr("text-anchor","end").text(l.end)}},"drawWord"),X={draw:R},j={byteFontSize:"10px",startByteColor:"black",endByteColor:"black",labelColor:"black",labelFontSize:"12px",titleColor:"black",titleFontSize:"14px",blockStrokeColor:"black",blockStrokeWidth:"1",blockFillColor:"#efefef"},J=n(({packet:t}={})=>{const e=x(j,t);return`
.packetByte {
font-size: ${e.byteFontSize};
}
.packetByte.start {
fill: ${e.startByteColor};
}
.packetByte.end {
fill: ${e.endByteColor};
}
.packetLabel {
fill: ${e.labelColor};
font-size: ${e.labelFontSize};
}
.packetTitle {
fill: ${e.titleColor};
font-size: ${e.titleFontSize};
}
.packetBlock {
stroke: ${e.blockStrokeColor};
stroke-width: ${e.blockStrokeWidth};
fill: ${e.blockFillColor};
}
`},"styles"),it={parser:q,db:m,renderer:X,styles:J};export{it as diagram};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,2 @@
import{_ as e,l as o,K as i,e as n,L as p}from"./mermaid-vendor-kG8Trx8h.js";import{p as m}from"./treemap-75Q7IDZK-M3vYzjwI.js";import"./feature-graph-7h-VS-iL.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-D5S4RK2K.js";import"./_basePickBy-DEiY070O.js";import"./clone-D1RenvQh.js";var g={parse:e(async r=>{const a=await m("info",r);o.debug(a)},"parse")},v={version:p.version+""},d=e(()=>v.version,"getVersion"),c={getVersion:d},l=e((r,a,s)=>{o.debug(`rendering info diagram
`+r);const t=i(a);n(t,100,400,!0),t.append("g").append("text").attr("x",100).attr("y",40).attr("class","version").attr("font-size",32).style("text-anchor","middle").text(`v${s}`)},"draw"),f={draw:l},L={parser:g,db:c,renderer:f};export{L as diagram};

View file

@ -1,2 +0,0 @@
import{_ as e,l as o,K as i,e as n,L as p}from"./mermaid-vendor-CuJcWrH0.js";import{p as m}from"./radar-MK3ICKWK-bzElGvQt.js";import"./feature-graph-CeEpF_Yb.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-CN7ubuxw.js";import"./_basePickBy-4-C6UZHi.js";import"./clone-Du5yQfgG.js";var g={parse:e(async r=>{const a=await m("info",r);o.debug(a)},"parse")},v={version:p.version},d=e(()=>v.version,"getVersion"),c={getVersion:d},l=e((r,a,s)=>{o.debug(`rendering info diagram
`+r);const t=i(a);n(t,100,400,!0),t.append("g").append("text").attr("x",100).attr("y",40).attr("class","version").attr("font-size",32).style("text-anchor","middle").text(`v${s}`)},"draw"),f={draw:l},L={parser:g,db:c,renderer:f};export{L as diagram};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -1,4 +1,4 @@
import{p as N}from"./chunk-4BMEZGHF-lnpW8Ujf.js";import{_ as i,g as B,s as U,a as q,b as H,t as K,q as V,l as C,c as Z,F as j,K as J,M as Q,N as z,O as X,e as Y,z as tt,P as et,H as at}from"./mermaid-vendor-CuJcWrH0.js";import{p as rt}from"./radar-MK3ICKWK-bzElGvQt.js";import"./feature-graph-CeEpF_Yb.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-CN7ubuxw.js";import"./_basePickBy-4-C6UZHi.js";import"./clone-Du5yQfgG.js";var it=at.pie,D={sections:new Map,showData:!1},f=D.sections,w=D.showData,st=structuredClone(it),ot=i(()=>structuredClone(st),"getConfig"),nt=i(()=>{f=new Map,w=D.showData,tt()},"clear"),lt=i(({label:t,value:a})=>{f.has(t)||(f.set(t,a),C.debug(`added new section: ${t}, with value: ${a}`))},"addSection"),ct=i(()=>f,"getSections"),pt=i(t=>{w=t},"setShowData"),dt=i(()=>w,"getShowData"),F={getConfig:ot,clear:nt,setDiagramTitle:V,getDiagramTitle:K,setAccTitle:H,getAccTitle:q,setAccDescription:U,getAccDescription:B,addSection:lt,getSections:ct,setShowData:pt,getShowData:dt},gt=i((t,a)=>{N(t,a),a.setShowData(t.showData),t.sections.map(a.addSection)},"populateDb"),ut={parse:i(async t=>{const a=await rt("pie",t);C.debug(a),gt(a,F)},"parse")},mt=i(t=>`
import{p as N}from"./chunk-353BL4L5-CG4Wb5Ni.js";import{_ as i,g as B,s as U,a as q,b as H,t as K,q as V,l as C,c as Z,F as j,K as J,M as Q,N as z,O as X,e as Y,z as tt,P as et,H as at}from"./mermaid-vendor-kG8Trx8h.js";import{p as rt}from"./treemap-75Q7IDZK-M3vYzjwI.js";import"./feature-graph-7h-VS-iL.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-D5S4RK2K.js";import"./_basePickBy-DEiY070O.js";import"./clone-D1RenvQh.js";var it=at.pie,D={sections:new Map,showData:!1},f=D.sections,w=D.showData,st=structuredClone(it),ot=i(()=>structuredClone(st),"getConfig"),nt=i(()=>{f=new Map,w=D.showData,tt()},"clear"),lt=i(({label:t,value:a})=>{f.has(t)||(f.set(t,a),C.debug(`added new section: ${t}, with value: ${a}`))},"addSection"),ct=i(()=>f,"getSections"),pt=i(t=>{w=t},"setShowData"),dt=i(()=>w,"getShowData"),F={getConfig:ot,clear:nt,setDiagramTitle:V,getDiagramTitle:K,setAccTitle:H,getAccTitle:q,setAccDescription:U,getAccDescription:B,addSection:lt,getSections:ct,setShowData:pt,getShowData:dt},gt=i((t,a)=>{N(t,a),a.setShowData(t.showData),t.sections.map(a.addSection)},"populateDb"),ut={parse:i(async t=>{const a=await rt("pie",t);C.debug(a),gt(a,F)},"parse")},mt=i(t=>`
.pieCircle{
stroke: ${t.pieStrokeColor};
stroke-width : ${t.pieStrokeWidth};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1 @@
import{s as r,b as e,a,S as i}from"./chunk-OW32GOEJ-7_NWvBKK.js";import{_ as s}from"./mermaid-vendor-kG8Trx8h.js";import"./chunk-BFAMUDN2-ILoF6WWe.js";import"./chunk-SKB7J2MH-CCgDhfF0.js";import"./feature-graph-7h-VS-iL.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var f={parser:a,get db(){return new i(2)},renderer:e,styles:r,init:s(t=>{t.state||(t.state={}),t.state.arrowMarkerAbsolute=t.arrowMarkerAbsolute},"init")};export{f as diagram};

View file

@ -1 +0,0 @@
import{s as r,b as e,a,S as s}from"./chunk-AEK57VVT-6am85ODh.js";import{_ as i}from"./mermaid-vendor-CuJcWrH0.js";import"./chunk-RZ5BOZE2-D3gEMbCJ.js";import"./feature-graph-CeEpF_Yb.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var b={parser:a,get db(){return new s(2)},renderer:e,styles:r,init:i(t=>{t.state||(t.state={}),t.state.arrowMarkerAbsolute=t.arrowMarkerAbsolute},"init")};export{b as diagram};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -8,18 +8,18 @@
<link rel="icon" type="image/png" href="favicon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Lightrag</title>
<script type="module" crossorigin src="/webui/assets/index-KW-eZWPA.js"></script>
<script type="module" crossorigin src="/webui/assets/index-DbjaBP8L.js"></script>
<link rel="modulepreload" crossorigin href="/webui/assets/react-vendor-DEwriMA6.js">
<link rel="modulepreload" crossorigin href="/webui/assets/ui-vendor-CeCm8EER.js">
<link rel="modulepreload" crossorigin href="/webui/assets/graph-vendor-B-X5JegA.js">
<link rel="modulepreload" crossorigin href="/webui/assets/utils-vendor-BysuhMZA.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-graph-CeEpF_Yb.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-documents-DaeP3jJU.js">
<link rel="modulepreload" crossorigin href="/webui/assets/mermaid-vendor-CuJcWrH0.js">
<link rel="modulepreload" crossorigin href="/webui/assets/markdown-vendor-BKOfNAUO.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-retrieval-CTawnaAw.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-graph-7h-VS-iL.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-documents-Btp1BS17.js">
<link rel="modulepreload" crossorigin href="/webui/assets/mermaid-vendor-kG8Trx8h.js">
<link rel="modulepreload" crossorigin href="/webui/assets/markdown-vendor-Dv0NSOeH.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-retrieval-DsN5eBDA.js">
<link rel="stylesheet" crossorigin href="/webui/assets/feature-graph-BipNuM18.css">
<link rel="stylesheet" crossorigin href="/webui/assets/index-BvrNHAMA.css">
<link rel="stylesheet" crossorigin href="/webui/assets/index-DzTVXLx_.css">
</head>
<body>
<div id="root"></div>

View file

@ -11,6 +11,10 @@ from typing import (
TypedDict,
TypeVar,
Callable,
Optional,
Dict,
List,
AsyncIterator,
)
from .utils import EmbeddingFunc
from .types import KnowledgeGraph
@ -132,13 +136,13 @@ class QueryParam:
ll_keywords: list[str] = field(default_factory=list)
"""List of low-level keywords to refine retrieval focus."""
# TODO: Deprecated - history message have negtive effect on query performance
# History mesages is only send to LLM for context, not used for retrieval
conversation_history: list[dict[str, str]] = field(default_factory=list)
"""Stores past conversation history to maintain context.
Format: [{"role": "user/assistant", "content": "message"}].
"""
# TODO: Deprecated - history message have negtive effect on query performance
# TODO: deprecated. No longer used in the codebase, all conversation_history messages is send to LLM
history_turns: int = int(os.getenv("HISTORY_TURNS", str(DEFAULT_HISTORY_TURNS)))
"""Number of complete conversation turns (user-assistant pairs) to consider in the response context."""
@ -150,7 +154,8 @@ class QueryParam:
user_prompt: str | None = None
"""User-provided prompt for the query.
If proivded, this will be use instead of the default vaulue from prompt template.
Addition instructions for LLM. If provided, this will be inject into the prompt template.
It's purpose is the let user customize the way LLM generate the response.
"""
enable_rerank: bool = os.getenv("RERANK_BY_DEFAULT", "true").lower() == "true"
@ -158,6 +163,12 @@ class QueryParam:
Default is True to enable reranking when rerank model is available.
"""
include_references: bool = False
"""If True, includes reference list in the response for supported endpoints.
This parameter controls whether the API response includes a references field
containing citation information for the retrieved content.
"""
@dataclass
class StorageNameSpace(ABC):
@ -629,6 +640,7 @@ class BaseGraphStorage(StorageNameSpace, ABC):
edges: List of edges to be deleted, each edge is a (source, target) tuple
"""
# TODO: deprecated
@abstractmethod
async def get_all_labels(self) -> list[str]:
"""Get all labels in the graph.
@ -671,6 +683,29 @@ class BaseGraphStorage(StorageNameSpace, ABC):
A list of all edges, where each edge is a dictionary of its properties
"""
@abstractmethod
async def get_popular_labels(self, limit: int = 300) -> list[str]:
"""Get popular labels by node degree (most connected entities)
Args:
limit: Maximum number of labels to return
Returns:
List of labels sorted by degree (highest first)
"""
@abstractmethod
async def search_labels(self, query: str, limit: int = 50) -> list[str]:
"""Search labels with fuzzy matching
Args:
query: Search query string
limit: Maximum number of results to return
Returns:
List of matching labels sorted by relevance
"""
class DocStatus(str, Enum):
"""Document processing status"""
@ -759,6 +794,18 @@ class DocStatusStorage(BaseKVStorage, ABC):
Dictionary mapping status names to counts
"""
@abstractmethod
async def get_doc_by_file_path(self, file_path: str) -> dict[str, Any] | None:
"""Get document by file path
Args:
file_path: The file path to search for
Returns:
dict[str, Any] | None: Document data if found, None otherwise
Returns the same format as get_by_ids method
"""
class StoragesStatus(str, Enum):
"""Storages status"""
@ -778,3 +825,68 @@ class DeletionResult:
message: str
status_code: int = 200
file_path: str | None = None
# Unified Query Result Data Structures for Reference List Support
@dataclass
class QueryResult:
"""
Unified query result data structure for all query modes.
Attributes:
content: Text content for non-streaming responses
response_iterator: Streaming response iterator for streaming responses
raw_data: Complete structured data including references and metadata
is_streaming: Whether this is a streaming result
"""
content: Optional[str] = None
response_iterator: Optional[AsyncIterator[str]] = None
raw_data: Optional[Dict[str, Any]] = None
is_streaming: bool = False
@property
def reference_list(self) -> List[Dict[str, str]]:
"""
Convenient property to extract reference list from raw_data.
Returns:
List[Dict[str, str]]: Reference list in format:
[{"reference_id": "1", "file_path": "/path/to/file.pdf"}, ...]
"""
if self.raw_data:
return self.raw_data.get("data", {}).get("references", [])
return []
@property
def metadata(self) -> Dict[str, Any]:
"""
Convenient property to extract metadata from raw_data.
Returns:
Dict[str, Any]: Query metadata including query_mode, keywords, etc.
"""
if self.raw_data:
return self.raw_data.get("metadata", {})
return {}
@dataclass
class QueryContextResult:
"""
Unified query context result data structure.
Attributes:
context: LLM context string
raw_data: Complete structured data including reference_list
"""
context: str
raw_data: Dict[str, Any]
@property
def reference_list(self) -> List[Dict[str, str]]:
"""Convenient property to extract reference list from raw_data."""
return self.raw_data.get("data", {}).get("references", [])

View file

@ -25,6 +25,7 @@ DEFAULT_SUMMARY_CONTEXT_SIZE = 12000
# Default entities to extract if ENTITY_TYPES is not specified in .env
DEFAULT_ENTITY_TYPES = [
"Person",
"Creature",
"Organization",
"Location",
"Event",
@ -48,7 +49,8 @@ DEFAULT_MAX_TOTAL_TOKENS = 30000
DEFAULT_COSINE_THRESHOLD = 0.2
DEFAULT_RELATED_CHUNK_NUMBER = 5
DEFAULT_KG_CHUNK_PICK_METHOD = "VECTOR"
# Deprated: history message have negtive effect on query performance
# TODO: Deprated. All conversation_history messages is send to LLM.
DEFAULT_HISTORY_TURNS = 0
# Rerank configuration defaults

View file

@ -323,6 +323,27 @@ class JsonDocStatusStorage(DocStatusStorage):
if any_deleted:
await set_all_update_flags(self.final_namespace)
async def get_doc_by_file_path(self, file_path: str) -> Union[dict[str, Any], None]:
"""Get document by file path
Args:
file_path: The file path to search for
Returns:
Union[dict[str, Any], None]: Document data if found, None otherwise
Returns the same format as get_by_ids method
"""
if self._storage_lock is None:
raise StorageNotInitializedError("JsonDocStatusStorage")
async with self._storage_lock:
for doc_id, doc_data in self._data.items():
if doc_data.get("file_path") == file_path:
# Return complete document data, consistent with get_by_ids method
return doc_data
return None
async def drop(self) -> dict[str, str]:
"""Drop all document status data from storage and clean up resources

View file

@ -1089,3 +1089,100 @@ class MemgraphStorage(BaseGraphStorage):
edges.append(edge_properties)
await result.consume()
return edges
async def get_popular_labels(self, limit: int = 300) -> list[str]:
"""Get popular labels by node degree (most connected entities)
Args:
limit: Maximum number of labels to return
Returns:
List of labels sorted by degree (highest first)
"""
if self._driver is None:
raise RuntimeError(
"Memgraph driver is not initialized. Call 'await initialize()' first."
)
try:
workspace_label = self._get_workspace_label()
async with self._driver.session(
database=self._DATABASE, default_access_mode="READ"
) as session:
query = f"""
MATCH (n:`{workspace_label}`)
WHERE n.entity_id IS NOT NULL
OPTIONAL MATCH (n)-[r]-()
WITH n.entity_id AS label, count(r) AS degree
ORDER BY degree DESC, label ASC
LIMIT {limit}
RETURN label
"""
result = await session.run(query)
labels = []
async for record in result:
labels.append(record["label"])
await result.consume()
logger.debug(
f"[{self.workspace}] Retrieved {len(labels)} popular labels (limit: {limit})"
)
return labels
except Exception as e:
logger.error(f"[{self.workspace}] Error getting popular labels: {str(e)}")
return []
async def search_labels(self, query: str, limit: int = 50) -> list[str]:
"""Search labels with fuzzy matching
Args:
query: Search query string
limit: Maximum number of results to return
Returns:
List of matching labels sorted by relevance
"""
if self._driver is None:
raise RuntimeError(
"Memgraph driver is not initialized. Call 'await initialize()' first."
)
query_lower = query.lower().strip()
if not query_lower:
return []
try:
workspace_label = self._get_workspace_label()
async with self._driver.session(
database=self._DATABASE, default_access_mode="READ"
) as session:
cypher_query = f"""
MATCH (n:`{workspace_label}`)
WHERE n.entity_id IS NOT NULL
WITH n.entity_id AS label, toLower(n.entity_id) AS label_lower
WHERE label_lower CONTAINS $query_lower
WITH label, label_lower,
CASE
WHEN label_lower = $query_lower THEN 1000
WHEN label_lower STARTS WITH $query_lower THEN 500
ELSE 100 - size(label)
END AS score
ORDER BY score DESC, label ASC
LIMIT {limit}
RETURN label
"""
result = await session.run(cypher_query, query_lower=query_lower)
labels = []
async for record in result:
labels.append(record["label"])
await result.consume()
logger.debug(
f"[{self.workspace}] Search query '{query}' returned {len(labels)} results (limit: {limit})"
)
return labels
except Exception as e:
logger.error(f"[{self.workspace}] Error searching labels: {str(e)}")
return []

Some files were not shown because too many files have changed in this diff Show more