Merge branch 'main' into pg-optimization
This commit is contained in:
commit
f76d926512
67 changed files with 1117 additions and 477 deletions
|
|
@ -123,7 +123,7 @@ MAX_PARALLEL_INSERT=2
|
|||
|
||||
###########################################################
|
||||
### LLM Configuration
|
||||
### LLM_BINDING type: openai, ollama, lollms, azure_openai
|
||||
### LLM_BINDING type: openai, ollama, lollms, azure_openai, aws_bedrock
|
||||
###########################################################
|
||||
### LLM temperature setting for all llm binding (openai, azure_openai, ollama)
|
||||
# TEMPERATURE=1.0
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ LightRAG 需要同时集成 LLM(大型语言模型)和嵌入模型以有效
|
|||
* lollms
|
||||
* openai 或 openai 兼容
|
||||
* azure_openai
|
||||
* aws_bedrock
|
||||
|
||||
建议使用环境变量来配置 LightRAG 服务器。项目根目录中有一个名为 `env.example` 的示例环境变量文件。请将此文件复制到启动目录并重命名为 `.env`。之后,您可以在 `.env` 文件中修改与 LLM 和嵌入模型相关的参数。需要注意的是,LightRAG 服务器每次启动时都会将 `.env` 中的环境变量加载到系统环境变量中。**LightRAG 服务器会优先使用系统环境变量中的设置**。
|
||||
|
||||
|
|
@ -359,6 +360,7 @@ LightRAG 支持绑定到各种 LLM/嵌入后端:
|
|||
* openai 和 openai 兼容
|
||||
* azure_openai
|
||||
* lollms
|
||||
* aws_bedrock
|
||||
|
||||
使用环境变量 `LLM_BINDING` 或 CLI 参数 `--llm-binding` 选择 LLM 后端类型。使用环境变量 `EMBEDDING_BINDING` 或 CLI 参数 `--embedding-binding` 选择嵌入后端类型。
|
||||
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ LightRAG necessitates the integration of both an LLM (Large Language Model) and
|
|||
* lollms
|
||||
* openai or openai compatible
|
||||
* azure_openai
|
||||
* aws_bedrock
|
||||
|
||||
It is recommended to use environment variables to configure the LightRAG Server. There is an example environment variable file named `env.example` in the root directory of the project. Please copy this file to the startup directory and rename it to `.env`. After that, you can modify the parameters related to the LLM and Embedding models in the `.env` file. It is important to note that the LightRAG Server will load the environment variables from `.env` into the system environment variables each time it starts. **LightRAG Server will prioritize the settings in the system environment variables to .env file**.
|
||||
|
||||
|
|
@ -362,6 +363,7 @@ LightRAG supports binding to various LLM/Embedding backends:
|
|||
* openai & openai compatible
|
||||
* azure_openai
|
||||
* lollms
|
||||
* aws_bedrock
|
||||
|
||||
Use environment variables `LLM_BINDING` or CLI argument `--llm-binding` to select the LLM backend type. Use environment variables `EMBEDDING_BINDING` or CLI argument `--embedding-binding` to select the Embedding backend type.
|
||||
|
||||
|
|
@ -461,8 +463,8 @@ You cannot change storage implementation selection after adding documents to Lig
|
|||
| --ssl-keyfile | None | Path to SSL private key file (required if --ssl is enabled) |
|
||||
| --top-k | 50 | Number of top-k items to retrieve; corresponds to entities in "local" mode and relationships in "global" mode. |
|
||||
| --cosine-threshold | 0.4 | The cosine threshold for nodes and relation retrieval, works with top-k to control the retrieval of nodes and relations. |
|
||||
| --llm-binding | ollama | LLM binding type (lollms, ollama, openai, openai-ollama, azure_openai) |
|
||||
| --embedding-binding | ollama | Embedding binding type (lollms, ollama, openai, azure_openai) |
|
||||
| --llm-binding | ollama | LLM binding type (lollms, ollama, openai, openai-ollama, azure_openai, aws_bedrock) |
|
||||
| --embedding-binding | ollama | Embedding binding type (lollms, ollama, openai, azure_openai, aws_bedrock) |
|
||||
| --auto-scan-at-startup| - | Scan input directory for new files and start indexing |
|
||||
|
||||
### Additional Ollama Binding Options
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__api_version__ = "0201"
|
||||
__api_version__ = "0202"
|
||||
|
|
|
|||
|
|
@ -209,14 +209,21 @@ def parse_args() -> argparse.Namespace:
|
|||
"--llm-binding",
|
||||
type=str,
|
||||
default=get_env_value("LLM_BINDING", "ollama"),
|
||||
choices=["lollms", "ollama", "openai", "openai-ollama", "azure_openai"],
|
||||
choices=[
|
||||
"lollms",
|
||||
"ollama",
|
||||
"openai",
|
||||
"openai-ollama",
|
||||
"azure_openai",
|
||||
"aws_bedrock",
|
||||
],
|
||||
help="LLM binding type (default: from env or ollama)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--embedding-binding",
|
||||
type=str,
|
||||
default=get_env_value("EMBEDDING_BINDING", "ollama"),
|
||||
choices=["lollms", "ollama", "openai", "azure_openai"],
|
||||
choices=["lollms", "ollama", "openai", "azure_openai", "aws_bedrock", "jina"],
|
||||
help="Embedding binding type (default: from env or ollama)",
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -104,8 +104,8 @@ def create_app(args):
|
|||
"lollms",
|
||||
"ollama",
|
||||
"openai",
|
||||
"openai-ollama",
|
||||
"azure_openai",
|
||||
"aws_bedrock",
|
||||
]:
|
||||
raise Exception("llm binding not supported")
|
||||
|
||||
|
|
@ -114,6 +114,7 @@ def create_app(args):
|
|||
"ollama",
|
||||
"openai",
|
||||
"azure_openai",
|
||||
"aws_bedrock",
|
||||
"jina",
|
||||
]:
|
||||
raise Exception("embedding binding not supported")
|
||||
|
|
@ -188,10 +189,12 @@ def create_app(args):
|
|||
# Initialize FastAPI
|
||||
app_kwargs = {
|
||||
"title": "LightRAG Server API",
|
||||
"description": "Providing API for LightRAG core, Web UI and Ollama Model Emulation"
|
||||
+ "(With authentication)"
|
||||
if api_key
|
||||
else "",
|
||||
"description": (
|
||||
"Providing API for LightRAG core, Web UI and Ollama Model Emulation"
|
||||
+ "(With authentication)"
|
||||
if api_key
|
||||
else ""
|
||||
),
|
||||
"version": __api_version__,
|
||||
"openapi_url": "/openapi.json", # Explicitly set OpenAPI schema URL
|
||||
"docs_url": "/docs", # Explicitly set docs URL
|
||||
|
|
@ -244,9 +247,9 @@ def create_app(args):
|
|||
azure_openai_complete_if_cache,
|
||||
azure_openai_embed,
|
||||
)
|
||||
if args.llm_binding_host == "openai-ollama" or args.embedding_binding == "ollama":
|
||||
from lightrag.llm.openai import openai_complete_if_cache
|
||||
from lightrag.llm.ollama import ollama_embed
|
||||
if args.llm_binding == "aws_bedrock" or args.embedding_binding == "aws_bedrock":
|
||||
from lightrag.llm.bedrock import bedrock_complete_if_cache, bedrock_embed
|
||||
if args.embedding_binding == "ollama":
|
||||
from lightrag.llm.binding_options import OllamaEmbeddingOptions
|
||||
if args.embedding_binding == "jina":
|
||||
from lightrag.llm.jina import jina_embed
|
||||
|
|
@ -312,41 +315,80 @@ def create_app(args):
|
|||
**kwargs,
|
||||
)
|
||||
|
||||
async def bedrock_model_complete(
|
||||
prompt,
|
||||
system_prompt=None,
|
||||
history_messages=None,
|
||||
keyword_extraction=False,
|
||||
**kwargs,
|
||||
) -> str:
|
||||
keyword_extraction = kwargs.pop("keyword_extraction", None)
|
||||
if keyword_extraction:
|
||||
kwargs["response_format"] = GPTKeywordExtractionFormat
|
||||
if history_messages is None:
|
||||
history_messages = []
|
||||
|
||||
# Use global temperature for Bedrock
|
||||
kwargs["temperature"] = args.temperature
|
||||
|
||||
return await bedrock_complete_if_cache(
|
||||
args.llm_model,
|
||||
prompt,
|
||||
system_prompt=system_prompt,
|
||||
history_messages=history_messages,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
embedding_func = EmbeddingFunc(
|
||||
embedding_dim=args.embedding_dim,
|
||||
func=lambda texts: lollms_embed(
|
||||
texts,
|
||||
embed_model=args.embedding_model,
|
||||
host=args.embedding_binding_host,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
)
|
||||
if args.embedding_binding == "lollms"
|
||||
else ollama_embed(
|
||||
texts,
|
||||
embed_model=args.embedding_model,
|
||||
host=args.embedding_binding_host,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
options=OllamaEmbeddingOptions.options_dict(args),
|
||||
)
|
||||
if args.embedding_binding == "ollama"
|
||||
else azure_openai_embed(
|
||||
texts,
|
||||
model=args.embedding_model, # no host is used for openai,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
)
|
||||
if args.embedding_binding == "azure_openai"
|
||||
else jina_embed(
|
||||
texts,
|
||||
dimensions=args.embedding_dim,
|
||||
base_url=args.embedding_binding_host,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
)
|
||||
if args.embedding_binding == "jina"
|
||||
else openai_embed(
|
||||
texts,
|
||||
model=args.embedding_model,
|
||||
base_url=args.embedding_binding_host,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
func=lambda texts: (
|
||||
lollms_embed(
|
||||
texts,
|
||||
embed_model=args.embedding_model,
|
||||
host=args.embedding_binding_host,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
)
|
||||
if args.embedding_binding == "lollms"
|
||||
else (
|
||||
ollama_embed(
|
||||
texts,
|
||||
embed_model=args.embedding_model,
|
||||
host=args.embedding_binding_host,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
options=OllamaEmbeddingOptions.options_dict(args),
|
||||
)
|
||||
if args.embedding_binding == "ollama"
|
||||
else (
|
||||
azure_openai_embed(
|
||||
texts,
|
||||
model=args.embedding_model, # no host is used for openai,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
)
|
||||
if args.embedding_binding == "azure_openai"
|
||||
else (
|
||||
bedrock_embed(
|
||||
texts,
|
||||
model=args.embedding_model,
|
||||
)
|
||||
if args.embedding_binding == "aws_bedrock"
|
||||
else (
|
||||
jina_embed(
|
||||
texts,
|
||||
dimensions=args.embedding_dim,
|
||||
base_url=args.embedding_binding_host,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
)
|
||||
if args.embedding_binding == "jina"
|
||||
else openai_embed(
|
||||
texts,
|
||||
model=args.embedding_model,
|
||||
base_url=args.embedding_binding_host,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
|
|
@ -386,28 +428,36 @@ def create_app(args):
|
|||
)
|
||||
|
||||
# Initialize RAG
|
||||
if args.llm_binding in ["lollms", "ollama", "openai"]:
|
||||
if args.llm_binding in ["lollms", "ollama", "openai", "aws_bedrock"]:
|
||||
rag = LightRAG(
|
||||
working_dir=args.working_dir,
|
||||
workspace=args.workspace,
|
||||
llm_model_func=lollms_model_complete
|
||||
if args.llm_binding == "lollms"
|
||||
else ollama_model_complete
|
||||
if args.llm_binding == "ollama"
|
||||
else openai_alike_model_complete,
|
||||
llm_model_func=(
|
||||
lollms_model_complete
|
||||
if args.llm_binding == "lollms"
|
||||
else (
|
||||
ollama_model_complete
|
||||
if args.llm_binding == "ollama"
|
||||
else bedrock_model_complete
|
||||
if args.llm_binding == "aws_bedrock"
|
||||
else openai_alike_model_complete
|
||||
)
|
||||
),
|
||||
llm_model_name=args.llm_model,
|
||||
llm_model_max_async=args.max_async,
|
||||
summary_max_tokens=args.max_tokens,
|
||||
chunk_token_size=int(args.chunk_size),
|
||||
chunk_overlap_token_size=int(args.chunk_overlap_size),
|
||||
llm_model_kwargs={
|
||||
"host": args.llm_binding_host,
|
||||
"timeout": args.timeout,
|
||||
"options": OllamaLLMOptions.options_dict(args),
|
||||
"api_key": args.llm_binding_api_key,
|
||||
}
|
||||
if args.llm_binding == "lollms" or args.llm_binding == "ollama"
|
||||
else {},
|
||||
llm_model_kwargs=(
|
||||
{
|
||||
"host": args.llm_binding_host,
|
||||
"timeout": args.timeout,
|
||||
"options": OllamaLLMOptions.options_dict(args),
|
||||
"api_key": args.llm_binding_api_key,
|
||||
}
|
||||
if args.llm_binding == "lollms" or args.llm_binding == "ollama"
|
||||
else {}
|
||||
),
|
||||
embedding_func=embedding_func,
|
||||
kv_storage=args.kv_storage,
|
||||
graph_storage=args.graph_storage,
|
||||
|
|
|
|||
|
|
@ -734,7 +734,7 @@ class DocumentManager:
|
|||
new_files = []
|
||||
for ext in self.supported_extensions:
|
||||
logger.debug(f"Scanning for {ext} files in {self.input_dir}")
|
||||
for file_path in self.input_dir.rglob(f"*{ext}"):
|
||||
for file_path in self.input_dir.glob(f"*{ext}"):
|
||||
if file_path not in self.indexed_files:
|
||||
new_files.append(file_path)
|
||||
return new_files
|
||||
|
|
@ -746,6 +746,39 @@ class DocumentManager:
|
|||
return any(filename.lower().endswith(ext) for ext in self.supported_extensions)
|
||||
|
||||
|
||||
def get_unique_filename_in_enqueued(target_dir: Path, original_name: str) -> str:
|
||||
"""Generate a unique filename in the target directory by adding numeric suffixes if needed
|
||||
|
||||
Args:
|
||||
target_dir: Target directory path
|
||||
original_name: Original filename
|
||||
|
||||
Returns:
|
||||
str: Unique filename (may have numeric suffix added)
|
||||
"""
|
||||
from pathlib import Path
|
||||
import time
|
||||
|
||||
original_path = Path(original_name)
|
||||
base_name = original_path.stem
|
||||
extension = original_path.suffix
|
||||
|
||||
# Try original name first
|
||||
if not (target_dir / original_name).exists():
|
||||
return original_name
|
||||
|
||||
# Try with numeric suffixes 001-999
|
||||
for i in range(1, 1000):
|
||||
suffix = f"{i:03d}"
|
||||
new_name = f"{base_name}_{suffix}{extension}"
|
||||
if not (target_dir / new_name).exists():
|
||||
return new_name
|
||||
|
||||
# Fallback with timestamp if all 999 slots are taken
|
||||
timestamp = int(time.time())
|
||||
return f"{base_name}_{timestamp}{extension}"
|
||||
|
||||
|
||||
async def pipeline_enqueue_file(
|
||||
rag: LightRAG, file_path: Path, track_id: str = None
|
||||
) -> tuple[bool, str]:
|
||||
|
|
@ -759,201 +792,446 @@ async def pipeline_enqueue_file(
|
|||
tuple: (success: bool, track_id: str)
|
||||
"""
|
||||
|
||||
# Generate track_id if not provided
|
||||
if track_id is None:
|
||||
track_id = generate_track_id("unknown")
|
||||
|
||||
try:
|
||||
content = ""
|
||||
ext = file_path.suffix.lower()
|
||||
file_size = 0
|
||||
|
||||
# Get file size for error reporting
|
||||
try:
|
||||
file_size = file_path.stat().st_size
|
||||
except Exception:
|
||||
file_size = 0
|
||||
|
||||
file = None
|
||||
async with aiofiles.open(file_path, "rb") as f:
|
||||
file = await f.read()
|
||||
try:
|
||||
async with aiofiles.open(file_path, "rb") as f:
|
||||
file = await f.read()
|
||||
except PermissionError as e:
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "Permission denied - cannot read file",
|
||||
"original_error": str(e),
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(error_files, track_id)
|
||||
logger.error(f"Permission denied reading file: {file_path.name}")
|
||||
return False, track_id
|
||||
except FileNotFoundError as e:
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "File not found",
|
||||
"original_error": str(e),
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(error_files, track_id)
|
||||
logger.error(f"File not found: {file_path.name}")
|
||||
return False, track_id
|
||||
except Exception as e:
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "File reading error",
|
||||
"original_error": str(e),
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(error_files, track_id)
|
||||
logger.error(f"Error reading file {file_path.name}: {str(e)}")
|
||||
return False, track_id
|
||||
|
||||
# Process based on file type
|
||||
match ext:
|
||||
case (
|
||||
".txt"
|
||||
| ".md"
|
||||
| ".html"
|
||||
| ".htm"
|
||||
| ".tex"
|
||||
| ".json"
|
||||
| ".xml"
|
||||
| ".yaml"
|
||||
| ".yml"
|
||||
| ".rtf"
|
||||
| ".odt"
|
||||
| ".epub"
|
||||
| ".csv"
|
||||
| ".log"
|
||||
| ".conf"
|
||||
| ".ini"
|
||||
| ".properties"
|
||||
| ".sql"
|
||||
| ".bat"
|
||||
| ".sh"
|
||||
| ".c"
|
||||
| ".cpp"
|
||||
| ".py"
|
||||
| ".java"
|
||||
| ".js"
|
||||
| ".ts"
|
||||
| ".swift"
|
||||
| ".go"
|
||||
| ".rb"
|
||||
| ".php"
|
||||
| ".css"
|
||||
| ".scss"
|
||||
| ".less"
|
||||
):
|
||||
try:
|
||||
# Try to decode as UTF-8
|
||||
content = file.decode("utf-8")
|
||||
try:
|
||||
match ext:
|
||||
case (
|
||||
".txt"
|
||||
| ".md"
|
||||
| ".html"
|
||||
| ".htm"
|
||||
| ".tex"
|
||||
| ".json"
|
||||
| ".xml"
|
||||
| ".yaml"
|
||||
| ".yml"
|
||||
| ".rtf"
|
||||
| ".odt"
|
||||
| ".epub"
|
||||
| ".csv"
|
||||
| ".log"
|
||||
| ".conf"
|
||||
| ".ini"
|
||||
| ".properties"
|
||||
| ".sql"
|
||||
| ".bat"
|
||||
| ".sh"
|
||||
| ".c"
|
||||
| ".cpp"
|
||||
| ".py"
|
||||
| ".java"
|
||||
| ".js"
|
||||
| ".ts"
|
||||
| ".swift"
|
||||
| ".go"
|
||||
| ".rb"
|
||||
| ".php"
|
||||
| ".css"
|
||||
| ".scss"
|
||||
| ".less"
|
||||
):
|
||||
try:
|
||||
# Try to decode as UTF-8
|
||||
content = file.decode("utf-8")
|
||||
|
||||
# Validate content
|
||||
if not content or len(content.strip()) == 0:
|
||||
logger.error(f"Empty content in file: {file_path.name}")
|
||||
return False, ""
|
||||
|
||||
# Check if content looks like binary data string representation
|
||||
if content.startswith("b'") or content.startswith('b"'):
|
||||
logger.error(
|
||||
f"File {file_path.name} appears to contain binary data representation instead of text"
|
||||
)
|
||||
return False, ""
|
||||
|
||||
except UnicodeDecodeError:
|
||||
logger.error(
|
||||
f"File {file_path.name} is not valid UTF-8 encoded text. Please convert it to UTF-8 before processing."
|
||||
)
|
||||
return False, ""
|
||||
case ".pdf":
|
||||
if global_args.document_loading_engine == "DOCLING":
|
||||
if not pm.is_installed("docling"): # type: ignore
|
||||
pm.install("docling")
|
||||
from docling.document_converter import DocumentConverter # type: ignore
|
||||
|
||||
converter = DocumentConverter()
|
||||
result = converter.convert(file_path)
|
||||
content = result.document.export_to_markdown()
|
||||
else:
|
||||
if not pm.is_installed("pypdf2"): # type: ignore
|
||||
pm.install("pypdf2")
|
||||
from PyPDF2 import PdfReader # type: ignore
|
||||
from io import BytesIO
|
||||
|
||||
pdf_file = BytesIO(file)
|
||||
reader = PdfReader(pdf_file)
|
||||
for page in reader.pages:
|
||||
content += page.extract_text() + "\n"
|
||||
case ".docx":
|
||||
if global_args.document_loading_engine == "DOCLING":
|
||||
if not pm.is_installed("docling"): # type: ignore
|
||||
pm.install("docling")
|
||||
from docling.document_converter import DocumentConverter # type: ignore
|
||||
|
||||
converter = DocumentConverter()
|
||||
result = converter.convert(file_path)
|
||||
content = result.document.export_to_markdown()
|
||||
else:
|
||||
if not pm.is_installed("python-docx"): # type: ignore
|
||||
try:
|
||||
pm.install("python-docx")
|
||||
except Exception:
|
||||
pm.install("docx")
|
||||
from docx import Document # type: ignore
|
||||
from io import BytesIO
|
||||
|
||||
docx_file = BytesIO(file)
|
||||
doc = Document(docx_file)
|
||||
content = "\n".join(
|
||||
[paragraph.text for paragraph in doc.paragraphs]
|
||||
)
|
||||
case ".pptx":
|
||||
if global_args.document_loading_engine == "DOCLING":
|
||||
if not pm.is_installed("docling"): # type: ignore
|
||||
pm.install("docling")
|
||||
from docling.document_converter import DocumentConverter # type: ignore
|
||||
|
||||
converter = DocumentConverter()
|
||||
result = converter.convert(file_path)
|
||||
content = result.document.export_to_markdown()
|
||||
else:
|
||||
if not pm.is_installed("python-pptx"): # type: ignore
|
||||
pm.install("pptx")
|
||||
from pptx import Presentation # type: ignore
|
||||
from io import BytesIO
|
||||
|
||||
pptx_file = BytesIO(file)
|
||||
prs = Presentation(pptx_file)
|
||||
for slide in prs.slides:
|
||||
for shape in slide.shapes:
|
||||
if hasattr(shape, "text"):
|
||||
content += shape.text + "\n"
|
||||
case ".xlsx":
|
||||
if global_args.document_loading_engine == "DOCLING":
|
||||
if not pm.is_installed("docling"): # type: ignore
|
||||
pm.install("docling")
|
||||
from docling.document_converter import DocumentConverter # type: ignore
|
||||
|
||||
converter = DocumentConverter()
|
||||
result = converter.convert(file_path)
|
||||
content = result.document.export_to_markdown()
|
||||
else:
|
||||
if not pm.is_installed("openpyxl"): # type: ignore
|
||||
pm.install("openpyxl")
|
||||
from openpyxl import load_workbook # type: ignore
|
||||
from io import BytesIO
|
||||
|
||||
xlsx_file = BytesIO(file)
|
||||
wb = load_workbook(xlsx_file)
|
||||
for sheet in wb:
|
||||
content += f"Sheet: {sheet.title}\n"
|
||||
for row in sheet.iter_rows(values_only=True):
|
||||
content += (
|
||||
"\t".join(
|
||||
str(cell) if cell is not None else ""
|
||||
for cell in row
|
||||
)
|
||||
+ "\n"
|
||||
# Validate content
|
||||
if not content or len(content.strip()) == 0:
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "Empty file content",
|
||||
"original_error": "File contains no content or only whitespace",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(
|
||||
error_files, track_id
|
||||
)
|
||||
content += "\n"
|
||||
case _:
|
||||
logger.error(
|
||||
f"Unsupported file type: {file_path.name} (extension {ext})"
|
||||
)
|
||||
return False, ""
|
||||
logger.error(f"Empty content in file: {file_path.name}")
|
||||
return False, track_id
|
||||
|
||||
# Check if content looks like binary data string representation
|
||||
if content.startswith("b'") or content.startswith('b"'):
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "Binary data in text file",
|
||||
"original_error": "File appears to contain binary data representation instead of text",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(
|
||||
error_files, track_id
|
||||
)
|
||||
logger.error(
|
||||
f"File {file_path.name} appears to contain binary data representation instead of text"
|
||||
)
|
||||
return False, track_id
|
||||
|
||||
except UnicodeDecodeError as e:
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "UTF-8 encoding error",
|
||||
"original_error": f"File is not valid UTF-8 encoded text: {str(e)}",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(
|
||||
error_files, track_id
|
||||
)
|
||||
logger.error(
|
||||
f"File {file_path.name} is not valid UTF-8 encoded text. Please convert it to UTF-8 before processing."
|
||||
)
|
||||
return False, track_id
|
||||
|
||||
case ".pdf":
|
||||
try:
|
||||
if global_args.document_loading_engine == "DOCLING":
|
||||
if not pm.is_installed("docling"): # type: ignore
|
||||
pm.install("docling")
|
||||
from docling.document_converter import DocumentConverter # type: ignore
|
||||
|
||||
converter = DocumentConverter()
|
||||
result = converter.convert(file_path)
|
||||
content = result.document.export_to_markdown()
|
||||
else:
|
||||
if not pm.is_installed("pypdf2"): # type: ignore
|
||||
pm.install("pypdf2")
|
||||
from PyPDF2 import PdfReader # type: ignore
|
||||
from io import BytesIO
|
||||
|
||||
pdf_file = BytesIO(file)
|
||||
reader = PdfReader(pdf_file)
|
||||
for page in reader.pages:
|
||||
content += page.extract_text() + "\n"
|
||||
except Exception as e:
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "PDF processing error",
|
||||
"original_error": f"Failed to extract text from PDF: {str(e)}",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(
|
||||
error_files, track_id
|
||||
)
|
||||
logger.error(f"Error processing PDF {file_path.name}: {str(e)}")
|
||||
return False, track_id
|
||||
|
||||
case ".docx":
|
||||
try:
|
||||
if global_args.document_loading_engine == "DOCLING":
|
||||
if not pm.is_installed("docling"): # type: ignore
|
||||
pm.install("docling")
|
||||
from docling.document_converter import DocumentConverter # type: ignore
|
||||
|
||||
converter = DocumentConverter()
|
||||
result = converter.convert(file_path)
|
||||
content = result.document.export_to_markdown()
|
||||
else:
|
||||
if not pm.is_installed("python-docx"): # type: ignore
|
||||
try:
|
||||
pm.install("python-docx")
|
||||
except Exception:
|
||||
pm.install("docx")
|
||||
from docx import Document # type: ignore
|
||||
from io import BytesIO
|
||||
|
||||
docx_file = BytesIO(file)
|
||||
doc = Document(docx_file)
|
||||
content = "\n".join(
|
||||
[paragraph.text for paragraph in doc.paragraphs]
|
||||
)
|
||||
except Exception as e:
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "DOCX processing error",
|
||||
"original_error": f"Failed to extract text from DOCX: {str(e)}",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(
|
||||
error_files, track_id
|
||||
)
|
||||
logger.error(
|
||||
f"Error processing DOCX {file_path.name}: {str(e)}"
|
||||
)
|
||||
return False, track_id
|
||||
|
||||
case ".pptx":
|
||||
try:
|
||||
if global_args.document_loading_engine == "DOCLING":
|
||||
if not pm.is_installed("docling"): # type: ignore
|
||||
pm.install("docling")
|
||||
from docling.document_converter import DocumentConverter # type: ignore
|
||||
|
||||
converter = DocumentConverter()
|
||||
result = converter.convert(file_path)
|
||||
content = result.document.export_to_markdown()
|
||||
else:
|
||||
if not pm.is_installed("python-pptx"): # type: ignore
|
||||
pm.install("pptx")
|
||||
from pptx import Presentation # type: ignore
|
||||
from io import BytesIO
|
||||
|
||||
pptx_file = BytesIO(file)
|
||||
prs = Presentation(pptx_file)
|
||||
for slide in prs.slides:
|
||||
for shape in slide.shapes:
|
||||
if hasattr(shape, "text"):
|
||||
content += shape.text + "\n"
|
||||
except Exception as e:
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "PPTX processing error",
|
||||
"original_error": f"Failed to extract text from PPTX: {str(e)}",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(
|
||||
error_files, track_id
|
||||
)
|
||||
logger.error(
|
||||
f"Error processing PPTX {file_path.name}: {str(e)}"
|
||||
)
|
||||
return False, track_id
|
||||
|
||||
case ".xlsx":
|
||||
try:
|
||||
if global_args.document_loading_engine == "DOCLING":
|
||||
if not pm.is_installed("docling"): # type: ignore
|
||||
pm.install("docling")
|
||||
from docling.document_converter import DocumentConverter # type: ignore
|
||||
|
||||
converter = DocumentConverter()
|
||||
result = converter.convert(file_path)
|
||||
content = result.document.export_to_markdown()
|
||||
else:
|
||||
if not pm.is_installed("openpyxl"): # type: ignore
|
||||
pm.install("openpyxl")
|
||||
from openpyxl import load_workbook # type: ignore
|
||||
from io import BytesIO
|
||||
|
||||
xlsx_file = BytesIO(file)
|
||||
wb = load_workbook(xlsx_file)
|
||||
for sheet in wb:
|
||||
content += f"Sheet: {sheet.title}\n"
|
||||
for row in sheet.iter_rows(values_only=True):
|
||||
content += (
|
||||
"\t".join(
|
||||
str(cell) if cell is not None else ""
|
||||
for cell in row
|
||||
)
|
||||
+ "\n"
|
||||
)
|
||||
content += "\n"
|
||||
except Exception as e:
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "XLSX processing error",
|
||||
"original_error": f"Failed to extract text from XLSX: {str(e)}",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(
|
||||
error_files, track_id
|
||||
)
|
||||
logger.error(
|
||||
f"Error processing XLSX {file_path.name}: {str(e)}"
|
||||
)
|
||||
return False, track_id
|
||||
|
||||
case _:
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": f"Unsupported file type: {ext}",
|
||||
"original_error": f"File extension {ext} is not supported",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(error_files, track_id)
|
||||
logger.error(
|
||||
f"Unsupported file type: {file_path.name} (extension {ext})"
|
||||
)
|
||||
return False, track_id
|
||||
|
||||
except Exception as e:
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "File format processing error",
|
||||
"original_error": f"Unexpected error during file extracting: {str(e)}",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(error_files, track_id)
|
||||
logger.error(
|
||||
f"Unexpected error during {file_path.name} extracting: {str(e)}"
|
||||
)
|
||||
return False, track_id
|
||||
|
||||
# Insert into the RAG queue
|
||||
if content:
|
||||
# Check if content contains only whitespace characters
|
||||
if not content.strip():
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "File contains only whitespace",
|
||||
"original_error": "File content contains only whitespace characters",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(error_files, track_id)
|
||||
logger.warning(
|
||||
f"File contains only whitespace characters. file_paths={file_path.name}"
|
||||
f"File contains only whitespace characters: {file_path.name}"
|
||||
)
|
||||
return False, track_id
|
||||
|
||||
try:
|
||||
await rag.apipeline_enqueue_documents(
|
||||
content, file_paths=file_path.name, track_id=track_id
|
||||
)
|
||||
|
||||
# Generate track_id if not provided
|
||||
if track_id is None:
|
||||
track_id = generate_track_id("unkown")
|
||||
logger.info(f"Successfully fetched and enqueued file: {file_path.name}")
|
||||
|
||||
await rag.apipeline_enqueue_documents(
|
||||
content, file_paths=file_path.name, track_id=track_id
|
||||
)
|
||||
# Move file to __enqueued__ directory after enqueuing
|
||||
try:
|
||||
enqueued_dir = file_path.parent / "__enqueued__"
|
||||
enqueued_dir.mkdir(exist_ok=True)
|
||||
|
||||
logger.info(f"Successfully fetched and enqueued file: {file_path.name}")
|
||||
return True, track_id
|
||||
# Generate unique filename to avoid conflicts
|
||||
unique_filename = get_unique_filename_in_enqueued(
|
||||
enqueued_dir, file_path.name
|
||||
)
|
||||
target_path = enqueued_dir / unique_filename
|
||||
|
||||
# Move the file
|
||||
file_path.rename(target_path)
|
||||
logger.debug(
|
||||
f"Moved file to enqueued directory: {file_path.name} -> {unique_filename}"
|
||||
)
|
||||
|
||||
except Exception as move_error:
|
||||
logger.error(
|
||||
f"Failed to move file {file_path.name} to __enqueued__ directory: {move_error}"
|
||||
)
|
||||
# Don't affect the main function's success status
|
||||
|
||||
return True, track_id
|
||||
|
||||
except Exception as e:
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "Document enqueue error",
|
||||
"original_error": f"Failed to enqueue document: {str(e)}",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(error_files, track_id)
|
||||
logger.error(f"Error enqueueing document {file_path.name}: {str(e)}")
|
||||
return False, track_id
|
||||
else:
|
||||
logger.error(f"No content could be extracted from file: {file_path.name}")
|
||||
return False, ""
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "No content extracted",
|
||||
"original_error": "No content could be extracted from file",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(error_files, track_id)
|
||||
logger.error(f"No content extracted from file: {file_path.name}")
|
||||
return False, track_id
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing or enqueueing file {file_path.name}: {str(e)}")
|
||||
# Catch-all for any unexpected errors
|
||||
try:
|
||||
file_size = file_path.stat().st_size if file_path.exists() else 0
|
||||
except Exception:
|
||||
file_size = 0
|
||||
|
||||
error_files = [
|
||||
{
|
||||
"file_path": str(file_path.name),
|
||||
"error_description": "Unexpected processing error",
|
||||
"original_error": f"Unexpected error: {str(e)}",
|
||||
"file_size": file_size,
|
||||
}
|
||||
]
|
||||
await rag.apipeline_enqueue_error_documents(error_files, track_id)
|
||||
logger.error(f"Enqueuing file {file_path.name} error: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
return False, track_id
|
||||
finally:
|
||||
if file_path.name.startswith(temp_prefix):
|
||||
try:
|
||||
file_path.unlink()
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting file {file_path}: {str(e)}")
|
||||
return False, ""
|
||||
|
||||
|
||||
async def pipeline_index_file(rag: LightRAG, file_path: Path, track_id: str = None):
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
import{e as o,c as l,g as b,k as O,h as P,j as p,l as w,m as c,n as v,t as A,o as N}from"./_baseUniq-Bd5eQEyI.js";import{a_ as g,aw as _,a$ as $,b0 as E,b1 as F,b2 as x,b3 as M,b4 as y,b5 as B,b6 as T}from"./mermaid-vendor-J2ju872p.js";var S=/\s/;function G(n){for(var r=n.length;r--&&S.test(n.charAt(r)););return r}var H=/^\s+/;function L(n){return n&&n.slice(0,G(n)+1).replace(H,"")}var m=NaN,R=/^[-+]0x[0-9a-f]+$/i,q=/^0b[01]+$/i,z=/^0o[0-7]+$/i,C=parseInt;function K(n){if(typeof n=="number")return n;if(o(n))return m;if(g(n)){var r=typeof n.valueOf=="function"?n.valueOf():n;n=g(r)?r+"":r}if(typeof n!="string")return n===0?n:+n;n=L(n);var t=q.test(n);return t||z.test(n)?C(n.slice(2),t?2:8):R.test(n)?m:+n}var W=1/0,X=17976931348623157e292;function Y(n){if(!n)return n===0?n:0;if(n=K(n),n===W||n===-1/0){var r=n<0?-1:1;return r*X}return n===n?n:0}function D(n){var r=Y(n),t=r%1;return r===r?t?r-t:r:0}function fn(n){var r=n==null?0:n.length;return r?l(n):[]}var I=Object.prototype,J=I.hasOwnProperty,dn=_(function(n,r){n=Object(n);var t=-1,e=r.length,i=e>2?r[2]:void 0;for(i&&$(r[0],r[1],i)&&(e=1);++t<e;)for(var f=r[t],a=E(f),s=-1,d=a.length;++s<d;){var u=a[s],h=n[u];(h===void 0||F(h,I[u])&&!J.call(n,u))&&(n[u]=f[u])}return n});function un(n){var r=n==null?0:n.length;return r?n[r-1]:void 0}function Q(n){return function(r,t,e){var i=Object(r);if(!x(r)){var f=b(t);r=O(r),t=function(s){return f(i[s],s,i)}}var a=n(r,t,e);return a>-1?i[f?r[a]:a]:void 0}}var U=Math.max;function Z(n,r,t){var e=n==null?0:n.length;if(!e)return-1;var i=t==null?0:D(t);return i<0&&(i=U(e+i,0)),P(n,b(r),i)}var hn=Q(Z);function V(n,r){var t=-1,e=x(n)?Array(n.length):[];return p(n,function(i,f,a){e[++t]=r(i,f,a)}),e}function gn(n,r){var t=M(n)?w:V;return t(n,b(r))}var j=Object.prototype,k=j.hasOwnProperty;function nn(n,r){return n!=null&&k.call(n,r)}function bn(n,r){return n!=null&&c(n,r,nn)}function rn(n,r){return n<r}function tn(n,r,t){for(var e=-1,i=n.length;++e<i;){var f=n[e],a=r(f);if(a!=null&&(s===void 0?a===a&&!o(a):t(a,s)))var s=a,d=f}return d}function mn(n){return n&&n.length?tn(n,y,rn):void 0}function an(n,r,t,e){if(!g(n))return n;r=v(r,n);for(var i=-1,f=r.length,a=f-1,s=n;s!=null&&++i<f;){var d=A(r[i]),u=t;if(d==="__proto__"||d==="constructor"||d==="prototype")return n;if(i!=a){var h=s[d];u=void 0,u===void 0&&(u=g(h)?h:B(r[i+1])?[]:{})}T(s,d,u),s=s[d]}return n}function on(n,r,t){for(var e=-1,i=r.length,f={};++e<i;){var a=r[e],s=N(n,a);t(s,a)&&an(f,v(a,n),s)}return f}export{rn as a,tn as b,V as c,on as d,mn as e,fn as f,hn as g,bn as h,dn as i,D as j,un as l,gn as m,Y as t};
|
||||
import{e as o,c as l,g as b,k as O,h as P,j as p,l as w,m as c,n as v,t as A,o as N}from"./_baseUniq-D1ajN3w6.js";import{a_ as g,aw as _,a$ as $,b0 as E,b1 as F,b2 as x,b3 as M,b4 as y,b5 as B,b6 as T}from"./mermaid-vendor-pCi8nQvB.js";var S=/\s/;function G(n){for(var r=n.length;r--&&S.test(n.charAt(r)););return r}var H=/^\s+/;function L(n){return n&&n.slice(0,G(n)+1).replace(H,"")}var m=NaN,R=/^[-+]0x[0-9a-f]+$/i,q=/^0b[01]+$/i,z=/^0o[0-7]+$/i,C=parseInt;function K(n){if(typeof n=="number")return n;if(o(n))return m;if(g(n)){var r=typeof n.valueOf=="function"?n.valueOf():n;n=g(r)?r+"":r}if(typeof n!="string")return n===0?n:+n;n=L(n);var t=q.test(n);return t||z.test(n)?C(n.slice(2),t?2:8):R.test(n)?m:+n}var W=1/0,X=17976931348623157e292;function Y(n){if(!n)return n===0?n:0;if(n=K(n),n===W||n===-1/0){var r=n<0?-1:1;return r*X}return n===n?n:0}function D(n){var r=Y(n),t=r%1;return r===r?t?r-t:r:0}function fn(n){var r=n==null?0:n.length;return r?l(n):[]}var I=Object.prototype,J=I.hasOwnProperty,dn=_(function(n,r){n=Object(n);var t=-1,e=r.length,i=e>2?r[2]:void 0;for(i&&$(r[0],r[1],i)&&(e=1);++t<e;)for(var f=r[t],a=E(f),s=-1,d=a.length;++s<d;){var u=a[s],h=n[u];(h===void 0||F(h,I[u])&&!J.call(n,u))&&(n[u]=f[u])}return n});function un(n){var r=n==null?0:n.length;return r?n[r-1]:void 0}function Q(n){return function(r,t,e){var i=Object(r);if(!x(r)){var f=b(t);r=O(r),t=function(s){return f(i[s],s,i)}}var a=n(r,t,e);return a>-1?i[f?r[a]:a]:void 0}}var U=Math.max;function Z(n,r,t){var e=n==null?0:n.length;if(!e)return-1;var i=t==null?0:D(t);return i<0&&(i=U(e+i,0)),P(n,b(r),i)}var hn=Q(Z);function V(n,r){var t=-1,e=x(n)?Array(n.length):[];return p(n,function(i,f,a){e[++t]=r(i,f,a)}),e}function gn(n,r){var t=M(n)?w:V;return t(n,b(r))}var j=Object.prototype,k=j.hasOwnProperty;function nn(n,r){return n!=null&&k.call(n,r)}function bn(n,r){return n!=null&&c(n,r,nn)}function rn(n,r){return n<r}function tn(n,r,t){for(var e=-1,i=n.length;++e<i;){var f=n[e],a=r(f);if(a!=null&&(s===void 0?a===a&&!o(a):t(a,s)))var s=a,d=f}return d}function mn(n){return n&&n.length?tn(n,y,rn):void 0}function an(n,r,t,e){if(!g(n))return n;r=v(r,n);for(var i=-1,f=r.length,a=f-1,s=n;s!=null&&++i<f;){var d=A(r[i]),u=t;if(d==="__proto__"||d==="constructor"||d==="prototype")return n;if(i!=a){var h=s[d];u=void 0,u===void 0&&(u=g(h)?h:B(r[i+1])?[]:{})}T(s,d,u),s=s[d]}return n}function on(n,r,t){for(var e=-1,i=r.length,f={};++e<i;){var a=r[e],s=N(n,a);t(s,a)&&an(f,v(a,n),s)}return f}export{rn as a,tn as b,V as c,on as d,mn as e,fn as f,hn as g,bn as h,dn as i,D as j,un as l,gn as m,Y as t};
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
|
@ -1 +1 @@
|
|||
import{_ as l}from"./mermaid-vendor-J2ju872p.js";function m(e,c){var i,t,o;e.accDescr&&((i=c.setAccDescription)==null||i.call(c,e.accDescr)),e.accTitle&&((t=c.setAccTitle)==null||t.call(c,e.accTitle)),e.title&&((o=c.setDiagramTitle)==null||o.call(c,e.title))}l(m,"populateCommonDb");export{m as p};
|
||||
import{_ as l}from"./mermaid-vendor-pCi8nQvB.js";function m(e,c){var i,t,o;e.accDescr&&((i=c.setAccDescription)==null||i.call(c,e.accDescr)),e.accTitle&&((t=c.setAccTitle)==null||t.call(c,e.accTitle)),e.title&&((o=c.setDiagramTitle)==null||o.call(c,e.title))}l(m,"populateCommonDb");export{m as p};
|
||||
|
|
@ -1 +1 @@
|
|||
import{_ as n,a2 as x,j as l}from"./mermaid-vendor-J2ju872p.js";var c=n((a,t)=>{const e=a.append("rect");if(e.attr("x",t.x),e.attr("y",t.y),e.attr("fill",t.fill),e.attr("stroke",t.stroke),e.attr("width",t.width),e.attr("height",t.height),t.name&&e.attr("name",t.name),t.rx&&e.attr("rx",t.rx),t.ry&&e.attr("ry",t.ry),t.attrs!==void 0)for(const r in t.attrs)e.attr(r,t.attrs[r]);return t.class&&e.attr("class",t.class),e},"drawRect"),d=n((a,t)=>{const e={x:t.startx,y:t.starty,width:t.stopx-t.startx,height:t.stopy-t.starty,fill:t.fill,stroke:t.stroke,class:"rect"};c(a,e).lower()},"drawBackgroundRect"),g=n((a,t)=>{const e=t.text.replace(x," "),r=a.append("text");r.attr("x",t.x),r.attr("y",t.y),r.attr("class","legend"),r.style("text-anchor",t.anchor),t.class&&r.attr("class",t.class);const s=r.append("tspan");return s.attr("x",t.x+t.textMargin*2),s.text(e),r},"drawText"),h=n((a,t,e,r)=>{const s=a.append("image");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",i)},"drawImage"),m=n((a,t,e,r)=>{const s=a.append("use");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",`#${i}`)},"drawEmbeddedImage"),y=n(()=>({x:0,y:0,width:100,height:100,fill:"#EDF2AE",stroke:"#666",anchor:"start",rx:0,ry:0}),"getNoteRect"),p=n(()=>({x:0,y:0,width:100,height:100,"text-anchor":"start",style:"#666",textMargin:0,rx:0,ry:0,tspan:!0}),"getTextObj");export{d as a,p as b,m as c,c as d,h as e,g as f,y as g};
|
||||
import{_ as n,a2 as x,j as l}from"./mermaid-vendor-pCi8nQvB.js";var c=n((a,t)=>{const e=a.append("rect");if(e.attr("x",t.x),e.attr("y",t.y),e.attr("fill",t.fill),e.attr("stroke",t.stroke),e.attr("width",t.width),e.attr("height",t.height),t.name&&e.attr("name",t.name),t.rx&&e.attr("rx",t.rx),t.ry&&e.attr("ry",t.ry),t.attrs!==void 0)for(const r in t.attrs)e.attr(r,t.attrs[r]);return t.class&&e.attr("class",t.class),e},"drawRect"),d=n((a,t)=>{const e={x:t.startx,y:t.starty,width:t.stopx-t.startx,height:t.stopy-t.starty,fill:t.fill,stroke:t.stroke,class:"rect"};c(a,e).lower()},"drawBackgroundRect"),g=n((a,t)=>{const e=t.text.replace(x," "),r=a.append("text");r.attr("x",t.x),r.attr("y",t.y),r.attr("class","legend"),r.style("text-anchor",t.anchor),t.class&&r.attr("class",t.class);const s=r.append("tspan");return s.attr("x",t.x+t.textMargin*2),s.text(e),r},"drawText"),h=n((a,t,e,r)=>{const s=a.append("image");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",i)},"drawImage"),m=n((a,t,e,r)=>{const s=a.append("use");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",`#${i}`)},"drawEmbeddedImage"),y=n(()=>({x:0,y:0,width:100,height:100,fill:"#EDF2AE",stroke:"#666",anchor:"start",rx:0,ry:0}),"getNoteRect"),p=n(()=>({x:0,y:0,width:100,height:100,"text-anchor":"start",style:"#666",textMargin:0,rx:0,ry:0,tspan:!0}),"getTextObj");export{d as a,p as b,m as c,c as d,h as e,g as f,y as g};
|
||||
|
|
@ -1 +1 @@
|
|||
import{_ as s}from"./mermaid-vendor-J2ju872p.js";var t,e=(t=class{constructor(i){this.init=i,this.records=this.init()}reset(){this.records=this.init()}},s(t,"ImperativeState"),t);export{e as I};
|
||||
import{_ as s}from"./mermaid-vendor-pCi8nQvB.js";var t,e=(t=class{constructor(i){this.init=i,this.records=this.init()}reset(){this.records=this.init()}},s(t,"ImperativeState"),t);export{e as I};
|
||||
|
|
@ -1 +1 @@
|
|||
import{_ as a,d as o}from"./mermaid-vendor-J2ju872p.js";var d=a((t,e)=>{let n;return e==="sandbox"&&(n=o("#i"+t)),(e==="sandbox"?o(n.nodes()[0].contentDocument.body):o("body")).select(`[id="${t}"]`)},"getDiagramElement");export{d as g};
|
||||
import{_ as a,d as o}from"./mermaid-vendor-pCi8nQvB.js";var d=a((t,e)=>{let n;return e==="sandbox"&&(n=o("#i"+t)),(e==="sandbox"?o(n.nodes()[0].contentDocument.body):o("body")).select(`[id="${t}"]`)},"getDiagramElement");export{d as g};
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
import{_ as e}from"./mermaid-vendor-J2ju872p.js";var l=e(()=>`
|
||||
import{_ as e}from"./mermaid-vendor-pCi8nQvB.js";var l=e(()=>`
|
||||
/* Font Awesome icon styling - consolidated */
|
||||
.label-icon {
|
||||
display: inline-block;
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1 +1 @@
|
|||
import{_ as a,e as w,l as x}from"./mermaid-vendor-J2ju872p.js";var d=a((e,t,i,o)=>{e.attr("class",i);const{width:r,height:h,x:n,y:c}=u(e,t);w(e,h,r,o);const s=l(n,c,r,h,t);e.attr("viewBox",s),x.debug(`viewBox configured: ${s} with padding: ${t}`)},"setupViewPortForSVG"),u=a((e,t)=>{var o;const i=((o=e.node())==null?void 0:o.getBBox())||{width:0,height:0,x:0,y:0};return{width:i.width+t*2,height:i.height+t*2,x:i.x,y:i.y}},"calculateDimensionsWithPadding"),l=a((e,t,i,o,r)=>`${e-r} ${t-r} ${i} ${o}`,"createViewBox");export{d as s};
|
||||
import{_ as a,e as w,l as x}from"./mermaid-vendor-pCi8nQvB.js";var d=a((e,t,i,o)=>{e.attr("class",i);const{width:r,height:h,x:n,y:c}=u(e,t);w(e,h,r,o);const s=l(n,c,r,h,t);e.attr("viewBox",s),x.debug(`viewBox configured: ${s} with padding: ${t}`)},"setupViewPortForSVG"),u=a((e,t)=>{var o;const i=((o=e.node())==null?void 0:o.getBBox())||{width:0,height:0,x:0,y:0};return{width:i.width+t*2,height:i.height+t*2,x:i.x,y:i.y}},"calculateDimensionsWithPadding"),l=a((e,t,i,o,r)=>`${e-r} ${t-r} ${i} ${o}`,"createViewBox");export{d as s};
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-DomaNiTF.js";import{_ as i}from"./mermaid-vendor-J2ju872p.js";import"./chunk-E2GYISFI-DtDXRKC8.js";import"./chunk-BFAMUDN2-nPRHB0S3.js";import"./chunk-SKB7J2MH-Cgz1KI3w.js";import"./feature-graph-BgVuQYut.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};
|
||||
1
lightrag/api/webui/assets/classDiagram-M3E45YP4-CIje27eO.js
generated
Normal file
1
lightrag/api/webui/assets/classDiagram-M3E45YP4-CIje27eO.js
generated
Normal file
|
|
@ -0,0 +1 @@
|
|||
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-BpuXKCOy.js";import{_ as i}from"./mermaid-vendor-pCi8nQvB.js";import"./chunk-E2GYISFI-wNdHFpBQ.js";import"./chunk-BFAMUDN2-ioRYrumN.js";import"./chunk-SKB7J2MH-BsoIgbf0.js";import"./feature-graph-O43AXICd.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};
|
||||
|
|
@ -1 +0,0 @@
|
|||
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-DomaNiTF.js";import{_ as i}from"./mermaid-vendor-J2ju872p.js";import"./chunk-E2GYISFI-DtDXRKC8.js";import"./chunk-BFAMUDN2-nPRHB0S3.js";import"./chunk-SKB7J2MH-Cgz1KI3w.js";import"./feature-graph-BgVuQYut.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};
|
||||
1
lightrag/api/webui/assets/classDiagram-v2-YAWTLIQI-CIje27eO.js
generated
Normal file
1
lightrag/api/webui/assets/classDiagram-v2-YAWTLIQI-CIje27eO.js
generated
Normal file
|
|
@ -0,0 +1 @@
|
|||
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-BpuXKCOy.js";import{_ as i}from"./mermaid-vendor-pCi8nQvB.js";import"./chunk-E2GYISFI-wNdHFpBQ.js";import"./chunk-BFAMUDN2-ioRYrumN.js";import"./chunk-SKB7J2MH-BsoIgbf0.js";import"./feature-graph-O43AXICd.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};
|
||||
1
lightrag/api/webui/assets/clone-CzjUKlzi.js
generated
1
lightrag/api/webui/assets/clone-CzjUKlzi.js
generated
|
|
@ -1 +0,0 @@
|
|||
import{b as r}from"./_baseUniq-Bd5eQEyI.js";var e=4;function a(o){return r(o,e)}export{a as c};
|
||||
1
lightrag/api/webui/assets/clone-DLVgXiWv.js
generated
Normal file
1
lightrag/api/webui/assets/clone-DLVgXiWv.js
generated
Normal file
|
|
@ -0,0 +1 @@
|
|||
import{b as r}from"./_baseUniq-D1ajN3w6.js";var e=4;function a(o){return r(o,e)}export{a as c};
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1,4 +1,4 @@
|
|||
import{p as y}from"./chunk-353BL4L5-leSB0xif.js";import{_ as l,s as B,g as S,t as z,q as F,a as P,b as E,F as v,K as W,e as T,z as D,G as _,H as A,l as w}from"./mermaid-vendor-J2ju872p.js";import{p as N}from"./treemap-75Q7IDZK-BiruUsnx.js";import"./feature-graph-BgVuQYut.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-Bd5eQEyI.js";import"./_basePickBy-C-Sjt21Q.js";import"./clone-CzjUKlzi.js";var x={packet:[]},m=structuredClone(x),L=A.packet,Y=l(()=>{const t=v({...L,..._().packet});return t.showBits&&(t.paddingY+=10),t},"getConfig"),G=l(()=>m.packet,"getPacket"),H=l(t=>{t.length>0&&m.packet.push(t)},"pushWord"),I=l(()=>{D(),m=structuredClone(x)},"clear"),u={pushWord:H,getPacket:G,getConfig:Y,clear:I,setAccTitle:E,getAccTitle:P,setDiagramTitle:F,getDiagramTitle:z,getAccDescription:S,setAccDescription:B},K=1e4,M=l(t=>{y(t,u);let e=-1,o=[],n=1;const{bitsPerRow:i}=u.getConfig();for(let{start:a,end:r,bits:c,label:f}of t.blocks){if(a!==void 0&&r!==void 0&&r<a)throw new Error(`Packet block ${a} - ${r} is invalid. End must be greater than start.`);if(a??(a=e+1),a!==e+1)throw new Error(`Packet block ${a} - ${r??a} is not contiguous. It should start from ${e+1}.`);if(c===0)throw new Error(`Packet block ${a} is invalid. Cannot have a zero bit field.`);for(r??(r=a+(c??1)-1),c??(c=r-a+1),e=r,w.debug(`Packet block ${a} - ${e} with label ${f}`);o.length<=i+1&&u.getPacket().length<K;){const[d,p]=O({start:a,end:r,bits:c,label:f},n,i);if(o.push(d),d.end+1===n*i&&(u.pushWord(o),o=[],n++),!p)break;({start:a,end:r,bits:c,label:f}=p)}}u.pushWord(o)},"populate"),O=l((t,e,o)=>{if(t.start===void 0)throw new Error("start should have been set during first phase");if(t.end===void 0)throw new Error("end should have been set during first phase");if(t.start>t.end)throw new Error(`Block start ${t.start} is greater than block end ${t.end}.`);if(t.end+1<=e*o)return[t,void 0];const n=e*o-1,i=e*o;return[{start:t.start,end:n,label:t.label,bits:n-t.start},{start:i,end:t.end,label:t.label,bits:t.end-i}]},"getNextFittingBlock"),q={parse:l(async t=>{const e=await N("packet",t);w.debug(e),M(e)},"parse")},R=l((t,e,o,n)=>{const i=n.db,a=i.getConfig(),{rowHeight:r,paddingY:c,bitWidth:f,bitsPerRow:d}=a,p=i.getPacket(),s=i.getDiagramTitle(),k=r+c,g=k*(p.length+1)-(s?0:r),b=f*d+2,h=W(e);h.attr("viewbox",`0 0 ${b} ${g}`),T(h,g,b,a.useMaxWidth);for(const[C,$]of p.entries())U(h,$,C,a);h.append("text").text(s).attr("x",b/2).attr("y",g-k/2).attr("dominant-baseline","middle").attr("text-anchor","middle").attr("class","packetTitle")},"draw"),U=l((t,e,o,{rowHeight:n,paddingX:i,paddingY:a,bitWidth:r,bitsPerRow:c,showBits:f})=>{const d=t.append("g"),p=o*(n+a)+a;for(const s of e){const k=s.start%c*r+1,g=(s.end-s.start+1)*r-i;if(d.append("rect").attr("x",k).attr("y",p).attr("width",g).attr("height",n).attr("class","packetBlock"),d.append("text").attr("x",k+g/2).attr("y",p+n/2).attr("class","packetLabel").attr("dominant-baseline","middle").attr("text-anchor","middle").text(s.label),!f)continue;const b=s.end===s.start,h=p-2;d.append("text").attr("x",k+(b?g/2:0)).attr("y",h).attr("class","packetByte start").attr("dominant-baseline","auto").attr("text-anchor",b?"middle":"start").text(s.start),b||d.append("text").attr("x",k+g).attr("y",h).attr("class","packetByte end").attr("dominant-baseline","auto").attr("text-anchor","end").text(s.end)}},"drawWord"),X={draw:R},j={byteFontSize:"10px",startByteColor:"black",endByteColor:"black",labelColor:"black",labelFontSize:"12px",titleColor:"black",titleFontSize:"14px",blockStrokeColor:"black",blockStrokeWidth:"1",blockFillColor:"#efefef"},J=l(({packet:t}={})=>{const e=v(j,t);return`
|
||||
import{p as y}from"./chunk-353BL4L5-DjMQec0j.js";import{_ as l,s as B,g as S,t as z,q as F,a as P,b as E,F as v,K as W,e as T,z as D,G as _,H as A,l as w}from"./mermaid-vendor-pCi8nQvB.js";import{p as N}from"./treemap-75Q7IDZK-DhCk0PFk.js";import"./feature-graph-O43AXICd.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-D1ajN3w6.js";import"./_basePickBy-C9JQvwyt.js";import"./clone-DLVgXiWv.js";var x={packet:[]},m=structuredClone(x),L=A.packet,Y=l(()=>{const t=v({...L,..._().packet});return t.showBits&&(t.paddingY+=10),t},"getConfig"),G=l(()=>m.packet,"getPacket"),H=l(t=>{t.length>0&&m.packet.push(t)},"pushWord"),I=l(()=>{D(),m=structuredClone(x)},"clear"),u={pushWord:H,getPacket:G,getConfig:Y,clear:I,setAccTitle:E,getAccTitle:P,setDiagramTitle:F,getDiagramTitle:z,getAccDescription:S,setAccDescription:B},K=1e4,M=l(t=>{y(t,u);let e=-1,o=[],n=1;const{bitsPerRow:i}=u.getConfig();for(let{start:a,end:r,bits:c,label:f}of t.blocks){if(a!==void 0&&r!==void 0&&r<a)throw new Error(`Packet block ${a} - ${r} is invalid. End must be greater than start.`);if(a??(a=e+1),a!==e+1)throw new Error(`Packet block ${a} - ${r??a} is not contiguous. It should start from ${e+1}.`);if(c===0)throw new Error(`Packet block ${a} is invalid. Cannot have a zero bit field.`);for(r??(r=a+(c??1)-1),c??(c=r-a+1),e=r,w.debug(`Packet block ${a} - ${e} with label ${f}`);o.length<=i+1&&u.getPacket().length<K;){const[d,p]=O({start:a,end:r,bits:c,label:f},n,i);if(o.push(d),d.end+1===n*i&&(u.pushWord(o),o=[],n++),!p)break;({start:a,end:r,bits:c,label:f}=p)}}u.pushWord(o)},"populate"),O=l((t,e,o)=>{if(t.start===void 0)throw new Error("start should have been set during first phase");if(t.end===void 0)throw new Error("end should have been set during first phase");if(t.start>t.end)throw new Error(`Block start ${t.start} is greater than block end ${t.end}.`);if(t.end+1<=e*o)return[t,void 0];const n=e*o-1,i=e*o;return[{start:t.start,end:n,label:t.label,bits:n-t.start},{start:i,end:t.end,label:t.label,bits:t.end-i}]},"getNextFittingBlock"),q={parse:l(async t=>{const e=await N("packet",t);w.debug(e),M(e)},"parse")},R=l((t,e,o,n)=>{const i=n.db,a=i.getConfig(),{rowHeight:r,paddingY:c,bitWidth:f,bitsPerRow:d}=a,p=i.getPacket(),s=i.getDiagramTitle(),k=r+c,g=k*(p.length+1)-(s?0:r),b=f*d+2,h=W(e);h.attr("viewbox",`0 0 ${b} ${g}`),T(h,g,b,a.useMaxWidth);for(const[C,$]of p.entries())U(h,$,C,a);h.append("text").text(s).attr("x",b/2).attr("y",g-k/2).attr("dominant-baseline","middle").attr("text-anchor","middle").attr("class","packetTitle")},"draw"),U=l((t,e,o,{rowHeight:n,paddingX:i,paddingY:a,bitWidth:r,bitsPerRow:c,showBits:f})=>{const d=t.append("g"),p=o*(n+a)+a;for(const s of e){const k=s.start%c*r+1,g=(s.end-s.start+1)*r-i;if(d.append("rect").attr("x",k).attr("y",p).attr("width",g).attr("height",n).attr("class","packetBlock"),d.append("text").attr("x",k+g/2).attr("y",p+n/2).attr("class","packetLabel").attr("dominant-baseline","middle").attr("text-anchor","middle").text(s.label),!f)continue;const b=s.end===s.start,h=p-2;d.append("text").attr("x",k+(b?g/2:0)).attr("y",h).attr("class","packetByte start").attr("dominant-baseline","auto").attr("text-anchor",b?"middle":"start").text(s.start),b||d.append("text").attr("x",k+g).attr("y",h).attr("class","packetByte end").attr("dominant-baseline","auto").attr("text-anchor","end").text(s.end)}},"drawWord"),X={draw:R},j={byteFontSize:"10px",startByteColor:"black",endByteColor:"black",labelColor:"black",labelFontSize:"12px",titleColor:"black",titleFontSize:"14px",blockStrokeColor:"black",blockStrokeWidth:"1",blockFillColor:"#efefef"},J=l(({packet:t}={})=>{const e=v(j,t);return`
|
||||
.packetByte {
|
||||
font-size: ${e.byteFontSize};
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
87
lightrag/api/webui/assets/feature-documents-XY-qP1x8.js
generated
Normal file
87
lightrag/api/webui/assets/feature-documents-XY-qP1x8.js
generated
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
10
lightrag/api/webui/assets/feature-retrieval-lTh8tRF9.js
generated
Normal file
10
lightrag/api/webui/assets/feature-retrieval-lTh8tRF9.js
generated
Normal file
File diff suppressed because one or more lines are too long
|
|
@ -1,4 +1,4 @@
|
|||
import{g as q1}from"./chunk-E2GYISFI-DtDXRKC8.js";import{_ as m,o as O1,l as ee,c as be,d as Se,p as H1,r as X1,u as i1,b as Q1,s as J1,q as Z1,a as $1,g as et,t as tt,k as st,v as it,J as rt,x as nt,y as s1,z as at,A as ut,B as lt,C as ot}from"./mermaid-vendor-J2ju872p.js";import{g as ct}from"./chunk-BFAMUDN2-nPRHB0S3.js";import{s as ht}from"./chunk-SKB7J2MH-Cgz1KI3w.js";import"./feature-graph-BgVuQYut.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var dt="flowchart-",Pe,pt=(Pe=class{constructor(){this.vertexCounter=0,this.config=be(),this.vertices=new Map,this.edges=[],this.classes=new Map,this.subGraphs=[],this.subGraphLookup=new Map,this.tooltips=new Map,this.subCount=0,this.firstGraphFlag=!0,this.secCount=-1,this.posCrossRef=[],this.funs=[],this.setAccTitle=Q1,this.setAccDescription=J1,this.setDiagramTitle=Z1,this.getAccTitle=$1,this.getAccDescription=et,this.getDiagramTitle=tt,this.funs.push(this.setupToolTips.bind(this)),this.addVertex=this.addVertex.bind(this),this.firstGraph=this.firstGraph.bind(this),this.setDirection=this.setDirection.bind(this),this.addSubGraph=this.addSubGraph.bind(this),this.addLink=this.addLink.bind(this),this.setLink=this.setLink.bind(this),this.updateLink=this.updateLink.bind(this),this.addClass=this.addClass.bind(this),this.setClass=this.setClass.bind(this),this.destructLink=this.destructLink.bind(this),this.setClickEvent=this.setClickEvent.bind(this),this.setTooltip=this.setTooltip.bind(this),this.updateLinkInterpolate=this.updateLinkInterpolate.bind(this),this.setClickFun=this.setClickFun.bind(this),this.bindFunctions=this.bindFunctions.bind(this),this.lex={firstGraph:this.firstGraph.bind(this)},this.clear(),this.setGen("gen-2")}sanitizeText(i){return st.sanitizeText(i,this.config)}lookUpDomId(i){for(const n of this.vertices.values())if(n.id===i)return n.domId;return i}addVertex(i,n,a,u,l,f,c={},A){var V,C;if(!i||i.trim().length===0)return;let r;if(A!==void 0){let p;A.includes(`
|
||||
import{g as q1}from"./chunk-E2GYISFI-wNdHFpBQ.js";import{_ as m,o as O1,l as ee,c as be,d as Se,p as H1,r as X1,u as i1,b as Q1,s as J1,q as Z1,a as $1,g as et,t as tt,k as st,v as it,J as rt,x as nt,y as s1,z as at,A as ut,B as lt,C as ot}from"./mermaid-vendor-pCi8nQvB.js";import{g as ct}from"./chunk-BFAMUDN2-ioRYrumN.js";import{s as ht}from"./chunk-SKB7J2MH-BsoIgbf0.js";import"./feature-graph-O43AXICd.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var dt="flowchart-",Pe,pt=(Pe=class{constructor(){this.vertexCounter=0,this.config=be(),this.vertices=new Map,this.edges=[],this.classes=new Map,this.subGraphs=[],this.subGraphLookup=new Map,this.tooltips=new Map,this.subCount=0,this.firstGraphFlag=!0,this.secCount=-1,this.posCrossRef=[],this.funs=[],this.setAccTitle=Q1,this.setAccDescription=J1,this.setDiagramTitle=Z1,this.getAccTitle=$1,this.getAccDescription=et,this.getDiagramTitle=tt,this.funs.push(this.setupToolTips.bind(this)),this.addVertex=this.addVertex.bind(this),this.firstGraph=this.firstGraph.bind(this),this.setDirection=this.setDirection.bind(this),this.addSubGraph=this.addSubGraph.bind(this),this.addLink=this.addLink.bind(this),this.setLink=this.setLink.bind(this),this.updateLink=this.updateLink.bind(this),this.addClass=this.addClass.bind(this),this.setClass=this.setClass.bind(this),this.destructLink=this.destructLink.bind(this),this.setClickEvent=this.setClickEvent.bind(this),this.setTooltip=this.setTooltip.bind(this),this.updateLinkInterpolate=this.updateLinkInterpolate.bind(this),this.setClickFun=this.setClickFun.bind(this),this.bindFunctions=this.bindFunctions.bind(this),this.lex={firstGraph:this.firstGraph.bind(this)},this.clear(),this.setGen("gen-2")}sanitizeText(i){return st.sanitizeText(i,this.config)}lookUpDomId(i){for(const n of this.vertices.values())if(n.id===i)return n.domId;return i}addVertex(i,n,a,u,l,f,c={},A){var V,C;if(!i||i.trim().length===0)return;let r;if(A!==void 0){let p;A.includes(`
|
||||
`)?p=A+`
|
||||
`:p=`{
|
||||
`+A+`
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
|
@ -1,2 +1,2 @@
|
|||
import{_ as e,l as o,K as i,e as n,L as p}from"./mermaid-vendor-J2ju872p.js";import{p as m}from"./treemap-75Q7IDZK-BiruUsnx.js";import"./feature-graph-BgVuQYut.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-Bd5eQEyI.js";import"./_basePickBy-C-Sjt21Q.js";import"./clone-CzjUKlzi.js";var g={parse:e(async r=>{const a=await m("info",r);o.debug(a)},"parse")},v={version:p.version+""},d=e(()=>v.version,"getVersion"),c={getVersion:d},l=e((r,a,s)=>{o.debug(`rendering info diagram
|
||||
import{_ as e,l as o,K as i,e as n,L as p}from"./mermaid-vendor-pCi8nQvB.js";import{p as m}from"./treemap-75Q7IDZK-DhCk0PFk.js";import"./feature-graph-O43AXICd.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-D1ajN3w6.js";import"./_basePickBy-C9JQvwyt.js";import"./clone-DLVgXiWv.js";var g={parse:e(async r=>{const a=await m("info",r);o.debug(a)},"parse")},v={version:p.version+""},d=e(()=>v.version,"getVersion"),c={getVersion:d},l=e((r,a,s)=>{o.debug(`rendering info diagram
|
||||
`+r);const t=i(a);n(t,100,400,!0),t.append("g").append("text").attr("x",100).attr("y",40).attr("class","version").attr("font-size",32).style("text-anchor","middle").text(`v${s}`)},"draw"),f={draw:l},L={parser:g,db:c,renderer:f};export{L as diagram};
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
import{a as gt,g as lt,f as mt,d as xt}from"./chunk-67H74DCK-Dq7l-UYX.js";import{g as kt}from"./chunk-E2GYISFI-DtDXRKC8.js";import{_ as r,g as _t,s as bt,a as vt,b as wt,t as Tt,q as St,c as R,d as G,e as $t,z as Mt,N as et}from"./mermaid-vendor-J2ju872p.js";import"./feature-graph-BgVuQYut.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var U=function(){var t=r(function(h,n,a,l){for(a=a||{},l=h.length;l--;a[h[l]]=n);return a},"o"),e=[6,8,10,11,12,14,16,17,18],s=[1,9],c=[1,10],i=[1,11],f=[1,12],u=[1,13],y=[1,14],g={trace:r(function(){},"trace"),yy:{},symbols_:{error:2,start:3,journey:4,document:5,EOF:6,line:7,SPACE:8,statement:9,NEWLINE:10,title:11,acc_title:12,acc_title_value:13,acc_descr:14,acc_descr_value:15,acc_descr_multiline_value:16,section:17,taskName:18,taskData:19,$accept:0,$end:1},terminals_:{2:"error",4:"journey",6:"EOF",8:"SPACE",10:"NEWLINE",11:"title",12:"acc_title",13:"acc_title_value",14:"acc_descr",15:"acc_descr_value",16:"acc_descr_multiline_value",17:"section",18:"taskName",19:"taskData"},productions_:[0,[3,3],[5,0],[5,2],[7,2],[7,1],[7,1],[7,1],[9,1],[9,2],[9,2],[9,1],[9,1],[9,2]],performAction:r(function(n,a,l,d,p,o,v){var k=o.length-1;switch(p){case 1:return o[k-1];case 2:this.$=[];break;case 3:o[k-1].push(o[k]),this.$=o[k-1];break;case 4:case 5:this.$=o[k];break;case 6:case 7:this.$=[];break;case 8:d.setDiagramTitle(o[k].substr(6)),this.$=o[k].substr(6);break;case 9:this.$=o[k].trim(),d.setAccTitle(this.$);break;case 10:case 11:this.$=o[k].trim(),d.setAccDescription(this.$);break;case 12:d.addSection(o[k].substr(8)),this.$=o[k].substr(8);break;case 13:d.addTask(o[k-1],o[k]),this.$="task";break}},"anonymous"),table:[{3:1,4:[1,2]},{1:[3]},t(e,[2,2],{5:3}),{6:[1,4],7:5,8:[1,6],9:7,10:[1,8],11:s,12:c,14:i,16:f,17:u,18:y},t(e,[2,7],{1:[2,1]}),t(e,[2,3]),{9:15,11:s,12:c,14:i,16:f,17:u,18:y},t(e,[2,5]),t(e,[2,6]),t(e,[2,8]),{13:[1,16]},{15:[1,17]},t(e,[2,11]),t(e,[2,12]),{19:[1,18]},t(e,[2,4]),t(e,[2,9]),t(e,[2,10]),t(e,[2,13])],defaultActions:{},parseError:r(function(n,a){if(a.recoverable)this.trace(n);else{var l=new Error(n);throw l.hash=a,l}},"parseError"),parse:r(function(n){var a=this,l=[0],d=[],p=[null],o=[],v=this.table,k="",C=0,K=0,dt=2,Q=1,yt=o.slice.call(arguments,1),_=Object.create(this.lexer),I={yy:{}};for(var O in this.yy)Object.prototype.hasOwnProperty.call(this.yy,O)&&(I.yy[O]=this.yy[O]);_.setInput(n,I.yy),I.yy.lexer=_,I.yy.parser=this,typeof _.yylloc>"u"&&(_.yylloc={});var Y=_.yylloc;o.push(Y);var ft=_.options&&_.options.ranges;typeof I.yy.parseError=="function"?this.parseError=I.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function pt(w){l.length=l.length-2*w,p.length=p.length-w,o.length=o.length-w}r(pt,"popStack");function D(){var w;return w=d.pop()||_.lex()||Q,typeof w!="number"&&(w instanceof Array&&(d=w,w=d.pop()),w=a.symbols_[w]||w),w}r(D,"lex");for(var b,A,T,q,F={},N,M,tt,z;;){if(A=l[l.length-1],this.defaultActions[A]?T=this.defaultActions[A]:((b===null||typeof b>"u")&&(b=D()),T=v[A]&&v[A][b]),typeof T>"u"||!T.length||!T[0]){var X="";z=[];for(N in v[A])this.terminals_[N]&&N>dt&&z.push("'"+this.terminals_[N]+"'");_.showPosition?X="Parse error on line "+(C+1)+`:
|
||||
import{a as gt,g as lt,f as mt,d as xt}from"./chunk-67H74DCK-BytGFags.js";import{g as kt}from"./chunk-E2GYISFI-wNdHFpBQ.js";import{_ as r,g as _t,s as bt,a as vt,b as wt,t as Tt,q as St,c as R,d as G,e as $t,z as Mt,N as et}from"./mermaid-vendor-pCi8nQvB.js";import"./feature-graph-O43AXICd.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var U=function(){var t=r(function(h,n,a,l){for(a=a||{},l=h.length;l--;a[h[l]]=n);return a},"o"),e=[6,8,10,11,12,14,16,17,18],s=[1,9],c=[1,10],i=[1,11],f=[1,12],u=[1,13],y=[1,14],g={trace:r(function(){},"trace"),yy:{},symbols_:{error:2,start:3,journey:4,document:5,EOF:6,line:7,SPACE:8,statement:9,NEWLINE:10,title:11,acc_title:12,acc_title_value:13,acc_descr:14,acc_descr_value:15,acc_descr_multiline_value:16,section:17,taskName:18,taskData:19,$accept:0,$end:1},terminals_:{2:"error",4:"journey",6:"EOF",8:"SPACE",10:"NEWLINE",11:"title",12:"acc_title",13:"acc_title_value",14:"acc_descr",15:"acc_descr_value",16:"acc_descr_multiline_value",17:"section",18:"taskName",19:"taskData"},productions_:[0,[3,3],[5,0],[5,2],[7,2],[7,1],[7,1],[7,1],[9,1],[9,2],[9,2],[9,1],[9,1],[9,2]],performAction:r(function(n,a,l,d,p,o,v){var k=o.length-1;switch(p){case 1:return o[k-1];case 2:this.$=[];break;case 3:o[k-1].push(o[k]),this.$=o[k-1];break;case 4:case 5:this.$=o[k];break;case 6:case 7:this.$=[];break;case 8:d.setDiagramTitle(o[k].substr(6)),this.$=o[k].substr(6);break;case 9:this.$=o[k].trim(),d.setAccTitle(this.$);break;case 10:case 11:this.$=o[k].trim(),d.setAccDescription(this.$);break;case 12:d.addSection(o[k].substr(8)),this.$=o[k].substr(8);break;case 13:d.addTask(o[k-1],o[k]),this.$="task";break}},"anonymous"),table:[{3:1,4:[1,2]},{1:[3]},t(e,[2,2],{5:3}),{6:[1,4],7:5,8:[1,6],9:7,10:[1,8],11:s,12:c,14:i,16:f,17:u,18:y},t(e,[2,7],{1:[2,1]}),t(e,[2,3]),{9:15,11:s,12:c,14:i,16:f,17:u,18:y},t(e,[2,5]),t(e,[2,6]),t(e,[2,8]),{13:[1,16]},{15:[1,17]},t(e,[2,11]),t(e,[2,12]),{19:[1,18]},t(e,[2,4]),t(e,[2,9]),t(e,[2,10]),t(e,[2,13])],defaultActions:{},parseError:r(function(n,a){if(a.recoverable)this.trace(n);else{var l=new Error(n);throw l.hash=a,l}},"parseError"),parse:r(function(n){var a=this,l=[0],d=[],p=[null],o=[],v=this.table,k="",C=0,K=0,dt=2,Q=1,yt=o.slice.call(arguments,1),_=Object.create(this.lexer),I={yy:{}};for(var O in this.yy)Object.prototype.hasOwnProperty.call(this.yy,O)&&(I.yy[O]=this.yy[O]);_.setInput(n,I.yy),I.yy.lexer=_,I.yy.parser=this,typeof _.yylloc>"u"&&(_.yylloc={});var Y=_.yylloc;o.push(Y);var ft=_.options&&_.options.ranges;typeof I.yy.parseError=="function"?this.parseError=I.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function pt(w){l.length=l.length-2*w,p.length=p.length-w,o.length=o.length-w}r(pt,"popStack");function D(){var w;return w=d.pop()||_.lex()||Q,typeof w!="number"&&(w instanceof Array&&(d=w,w=d.pop()),w=a.symbols_[w]||w),w}r(D,"lex");for(var b,A,T,q,F={},N,M,tt,z;;){if(A=l[l.length-1],this.defaultActions[A]?T=this.defaultActions[A]:((b===null||typeof b>"u")&&(b=D()),T=v[A]&&v[A][b]),typeof T>"u"||!T.length||!T[0]){var X="";z=[];for(N in v[A])this.terminals_[N]&&N>dt&&z.push("'"+this.terminals_[N]+"'");_.showPosition?X="Parse error on line "+(C+1)+`:
|
||||
`+_.showPosition()+`
|
||||
Expecting `+z.join(", ")+", got '"+(this.terminals_[b]||b)+"'":X="Parse error on line "+(C+1)+": Unexpected "+(b==Q?"end of input":"'"+(this.terminals_[b]||b)+"'"),this.parseError(X,{text:_.match,token:this.terminals_[b]||b,line:_.yylineno,loc:Y,expected:z})}if(T[0]instanceof Array&&T.length>1)throw new Error("Parse Error: multiple actions possible at state: "+A+", token: "+b);switch(T[0]){case 1:l.push(b),p.push(_.yytext),o.push(_.yylloc),l.push(T[1]),b=null,K=_.yyleng,k=_.yytext,C=_.yylineno,Y=_.yylloc;break;case 2:if(M=this.productions_[T[1]][1],F.$=p[p.length-M],F._$={first_line:o[o.length-(M||1)].first_line,last_line:o[o.length-1].last_line,first_column:o[o.length-(M||1)].first_column,last_column:o[o.length-1].last_column},ft&&(F._$.range=[o[o.length-(M||1)].range[0],o[o.length-1].range[1]]),q=this.performAction.apply(F,[k,K,C,I.yy,T[1],p,o].concat(yt)),typeof q<"u")return q;M&&(l=l.slice(0,-1*M*2),p=p.slice(0,-1*M),o=o.slice(0,-1*M)),l.push(this.productions_[T[1]][0]),p.push(F.$),o.push(F._$),tt=v[l[l.length-2]][l[l.length-1]],l.push(tt);break;case 3:return!0}}return!0},"parse")},m=function(){var h={EOF:1,parseError:r(function(a,l){if(this.yy.parser)this.yy.parser.parseError(a,l);else throw new Error(a)},"parseError"),setInput:r(function(n,a){return this.yy=a||this.yy||{},this._input=n,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},"setInput"),input:r(function(){var n=this._input[0];this.yytext+=n,this.yyleng++,this.offset++,this.match+=n,this.matched+=n;var a=n.match(/(?:\r\n?|\n).*/g);return a?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),n},"input"),unput:r(function(n){var a=n.length,l=n.split(/(?:\r\n?|\n)/g);this._input=n+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-a),this.offset-=a;var d=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),l.length-1&&(this.yylineno-=l.length-1);var p=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:l?(l.length===d.length?this.yylloc.first_column:0)+d[d.length-l.length].length-l[0].length:this.yylloc.first_column-a},this.options.ranges&&(this.yylloc.range=[p[0],p[0]+this.yyleng-a]),this.yyleng=this.yytext.length,this},"unput"),more:r(function(){return this._more=!0,this},"more"),reject:r(function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).
|
||||
`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},"reject"),less:r(function(n){this.unput(this.match.slice(n))},"less"),pastInput:r(function(){var n=this.matched.substr(0,this.matched.length-this.match.length);return(n.length>20?"...":"")+n.substr(-20).replace(/\n/g,"")},"pastInput"),upcomingInput:r(function(){var n=this.match;return n.length<20&&(n+=this._input.substr(0,20-n.length)),(n.substr(0,20)+(n.length>20?"...":"")).replace(/\n/g,"")},"upcomingInput"),showPosition:r(function(){var n=this.pastInput(),a=new Array(n.length+1).join("-");return n+this.upcomingInput()+`
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
import{g as fe}from"./chunk-E2GYISFI-DtDXRKC8.js";import{_ as c,l as te,c as W,K as ye,a8 as be,a9 as me,aa as _e,a3 as Ee,H as Y,i as G,v as ke,J as Se,a4 as Ne,a5 as le,a6 as ce}from"./mermaid-vendor-J2ju872p.js";import"./feature-graph-BgVuQYut.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var $=function(){var t=c(function(_,s,n,a){for(n=n||{},a=_.length;a--;n[_[a]]=s);return n},"o"),g=[1,4],d=[1,13],r=[1,12],p=[1,15],E=[1,16],f=[1,20],h=[1,19],L=[6,7,8],C=[1,26],w=[1,24],N=[1,25],i=[6,7,11],H=[1,31],x=[6,7,11,24],P=[1,6,13,16,17,20,23],M=[1,35],U=[1,36],A=[1,6,7,11,13,16,17,20,23],j=[1,38],V={trace:c(function(){},"trace"),yy:{},symbols_:{error:2,start:3,mindMap:4,spaceLines:5,SPACELINE:6,NL:7,KANBAN:8,document:9,stop:10,EOF:11,statement:12,SPACELIST:13,node:14,shapeData:15,ICON:16,CLASS:17,nodeWithId:18,nodeWithoutId:19,NODE_DSTART:20,NODE_DESCR:21,NODE_DEND:22,NODE_ID:23,SHAPE_DATA:24,$accept:0,$end:1},terminals_:{2:"error",6:"SPACELINE",7:"NL",8:"KANBAN",11:"EOF",13:"SPACELIST",16:"ICON",17:"CLASS",20:"NODE_DSTART",21:"NODE_DESCR",22:"NODE_DEND",23:"NODE_ID",24:"SHAPE_DATA"},productions_:[0,[3,1],[3,2],[5,1],[5,2],[5,2],[4,2],[4,3],[10,1],[10,1],[10,1],[10,2],[10,2],[9,3],[9,2],[12,3],[12,2],[12,2],[12,2],[12,1],[12,2],[12,1],[12,1],[12,1],[12,1],[14,1],[14,1],[19,3],[18,1],[18,4],[15,2],[15,1]],performAction:c(function(s,n,a,o,u,e,B){var l=e.length-1;switch(u){case 6:case 7:return o;case 8:o.getLogger().trace("Stop NL ");break;case 9:o.getLogger().trace("Stop EOF ");break;case 11:o.getLogger().trace("Stop NL2 ");break;case 12:o.getLogger().trace("Stop EOF2 ");break;case 15:o.getLogger().info("Node: ",e[l-1].id),o.addNode(e[l-2].length,e[l-1].id,e[l-1].descr,e[l-1].type,e[l]);break;case 16:o.getLogger().info("Node: ",e[l].id),o.addNode(e[l-1].length,e[l].id,e[l].descr,e[l].type);break;case 17:o.getLogger().trace("Icon: ",e[l]),o.decorateNode({icon:e[l]});break;case 18:case 23:o.decorateNode({class:e[l]});break;case 19:o.getLogger().trace("SPACELIST");break;case 20:o.getLogger().trace("Node: ",e[l-1].id),o.addNode(0,e[l-1].id,e[l-1].descr,e[l-1].type,e[l]);break;case 21:o.getLogger().trace("Node: ",e[l].id),o.addNode(0,e[l].id,e[l].descr,e[l].type);break;case 22:o.decorateNode({icon:e[l]});break;case 27:o.getLogger().trace("node found ..",e[l-2]),this.$={id:e[l-1],descr:e[l-1],type:o.getType(e[l-2],e[l])};break;case 28:this.$={id:e[l],descr:e[l],type:0};break;case 29:o.getLogger().trace("node found ..",e[l-3]),this.$={id:e[l-3],descr:e[l-1],type:o.getType(e[l-2],e[l])};break;case 30:this.$=e[l-1]+e[l];break;case 31:this.$=e[l];break}},"anonymous"),table:[{3:1,4:2,5:3,6:[1,5],8:g},{1:[3]},{1:[2,1]},{4:6,6:[1,7],7:[1,8],8:g},{6:d,7:[1,10],9:9,12:11,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},t(L,[2,3]),{1:[2,2]},t(L,[2,4]),t(L,[2,5]),{1:[2,6],6:d,12:21,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},{6:d,9:22,12:11,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},{6:C,7:w,10:23,11:N},t(i,[2,24],{18:17,19:18,14:27,16:[1,28],17:[1,29],20:f,23:h}),t(i,[2,19]),t(i,[2,21],{15:30,24:H}),t(i,[2,22]),t(i,[2,23]),t(x,[2,25]),t(x,[2,26]),t(x,[2,28],{20:[1,32]}),{21:[1,33]},{6:C,7:w,10:34,11:N},{1:[2,7],6:d,12:21,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},t(P,[2,14],{7:M,11:U}),t(A,[2,8]),t(A,[2,9]),t(A,[2,10]),t(i,[2,16],{15:37,24:H}),t(i,[2,17]),t(i,[2,18]),t(i,[2,20],{24:j}),t(x,[2,31]),{21:[1,39]},{22:[1,40]},t(P,[2,13],{7:M,11:U}),t(A,[2,11]),t(A,[2,12]),t(i,[2,15],{24:j}),t(x,[2,30]),{22:[1,41]},t(x,[2,27]),t(x,[2,29])],defaultActions:{2:[2,1],6:[2,2]},parseError:c(function(s,n){if(n.recoverable)this.trace(s);else{var a=new Error(s);throw a.hash=n,a}},"parseError"),parse:c(function(s){var n=this,a=[0],o=[],u=[null],e=[],B=this.table,l="",z=0,ie=0,ue=2,re=1,ge=e.slice.call(arguments,1),b=Object.create(this.lexer),T={yy:{}};for(var J in this.yy)Object.prototype.hasOwnProperty.call(this.yy,J)&&(T.yy[J]=this.yy[J]);b.setInput(s,T.yy),T.yy.lexer=b,T.yy.parser=this,typeof b.yylloc>"u"&&(b.yylloc={});var q=b.yylloc;e.push(q);var de=b.options&&b.options.ranges;typeof T.yy.parseError=="function"?this.parseError=T.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function pe(S){a.length=a.length-2*S,u.length=u.length-S,e.length=e.length-S}c(pe,"popStack");function ae(){var S;return S=o.pop()||b.lex()||re,typeof S!="number"&&(S instanceof Array&&(o=S,S=o.pop()),S=n.symbols_[S]||S),S}c(ae,"lex");for(var k,R,v,Q,F={},K,I,oe,X;;){if(R=a[a.length-1],this.defaultActions[R]?v=this.defaultActions[R]:((k===null||typeof k>"u")&&(k=ae()),v=B[R]&&B[R][k]),typeof v>"u"||!v.length||!v[0]){var Z="";X=[];for(K in B[R])this.terminals_[K]&&K>ue&&X.push("'"+this.terminals_[K]+"'");b.showPosition?Z="Parse error on line "+(z+1)+`:
|
||||
import{g as fe}from"./chunk-E2GYISFI-wNdHFpBQ.js";import{_ as c,l as te,c as W,K as ye,a8 as be,a9 as me,aa as _e,a3 as Ee,H as Y,i as G,v as ke,J as Se,a4 as Ne,a5 as le,a6 as ce}from"./mermaid-vendor-pCi8nQvB.js";import"./feature-graph-O43AXICd.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var $=function(){var t=c(function(_,s,n,a){for(n=n||{},a=_.length;a--;n[_[a]]=s);return n},"o"),g=[1,4],d=[1,13],r=[1,12],p=[1,15],E=[1,16],f=[1,20],h=[1,19],L=[6,7,8],C=[1,26],w=[1,24],N=[1,25],i=[6,7,11],H=[1,31],x=[6,7,11,24],P=[1,6,13,16,17,20,23],M=[1,35],U=[1,36],A=[1,6,7,11,13,16,17,20,23],j=[1,38],V={trace:c(function(){},"trace"),yy:{},symbols_:{error:2,start:3,mindMap:4,spaceLines:5,SPACELINE:6,NL:7,KANBAN:8,document:9,stop:10,EOF:11,statement:12,SPACELIST:13,node:14,shapeData:15,ICON:16,CLASS:17,nodeWithId:18,nodeWithoutId:19,NODE_DSTART:20,NODE_DESCR:21,NODE_DEND:22,NODE_ID:23,SHAPE_DATA:24,$accept:0,$end:1},terminals_:{2:"error",6:"SPACELINE",7:"NL",8:"KANBAN",11:"EOF",13:"SPACELIST",16:"ICON",17:"CLASS",20:"NODE_DSTART",21:"NODE_DESCR",22:"NODE_DEND",23:"NODE_ID",24:"SHAPE_DATA"},productions_:[0,[3,1],[3,2],[5,1],[5,2],[5,2],[4,2],[4,3],[10,1],[10,1],[10,1],[10,2],[10,2],[9,3],[9,2],[12,3],[12,2],[12,2],[12,2],[12,1],[12,2],[12,1],[12,1],[12,1],[12,1],[14,1],[14,1],[19,3],[18,1],[18,4],[15,2],[15,1]],performAction:c(function(s,n,a,o,u,e,B){var l=e.length-1;switch(u){case 6:case 7:return o;case 8:o.getLogger().trace("Stop NL ");break;case 9:o.getLogger().trace("Stop EOF ");break;case 11:o.getLogger().trace("Stop NL2 ");break;case 12:o.getLogger().trace("Stop EOF2 ");break;case 15:o.getLogger().info("Node: ",e[l-1].id),o.addNode(e[l-2].length,e[l-1].id,e[l-1].descr,e[l-1].type,e[l]);break;case 16:o.getLogger().info("Node: ",e[l].id),o.addNode(e[l-1].length,e[l].id,e[l].descr,e[l].type);break;case 17:o.getLogger().trace("Icon: ",e[l]),o.decorateNode({icon:e[l]});break;case 18:case 23:o.decorateNode({class:e[l]});break;case 19:o.getLogger().trace("SPACELIST");break;case 20:o.getLogger().trace("Node: ",e[l-1].id),o.addNode(0,e[l-1].id,e[l-1].descr,e[l-1].type,e[l]);break;case 21:o.getLogger().trace("Node: ",e[l].id),o.addNode(0,e[l].id,e[l].descr,e[l].type);break;case 22:o.decorateNode({icon:e[l]});break;case 27:o.getLogger().trace("node found ..",e[l-2]),this.$={id:e[l-1],descr:e[l-1],type:o.getType(e[l-2],e[l])};break;case 28:this.$={id:e[l],descr:e[l],type:0};break;case 29:o.getLogger().trace("node found ..",e[l-3]),this.$={id:e[l-3],descr:e[l-1],type:o.getType(e[l-2],e[l])};break;case 30:this.$=e[l-1]+e[l];break;case 31:this.$=e[l];break}},"anonymous"),table:[{3:1,4:2,5:3,6:[1,5],8:g},{1:[3]},{1:[2,1]},{4:6,6:[1,7],7:[1,8],8:g},{6:d,7:[1,10],9:9,12:11,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},t(L,[2,3]),{1:[2,2]},t(L,[2,4]),t(L,[2,5]),{1:[2,6],6:d,12:21,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},{6:d,9:22,12:11,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},{6:C,7:w,10:23,11:N},t(i,[2,24],{18:17,19:18,14:27,16:[1,28],17:[1,29],20:f,23:h}),t(i,[2,19]),t(i,[2,21],{15:30,24:H}),t(i,[2,22]),t(i,[2,23]),t(x,[2,25]),t(x,[2,26]),t(x,[2,28],{20:[1,32]}),{21:[1,33]},{6:C,7:w,10:34,11:N},{1:[2,7],6:d,12:21,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},t(P,[2,14],{7:M,11:U}),t(A,[2,8]),t(A,[2,9]),t(A,[2,10]),t(i,[2,16],{15:37,24:H}),t(i,[2,17]),t(i,[2,18]),t(i,[2,20],{24:j}),t(x,[2,31]),{21:[1,39]},{22:[1,40]},t(P,[2,13],{7:M,11:U}),t(A,[2,11]),t(A,[2,12]),t(i,[2,15],{24:j}),t(x,[2,30]),{22:[1,41]},t(x,[2,27]),t(x,[2,29])],defaultActions:{2:[2,1],6:[2,2]},parseError:c(function(s,n){if(n.recoverable)this.trace(s);else{var a=new Error(s);throw a.hash=n,a}},"parseError"),parse:c(function(s){var n=this,a=[0],o=[],u=[null],e=[],B=this.table,l="",z=0,ie=0,ue=2,re=1,ge=e.slice.call(arguments,1),b=Object.create(this.lexer),T={yy:{}};for(var J in this.yy)Object.prototype.hasOwnProperty.call(this.yy,J)&&(T.yy[J]=this.yy[J]);b.setInput(s,T.yy),T.yy.lexer=b,T.yy.parser=this,typeof b.yylloc>"u"&&(b.yylloc={});var q=b.yylloc;e.push(q);var de=b.options&&b.options.ranges;typeof T.yy.parseError=="function"?this.parseError=T.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function pe(S){a.length=a.length-2*S,u.length=u.length-S,e.length=e.length-S}c(pe,"popStack");function ae(){var S;return S=o.pop()||b.lex()||re,typeof S!="number"&&(S instanceof Array&&(o=S,S=o.pop()),S=n.symbols_[S]||S),S}c(ae,"lex");for(var k,R,v,Q,F={},K,I,oe,X;;){if(R=a[a.length-1],this.defaultActions[R]?v=this.defaultActions[R]:((k===null||typeof k>"u")&&(k=ae()),v=B[R]&&B[R][k]),typeof v>"u"||!v.length||!v[0]){var Z="";X=[];for(K in B[R])this.terminals_[K]&&K>ue&&X.push("'"+this.terminals_[K]+"'");b.showPosition?Z="Parse error on line "+(z+1)+`:
|
||||
`+b.showPosition()+`
|
||||
Expecting `+X.join(", ")+", got '"+(this.terminals_[k]||k)+"'":Z="Parse error on line "+(z+1)+": Unexpected "+(k==re?"end of input":"'"+(this.terminals_[k]||k)+"'"),this.parseError(Z,{text:b.match,token:this.terminals_[k]||k,line:b.yylineno,loc:q,expected:X})}if(v[0]instanceof Array&&v.length>1)throw new Error("Parse Error: multiple actions possible at state: "+R+", token: "+k);switch(v[0]){case 1:a.push(k),u.push(b.yytext),e.push(b.yylloc),a.push(v[1]),k=null,ie=b.yyleng,l=b.yytext,z=b.yylineno,q=b.yylloc;break;case 2:if(I=this.productions_[v[1]][1],F.$=u[u.length-I],F._$={first_line:e[e.length-(I||1)].first_line,last_line:e[e.length-1].last_line,first_column:e[e.length-(I||1)].first_column,last_column:e[e.length-1].last_column},de&&(F._$.range=[e[e.length-(I||1)].range[0],e[e.length-1].range[1]]),Q=this.performAction.apply(F,[l,ie,z,T.yy,v[1],u,e].concat(ge)),typeof Q<"u")return Q;I&&(a=a.slice(0,-1*I*2),u=u.slice(0,-1*I),e=e.slice(0,-1*I)),a.push(this.productions_[v[1]][0]),u.push(F.$),e.push(F._$),oe=B[a[a.length-2]][a[a.length-1]],a.push(oe);break;case 3:return!0}}return!0},"parse")},m=function(){var _={EOF:1,parseError:c(function(n,a){if(this.yy.parser)this.yy.parser.parseError(n,a);else throw new Error(n)},"parseError"),setInput:c(function(s,n){return this.yy=n||this.yy||{},this._input=s,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},"setInput"),input:c(function(){var s=this._input[0];this.yytext+=s,this.yyleng++,this.offset++,this.match+=s,this.matched+=s;var n=s.match(/(?:\r\n?|\n).*/g);return n?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),s},"input"),unput:c(function(s){var n=s.length,a=s.split(/(?:\r\n?|\n)/g);this._input=s+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-n),this.offset-=n;var o=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),a.length-1&&(this.yylineno-=a.length-1);var u=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:a?(a.length===o.length?this.yylloc.first_column:0)+o[o.length-a.length].length-a[0].length:this.yylloc.first_column-n},this.options.ranges&&(this.yylloc.range=[u[0],u[0]+this.yyleng-n]),this.yyleng=this.yytext.length,this},"unput"),more:c(function(){return this._more=!0,this},"more"),reject:c(function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).
|
||||
`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},"reject"),less:c(function(s){this.unput(this.match.slice(s))},"less"),pastInput:c(function(){var s=this.matched.substr(0,this.matched.length-this.match.length);return(s.length>20?"...":"")+s.substr(-20).replace(/\n/g,"")},"pastInput"),upcomingInput:c(function(){var s=this.match;return s.length<20&&(s+=this._input.substr(0,20-s.length)),(s.substr(0,20)+(s.length>20?"...":"")).replace(/\n/g,"")},"upcomingInput"),showPosition:c(function(){var s=this.pastInput(),n=new Array(s.length+1).join("-");return s+this.upcomingInput()+`
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
|
@ -1,4 +1,4 @@
|
|||
import{p as N}from"./chunk-353BL4L5-leSB0xif.js";import{_ as i,g as B,s as U,a as q,b as H,t as K,q as V,l as C,c as Z,F as j,K as J,M as Q,N as z,O as X,e as Y,z as tt,P as et,H as at}from"./mermaid-vendor-J2ju872p.js";import{p as rt}from"./treemap-75Q7IDZK-BiruUsnx.js";import"./feature-graph-BgVuQYut.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-Bd5eQEyI.js";import"./_basePickBy-C-Sjt21Q.js";import"./clone-CzjUKlzi.js";var it=at.pie,D={sections:new Map,showData:!1},f=D.sections,w=D.showData,st=structuredClone(it),ot=i(()=>structuredClone(st),"getConfig"),nt=i(()=>{f=new Map,w=D.showData,tt()},"clear"),lt=i(({label:t,value:a})=>{f.has(t)||(f.set(t,a),C.debug(`added new section: ${t}, with value: ${a}`))},"addSection"),ct=i(()=>f,"getSections"),pt=i(t=>{w=t},"setShowData"),dt=i(()=>w,"getShowData"),F={getConfig:ot,clear:nt,setDiagramTitle:V,getDiagramTitle:K,setAccTitle:H,getAccTitle:q,setAccDescription:U,getAccDescription:B,addSection:lt,getSections:ct,setShowData:pt,getShowData:dt},gt=i((t,a)=>{N(t,a),a.setShowData(t.showData),t.sections.map(a.addSection)},"populateDb"),ut={parse:i(async t=>{const a=await rt("pie",t);C.debug(a),gt(a,F)},"parse")},mt=i(t=>`
|
||||
import{p as N}from"./chunk-353BL4L5-DjMQec0j.js";import{_ as i,g as B,s as U,a as q,b as H,t as K,q as V,l as C,c as Z,F as j,K as J,M as Q,N as z,O as X,e as Y,z as tt,P as et,H as at}from"./mermaid-vendor-pCi8nQvB.js";import{p as rt}from"./treemap-75Q7IDZK-DhCk0PFk.js";import"./feature-graph-O43AXICd.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-D1ajN3w6.js";import"./_basePickBy-C9JQvwyt.js";import"./clone-DLVgXiWv.js";var it=at.pie,D={sections:new Map,showData:!1},f=D.sections,w=D.showData,st=structuredClone(it),ot=i(()=>structuredClone(st),"getConfig"),nt=i(()=>{f=new Map,w=D.showData,tt()},"clear"),lt=i(({label:t,value:a})=>{f.has(t)||(f.set(t,a),C.debug(`added new section: ${t}, with value: ${a}`))},"addSection"),ct=i(()=>f,"getSections"),pt=i(t=>{w=t},"setShowData"),dt=i(()=>w,"getShowData"),F={getConfig:ot,clear:nt,setDiagramTitle:V,getDiagramTitle:K,setAccTitle:H,getAccTitle:q,setAccDescription:U,getAccDescription:B,addSection:lt,getSections:ct,setShowData:pt,getShowData:dt},gt=i((t,a)=>{N(t,a),a.setShowData(t.showData),t.sections.map(a.addSection)},"populateDb"),ut={parse:i(async t=>{const a=await rt("pie",t);C.debug(a),gt(a,F)},"parse")},mt=i(t=>`
|
||||
.pieCircle{
|
||||
stroke: ${t.pieStrokeColor};
|
||||
stroke-width : ${t.pieStrokeWidth};
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
|
@ -1 +1 @@
|
|||
import{s as r,b as e,a,S as i}from"./chunk-OW32GOEJ-B9FViCRE.js";import{_ as s}from"./mermaid-vendor-J2ju872p.js";import"./chunk-BFAMUDN2-nPRHB0S3.js";import"./chunk-SKB7J2MH-Cgz1KI3w.js";import"./feature-graph-BgVuQYut.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var f={parser:a,get db(){return new i(2)},renderer:e,styles:r,init:s(t=>{t.state||(t.state={}),t.state.arrowMarkerAbsolute=t.arrowMarkerAbsolute},"init")};export{f as diagram};
|
||||
import{s as r,b as e,a,S as i}from"./chunk-OW32GOEJ-WZhy3vze.js";import{_ as s}from"./mermaid-vendor-pCi8nQvB.js";import"./chunk-BFAMUDN2-ioRYrumN.js";import"./chunk-SKB7J2MH-BsoIgbf0.js";import"./feature-graph-O43AXICd.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var f={parser:a,get db(){return new i(2)},renderer:e,styles:r,init:s(t=>{t.state||(t.state={}),t.state.arrowMarkerAbsolute=t.arrowMarkerAbsolute},"init")};export{f as diagram};
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
import{_ as s,c as xt,l as E,d as q,a3 as kt,a4 as _t,a5 as bt,a6 as vt,N as nt,D as wt,a7 as St,z as Et}from"./mermaid-vendor-J2ju872p.js";import"./feature-graph-BgVuQYut.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var X=function(){var n=s(function(f,r,a,h){for(a=a||{},h=f.length;h--;a[f[h]]=r);return a},"o"),t=[6,8,10,11,12,14,16,17,20,21],e=[1,9],l=[1,10],i=[1,11],d=[1,12],c=[1,13],g=[1,16],m=[1,17],p={trace:s(function(){},"trace"),yy:{},symbols_:{error:2,start:3,timeline:4,document:5,EOF:6,line:7,SPACE:8,statement:9,NEWLINE:10,title:11,acc_title:12,acc_title_value:13,acc_descr:14,acc_descr_value:15,acc_descr_multiline_value:16,section:17,period_statement:18,event_statement:19,period:20,event:21,$accept:0,$end:1},terminals_:{2:"error",4:"timeline",6:"EOF",8:"SPACE",10:"NEWLINE",11:"title",12:"acc_title",13:"acc_title_value",14:"acc_descr",15:"acc_descr_value",16:"acc_descr_multiline_value",17:"section",20:"period",21:"event"},productions_:[0,[3,3],[5,0],[5,2],[7,2],[7,1],[7,1],[7,1],[9,1],[9,2],[9,2],[9,1],[9,1],[9,1],[9,1],[18,1],[19,1]],performAction:s(function(r,a,h,u,y,o,S){var k=o.length-1;switch(y){case 1:return o[k-1];case 2:this.$=[];break;case 3:o[k-1].push(o[k]),this.$=o[k-1];break;case 4:case 5:this.$=o[k];break;case 6:case 7:this.$=[];break;case 8:u.getCommonDb().setDiagramTitle(o[k].substr(6)),this.$=o[k].substr(6);break;case 9:this.$=o[k].trim(),u.getCommonDb().setAccTitle(this.$);break;case 10:case 11:this.$=o[k].trim(),u.getCommonDb().setAccDescription(this.$);break;case 12:u.addSection(o[k].substr(8)),this.$=o[k].substr(8);break;case 15:u.addTask(o[k],0,""),this.$=o[k];break;case 16:u.addEvent(o[k].substr(2)),this.$=o[k];break}},"anonymous"),table:[{3:1,4:[1,2]},{1:[3]},n(t,[2,2],{5:3}),{6:[1,4],7:5,8:[1,6],9:7,10:[1,8],11:e,12:l,14:i,16:d,17:c,18:14,19:15,20:g,21:m},n(t,[2,7],{1:[2,1]}),n(t,[2,3]),{9:18,11:e,12:l,14:i,16:d,17:c,18:14,19:15,20:g,21:m},n(t,[2,5]),n(t,[2,6]),n(t,[2,8]),{13:[1,19]},{15:[1,20]},n(t,[2,11]),n(t,[2,12]),n(t,[2,13]),n(t,[2,14]),n(t,[2,15]),n(t,[2,16]),n(t,[2,4]),n(t,[2,9]),n(t,[2,10])],defaultActions:{},parseError:s(function(r,a){if(a.recoverable)this.trace(r);else{var h=new Error(r);throw h.hash=a,h}},"parseError"),parse:s(function(r){var a=this,h=[0],u=[],y=[null],o=[],S=this.table,k="",M=0,C=0,B=2,J=1,O=o.slice.call(arguments,1),_=Object.create(this.lexer),N={yy:{}};for(var L in this.yy)Object.prototype.hasOwnProperty.call(this.yy,L)&&(N.yy[L]=this.yy[L]);_.setInput(r,N.yy),N.yy.lexer=_,N.yy.parser=this,typeof _.yylloc>"u"&&(_.yylloc={});var v=_.yylloc;o.push(v);var $=_.options&&_.options.ranges;typeof N.yy.parseError=="function"?this.parseError=N.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function R(T){h.length=h.length-2*T,y.length=y.length-T,o.length=o.length-T}s(R,"popStack");function A(){var T;return T=u.pop()||_.lex()||J,typeof T!="number"&&(T instanceof Array&&(u=T,T=u.pop()),T=a.symbols_[T]||T),T}s(A,"lex");for(var w,H,I,K,F={},j,P,et,G;;){if(H=h[h.length-1],this.defaultActions[H]?I=this.defaultActions[H]:((w===null||typeof w>"u")&&(w=A()),I=S[H]&&S[H][w]),typeof I>"u"||!I.length||!I[0]){var Q="";G=[];for(j in S[H])this.terminals_[j]&&j>B&&G.push("'"+this.terminals_[j]+"'");_.showPosition?Q="Parse error on line "+(M+1)+`:
|
||||
import{_ as s,c as xt,l as E,d as q,a3 as kt,a4 as _t,a5 as bt,a6 as vt,N as nt,D as wt,a7 as St,z as Et}from"./mermaid-vendor-pCi8nQvB.js";import"./feature-graph-O43AXICd.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var X=function(){var n=s(function(f,r,a,h){for(a=a||{},h=f.length;h--;a[f[h]]=r);return a},"o"),t=[6,8,10,11,12,14,16,17,20,21],e=[1,9],l=[1,10],i=[1,11],d=[1,12],c=[1,13],g=[1,16],m=[1,17],p={trace:s(function(){},"trace"),yy:{},symbols_:{error:2,start:3,timeline:4,document:5,EOF:6,line:7,SPACE:8,statement:9,NEWLINE:10,title:11,acc_title:12,acc_title_value:13,acc_descr:14,acc_descr_value:15,acc_descr_multiline_value:16,section:17,period_statement:18,event_statement:19,period:20,event:21,$accept:0,$end:1},terminals_:{2:"error",4:"timeline",6:"EOF",8:"SPACE",10:"NEWLINE",11:"title",12:"acc_title",13:"acc_title_value",14:"acc_descr",15:"acc_descr_value",16:"acc_descr_multiline_value",17:"section",20:"period",21:"event"},productions_:[0,[3,3],[5,0],[5,2],[7,2],[7,1],[7,1],[7,1],[9,1],[9,2],[9,2],[9,1],[9,1],[9,1],[9,1],[18,1],[19,1]],performAction:s(function(r,a,h,u,y,o,S){var k=o.length-1;switch(y){case 1:return o[k-1];case 2:this.$=[];break;case 3:o[k-1].push(o[k]),this.$=o[k-1];break;case 4:case 5:this.$=o[k];break;case 6:case 7:this.$=[];break;case 8:u.getCommonDb().setDiagramTitle(o[k].substr(6)),this.$=o[k].substr(6);break;case 9:this.$=o[k].trim(),u.getCommonDb().setAccTitle(this.$);break;case 10:case 11:this.$=o[k].trim(),u.getCommonDb().setAccDescription(this.$);break;case 12:u.addSection(o[k].substr(8)),this.$=o[k].substr(8);break;case 15:u.addTask(o[k],0,""),this.$=o[k];break;case 16:u.addEvent(o[k].substr(2)),this.$=o[k];break}},"anonymous"),table:[{3:1,4:[1,2]},{1:[3]},n(t,[2,2],{5:3}),{6:[1,4],7:5,8:[1,6],9:7,10:[1,8],11:e,12:l,14:i,16:d,17:c,18:14,19:15,20:g,21:m},n(t,[2,7],{1:[2,1]}),n(t,[2,3]),{9:18,11:e,12:l,14:i,16:d,17:c,18:14,19:15,20:g,21:m},n(t,[2,5]),n(t,[2,6]),n(t,[2,8]),{13:[1,19]},{15:[1,20]},n(t,[2,11]),n(t,[2,12]),n(t,[2,13]),n(t,[2,14]),n(t,[2,15]),n(t,[2,16]),n(t,[2,4]),n(t,[2,9]),n(t,[2,10])],defaultActions:{},parseError:s(function(r,a){if(a.recoverable)this.trace(r);else{var h=new Error(r);throw h.hash=a,h}},"parseError"),parse:s(function(r){var a=this,h=[0],u=[],y=[null],o=[],S=this.table,k="",M=0,C=0,B=2,J=1,O=o.slice.call(arguments,1),_=Object.create(this.lexer),N={yy:{}};for(var L in this.yy)Object.prototype.hasOwnProperty.call(this.yy,L)&&(N.yy[L]=this.yy[L]);_.setInput(r,N.yy),N.yy.lexer=_,N.yy.parser=this,typeof _.yylloc>"u"&&(_.yylloc={});var v=_.yylloc;o.push(v);var $=_.options&&_.options.ranges;typeof N.yy.parseError=="function"?this.parseError=N.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function R(T){h.length=h.length-2*T,y.length=y.length-T,o.length=o.length-T}s(R,"popStack");function A(){var T;return T=u.pop()||_.lex()||J,typeof T!="number"&&(T instanceof Array&&(u=T,T=u.pop()),T=a.symbols_[T]||T),T}s(A,"lex");for(var w,H,I,K,F={},j,P,et,G;;){if(H=h[h.length-1],this.defaultActions[H]?I=this.defaultActions[H]:((w===null||typeof w>"u")&&(w=A()),I=S[H]&&S[H][w]),typeof I>"u"||!I.length||!I[0]){var Q="";G=[];for(j in S[H])this.terminals_[j]&&j>B&&G.push("'"+this.terminals_[j]+"'");_.showPosition?Q="Parse error on line "+(M+1)+`:
|
||||
`+_.showPosition()+`
|
||||
Expecting `+G.join(", ")+", got '"+(this.terminals_[w]||w)+"'":Q="Parse error on line "+(M+1)+": Unexpected "+(w==J?"end of input":"'"+(this.terminals_[w]||w)+"'"),this.parseError(Q,{text:_.match,token:this.terminals_[w]||w,line:_.yylineno,loc:v,expected:G})}if(I[0]instanceof Array&&I.length>1)throw new Error("Parse Error: multiple actions possible at state: "+H+", token: "+w);switch(I[0]){case 1:h.push(w),y.push(_.yytext),o.push(_.yylloc),h.push(I[1]),w=null,C=_.yyleng,k=_.yytext,M=_.yylineno,v=_.yylloc;break;case 2:if(P=this.productions_[I[1]][1],F.$=y[y.length-P],F._$={first_line:o[o.length-(P||1)].first_line,last_line:o[o.length-1].last_line,first_column:o[o.length-(P||1)].first_column,last_column:o[o.length-1].last_column},$&&(F._$.range=[o[o.length-(P||1)].range[0],o[o.length-1].range[1]]),K=this.performAction.apply(F,[k,C,M,N.yy,I[1],y,o].concat(O)),typeof K<"u")return K;P&&(h=h.slice(0,-1*P*2),y=y.slice(0,-1*P),o=o.slice(0,-1*P)),h.push(this.productions_[I[1]][0]),y.push(F.$),o.push(F._$),et=S[h[h.length-2]][h[h.length-1]],h.push(et);break;case 3:return!0}}return!0},"parse")},x=function(){var f={EOF:1,parseError:s(function(a,h){if(this.yy.parser)this.yy.parser.parseError(a,h);else throw new Error(a)},"parseError"),setInput:s(function(r,a){return this.yy=a||this.yy||{},this._input=r,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},"setInput"),input:s(function(){var r=this._input[0];this.yytext+=r,this.yyleng++,this.offset++,this.match+=r,this.matched+=r;var a=r.match(/(?:\r\n?|\n).*/g);return a?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),r},"input"),unput:s(function(r){var a=r.length,h=r.split(/(?:\r\n?|\n)/g);this._input=r+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-a),this.offset-=a;var u=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),h.length-1&&(this.yylineno-=h.length-1);var y=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:h?(h.length===u.length?this.yylloc.first_column:0)+u[u.length-h.length].length-h[0].length:this.yylloc.first_column-a},this.options.ranges&&(this.yylloc.range=[y[0],y[0]+this.yyleng-a]),this.yyleng=this.yytext.length,this},"unput"),more:s(function(){return this._more=!0,this},"more"),reject:s(function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).
|
||||
`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},"reject"),less:s(function(r){this.unput(this.match.slice(r))},"less"),pastInput:s(function(){var r=this.matched.substr(0,this.matched.length-this.match.length);return(r.length>20?"...":"")+r.substr(-20).replace(/\n/g,"")},"pastInput"),upcomingInput:s(function(){var r=this.match;return r.length<20&&(r+=this._input.substr(0,20-r.length)),(r.substr(0,20)+(r.length>20?"...":"")).replace(/\n/g,"")},"upcomingInput"),showPosition:s(function(){var r=this.pastInput(),a=new Array(r.length+1).join("-");return r+this.upcomingInput()+`
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
10
lightrag/api/webui/index.html
generated
10
lightrag/api/webui/index.html
generated
|
|
@ -8,16 +8,16 @@
|
|||
<link rel="icon" type="image/png" href="favicon.png" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Lightrag</title>
|
||||
<script type="module" crossorigin src="/webui/assets/index-MT1PAKNY.js"></script>
|
||||
<script type="module" crossorigin src="/webui/assets/index-CfXratTv.js"></script>
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/react-vendor-DEwriMA6.js">
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/ui-vendor-CeCm8EER.js">
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/graph-vendor-B-X5JegA.js">
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/utils-vendor-BysuhMZA.js">
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/feature-graph-BgVuQYut.js">
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/feature-documents-DfpqEFJm.js">
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/mermaid-vendor-J2ju872p.js">
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/feature-graph-O43AXICd.js">
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/feature-documents-XY-qP1x8.js">
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/mermaid-vendor-pCi8nQvB.js">
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/markdown-vendor-DmIvJdn7.js">
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/feature-retrieval-Q3pnpDUM.js">
|
||||
<link rel="modulepreload" crossorigin href="/webui/assets/feature-retrieval-lTh8tRF9.js">
|
||||
<link rel="stylesheet" crossorigin href="/webui/assets/feature-graph-BipNuM18.css">
|
||||
<link rel="stylesheet" crossorigin href="/webui/assets/index-CafJWW1u.css">
|
||||
</head>
|
||||
|
|
|
|||
|
|
@ -971,11 +971,10 @@ class LightRAG:
|
|||
"""
|
||||
Pipeline for Processing Documents
|
||||
|
||||
1. Validate ids if provided or generate MD5 hash IDs
|
||||
2. Remove duplicate contents
|
||||
3. Generate document initial status
|
||||
4. Filter out already processed documents
|
||||
5. Enqueue document in status
|
||||
1. Validate ids if provided or generate MD5 hash IDs and remove duplicate contents
|
||||
2. Generate document initial status
|
||||
3. Filter out already processed documents
|
||||
4. Enqueue document in status
|
||||
|
||||
Args:
|
||||
input: Single document string or list of document strings
|
||||
|
|
@ -1008,7 +1007,7 @@ class LightRAG:
|
|||
# If no file paths provided, use placeholder
|
||||
file_paths = ["unknown_source"] * len(input)
|
||||
|
||||
# 1. Validate ids if provided or generate MD5 hash IDs
|
||||
# 1. Validate ids if provided or generate MD5 hash IDs and remove duplicate contents
|
||||
if ids is not None:
|
||||
# Check if the number of IDs matches the number of documents
|
||||
if len(ids) != len(input):
|
||||
|
|
@ -1018,22 +1017,25 @@ class LightRAG:
|
|||
if len(ids) != len(set(ids)):
|
||||
raise ValueError("IDs must be unique")
|
||||
|
||||
# Generate contents dict of IDs provided by user and documents
|
||||
# Generate contents dict and remove duplicates in one pass
|
||||
unique_contents = {}
|
||||
for id_, doc, path in zip(ids, input, file_paths):
|
||||
cleaned_content = clean_text(doc)
|
||||
if cleaned_content not in unique_contents:
|
||||
unique_contents[cleaned_content] = (id_, path)
|
||||
|
||||
# Reconstruct contents with unique content
|
||||
contents = {
|
||||
id_: {"content": doc, "file_path": path}
|
||||
for id_, doc, path in zip(ids, input, file_paths)
|
||||
id_: {"content": content, "file_path": file_path}
|
||||
for content, (id_, file_path) in unique_contents.items()
|
||||
}
|
||||
else:
|
||||
# Clean input text and remove duplicates
|
||||
cleaned_input = [
|
||||
(clean_text(doc), path) for doc, path in zip(input, file_paths)
|
||||
]
|
||||
# Clean input text and remove duplicates in one pass
|
||||
unique_content_with_paths = {}
|
||||
|
||||
# Keep track of unique content and their paths
|
||||
for content, path in cleaned_input:
|
||||
if content not in unique_content_with_paths:
|
||||
unique_content_with_paths[content] = path
|
||||
for doc, path in zip(input, file_paths):
|
||||
cleaned_content = clean_text(doc)
|
||||
if cleaned_content not in unique_content_with_paths:
|
||||
unique_content_with_paths[cleaned_content] = path
|
||||
|
||||
# Generate contents dict of MD5 hash IDs and documents with paths
|
||||
contents = {
|
||||
|
|
@ -1044,21 +1046,7 @@ class LightRAG:
|
|||
for content, path in unique_content_with_paths.items()
|
||||
}
|
||||
|
||||
# 2. Remove duplicate contents
|
||||
unique_contents = {}
|
||||
for id_, content_data in contents.items():
|
||||
content = content_data["content"]
|
||||
file_path = content_data["file_path"]
|
||||
if content not in unique_contents:
|
||||
unique_contents[content] = (id_, file_path)
|
||||
|
||||
# Reconstruct contents with unique content
|
||||
contents = {
|
||||
id_: {"content": content, "file_path": file_path}
|
||||
for content, (id_, file_path) in unique_contents.items()
|
||||
}
|
||||
|
||||
# 3. Generate document initial status (without content)
|
||||
# 2. Generate document initial status (without content)
|
||||
new_docs: dict[str, Any] = {
|
||||
id_: {
|
||||
"status": DocStatus.PENDING,
|
||||
|
|
@ -1074,22 +1062,24 @@ class LightRAG:
|
|||
for id_, content_data in contents.items()
|
||||
}
|
||||
|
||||
# 4. Filter out already processed documents
|
||||
# 3. Filter out already processed documents
|
||||
# Get docs ids
|
||||
all_new_doc_ids = set(new_docs.keys())
|
||||
# Exclude IDs of documents that are already in progress
|
||||
# Exclude IDs of documents that are already enqueued
|
||||
unique_new_doc_ids = await self.doc_status.filter_keys(all_new_doc_ids)
|
||||
|
||||
# Log ignored document IDs
|
||||
ignored_ids = [
|
||||
doc_id for doc_id in unique_new_doc_ids if doc_id not in new_docs
|
||||
]
|
||||
# Log ignored document IDs (documents that were filtered out because they already exist)
|
||||
ignored_ids = list(all_new_doc_ids - unique_new_doc_ids)
|
||||
if ignored_ids:
|
||||
logger.warning(
|
||||
f"Ignoring {len(ignored_ids)} document IDs not found in new_docs"
|
||||
)
|
||||
for doc_id in ignored_ids:
|
||||
logger.warning(f"Ignored document ID: {doc_id}")
|
||||
file_path = new_docs.get(doc_id, {}).get("file_path", "unknown_source")
|
||||
logger.warning(
|
||||
f"Ignoring document ID (already exists): {doc_id} ({file_path})"
|
||||
)
|
||||
if len(ignored_ids) > 3:
|
||||
logger.warning(
|
||||
f"Total Ignoring {len(ignored_ids)} document IDs that already exist in storage"
|
||||
)
|
||||
|
||||
# Filter new_docs to only include documents with unique IDs
|
||||
new_docs = {
|
||||
|
|
@ -1099,11 +1089,11 @@ class LightRAG:
|
|||
}
|
||||
|
||||
if not new_docs:
|
||||
logger.info("No new unique documents were found.")
|
||||
logger.warning("No new unique documents were found.")
|
||||
return
|
||||
|
||||
# 5. Store document content in full_docs and status in doc_status
|
||||
# Store full document content separately
|
||||
# 4. Store document content in full_docs and status in doc_status
|
||||
# Store full document content separately
|
||||
full_docs_data = {
|
||||
doc_id: {"content": contents[doc_id]["content"]}
|
||||
for doc_id in new_docs.keys()
|
||||
|
|
@ -1118,23 +1108,114 @@ class LightRAG:
|
|||
|
||||
return track_id
|
||||
|
||||
async def apipeline_enqueue_error_documents(
|
||||
self,
|
||||
error_files: list[dict[str, Any]],
|
||||
track_id: str | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Record file extraction errors in doc_status storage.
|
||||
|
||||
This function creates error document entries in the doc_status storage for files
|
||||
that failed during the extraction process. Each error entry contains information
|
||||
about the failure to help with debugging and monitoring.
|
||||
|
||||
Args:
|
||||
error_files: List of dictionaries containing error information for each failed file.
|
||||
Each dictionary should contain:
|
||||
- file_path: Original file name/path
|
||||
- error_description: Brief error description (for content_summary)
|
||||
- original_error: Full error message (for error_msg)
|
||||
- file_size: File size in bytes (for content_length, 0 if unknown)
|
||||
track_id: Optional tracking ID for grouping related operations
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
if not error_files:
|
||||
logger.debug("No error files to record")
|
||||
return
|
||||
|
||||
# Generate track_id if not provided
|
||||
if track_id is None or track_id.strip() == "":
|
||||
track_id = generate_track_id("error")
|
||||
|
||||
error_docs: dict[str, Any] = {}
|
||||
current_time = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
for error_file in error_files:
|
||||
file_path = error_file.get("file_path", "unknown_file")
|
||||
error_description = error_file.get(
|
||||
"error_description", "File extraction failed"
|
||||
)
|
||||
original_error = error_file.get("original_error", "Unknown error")
|
||||
file_size = error_file.get("file_size", 0)
|
||||
|
||||
# Generate unique doc_id with "error-" prefix
|
||||
doc_id_content = f"{file_path}-{error_description}"
|
||||
doc_id = compute_mdhash_id(doc_id_content, prefix="error-")
|
||||
|
||||
error_docs[doc_id] = {
|
||||
"status": DocStatus.FAILED,
|
||||
"content_summary": error_description,
|
||||
"content_length": file_size,
|
||||
"error_msg": original_error,
|
||||
"chunks_count": 0, # No chunks for failed files
|
||||
"created_at": current_time,
|
||||
"updated_at": current_time,
|
||||
"file_path": file_path,
|
||||
"track_id": track_id,
|
||||
"metadata": {
|
||||
"error_type": "file_extraction_error",
|
||||
},
|
||||
}
|
||||
|
||||
# Store error documents in doc_status
|
||||
if error_docs:
|
||||
await self.doc_status.upsert(error_docs)
|
||||
# Log each error for debugging
|
||||
for doc_id, error_doc in error_docs.items():
|
||||
logger.error(
|
||||
f"File processing error: - ID: {doc_id} {error_doc['file_path']}"
|
||||
)
|
||||
|
||||
async def _validate_and_fix_document_consistency(
|
||||
self,
|
||||
to_process_docs: dict[str, DocProcessingStatus],
|
||||
pipeline_status: dict,
|
||||
pipeline_status_lock: asyncio.Lock,
|
||||
) -> dict[str, DocProcessingStatus]:
|
||||
"""Validate and fix document data consistency by deleting inconsistent entries"""
|
||||
"""Validate and fix document data consistency by deleting inconsistent entries, but preserve failed documents"""
|
||||
inconsistent_docs = []
|
||||
failed_docs_to_preserve = []
|
||||
|
||||
# Check each document's data consistency
|
||||
for doc_id, status_doc in to_process_docs.items():
|
||||
# Check if corresponding content exists in full_docs
|
||||
content_data = await self.full_docs.get_by_id(doc_id)
|
||||
if not content_data:
|
||||
inconsistent_docs.append(doc_id)
|
||||
# Check if this is a failed document that should be preserved
|
||||
if (
|
||||
hasattr(status_doc, "status")
|
||||
and status_doc.status == DocStatus.FAILED
|
||||
):
|
||||
failed_docs_to_preserve.append(doc_id)
|
||||
else:
|
||||
inconsistent_docs.append(doc_id)
|
||||
|
||||
# Delete inconsistent document entries one by one
|
||||
# Log information about failed documents that will be preserved
|
||||
if failed_docs_to_preserve:
|
||||
async with pipeline_status_lock:
|
||||
preserve_message = f"Preserving {len(failed_docs_to_preserve)} failed document entries for manual review"
|
||||
logger.info(preserve_message)
|
||||
pipeline_status["latest_message"] = preserve_message
|
||||
pipeline_status["history_messages"].append(preserve_message)
|
||||
|
||||
# Remove failed documents from processing list but keep them in doc_status
|
||||
for doc_id in failed_docs_to_preserve:
|
||||
to_process_docs.pop(doc_id, None)
|
||||
|
||||
# Delete inconsistent document entries(excluding failed documents)
|
||||
if inconsistent_docs:
|
||||
async with pipeline_status_lock:
|
||||
summary_message = (
|
||||
|
|
@ -1156,7 +1237,9 @@ class LightRAG:
|
|||
|
||||
# Log successful deletion
|
||||
async with pipeline_status_lock:
|
||||
log_message = f"Deleted entry: {doc_id} ({file_path})"
|
||||
log_message = (
|
||||
f"Deleted inconsistent entry: {doc_id} ({file_path})"
|
||||
)
|
||||
logger.info(log_message)
|
||||
pipeline_status["latest_message"] = log_message
|
||||
pipeline_status["history_messages"].append(log_message)
|
||||
|
|
@ -1174,7 +1257,7 @@ class LightRAG:
|
|||
|
||||
# Final summary log
|
||||
async with pipeline_status_lock:
|
||||
final_message = f"Data consistency cleanup completed: successfully deleted {successful_deletions} entries"
|
||||
final_message = f"Data consistency cleanup completed: successfully deleted {successful_deletions} inconsistent entries, preserved {len(failed_docs_to_preserve)} failed documents"
|
||||
logger.info(final_message)
|
||||
pipeline_status["latest_message"] = final_message
|
||||
pipeline_status["history_messages"].append(final_message)
|
||||
|
|
|
|||
|
|
@ -15,11 +15,25 @@ from tenacity import (
|
|||
retry_if_exception_type,
|
||||
)
|
||||
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
from typing import AsyncIterator
|
||||
else:
|
||||
from collections.abc import AsyncIterator
|
||||
from typing import Union
|
||||
|
||||
|
||||
class BedrockError(Exception):
|
||||
"""Generic error for issues related to Amazon Bedrock"""
|
||||
|
||||
|
||||
def _set_env_if_present(key: str, value):
|
||||
"""Set environment variable only if a non-empty value is provided."""
|
||||
if value is not None and value != "":
|
||||
os.environ[key] = value
|
||||
|
||||
|
||||
@retry(
|
||||
stop=stop_after_attempt(5),
|
||||
wait=wait_exponential(multiplier=1, max=60),
|
||||
|
|
@ -34,17 +48,35 @@ async def bedrock_complete_if_cache(
|
|||
aws_secret_access_key=None,
|
||||
aws_session_token=None,
|
||||
**kwargs,
|
||||
) -> str:
|
||||
os.environ["AWS_ACCESS_KEY_ID"] = os.environ.get(
|
||||
"AWS_ACCESS_KEY_ID", aws_access_key_id
|
||||
)
|
||||
os.environ["AWS_SECRET_ACCESS_KEY"] = os.environ.get(
|
||||
"AWS_SECRET_ACCESS_KEY", aws_secret_access_key
|
||||
)
|
||||
os.environ["AWS_SESSION_TOKEN"] = os.environ.get(
|
||||
"AWS_SESSION_TOKEN", aws_session_token
|
||||
)
|
||||
) -> Union[str, AsyncIterator[str]]:
|
||||
# Respect existing env; only set if a non-empty value is available
|
||||
access_key = os.environ.get("AWS_ACCESS_KEY_ID") or aws_access_key_id
|
||||
secret_key = os.environ.get("AWS_SECRET_ACCESS_KEY") or aws_secret_access_key
|
||||
session_token = os.environ.get("AWS_SESSION_TOKEN") or aws_session_token
|
||||
_set_env_if_present("AWS_ACCESS_KEY_ID", access_key)
|
||||
_set_env_if_present("AWS_SECRET_ACCESS_KEY", secret_key)
|
||||
_set_env_if_present("AWS_SESSION_TOKEN", session_token)
|
||||
# Region handling: prefer env, else kwarg (optional)
|
||||
region = os.environ.get("AWS_REGION") or kwargs.pop("aws_region", None)
|
||||
kwargs.pop("hashing_kv", None)
|
||||
# Capture stream flag (if provided) and remove from kwargs since it's not a Bedrock API parameter
|
||||
# We'll use this to determine whether to call converse_stream or converse
|
||||
stream = bool(kwargs.pop("stream", False))
|
||||
# Remove unsupported args for Bedrock Converse API
|
||||
for k in [
|
||||
"response_format",
|
||||
"tools",
|
||||
"tool_choice",
|
||||
"seed",
|
||||
"presence_penalty",
|
||||
"frequency_penalty",
|
||||
"n",
|
||||
"logprobs",
|
||||
"top_logprobs",
|
||||
"max_completion_tokens",
|
||||
"response_format",
|
||||
]:
|
||||
kwargs.pop(k, None)
|
||||
# Fix message history format
|
||||
messages = []
|
||||
for history_message in history_messages:
|
||||
|
|
@ -77,21 +109,131 @@ async def bedrock_complete_if_cache(
|
|||
kwargs.pop(param)
|
||||
)
|
||||
|
||||
# Call model via Converse API
|
||||
session = aioboto3.Session()
|
||||
async with session.client("bedrock-runtime") as bedrock_async_client:
|
||||
try:
|
||||
response = await bedrock_async_client.converse(**args, **kwargs)
|
||||
except Exception as e:
|
||||
raise BedrockError(e)
|
||||
# Import logging for error handling
|
||||
import logging
|
||||
|
||||
return response["output"]["message"]["content"][0]["text"]
|
||||
# For streaming responses, we need a different approach to keep the connection open
|
||||
if stream:
|
||||
# Create a session that will be used throughout the streaming process
|
||||
session = aioboto3.Session()
|
||||
client = None
|
||||
|
||||
# Define the generator function that will manage the client lifecycle
|
||||
async def stream_generator():
|
||||
nonlocal client
|
||||
|
||||
# Create the client outside the generator to ensure it stays open
|
||||
client = await session.client(
|
||||
"bedrock-runtime", region_name=region
|
||||
).__aenter__()
|
||||
event_stream = None
|
||||
iteration_started = False
|
||||
|
||||
try:
|
||||
# Make the API call
|
||||
response = await client.converse_stream(**args, **kwargs)
|
||||
event_stream = response.get("stream")
|
||||
iteration_started = True
|
||||
|
||||
# Process the stream
|
||||
async for event in event_stream:
|
||||
# Validate event structure
|
||||
if not event or not isinstance(event, dict):
|
||||
continue
|
||||
|
||||
if "contentBlockDelta" in event:
|
||||
delta = event["contentBlockDelta"].get("delta", {})
|
||||
text = delta.get("text")
|
||||
if text:
|
||||
yield text
|
||||
# Handle other event types that might indicate stream end
|
||||
elif "messageStop" in event:
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
# Log the specific error for debugging
|
||||
logging.error(f"Bedrock streaming error: {e}")
|
||||
|
||||
# Try to clean up resources if possible
|
||||
if (
|
||||
iteration_started
|
||||
and event_stream
|
||||
and hasattr(event_stream, "aclose")
|
||||
and callable(getattr(event_stream, "aclose", None))
|
||||
):
|
||||
try:
|
||||
await event_stream.aclose()
|
||||
except Exception as close_error:
|
||||
logging.warning(
|
||||
f"Failed to close Bedrock event stream: {close_error}"
|
||||
)
|
||||
|
||||
raise BedrockError(f"Streaming error: {e}")
|
||||
|
||||
finally:
|
||||
# Clean up the event stream
|
||||
if (
|
||||
iteration_started
|
||||
and event_stream
|
||||
and hasattr(event_stream, "aclose")
|
||||
and callable(getattr(event_stream, "aclose", None))
|
||||
):
|
||||
try:
|
||||
await event_stream.aclose()
|
||||
except Exception as close_error:
|
||||
logging.warning(
|
||||
f"Failed to close Bedrock event stream in finally block: {close_error}"
|
||||
)
|
||||
|
||||
# Clean up the client
|
||||
if client:
|
||||
try:
|
||||
await client.__aexit__(None, None, None)
|
||||
except Exception as client_close_error:
|
||||
logging.warning(
|
||||
f"Failed to close Bedrock client: {client_close_error}"
|
||||
)
|
||||
|
||||
# Return the generator that manages its own lifecycle
|
||||
return stream_generator()
|
||||
|
||||
# For non-streaming responses, use the standard async context manager pattern
|
||||
session = aioboto3.Session()
|
||||
async with session.client(
|
||||
"bedrock-runtime", region_name=region
|
||||
) as bedrock_async_client:
|
||||
try:
|
||||
# Use converse for non-streaming responses
|
||||
response = await bedrock_async_client.converse(**args, **kwargs)
|
||||
|
||||
# Validate response structure
|
||||
if (
|
||||
not response
|
||||
or "output" not in response
|
||||
or "message" not in response["output"]
|
||||
or "content" not in response["output"]["message"]
|
||||
or not response["output"]["message"]["content"]
|
||||
):
|
||||
raise BedrockError("Invalid response structure from Bedrock API")
|
||||
|
||||
content = response["output"]["message"]["content"][0]["text"]
|
||||
|
||||
if not content or content.strip() == "":
|
||||
raise BedrockError("Received empty content from Bedrock API")
|
||||
|
||||
return content
|
||||
|
||||
except Exception as e:
|
||||
if isinstance(e, BedrockError):
|
||||
raise
|
||||
else:
|
||||
raise BedrockError(f"Bedrock API error: {e}")
|
||||
|
||||
|
||||
# Generic Bedrock completion function
|
||||
async def bedrock_complete(
|
||||
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
|
||||
) -> str:
|
||||
) -> Union[str, AsyncIterator[str]]:
|
||||
kwargs.pop("keyword_extraction", None)
|
||||
model_name = kwargs["hashing_kv"].global_config["llm_model_name"]
|
||||
result = await bedrock_complete_if_cache(
|
||||
|
|
@ -117,18 +259,21 @@ async def bedrock_embed(
|
|||
aws_secret_access_key=None,
|
||||
aws_session_token=None,
|
||||
) -> np.ndarray:
|
||||
os.environ["AWS_ACCESS_KEY_ID"] = os.environ.get(
|
||||
"AWS_ACCESS_KEY_ID", aws_access_key_id
|
||||
)
|
||||
os.environ["AWS_SECRET_ACCESS_KEY"] = os.environ.get(
|
||||
"AWS_SECRET_ACCESS_KEY", aws_secret_access_key
|
||||
)
|
||||
os.environ["AWS_SESSION_TOKEN"] = os.environ.get(
|
||||
"AWS_SESSION_TOKEN", aws_session_token
|
||||
)
|
||||
# Respect existing env; only set if a non-empty value is available
|
||||
access_key = os.environ.get("AWS_ACCESS_KEY_ID") or aws_access_key_id
|
||||
secret_key = os.environ.get("AWS_SECRET_ACCESS_KEY") or aws_secret_access_key
|
||||
session_token = os.environ.get("AWS_SESSION_TOKEN") or aws_session_token
|
||||
_set_env_if_present("AWS_ACCESS_KEY_ID", access_key)
|
||||
_set_env_if_present("AWS_SECRET_ACCESS_KEY", secret_key)
|
||||
_set_env_if_present("AWS_SESSION_TOKEN", session_token)
|
||||
|
||||
# Region handling: prefer env
|
||||
region = os.environ.get("AWS_REGION")
|
||||
|
||||
session = aioboto3.Session()
|
||||
async with session.client("bedrock-runtime") as bedrock_async_client:
|
||||
async with session.client(
|
||||
"bedrock-runtime", region_name=region
|
||||
) as bedrock_async_client:
|
||||
if (model_provider := model.split(".")[0]) == "amazon":
|
||||
embed_texts = []
|
||||
for text in texts:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { useState, useCallback, useEffect } from 'react'
|
||||
import { useState, useCallback, useEffect, useRef } from 'react'
|
||||
import Button from '@/components/ui/Button'
|
||||
import {
|
||||
Dialog,
|
||||
|
|
@ -15,10 +15,10 @@ import { toast } from 'sonner'
|
|||
import { errorMessage } from '@/lib/utils'
|
||||
import { clearDocuments, clearCache } from '@/api/lightrag'
|
||||
|
||||
import { EraserIcon, AlertTriangleIcon } from 'lucide-react'
|
||||
import { EraserIcon, AlertTriangleIcon, Loader2Icon } from 'lucide-react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
// 简单的Label组件
|
||||
// Simple Label component
|
||||
const Label = ({
|
||||
htmlFor,
|
||||
className,
|
||||
|
|
@ -43,18 +43,51 @@ export default function ClearDocumentsDialog({ onDocumentsCleared }: ClearDocume
|
|||
const [open, setOpen] = useState(false)
|
||||
const [confirmText, setConfirmText] = useState('')
|
||||
const [clearCacheOption, setClearCacheOption] = useState(false)
|
||||
const [isClearing, setIsClearing] = useState(false)
|
||||
const timeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null)
|
||||
const isConfirmEnabled = confirmText.toLowerCase() === 'yes'
|
||||
|
||||
// 重置状态当对话框关闭时
|
||||
// Timeout constant (30 seconds)
|
||||
const CLEAR_TIMEOUT = 30000
|
||||
|
||||
// Reset state when dialog closes
|
||||
useEffect(() => {
|
||||
if (!open) {
|
||||
setConfirmText('')
|
||||
setClearCacheOption(false)
|
||||
setIsClearing(false)
|
||||
|
||||
// Clear timeout timer
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
timeoutRef.current = null
|
||||
}
|
||||
}
|
||||
}, [open])
|
||||
|
||||
// Cleanup when component unmounts
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
// Clear timeout timer when component unmounts
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleClear = useCallback(async () => {
|
||||
if (!isConfirmEnabled) return
|
||||
if (!isConfirmEnabled || isClearing) return
|
||||
|
||||
setIsClearing(true)
|
||||
|
||||
// Set timeout protection
|
||||
timeoutRef.current = setTimeout(() => {
|
||||
if (isClearing) {
|
||||
toast.error(t('documentPanel.clearDocuments.timeout'))
|
||||
setIsClearing(false)
|
||||
setConfirmText('') // Reset confirmation text after timeout
|
||||
}
|
||||
}, CLEAR_TIMEOUT)
|
||||
|
||||
try {
|
||||
const result = await clearDocuments()
|
||||
|
|
@ -81,13 +114,20 @@ export default function ClearDocumentsDialog({ onDocumentsCleared }: ClearDocume
|
|||
onDocumentsCleared().catch(console.error)
|
||||
}
|
||||
|
||||
// 所有操作成功后关闭对话框
|
||||
// Close dialog after all operations succeed
|
||||
setOpen(false)
|
||||
} catch (err) {
|
||||
toast.error(t('documentPanel.clearDocuments.error', { error: errorMessage(err) }))
|
||||
setConfirmText('')
|
||||
} finally {
|
||||
// Clear timeout timer
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
timeoutRef.current = null
|
||||
}
|
||||
setIsClearing(false)
|
||||
}
|
||||
}, [isConfirmEnabled, clearCacheOption, setOpen, t, onDocumentsCleared])
|
||||
}, [isConfirmEnabled, isClearing, clearCacheOption, setOpen, t, onDocumentsCleared, CLEAR_TIMEOUT])
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={setOpen}>
|
||||
|
|
@ -125,6 +165,7 @@ export default function ClearDocumentsDialog({ onDocumentsCleared }: ClearDocume
|
|||
onChange={(e: React.ChangeEvent<HTMLInputElement>) => setConfirmText(e.target.value)}
|
||||
placeholder={t('documentPanel.clearDocuments.confirmPlaceholder')}
|
||||
className="w-full"
|
||||
disabled={isClearing}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
|
@ -133,6 +174,7 @@ export default function ClearDocumentsDialog({ onDocumentsCleared }: ClearDocume
|
|||
id="clear-cache"
|
||||
checked={clearCacheOption}
|
||||
onCheckedChange={(checked: boolean | 'indeterminate') => setClearCacheOption(checked === true)}
|
||||
disabled={isClearing}
|
||||
/>
|
||||
<Label htmlFor="clear-cache" className="text-sm font-medium cursor-pointer">
|
||||
{t('documentPanel.clearDocuments.clearCache')}
|
||||
|
|
@ -141,15 +183,26 @@ export default function ClearDocumentsDialog({ onDocumentsCleared }: ClearDocume
|
|||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<Button variant="outline" onClick={() => setOpen(false)}>
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => setOpen(false)}
|
||||
disabled={isClearing}
|
||||
>
|
||||
{t('common.cancel')}
|
||||
</Button>
|
||||
<Button
|
||||
variant="destructive"
|
||||
onClick={handleClear}
|
||||
disabled={!isConfirmEnabled}
|
||||
disabled={!isConfirmEnabled || isClearing}
|
||||
>
|
||||
{t('documentPanel.clearDocuments.confirmButton')}
|
||||
{isClearing ? (
|
||||
<>
|
||||
<Loader2Icon className="mr-2 h-4 w-4 animate-spin" />
|
||||
{t('documentPanel.clearDocuments.clearing')}
|
||||
</>
|
||||
) : (
|
||||
t('documentPanel.clearDocuments.confirmButton')
|
||||
)}
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
|
|
|
|||
|
|
@ -532,15 +532,25 @@ export default function DocumentManager() {
|
|||
// Reset health check timer with 1 second delay to avoid race condition
|
||||
useBackendState.getState().resetHealthCheckTimerDelayed(1000);
|
||||
|
||||
// Schedule a health check 2 seconds after successful scan
|
||||
// Start fast refresh with 2-second interval immediately after scan
|
||||
startPollingInterval(2000);
|
||||
|
||||
// Set recovery timer to restore normal polling interval after 15 seconds
|
||||
setTimeout(() => {
|
||||
if (isMountedRef.current && currentTab === 'documents' && health) {
|
||||
// Restore intelligent polling interval based on document status
|
||||
const hasActiveDocuments = (statusCounts.processing || 0) > 0 || (statusCounts.pending || 0) > 0;
|
||||
const normalInterval = hasActiveDocuments ? 5000 : 30000;
|
||||
startPollingInterval(normalInterval);
|
||||
}
|
||||
}, 15000); // Restore after 15 seconds
|
||||
} catch (err) {
|
||||
// Only show error if component is still mounted
|
||||
if (isMountedRef.current) {
|
||||
toast.error(t('documentPanel.documentManager.errors.scanFailed', { error: errorMessage(err) }));
|
||||
}
|
||||
}
|
||||
}, [t, startPollingInterval])
|
||||
}, [t, startPollingInterval, currentTab, health, statusCounts])
|
||||
|
||||
// Handle page size change - update state and save to store
|
||||
const handlePageSizeChange = useCallback((newPageSize: number) => {
|
||||
|
|
|
|||
|
|
@ -50,6 +50,8 @@
|
|||
"confirmPlaceholder": "اكتب yes للتأكيد",
|
||||
"clearCache": "مسح كاش نموذج اللغة",
|
||||
"confirmButton": "نعم",
|
||||
"clearing": "جارٍ المسح...",
|
||||
"timeout": "انتهت مهلة عملية المسح، يرجى المحاولة مرة أخرى",
|
||||
"success": "تم مسح المستندات بنجاح",
|
||||
"cacheCleared": "تم مسح ذاكرة التخزين المؤقت بنجاح",
|
||||
"cacheClearFailed": "فشل مسح ذاكرة التخزين المؤقت:\n{{error}}",
|
||||
|
|
|
|||
|
|
@ -50,6 +50,8 @@
|
|||
"confirmPlaceholder": "Type yes to confirm",
|
||||
"clearCache": "Clear LLM cache",
|
||||
"confirmButton": "YES",
|
||||
"clearing": "Clearing...",
|
||||
"timeout": "Clear operation timed out, please try again",
|
||||
"success": "Documents cleared successfully",
|
||||
"cacheCleared": "Cache cleared successfully",
|
||||
"cacheClearFailed": "Failed to clear cache:\n{{error}}",
|
||||
|
|
|
|||
|
|
@ -50,6 +50,8 @@
|
|||
"confirmPlaceholder": "Tapez yes pour confirmer",
|
||||
"clearCache": "Effacer le cache LLM",
|
||||
"confirmButton": "OUI",
|
||||
"clearing": "Effacement en cours...",
|
||||
"timeout": "L'opération d'effacement a expiré, veuillez réessayer",
|
||||
"success": "Documents effacés avec succès",
|
||||
"cacheCleared": "Cache effacé avec succès",
|
||||
"cacheClearFailed": "Échec de l'effacement du cache :\n{{error}}",
|
||||
|
|
|
|||
|
|
@ -50,6 +50,8 @@
|
|||
"confirmPlaceholder": "输入 yes 确认",
|
||||
"clearCache": "清空LLM缓存",
|
||||
"confirmButton": "确定",
|
||||
"clearing": "正在清除...",
|
||||
"timeout": "清除操作超时,请重试",
|
||||
"success": "文档清空成功",
|
||||
"cacheCleared": "缓存清空成功",
|
||||
"cacheClearFailed": "清空缓存失败:\n{{error}}",
|
||||
|
|
|
|||
|
|
@ -50,6 +50,8 @@
|
|||
"confirmPlaceholder": "輸入 yes 以確認",
|
||||
"clearCache": "清空 LLM 快取",
|
||||
"confirmButton": "確定",
|
||||
"clearing": "正在清除...",
|
||||
"timeout": "清除操作逾時,請重試",
|
||||
"success": "文件清空成功",
|
||||
"cacheCleared": "快取清空成功",
|
||||
"cacheClearFailed": "清空快取失敗:\n{{error}}",
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue