Compare commits
7 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7ca3e11566 | ||
|
|
a2e080c2d3 | ||
|
|
ad6f7fd4b0 | ||
|
|
2a0f835ffe | ||
|
|
13d8241eee | ||
|
|
1ddd11f045 | ||
|
|
81eb03d230 |
25 changed files with 926 additions and 204 deletions
374
common/crypto_utils.py
Normal file
374
common/crypto_utils.py
Normal file
|
|
@ -0,0 +1,374 @@
|
|||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import os
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
from cryptography.hazmat.primitives import padding
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
|
||||
|
||||
class BaseCrypto:
|
||||
"""Base class for cryptographic algorithms"""
|
||||
|
||||
# Magic header to identify encrypted data
|
||||
ENCRYPTED_MAGIC = b'RAGF'
|
||||
|
||||
def __init__(self, key, iv=None, block_size=16, key_length=32, iv_length=16):
|
||||
"""
|
||||
Initialize cryptographic algorithm
|
||||
|
||||
Args:
|
||||
key: Encryption key
|
||||
iv: Initialization vector, automatically generated if None
|
||||
block_size: Block size
|
||||
key_length: Key length
|
||||
iv_length: Initialization vector length
|
||||
"""
|
||||
self.block_size = block_size
|
||||
self.key_length = key_length
|
||||
self.iv_length = iv_length
|
||||
|
||||
# Normalize key
|
||||
self.key = self._normalize_key(key)
|
||||
self.iv = iv
|
||||
|
||||
def _normalize_key(self, key):
|
||||
"""Normalize key length"""
|
||||
if isinstance(key, str):
|
||||
key = key.encode('utf-8')
|
||||
|
||||
# Use PBKDF2 for key derivation to ensure correct key length
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=self.key_length,
|
||||
salt=b"ragflow_crypto_salt", # Fixed salt to ensure consistent key derivation results
|
||||
iterations=100000,
|
||||
backend=default_backend()
|
||||
)
|
||||
|
||||
return kdf.derive(key)
|
||||
|
||||
def encrypt(self, data):
|
||||
"""
|
||||
Encrypt data (template method)
|
||||
|
||||
Args:
|
||||
data: Data to encrypt (bytes)
|
||||
|
||||
Returns:
|
||||
Encrypted data (bytes), format: magic_header + iv + encrypted_data
|
||||
"""
|
||||
# Generate random IV
|
||||
iv = os.urandom(self.iv_length) if not self.iv else self.iv
|
||||
|
||||
# Use PKCS7 padding
|
||||
padder = padding.PKCS7(self.block_size * 8).padder()
|
||||
padded_data = padder.update(data) + padder.finalize()
|
||||
|
||||
# Delegate to subclass for specific encryption
|
||||
ciphertext = self._encrypt(padded_data, iv)
|
||||
|
||||
# Return Magic Header + IV + encrypted data
|
||||
return self.ENCRYPTED_MAGIC + iv + ciphertext
|
||||
|
||||
def decrypt(self, encrypted_data):
|
||||
"""
|
||||
Decrypt data (template method)
|
||||
|
||||
Args:
|
||||
encrypted_data: Encrypted data (bytes)
|
||||
|
||||
Returns:
|
||||
Decrypted data (bytes)
|
||||
"""
|
||||
# Check if data is encrypted by magic header
|
||||
if not encrypted_data.startswith(self.ENCRYPTED_MAGIC):
|
||||
# Not encrypted, return as-is
|
||||
return encrypted_data
|
||||
|
||||
# Remove magic header
|
||||
encrypted_data = encrypted_data[len(self.ENCRYPTED_MAGIC):]
|
||||
|
||||
# Separate IV and encrypted data
|
||||
iv = encrypted_data[:self.iv_length]
|
||||
ciphertext = encrypted_data[self.iv_length:]
|
||||
|
||||
# Delegate to subclass for specific decryption
|
||||
padded_data = self._decrypt(ciphertext, iv)
|
||||
|
||||
# Remove padding
|
||||
unpadder = padding.PKCS7(self.block_size * 8).unpadder()
|
||||
data = unpadder.update(padded_data) + unpadder.finalize()
|
||||
|
||||
return data
|
||||
|
||||
def _encrypt(self, padded_data, iv):
|
||||
"""
|
||||
Encrypt padded data with specific algorithm
|
||||
|
||||
Args:
|
||||
padded_data: Padded data to encrypt
|
||||
iv: Initialization vector
|
||||
|
||||
Returns:
|
||||
Encrypted data
|
||||
"""
|
||||
raise NotImplementedError("_encrypt method must be implemented by subclass")
|
||||
|
||||
def _decrypt(self, ciphertext, iv):
|
||||
"""
|
||||
Decrypt ciphertext with specific algorithm
|
||||
|
||||
Args:
|
||||
ciphertext: Ciphertext to decrypt
|
||||
iv: Initialization vector
|
||||
|
||||
Returns:
|
||||
Decrypted padded data
|
||||
"""
|
||||
raise NotImplementedError("_decrypt method must be implemented by subclass")
|
||||
|
||||
|
||||
class AESCrypto(BaseCrypto):
|
||||
"""Base class for AES cryptographic algorithm"""
|
||||
|
||||
def __init__(self, key, iv=None, key_length=32):
|
||||
"""
|
||||
Initialize AES cryptographic algorithm
|
||||
|
||||
Args:
|
||||
key: Encryption key
|
||||
iv: Initialization vector, automatically generated if None
|
||||
key_length: Key length (16 for AES-128, 32 for AES-256)
|
||||
"""
|
||||
super().__init__(key, iv, block_size=16, key_length=key_length, iv_length=16)
|
||||
|
||||
def _encrypt(self, padded_data, iv):
|
||||
"""AES encryption implementation"""
|
||||
# Create encryptor
|
||||
cipher = Cipher(
|
||||
algorithms.AES(self.key),
|
||||
modes.CBC(iv),
|
||||
backend=default_backend()
|
||||
)
|
||||
encryptor = cipher.encryptor()
|
||||
|
||||
# Encrypt data
|
||||
return encryptor.update(padded_data) + encryptor.finalize()
|
||||
|
||||
def _decrypt(self, ciphertext, iv):
|
||||
"""AES decryption implementation"""
|
||||
# Create decryptor
|
||||
cipher = Cipher(
|
||||
algorithms.AES(self.key),
|
||||
modes.CBC(iv),
|
||||
backend=default_backend()
|
||||
)
|
||||
decryptor = cipher.decryptor()
|
||||
|
||||
# Decrypt data
|
||||
return decryptor.update(ciphertext) + decryptor.finalize()
|
||||
|
||||
|
||||
class AES128CBC(AESCrypto):
|
||||
"""AES-128-CBC cryptographic algorithm"""
|
||||
|
||||
def __init__(self, key, iv=None):
|
||||
"""
|
||||
Initialize AES-128-CBC cryptographic algorithm
|
||||
|
||||
Args:
|
||||
key: Encryption key
|
||||
iv: Initialization vector, automatically generated if None
|
||||
"""
|
||||
super().__init__(key, iv, key_length=16)
|
||||
|
||||
|
||||
class AES256CBC(AESCrypto):
|
||||
"""AES-256-CBC cryptographic algorithm"""
|
||||
|
||||
def __init__(self, key, iv=None):
|
||||
"""
|
||||
Initialize AES-256-CBC cryptographic algorithm
|
||||
|
||||
Args:
|
||||
key: Encryption key
|
||||
iv: Initialization vector, automatically generated if None
|
||||
"""
|
||||
super().__init__(key, iv, key_length=32)
|
||||
|
||||
|
||||
class SM4CBC(BaseCrypto):
|
||||
"""SM4-CBC cryptographic algorithm using cryptography library for better performance"""
|
||||
|
||||
def __init__(self, key, iv=None):
|
||||
"""
|
||||
Initialize SM4-CBC cryptographic algorithm
|
||||
|
||||
Args:
|
||||
key: Encryption key
|
||||
iv: Initialization vector, automatically generated if None
|
||||
"""
|
||||
super().__init__(key, iv, block_size=16, key_length=16, iv_length=16)
|
||||
|
||||
def _encrypt(self, padded_data, iv):
|
||||
"""SM4 encryption implementation using cryptography library"""
|
||||
# Create encryptor
|
||||
cipher = Cipher(
|
||||
algorithms.SM4(self.key),
|
||||
modes.CBC(iv),
|
||||
backend=default_backend()
|
||||
)
|
||||
encryptor = cipher.encryptor()
|
||||
|
||||
# Encrypt data
|
||||
return encryptor.update(padded_data) + encryptor.finalize()
|
||||
|
||||
def _decrypt(self, ciphertext, iv):
|
||||
"""SM4 decryption implementation using cryptography library"""
|
||||
# Create decryptor
|
||||
cipher = Cipher(
|
||||
algorithms.SM4(self.key),
|
||||
modes.CBC(iv),
|
||||
backend=default_backend()
|
||||
)
|
||||
decryptor = cipher.decryptor()
|
||||
|
||||
# Decrypt data
|
||||
return decryptor.update(ciphertext) + decryptor.finalize()
|
||||
|
||||
|
||||
class CryptoUtil:
|
||||
"""Cryptographic utility class, using factory pattern to create cryptographic algorithm instances"""
|
||||
|
||||
# Supported cryptographic algorithms mapping
|
||||
SUPPORTED_ALGORITHMS = {
|
||||
"aes-128-cbc": AES128CBC,
|
||||
"aes-256-cbc": AES256CBC,
|
||||
"sm4-cbc": SM4CBC
|
||||
}
|
||||
|
||||
def __init__(self, algorithm="aes-256-cbc", key=None, iv=None):
|
||||
"""
|
||||
Initialize cryptographic utility
|
||||
|
||||
Args:
|
||||
algorithm: Cryptographic algorithm, default is aes-256-cbc
|
||||
key: Encryption key, uses RAGFLOW_CRYPTO_KEY environment variable if None
|
||||
iv: Initialization vector, automatically generated if None
|
||||
"""
|
||||
if algorithm not in self.SUPPORTED_ALGORITHMS:
|
||||
raise ValueError(f"Unsupported algorithm: {algorithm}")
|
||||
|
||||
if not key:
|
||||
raise ValueError("Encryption key not provided and RAGFLOW_CRYPTO_KEY environment variable not set")
|
||||
|
||||
# Create cryptographic algorithm instance
|
||||
self.algorithm_name = algorithm
|
||||
self.crypto = self.SUPPORTED_ALGORITHMS[algorithm](key=key, iv=iv)
|
||||
|
||||
def encrypt(self, data):
|
||||
"""
|
||||
Encrypt data
|
||||
|
||||
Args:
|
||||
data: Data to encrypt (bytes)
|
||||
|
||||
Returns:
|
||||
Encrypted data (bytes)
|
||||
"""
|
||||
# import time
|
||||
# start_time = time.time()
|
||||
encrypted = self.crypto.encrypt(data)
|
||||
# end_time = time.time()
|
||||
# logging.info(f"Encryption completed, data length: {len(data)} bytes, time: {(end_time - start_time)*1000:.2f} ms")
|
||||
return encrypted
|
||||
|
||||
def decrypt(self, encrypted_data):
|
||||
"""
|
||||
Decrypt data
|
||||
|
||||
Args:
|
||||
encrypted_data: Encrypted data (bytes)
|
||||
|
||||
Returns:
|
||||
Decrypted data (bytes)
|
||||
"""
|
||||
# import time
|
||||
# start_time = time.time()
|
||||
decrypted = self.crypto.decrypt(encrypted_data)
|
||||
# end_time = time.time()
|
||||
# logging.info(f"Decryption completed, data length: {len(encrypted_data)} bytes, time: {(end_time - start_time)*1000:.2f} ms")
|
||||
return decrypted
|
||||
|
||||
|
||||
# Test code
|
||||
if __name__ == "__main__":
|
||||
# Test AES encryption
|
||||
crypto = CryptoUtil(algorithm="aes-256-cbc", key="test_key_123456")
|
||||
test_data = b"Hello, RAGFlow! This is a test for encryption."
|
||||
|
||||
encrypted = crypto.encrypt(test_data)
|
||||
decrypted = crypto.decrypt(encrypted)
|
||||
|
||||
print("AES Test:")
|
||||
print(f"Original: {test_data}")
|
||||
print(f"Encrypted: {encrypted}")
|
||||
print(f"Decrypted: {decrypted}")
|
||||
print(f"Success: {test_data == decrypted}")
|
||||
print()
|
||||
|
||||
# Test SM4 encryption
|
||||
try:
|
||||
crypto_sm4 = CryptoUtil(algorithm="sm4-cbc", key="test_key_123456")
|
||||
encrypted_sm4 = crypto_sm4.encrypt(test_data)
|
||||
decrypted_sm4 = crypto_sm4.decrypt(encrypted_sm4)
|
||||
|
||||
print("SM4 Test:")
|
||||
print(f"Original: {test_data}")
|
||||
print(f"Encrypted: {encrypted_sm4}")
|
||||
print(f"Decrypted: {decrypted_sm4}")
|
||||
print(f"Success: {test_data == decrypted_sm4}")
|
||||
except Exception as e:
|
||||
print(f"SM4 Test Failed: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Test with specific algorithm classes directly
|
||||
print("\nDirect Algorithm Class Test:")
|
||||
|
||||
# Test AES-128-CBC
|
||||
aes128 = AES128CBC(key="test_key_123456")
|
||||
encrypted_aes128 = aes128.encrypt(test_data)
|
||||
decrypted_aes128 = aes128.decrypt(encrypted_aes128)
|
||||
print(f"AES-128-CBC test: {'passed' if decrypted_aes128 == test_data else 'failed'}")
|
||||
|
||||
# Test AES-256-CBC
|
||||
aes256 = AES256CBC(key="test_key_123456")
|
||||
encrypted_aes256 = aes256.encrypt(test_data)
|
||||
decrypted_aes256 = aes256.decrypt(encrypted_aes256)
|
||||
print(f"AES-256-CBC test: {'passed' if decrypted_aes256 == test_data else 'failed'}")
|
||||
|
||||
# Test SM4-CBC
|
||||
try:
|
||||
sm4 = SM4CBC(key="test_key_123456")
|
||||
encrypted_sm4 = sm4.encrypt(test_data)
|
||||
decrypted_sm4 = sm4.decrypt(encrypted_sm4)
|
||||
print(f"SM4-CBC test: {'passed' if decrypted_sm4 == test_data else 'failed'}")
|
||||
except Exception as e:
|
||||
print(f"SM4-CBC test failed: {e}")
|
||||
|
|
@ -269,7 +269,27 @@ def init_settings():
|
|||
GCS = get_base_config("gcs", {})
|
||||
|
||||
global STORAGE_IMPL
|
||||
STORAGE_IMPL = StorageFactory.create(Storage[STORAGE_IMPL_TYPE])
|
||||
storage_impl = StorageFactory.create(Storage[STORAGE_IMPL_TYPE])
|
||||
|
||||
# Define crypto settings
|
||||
crypto_enabled = os.environ.get("RAGFLOW_CRYPTO_ENABLED", "false").lower() == "true"
|
||||
|
||||
# Check if encryption is enabled
|
||||
if crypto_enabled:
|
||||
try:
|
||||
from rag.utils.encrypted_storage import create_encrypted_storage
|
||||
algorithm = os.environ.get("RAGFLOW_CRYPTO_ALGORITHM", "aes-256-cbc")
|
||||
crypto_key = os.environ.get("RAGFLOW_CRYPTO_KEY")
|
||||
|
||||
STORAGE_IMPL = create_encrypted_storage(storage_impl,
|
||||
algorithm=algorithm,
|
||||
key=crypto_key,
|
||||
encryption_enabled=crypto_enabled)
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to initialize encrypted storage: {e}")
|
||||
STORAGE_IMPL = storage_impl
|
||||
else:
|
||||
STORAGE_IMPL = storage_impl
|
||||
|
||||
global retriever, kg_retriever
|
||||
retriever = search.Dealer(docStoreConn)
|
||||
|
|
|
|||
|
|
@ -240,7 +240,10 @@ MINERU_EXECUTABLE="$HOME/uv_tools/.venv/bin/mineru"
|
|||
# MINERU_DELETE_OUTPUT=0 # keep output directory
|
||||
# MINERU_BACKEND=pipeline # or another backend you prefer
|
||||
|
||||
|
||||
|
||||
# pptx support
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1
|
||||
|
||||
# crypto utils
|
||||
# RAGFLOW_CRYPTO_ENABLED=true
|
||||
# RAGFLOW_CRYPTO_ALGORITHM=aes-256-cbc # one of aes-256-cbc, aes-128-cbc, sm4-cbc
|
||||
# RAGFLOW_CRYPTO_KEY=ragflow-crypto-key
|
||||
|
|
|
|||
|
|
@ -209,23 +209,42 @@ function ensure_mineru() {
|
|||
local default_prefix="/ragflow/uv_tools"
|
||||
local venv_dir="${default_prefix}/.venv"
|
||||
local exe="${MINERU_EXECUTABLE:-${venv_dir}/bin/mineru}"
|
||||
local mineru_backend="${MINERU_BACKEND:-pipeline}"
|
||||
local mineru_pkg="mineru[core]"
|
||||
|
||||
if [[ "${mineru_backend}" == vlm-* ]]; then
|
||||
mineru_pkg="mineru[core,vlm]"
|
||||
fi
|
||||
|
||||
if [[ -x "${exe}" ]]; then
|
||||
echo "[mineru] found: ${exe}"
|
||||
echo "[mineru] found: ${exe} (MINERU_BACKEND=${mineru_backend})"
|
||||
export MINERU_EXECUTABLE="${exe}"
|
||||
|
||||
if [[ "${mineru_backend}" == vlm-* ]]; then
|
||||
if ! "${venv_dir}/bin/python3" -c "import importlib.util, sys; sys.exit(0 if importlib.util.find_spec('vllm') else 1)" >/dev/null 2>&1; then
|
||||
echo "[mineru] vllm not found for MINERU_BACKEND=${mineru_backend}, installing ${mineru_pkg} ..."
|
||||
(
|
||||
set -e
|
||||
source "${venv_dir}/bin/activate"
|
||||
uv pip install -U "${mineru_pkg}" -i https://mirrors.aliyun.com/pypi/simple --extra-index-url https://pypi.org/simple
|
||||
deactivate
|
||||
) || return 1
|
||||
fi
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "[mineru] not found, bootstrapping with uv ..."
|
||||
echo "[mineru] not found, bootstrapping with uv ... (MINERU_BACKEND=${mineru_backend}, pkg=${mineru_pkg})"
|
||||
|
||||
(
|
||||
set -e
|
||||
mkdir -p "${default_prefix}"
|
||||
cd "${default_prefix}"
|
||||
[[ -d "${venv_dir}" ]] || uv venv "${venv_dir}"
|
||||
[[ -d "${venv_dir}" ]] || { echo "[mineru] creating venv at ${venv_dir} ..."; uv venv "${venv_dir}"; }
|
||||
|
||||
echo "[mineru] installing ${mineru_pkg} into ${venv_dir} ..."
|
||||
source "${venv_dir}/bin/activate"
|
||||
uv pip install -U "mineru[core]" -i https://mirrors.aliyun.com/pypi/simple --extra-index-url https://pypi.org/simple
|
||||
uv pip install -U "${mineru_pkg}" -i https://mirrors.aliyun.com/pypi/simple --extra-index-url https://pypi.org/simple
|
||||
deactivate
|
||||
)
|
||||
export MINERU_EXECUTABLE="${exe}"
|
||||
|
|
|
|||
|
|
@ -23,6 +23,10 @@ We use gVisor to isolate code execution from the host system. Please follow [the
|
|||
|
||||
RAGFlow Sandbox is a secure, pluggable code execution backend. It serves as the code executor for the **Code** component. Please follow the [instructions here](https://github.com/infiniflow/ragflow/tree/main/sandbox) to install RAGFlow Sandbox.
|
||||
|
||||
:::note Docker client version
|
||||
The executor manager image now bundles Docker CLI `29.1.0` (API 1.44+). Older images shipped Docker 24.x and will fail against newer Docker daemons with `client version 1.43 is too old`. Pull the latest `infiniflow/sandbox-executor-manager:latest` or rebuild `./sandbox/executor_manager` if you encounter this error.
|
||||
:::
|
||||
|
||||
:::tip NOTE
|
||||
If your RAGFlow Sandbox is not working, please be sure to consult the [Troubleshooting](#troubleshooting) section in this document. We assure you that it addresses 99.99% of the issues!
|
||||
:::
|
||||
|
|
@ -122,6 +126,22 @@ docker pull infiniflow/sandbox-base-nodejs:latest
|
|||
docker pull infiniflow/sandbox-base-python:latest
|
||||
```
|
||||
|
||||
### `docker: Error response from daemon: client version 1.43 is too old. Minimum supported API version is 1.44`
|
||||
|
||||
**Root cause**
|
||||
|
||||
The executor manager image you are running includes Docker CLI 24.x (API 1.43) while the host Docker daemon now requires API 1.44+ (e.g., Docker 25+ / 29.x).
|
||||
|
||||
**Solution**
|
||||
|
||||
Pull the refreshed image or rebuild locally so the embedded Docker client is new enough:
|
||||
|
||||
```bash
|
||||
docker pull infiniflow/sandbox-executor-manager:latest
|
||||
# or
|
||||
docker build -t sandbox-executor-manager:latest ./sandbox/executor_manager
|
||||
```
|
||||
|
||||
### `HTTPConnectionPool(host='none', port=9385): Max retries exceeded.`
|
||||
|
||||
**Root cause**
|
||||
|
|
|
|||
|
|
@ -23,11 +23,13 @@ The architecture consists of isolated Docker base images for each supported lang
|
|||
|
||||
- Linux distribution compatible with gVisor.
|
||||
- gVisor installed and configured.
|
||||
- Docker version 24.0.0 or higher.
|
||||
- Docker version 25.0 or higher (API 1.44+). The executor manager image now ships Docker CLI `29.1.0` to stay compatible with newer Docker daemons.
|
||||
- Docker Compose version 2.26.1 or higher (similar to RAGFlow requirements).
|
||||
- uv package and project manager installed.
|
||||
- (Optional) GNU Make for simplified command-line management.
|
||||
|
||||
> **Note:** If you see `client version 1.43 is too old. Minimum supported API version is 1.44`, pull the latest `infiniflow/sandbox-executor-manager:latest` from Docker Hub (or rebuild `./sandbox/executor_manager`). Older images embedded Docker 24.x, which cannot talk to upgraded Docker daemons.
|
||||
|
||||
## Build Docker base images
|
||||
|
||||
The sandbox uses isolated base images for secure containerised execution environments.
|
||||
|
|
|
|||
|
|
@ -154,6 +154,7 @@ dependencies = [
|
|||
"exceptiongroup>=1.3.0,<2.0.0",
|
||||
"ffmpeg-python>=0.2.0",
|
||||
"imageio-ffmpeg>=0.6.0",
|
||||
"cryptography==46.0.3",
|
||||
"reportlab>=4.4.1",
|
||||
"jinja2>=3.1.0",
|
||||
"boxsdk>=10.1.0",
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ from openai import OpenAI
|
|||
from zhipuai import ZhipuAI
|
||||
|
||||
from common.log_utils import log_exception
|
||||
from common.token_utils import num_tokens_from_string, truncate
|
||||
from common.token_utils import num_tokens_from_string, truncate, total_token_count_from_response
|
||||
from common import settings
|
||||
import logging
|
||||
import base64
|
||||
|
|
@ -118,7 +118,7 @@ class OpenAIEmbed(Base):
|
|||
res = self.client.embeddings.create(input=texts[i : i + batch_size], model=self.model_name, encoding_format="float", extra_body={"drop_params": True})
|
||||
try:
|
||||
ress.extend([d.embedding for d in res.data])
|
||||
total_tokens += self.total_token_count(res)
|
||||
total_tokens += total_token_count_from_response(res)
|
||||
except Exception as _e:
|
||||
log_exception(_e, res)
|
||||
raise Exception(f"Error: {res}")
|
||||
|
|
@ -127,7 +127,7 @@ class OpenAIEmbed(Base):
|
|||
def encode_queries(self, text):
|
||||
res = self.client.embeddings.create(input=[truncate(text, 8191)], model=self.model_name, encoding_format="float",extra_body={"drop_params": True})
|
||||
try:
|
||||
return np.array(res.data[0].embedding), self.total_token_count(res)
|
||||
return np.array(res.data[0].embedding), self.total_token_count_from_response(res)
|
||||
except Exception as _e:
|
||||
log_exception(_e, res)
|
||||
raise Exception(f"Error: {res}")
|
||||
|
|
@ -216,7 +216,7 @@ class QWenEmbed(Base):
|
|||
for e in resp["output"]["embeddings"]:
|
||||
embds[e["text_index"]] = e["embedding"]
|
||||
res.extend(embds)
|
||||
token_count += self.total_token_count(resp)
|
||||
token_count += self.total_token_count_from_response(resp)
|
||||
except Exception as _e:
|
||||
log_exception(_e, resp)
|
||||
raise
|
||||
|
|
@ -225,7 +225,7 @@ class QWenEmbed(Base):
|
|||
def encode_queries(self, text):
|
||||
resp = dashscope.TextEmbedding.call(model=self.model_name, input=text[:2048], api_key=self.key, text_type="query")
|
||||
try:
|
||||
return np.array(resp["output"]["embeddings"][0]["embedding"]), self.total_token_count(resp)
|
||||
return np.array(resp["output"]["embeddings"][0]["embedding"]), self.total_token_count_from_response(resp)
|
||||
except Exception as _e:
|
||||
log_exception(_e, resp)
|
||||
raise Exception(f"Error: {resp}")
|
||||
|
|
@ -253,7 +253,7 @@ class ZhipuEmbed(Base):
|
|||
res = self.client.embeddings.create(input=txt, model=self.model_name)
|
||||
try:
|
||||
arr.append(res.data[0].embedding)
|
||||
tks_num += self.total_token_count(res)
|
||||
tks_num += self.total_token_count_from_response(res)
|
||||
except Exception as _e:
|
||||
log_exception(_e, res)
|
||||
raise Exception(f"Error: {res}")
|
||||
|
|
@ -262,7 +262,7 @@ class ZhipuEmbed(Base):
|
|||
def encode_queries(self, text):
|
||||
res = self.client.embeddings.create(input=text, model=self.model_name)
|
||||
try:
|
||||
return np.array(res.data[0].embedding), self.total_token_count(res)
|
||||
return np.array(res.data[0].embedding), self.total_token_count_from_response(res)
|
||||
except Exception as _e:
|
||||
log_exception(_e, res)
|
||||
raise Exception(f"Error: {res}")
|
||||
|
|
@ -323,7 +323,7 @@ class XinferenceEmbed(Base):
|
|||
try:
|
||||
res = self.client.embeddings.create(input=texts[i : i + batch_size], model=self.model_name)
|
||||
ress.extend([d.embedding for d in res.data])
|
||||
total_tokens += self.total_token_count(res)
|
||||
total_tokens += self.total_token_count_from_response(res)
|
||||
except Exception as _e:
|
||||
log_exception(_e, res)
|
||||
raise Exception(f"Error: {res}")
|
||||
|
|
@ -333,7 +333,7 @@ class XinferenceEmbed(Base):
|
|||
res = None
|
||||
try:
|
||||
res = self.client.embeddings.create(input=[text], model=self.model_name)
|
||||
return np.array(res.data[0].embedding), self.total_token_count(res)
|
||||
return np.array(res.data[0].embedding), self.total_token_count_from_response(res)
|
||||
except Exception as _e:
|
||||
log_exception(_e, res)
|
||||
raise Exception(f"Error: {res}")
|
||||
|
|
@ -409,7 +409,7 @@ class JinaMultiVecEmbed(Base):
|
|||
|
||||
ress.append(chunk_emb)
|
||||
|
||||
token_count += self.total_token_count(res)
|
||||
token_count += self.total_token_count_from_response(res)
|
||||
except Exception as _e:
|
||||
log_exception(_e, response)
|
||||
raise Exception(f"Error: {response}")
|
||||
|
|
@ -443,7 +443,7 @@ class MistralEmbed(Base):
|
|||
try:
|
||||
res = self.client.embeddings(input=texts[i : i + batch_size], model=self.model_name)
|
||||
ress.extend([d.embedding for d in res.data])
|
||||
token_count += self.total_token_count(res)
|
||||
token_count += self.total_token_count_from_response(res)
|
||||
break
|
||||
except Exception as _e:
|
||||
if retry_max == 1:
|
||||
|
|
@ -460,7 +460,7 @@ class MistralEmbed(Base):
|
|||
while retry_max > 0:
|
||||
try:
|
||||
res = self.client.embeddings(input=[truncate(text, 8196)], model=self.model_name)
|
||||
return np.array(res.data[0].embedding), self.total_token_count(res)
|
||||
return np.array(res.data[0].embedding), self.total_token_count_from_response(res)
|
||||
except Exception as _e:
|
||||
if retry_max == 1:
|
||||
log_exception(_e)
|
||||
|
|
@ -595,7 +595,7 @@ class NvidiaEmbed(Base):
|
|||
try:
|
||||
res = response.json()
|
||||
ress.extend([d["embedding"] for d in res["data"]])
|
||||
token_count += self.total_token_count(res)
|
||||
token_count += self.total_token_count_from_response(res)
|
||||
except Exception as _e:
|
||||
log_exception(_e, response)
|
||||
raise Exception(f"Error: {response}")
|
||||
|
|
@ -732,7 +732,7 @@ class SILICONFLOWEmbed(Base):
|
|||
try:
|
||||
res = response.json()
|
||||
ress.extend([d["embedding"] for d in res["data"]])
|
||||
token_count += self.total_token_count(res)
|
||||
token_count += self.total_token_count_from_response(res)
|
||||
except Exception as _e:
|
||||
log_exception(_e, response)
|
||||
raise Exception(f"Error: {response}")
|
||||
|
|
@ -748,7 +748,7 @@ class SILICONFLOWEmbed(Base):
|
|||
response = requests.post(self.base_url, json=payload, headers=self.headers)
|
||||
try:
|
||||
res = response.json()
|
||||
return np.array(res["data"][0]["embedding"]), self.total_token_count(res)
|
||||
return np.array(res["data"][0]["embedding"]), self.total_token_count_from_response(res)
|
||||
except Exception as _e:
|
||||
log_exception(_e, response)
|
||||
raise Exception(f"Error: {response}")
|
||||
|
|
@ -794,7 +794,7 @@ class BaiduYiyanEmbed(Base):
|
|||
try:
|
||||
return (
|
||||
np.array([r["embedding"] for r in res["data"]]),
|
||||
self.total_token_count(res),
|
||||
self.total_token_count_from_response(res),
|
||||
)
|
||||
except Exception as _e:
|
||||
log_exception(_e, res)
|
||||
|
|
@ -805,7 +805,7 @@ class BaiduYiyanEmbed(Base):
|
|||
try:
|
||||
return (
|
||||
np.array([r["embedding"] for r in res["data"]]),
|
||||
self.total_token_count(res),
|
||||
self.total_token_count_from_response(res),
|
||||
)
|
||||
except Exception as _e:
|
||||
log_exception(_e, res)
|
||||
|
|
|
|||
|
|
@ -34,18 +34,28 @@ class MinerUOcrModel(Base, MinerUParser):
|
|||
|
||||
def __init__(self, key: str | dict, model_name: str, **kwargs):
|
||||
Base.__init__(self, key, model_name, **kwargs)
|
||||
config = {}
|
||||
raw_config = {}
|
||||
if key:
|
||||
try:
|
||||
config = json.loads(key)
|
||||
raw_config = json.loads(key)
|
||||
except Exception:
|
||||
config = {}
|
||||
config = config["api_key"]
|
||||
self.mineru_api = config.get("mineru_apiserver", os.environ.get("MINERU_APISERVER", ""))
|
||||
self.mineru_output_dir = config.get("mineru_output_dir", os.environ.get("MINERU_OUTPUT_DIR", ""))
|
||||
self.mineru_backend = config.get("mineru_backend", os.environ.get("MINERU_BACKEND", "pipeline"))
|
||||
self.mineru_server_url = config.get("mineru_server_url", os.environ.get("MINERU_SERVER_URL", ""))
|
||||
self.mineru_delete_output = bool(int(config.get("mineru_delete_output", os.environ.get("MINERU_DELETE_OUTPUT", 1))))
|
||||
raw_config = {}
|
||||
|
||||
# nested {"api_key": {...}} from UI
|
||||
# flat {"MINERU_*": "..."} payload auto-provisioned from env vars
|
||||
config = raw_config.get("api_key", raw_config)
|
||||
if not isinstance(config, dict):
|
||||
config = {}
|
||||
|
||||
def _resolve_config(key: str, env_key: str, default=""):
|
||||
# lower-case keys (UI), upper-case MINERU_* (env auto-provision), env vars
|
||||
return config.get(key, config.get(env_key, os.environ.get(env_key, default)))
|
||||
|
||||
self.mineru_api = _resolve_config("mineru_apiserver", "MINERU_APISERVER", "")
|
||||
self.mineru_output_dir = _resolve_config("mineru_output_dir", "MINERU_OUTPUT_DIR", "")
|
||||
self.mineru_backend = _resolve_config("mineru_backend", "MINERU_BACKEND", "pipeline")
|
||||
self.mineru_server_url = _resolve_config("mineru_server_url", "MINERU_SERVER_URL", "")
|
||||
self.mineru_delete_output = bool(int(_resolve_config("mineru_delete_output", "MINERU_DELETE_OUTPUT", 1)))
|
||||
self.mineru_executable = os.environ.get("MINERU_EXECUTABLE", "mineru")
|
||||
|
||||
logging.info(f"Parsed MinerU config: {config}")
|
||||
|
|
|
|||
266
rag/utils/encrypted_storage.py
Normal file
266
rag/utils/encrypted_storage.py
Normal file
|
|
@ -0,0 +1,266 @@
|
|||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import logging
|
||||
from common.crypto_utils import CryptoUtil
|
||||
# from common.decorator import singleton
|
||||
|
||||
class EncryptedStorageWrapper:
|
||||
"""Encrypted storage wrapper that wraps existing storage implementations to provide transparent encryption"""
|
||||
|
||||
def __init__(self, storage_impl, algorithm="aes-256-cbc", key=None, iv=None):
|
||||
"""
|
||||
Initialize encrypted storage wrapper
|
||||
|
||||
Args:
|
||||
storage_impl: Original storage implementation instance
|
||||
algorithm: Encryption algorithm, default is aes-256-cbc
|
||||
key: Encryption key, uses RAGFLOW_CRYPTO_KEY environment variable if None
|
||||
iv: Initialization vector, automatically generated if None
|
||||
"""
|
||||
self.storage_impl = storage_impl
|
||||
self.crypto = CryptoUtil(algorithm=algorithm, key=key, iv=iv)
|
||||
self.encryption_enabled = True
|
||||
|
||||
# Check if storage implementation has required methods
|
||||
# todo: Consider abstracting a storage base class to ensure these methods exist
|
||||
required_methods = ["put", "get", "rm", "obj_exist", "health"]
|
||||
for method in required_methods:
|
||||
if not hasattr(storage_impl, method):
|
||||
raise AttributeError(f"Storage implementation missing required method: {method}")
|
||||
|
||||
logging.info(f"EncryptedStorageWrapper initialized with algorithm: {algorithm}")
|
||||
|
||||
def put(self, bucket, fnm, binary, tenant_id=None):
|
||||
"""
|
||||
Encrypt and store data
|
||||
|
||||
Args:
|
||||
bucket: Bucket name
|
||||
fnm: File name
|
||||
binary: Original binary data
|
||||
tenant_id: Tenant ID (optional)
|
||||
|
||||
Returns:
|
||||
Storage result
|
||||
"""
|
||||
if not self.encryption_enabled:
|
||||
return self.storage_impl.put(bucket, fnm, binary, tenant_id)
|
||||
|
||||
try:
|
||||
encrypted_binary = self.crypto.encrypt(binary)
|
||||
|
||||
return self.storage_impl.put(bucket, fnm, encrypted_binary, tenant_id)
|
||||
except Exception as e:
|
||||
logging.exception(f"Failed to encrypt and store data: {bucket}/{fnm}, error: {str(e)}")
|
||||
raise
|
||||
|
||||
def get(self, bucket, fnm, tenant_id=None):
|
||||
"""
|
||||
Retrieve and decrypt data
|
||||
|
||||
Args:
|
||||
bucket: Bucket name
|
||||
fnm: File name
|
||||
tenant_id: Tenant ID (optional)
|
||||
|
||||
Returns:
|
||||
Decrypted binary data
|
||||
"""
|
||||
try:
|
||||
# Get encrypted data
|
||||
encrypted_binary = self.storage_impl.get(bucket, fnm, tenant_id)
|
||||
|
||||
if encrypted_binary is None:
|
||||
return None
|
||||
|
||||
if not self.encryption_enabled:
|
||||
return encrypted_binary
|
||||
|
||||
# Decrypt data
|
||||
decrypted_binary = self.crypto.decrypt(encrypted_binary)
|
||||
return decrypted_binary
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Failed to get and decrypt data: {bucket}/{fnm}, error: {str(e)}")
|
||||
raise
|
||||
|
||||
def rm(self, bucket, fnm, tenant_id=None):
|
||||
"""
|
||||
Delete data (same as original storage implementation, no decryption needed)
|
||||
|
||||
Args:
|
||||
bucket: Bucket name
|
||||
fnm: File name
|
||||
tenant_id: Tenant ID (optional)
|
||||
|
||||
Returns:
|
||||
Deletion result
|
||||
"""
|
||||
return self.storage_impl.rm(bucket, fnm, tenant_id)
|
||||
|
||||
def obj_exist(self, bucket, fnm, tenant_id=None):
|
||||
"""
|
||||
Check if object exists (same as original storage implementation, no decryption needed)
|
||||
|
||||
Args:
|
||||
bucket: Bucket name
|
||||
fnm: File name
|
||||
tenant_id: Tenant ID (optional)
|
||||
|
||||
Returns:
|
||||
Whether the object exists
|
||||
"""
|
||||
return self.storage_impl.obj_exist(bucket, fnm, tenant_id)
|
||||
|
||||
def health(self):
|
||||
"""
|
||||
Health check (uses the original storage implementation's method)
|
||||
|
||||
Returns:
|
||||
Health check result
|
||||
"""
|
||||
return self.storage_impl.health()
|
||||
|
||||
def bucket_exists(self, bucket):
|
||||
"""
|
||||
Check if bucket exists (if the original storage implementation has this method)
|
||||
|
||||
Args:
|
||||
bucket: Bucket name
|
||||
|
||||
Returns:
|
||||
Whether the bucket exists
|
||||
"""
|
||||
if hasattr(self.storage_impl, "bucket_exists"):
|
||||
return self.storage_impl.bucket_exists(bucket)
|
||||
return False
|
||||
|
||||
def get_presigned_url(self, bucket, fnm, expires, tenant_id=None):
|
||||
"""
|
||||
Get presigned URL (if the original storage implementation has this method)
|
||||
|
||||
Args:
|
||||
bucket: Bucket name
|
||||
fnm: File name
|
||||
expires: Expiration time
|
||||
tenant_id: Tenant ID (optional)
|
||||
|
||||
Returns:
|
||||
Presigned URL
|
||||
"""
|
||||
if hasattr(self.storage_impl, "get_presigned_url"):
|
||||
return self.storage_impl.get_presigned_url(bucket, fnm, expires, tenant_id)
|
||||
return None
|
||||
|
||||
def scan(self, bucket, fnm, tenant_id=None):
|
||||
"""
|
||||
Scan objects (if the original storage implementation has this method)
|
||||
|
||||
Args:
|
||||
bucket: Bucket name
|
||||
fnm: File name prefix
|
||||
tenant_id: Tenant ID (optional)
|
||||
|
||||
Returns:
|
||||
Scan results
|
||||
"""
|
||||
if hasattr(self.storage_impl, "scan"):
|
||||
return self.storage_impl.scan(bucket, fnm, tenant_id)
|
||||
return None
|
||||
|
||||
def copy(self, src_bucket, src_path, dest_bucket, dest_path):
|
||||
"""
|
||||
Copy object (if the original storage implementation has this method)
|
||||
|
||||
Args:
|
||||
src_bucket: Source bucket name
|
||||
src_path: Source file path
|
||||
dest_bucket: Destination bucket name
|
||||
dest_path: Destination file path
|
||||
|
||||
Returns:
|
||||
Copy result
|
||||
"""
|
||||
if hasattr(self.storage_impl, "copy"):
|
||||
return self.storage_impl.copy(src_bucket, src_path, dest_bucket, dest_path)
|
||||
return False
|
||||
|
||||
def move(self, src_bucket, src_path, dest_bucket, dest_path):
|
||||
"""
|
||||
Move object (if the original storage implementation has this method)
|
||||
|
||||
Args:
|
||||
src_bucket: Source bucket name
|
||||
src_path: Source file path
|
||||
dest_bucket: Destination bucket name
|
||||
dest_path: Destination file path
|
||||
|
||||
Returns:
|
||||
Move result
|
||||
"""
|
||||
if hasattr(self.storage_impl, "move"):
|
||||
return self.storage_impl.move(src_bucket, src_path, dest_bucket, dest_path)
|
||||
return False
|
||||
|
||||
def remove_bucket(self, bucket):
|
||||
"""
|
||||
Remove bucket (if the original storage implementation has this method)
|
||||
|
||||
Args:
|
||||
bucket: Bucket name
|
||||
|
||||
Returns:
|
||||
Remove result
|
||||
"""
|
||||
if hasattr(self.storage_impl, "remove_bucket"):
|
||||
return self.storage_impl.remove_bucket(bucket)
|
||||
return False
|
||||
|
||||
def enable_encryption(self):
|
||||
"""Enable encryption"""
|
||||
self.encryption_enabled = True
|
||||
logging.info("Encryption enabled")
|
||||
|
||||
def disable_encryption(self):
|
||||
"""Disable encryption"""
|
||||
self.encryption_enabled = False
|
||||
logging.info("Encryption disabled")
|
||||
|
||||
# Create singleton wrapper function
|
||||
def create_encrypted_storage(storage_impl, algorithm=None, key=None, encryption_enabled=True):
|
||||
"""
|
||||
Create singleton instance of encrypted storage wrapper
|
||||
|
||||
Args:
|
||||
storage_impl: Original storage implementation instance
|
||||
algorithm: Encryption algorithm, uses environment variable RAGFLOW_CRYPTO_ALGORITHM or default if None
|
||||
key: Encryption key, uses environment variable RAGFLOW_CRYPTO_KEY if None
|
||||
encryption_enabled: Whether to enable encryption functionality
|
||||
|
||||
Returns:
|
||||
Encrypted storage wrapper instance
|
||||
"""
|
||||
wrapper = EncryptedStorageWrapper(storage_impl, algorithm=algorithm, key=key)
|
||||
|
||||
wrapper.encryption_enabled = encryption_enabled
|
||||
|
||||
if encryption_enabled:
|
||||
logging.info("Encryption enabled in storage wrapper")
|
||||
else:
|
||||
logging.info("Encryption disabled in storage wrapper")
|
||||
|
||||
return wrapper
|
||||
|
|
@ -24,7 +24,7 @@ A secure, pluggable code execution backend for RAGFlow and beyond.
|
|||
|
||||
- Linux distro compatible with gVisor
|
||||
- [gVisor](https://gvisor.dev/docs/user_guide/install/)
|
||||
- Docker >= `24.0.0`
|
||||
- Docker >= `25.0` (API 1.44+) — executor manager now bundles Docker CLI `29.1.0` to match newer daemons.
|
||||
- Docker Compose >= `v2.26.1` like [RAGFlow](https://github.com/infiniflow/ragflow)
|
||||
- [uv](https://docs.astral.sh/uv/) as package and project manager
|
||||
|
||||
|
|
@ -34,6 +34,10 @@ A secure, pluggable code execution backend for RAGFlow and beyond.
|
|||
|
||||
---
|
||||
|
||||
> ⚠️ **New Docker CLI requirement**
|
||||
>
|
||||
> If you see `client version 1.43 is too old. Minimum supported API version is 1.44`, pull the latest `infiniflow/sandbox-executor-manager:latest` (rebuilt with Docker CLI `29.1.0`) or rebuild `./sandbox/executor_manager` locally. Older images shipped Docker 24.x, which cannot talk to newer Docker daemons.
|
||||
|
||||
### 🐳 Build Docker Base Images
|
||||
|
||||
We use isolated base images for secure containerized execution:
|
||||
|
|
@ -292,6 +296,22 @@ Follow this checklist to troubleshoot:
|
|||
127.0.0.1 es01 infinity mysql minio redis sandbox-executor-manager
|
||||
```
|
||||
|
||||
- [ ] **Are you running the latest executor manager image?**
|
||||
|
||||
**Common error:**
|
||||
|
||||
`docker: Error response from daemon: client version 1.43 is too old. Minimum supported API version is 1.44`
|
||||
|
||||
**Fix:**
|
||||
|
||||
Pull the refreshed image that bundles Docker CLI `29.1.0`, or rebuild locally:
|
||||
|
||||
```bash
|
||||
docker pull infiniflow/sandbox-executor-manager:latest
|
||||
# or
|
||||
docker build -t sandbox-executor-manager:latest ./sandbox/executor_manager
|
||||
```
|
||||
|
||||
- [ ] **Have you enabled sandbox-related configurations in RAGFlow?**
|
||||
|
||||
Double-check that all sandbox settings are correctly enabled in your RAGFlow configuration.
|
||||
|
|
|
|||
2
uv.lock
generated
2
uv.lock
generated
|
|
@ -6136,6 +6136,7 @@ dependencies = [
|
|||
{ name = "cn2an" },
|
||||
{ name = "cohere" },
|
||||
{ name = "crawl4ai" },
|
||||
{ name = "cryptography" },
|
||||
{ name = "dashscope" },
|
||||
{ name = "datrie" },
|
||||
{ name = "debugpy" },
|
||||
|
|
@ -6309,6 +6310,7 @@ requires-dist = [
|
|||
{ name = "cn2an", specifier = "==0.5.22" },
|
||||
{ name = "cohere", specifier = "==5.6.2" },
|
||||
{ name = "crawl4ai", specifier = ">=0.4.0,<1.0.0" },
|
||||
{ name = "cryptography", specifier = "==46.0.3" },
|
||||
{ name = "dashscope", specifier = "==1.20.11" },
|
||||
{ name = "datrie", specifier = ">=0.8.3,<0.9.0" },
|
||||
{ name = "debugpy", specifier = ">=1.8.13" },
|
||||
|
|
|
|||
|
|
@ -196,8 +196,8 @@ export default {
|
|||
learnMore: 'Built-in pipeline introduction',
|
||||
general: 'General',
|
||||
chunkMethodTab: 'Chunk method',
|
||||
testResults: 'Test results',
|
||||
testSetting: 'Test setting',
|
||||
testResults: 'Results',
|
||||
testSetting: 'Setting',
|
||||
retrievalTesting: 'Retrieval testing',
|
||||
retrievalTestingDescription:
|
||||
'Conduct a retrieval test to check if RAGFlow can recover the intended content for the LLM.',
|
||||
|
|
@ -338,7 +338,7 @@ export default {
|
|||
autoParse: 'Auto parse',
|
||||
rebuildTip:
|
||||
'Re-downloads files from the linked data source and parses them again.',
|
||||
baseInfo: 'Basic info',
|
||||
baseInfo: 'Basic',
|
||||
globalIndex: 'Global index',
|
||||
dataSource: 'Data source',
|
||||
linkSourceSetTip: 'Manage data source linkage with this dataset',
|
||||
|
|
@ -2006,28 +2006,28 @@ Important structured information may include: names, dates, locations, events, k
|
|||
schema: 'Schema',
|
||||
response: 'Response',
|
||||
executionMode: 'Execution mode',
|
||||
authMethods: 'Authentication Methods',
|
||||
authType: 'Authentication Type',
|
||||
limit: 'Request Limit',
|
||||
per: 'Time Period',
|
||||
maxBodySize: 'Maximum Body Size',
|
||||
ipWhitelist: 'IP Whitelist',
|
||||
tokenHeader: 'Token Header',
|
||||
tokenValue: 'Token Value',
|
||||
authMethods: 'Authentication methods',
|
||||
authType: 'Authentication type',
|
||||
limit: 'Request limit',
|
||||
per: 'Time period',
|
||||
maxBodySize: 'Maximum body size',
|
||||
ipWhitelist: 'Ip whitelist',
|
||||
tokenHeader: 'Token header',
|
||||
tokenValue: 'Token value',
|
||||
username: 'Username',
|
||||
password: 'Password',
|
||||
algorithm: 'Algorithm',
|
||||
secret: 'Secret',
|
||||
issuer: 'Issuer',
|
||||
audience: 'Audience',
|
||||
requiredClaims: 'Required Claims',
|
||||
requiredClaims: 'Required claims',
|
||||
header: 'Header',
|
||||
status: 'Status',
|
||||
headersTemplate: 'Headers Template',
|
||||
bodyTemplate: 'Body Template',
|
||||
headersTemplate: 'Headers template',
|
||||
bodyTemplate: 'Body template',
|
||||
basic: 'Basic',
|
||||
bearer: 'Bearer',
|
||||
apiKey: 'Api Key',
|
||||
apiKey: 'Api key',
|
||||
queryParameters: 'Query parameters',
|
||||
headerParameters: 'Header parameters',
|
||||
requestBodyParameters: 'Request body parameters',
|
||||
|
|
|
|||
|
|
@ -327,7 +327,7 @@ export const useSendAgentMessage = ({
|
|||
async (body: { id?: string; inputs: Record<string, BeginQuery> }) => {
|
||||
addNewestOneQuestion({
|
||||
content: Object.entries(body.inputs)
|
||||
.map(([key, val]) => `${key}: ${val.value}`)
|
||||
.map(([, val]) => `${val.name}: ${val.value}`)
|
||||
.join('<br/>'),
|
||||
role: MessageType.User,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1037,3 +1037,9 @@ export enum WebhookRequestParameters {
|
|||
Number = TypesWithArray.Number,
|
||||
Boolean = TypesWithArray.Boolean,
|
||||
}
|
||||
|
||||
export enum WebhookStatus {
|
||||
Testing = 'testing',
|
||||
Live = 'live',
|
||||
Stopped = 'stopped',
|
||||
}
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ function BeginForm({ node }: INextOperatorForm) {
|
|||
});
|
||||
|
||||
return (
|
||||
<section className="px-5 space-y-5">
|
||||
<section className="px-5 space-y-5 pb-4">
|
||||
<Form {...form}>
|
||||
<FormField
|
||||
control={form.control}
|
||||
|
|
|
|||
|
|
@ -69,10 +69,10 @@ export const BeginFormSchema = z.object({
|
|||
response: z
|
||||
.object({
|
||||
status: z.number(),
|
||||
headers_template: z.array(
|
||||
z.object({ key: z.string(), value: z.string() }),
|
||||
),
|
||||
body_template: z.array(z.object({ key: z.string(), value: z.string() })),
|
||||
// headers_template: z.array(
|
||||
// z.object({ key: z.string(), value: z.string() }),
|
||||
// ),
|
||||
body_template: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
execution_mode: z.string().optional(),
|
||||
|
|
|
|||
|
|
@ -15,7 +15,9 @@ const initialFormValuesMap = {
|
|||
schema: {},
|
||||
'security.auth_type': WebhookSecurityAuthType.Basic,
|
||||
'security.rate_limit.per': RateLimitPerList[0],
|
||||
'security.rate_limit.limit': 10,
|
||||
'security.max_body_size': WebhookMaxBodySize[0],
|
||||
'response.status': 200,
|
||||
execution_mode: WebhookExecutionMode.Immediately,
|
||||
content_types: WebhookContentType.ApplicationJson,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -9,8 +9,12 @@ import { loader } from '@monaco-editor/react';
|
|||
import { omit } from 'lodash';
|
||||
import { X } from 'lucide-react';
|
||||
import { ReactNode } from 'react';
|
||||
import { useFieldArray, useFormContext } from 'react-hook-form';
|
||||
import { TypesWithArray, WebhookRequestParameters } from '../../../constant';
|
||||
import { useFieldArray, useFormContext, useWatch } from 'react-hook-form';
|
||||
import {
|
||||
TypesWithArray,
|
||||
WebhookContentType,
|
||||
WebhookRequestParameters,
|
||||
} from '../../../constant';
|
||||
import { DynamicFormHeader } from '../../components/dynamic-fom-header';
|
||||
|
||||
loader.config({ paths: { vs: '/vs' } });
|
||||
|
|
@ -44,6 +48,12 @@ export function DynamicRequest({
|
|||
isObject = false,
|
||||
}: SelectKeysProps) {
|
||||
const form = useFormContext();
|
||||
const contentType = useWatch({
|
||||
name: 'content_types',
|
||||
control: form.control,
|
||||
});
|
||||
const isFormDataContentType =
|
||||
contentType === WebhookContentType.MultipartFormData;
|
||||
|
||||
const { fields, remove, append } = useFieldArray({
|
||||
name: name,
|
||||
|
|
@ -84,7 +94,9 @@ export function DynamicRequest({
|
|||
onChange={(val) => {
|
||||
field.onChange(val);
|
||||
}}
|
||||
options={buildParametersOptions(isObject)}
|
||||
options={buildParametersOptions(
|
||||
isObject && isFormDataContentType,
|
||||
)}
|
||||
></SelectWithSearch>
|
||||
)}
|
||||
</RAGFlowFormItem>
|
||||
|
|
|
|||
|
|
@ -11,13 +11,9 @@ import { Editor, loader } from '@monaco-editor/react';
|
|||
import { X } from 'lucide-react';
|
||||
import { ReactNode, useCallback } from 'react';
|
||||
import { useFieldArray, useFormContext } from 'react-hook-form';
|
||||
import { InputMode, TypesWithArray } from '../../../constant';
|
||||
import {
|
||||
InputModeOptions,
|
||||
buildConversationVariableSelectOptions,
|
||||
} from '../../../utils';
|
||||
import { TypesWithArray } from '../../../constant';
|
||||
import { buildConversationVariableSelectOptions } from '../../../utils';
|
||||
import { DynamicFormHeader } from '../../components/dynamic-fom-header';
|
||||
import { QueryVariable } from '../../components/query-variable';
|
||||
|
||||
loader.config({ paths: { vs: '/vs' } });
|
||||
|
||||
|
|
@ -33,8 +29,6 @@ type SelectKeysProps = {
|
|||
|
||||
const VariableTypeOptions = buildConversationVariableSelectOptions();
|
||||
|
||||
const modeField = 'input_mode';
|
||||
|
||||
const ConstantValueMap = {
|
||||
[TypesWithArray.Boolean]: true,
|
||||
[TypesWithArray.Number]: 0,
|
||||
|
|
@ -63,71 +57,46 @@ export function DynamicResponse({
|
|||
});
|
||||
|
||||
const initializeValue = useCallback(
|
||||
(mode: string, variableType: string, valueFieldAlias: string) => {
|
||||
if (mode === InputMode.Variable) {
|
||||
form.setValue(valueFieldAlias, '', { shouldDirty: true });
|
||||
} else {
|
||||
const val = ConstantValueMap[variableType as TypesWithArray];
|
||||
form.setValue(valueFieldAlias, val, { shouldDirty: true });
|
||||
}
|
||||
(variableType: string, valueFieldAlias: string) => {
|
||||
const val = ConstantValueMap[variableType as TypesWithArray];
|
||||
form.setValue(valueFieldAlias, val, { shouldDirty: true });
|
||||
},
|
||||
[form],
|
||||
);
|
||||
|
||||
const handleModeChange = useCallback(
|
||||
(mode: string, valueFieldAlias: string, operatorFieldAlias: string) => {
|
||||
const variableType = form.getValues(operatorFieldAlias);
|
||||
initializeValue(mode, variableType, valueFieldAlias);
|
||||
},
|
||||
[form, initializeValue],
|
||||
);
|
||||
|
||||
const handleVariableTypeChange = useCallback(
|
||||
(variableType: string, valueFieldAlias: string, modeFieldAlias: string) => {
|
||||
const mode = form.getValues(modeFieldAlias);
|
||||
|
||||
initializeValue(mode, variableType, valueFieldAlias);
|
||||
(variableType: string, valueFieldAlias: string) => {
|
||||
initializeValue(variableType, valueFieldAlias);
|
||||
},
|
||||
[form, initializeValue],
|
||||
[initializeValue],
|
||||
);
|
||||
|
||||
const renderParameter = useCallback(
|
||||
(operatorFieldName: string, modeFieldName: string) => {
|
||||
const mode = form.getValues(modeFieldName);
|
||||
(operatorFieldName: string) => {
|
||||
const logicalOperator = form.getValues(operatorFieldName);
|
||||
|
||||
if (mode === InputMode.Constant) {
|
||||
if (logicalOperator === TypesWithArray.Boolean) {
|
||||
return <BoolSegmented></BoolSegmented>;
|
||||
}
|
||||
if (logicalOperator === TypesWithArray.Boolean) {
|
||||
return <BoolSegmented></BoolSegmented>;
|
||||
}
|
||||
|
||||
if (logicalOperator === TypesWithArray.Number) {
|
||||
return <Input className="w-full" type="number"></Input>;
|
||||
}
|
||||
if (logicalOperator === TypesWithArray.Number) {
|
||||
return <Input className="w-full" type="number"></Input>;
|
||||
}
|
||||
|
||||
if (logicalOperator === TypesWithArray.String) {
|
||||
return <Textarea></Textarea>;
|
||||
}
|
||||
|
||||
return (
|
||||
<Editor
|
||||
height={300}
|
||||
theme={isDarkTheme ? 'vs-dark' : 'vs'}
|
||||
language={'json'}
|
||||
options={{
|
||||
minimap: { enabled: false },
|
||||
automaticLayout: true,
|
||||
}}
|
||||
/>
|
||||
);
|
||||
if (logicalOperator === TypesWithArray.String) {
|
||||
return <Textarea></Textarea>;
|
||||
}
|
||||
|
||||
return (
|
||||
<QueryVariable
|
||||
types={[logicalOperator]}
|
||||
hideLabel
|
||||
pureQuery
|
||||
></QueryVariable>
|
||||
<Editor
|
||||
height={300}
|
||||
theme={isDarkTheme ? 'vs-dark' : 'vs'}
|
||||
language={'json'}
|
||||
options={{
|
||||
minimap: { enabled: false },
|
||||
automaticLayout: true,
|
||||
}}
|
||||
/>
|
||||
);
|
||||
},
|
||||
[form, isDarkTheme],
|
||||
|
|
@ -142,7 +111,6 @@ export function DynamicResponse({
|
|||
append({
|
||||
[keyField]: '',
|
||||
[valueField]: '',
|
||||
[modeField]: InputMode.Constant,
|
||||
[operatorField]: TypesWithArray.String,
|
||||
})
|
||||
}
|
||||
|
|
@ -152,7 +120,6 @@ export function DynamicResponse({
|
|||
const keyFieldAlias = `${name}.${index}.${keyField}`;
|
||||
const valueFieldAlias = `${name}.${index}.${valueField}`;
|
||||
const operatorFieldAlias = `${name}.${index}.${operatorField}`;
|
||||
const modeFieldAlias = `${name}.${index}.${modeField}`;
|
||||
|
||||
return (
|
||||
<section key={field.id} className="flex gap-2">
|
||||
|
|
@ -167,11 +134,7 @@ export function DynamicResponse({
|
|||
<SelectWithSearch
|
||||
value={field.value}
|
||||
onChange={(val) => {
|
||||
handleVariableTypeChange(
|
||||
val,
|
||||
valueFieldAlias,
|
||||
modeFieldAlias,
|
||||
);
|
||||
handleVariableTypeChange(val, valueFieldAlias);
|
||||
field.onChange(val);
|
||||
}}
|
||||
options={VariableTypeOptions}
|
||||
|
|
@ -179,25 +142,9 @@ export function DynamicResponse({
|
|||
)}
|
||||
</RAGFlowFormItem>
|
||||
<Separator className="w-2" />
|
||||
<RAGFlowFormItem name={modeFieldAlias} className="flex-1">
|
||||
{(field) => (
|
||||
<SelectWithSearch
|
||||
value={field.value}
|
||||
onChange={(val) => {
|
||||
handleModeChange(
|
||||
val,
|
||||
valueFieldAlias,
|
||||
operatorFieldAlias,
|
||||
);
|
||||
field.onChange(val);
|
||||
}}
|
||||
options={InputModeOptions}
|
||||
></SelectWithSearch>
|
||||
)}
|
||||
</RAGFlowFormItem>
|
||||
</div>
|
||||
<RAGFlowFormItem name={valueFieldAlias} className="w-full">
|
||||
{renderParameter(operatorFieldAlias, modeFieldAlias)}
|
||||
{renderParameter(operatorFieldAlias)}
|
||||
</RAGFlowFormItem>
|
||||
</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -1,16 +1,15 @@
|
|||
import { Collapse } from '@/components/collapse';
|
||||
import CopyToClipboard from '@/components/copy-to-clipboard';
|
||||
import { SelectWithSearch } from '@/components/originui/select-with-search';
|
||||
import { RAGFlowFormItem } from '@/components/ragflow-form';
|
||||
import { Input } from '@/components/ui/input';
|
||||
import { MultiSelect } from '@/components/ui/multi-select';
|
||||
import { Textarea } from '@/components/ui/textarea';
|
||||
import { buildOptions } from '@/utils/form';
|
||||
import { useFormContext, useWatch } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { useParams } from 'umi';
|
||||
import {
|
||||
RateLimitPerList,
|
||||
WebhookContentType,
|
||||
WebhookExecutionMode,
|
||||
WebhookMaxBodySize,
|
||||
WebhookMethod,
|
||||
WebhookSecurityAuthType,
|
||||
|
|
@ -24,15 +23,16 @@ const RateLimitPerOptions = buildOptions(RateLimitPerList);
|
|||
|
||||
export function WebHook() {
|
||||
const { t } = useTranslation();
|
||||
const form = useFormContext();
|
||||
const { id } = useParams();
|
||||
|
||||
const executionMode = useWatch({
|
||||
control: form.control,
|
||||
name: 'execution_mode',
|
||||
});
|
||||
const text = `${location.protocol}//${location.host}/api/v1/webhook/${id}`;
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="bg-bg-card p-1 rounded-md flex gap-2">
|
||||
<span className="flex-1 truncate">{text}</span>
|
||||
<CopyToClipboard text={text}></CopyToClipboard>
|
||||
</div>
|
||||
<RAGFlowFormItem name="methods" label={t('flow.webhook.methods')}>
|
||||
{(field) => (
|
||||
<MultiSelect
|
||||
|
|
@ -45,14 +45,7 @@ export function WebHook() {
|
|||
/>
|
||||
)}
|
||||
</RAGFlowFormItem>
|
||||
<RAGFlowFormItem
|
||||
name="content_types"
|
||||
label={t('flow.webhook.contentTypes')}
|
||||
>
|
||||
<SelectWithSearch
|
||||
options={buildOptions(WebhookContentType)}
|
||||
></SelectWithSearch>
|
||||
</RAGFlowFormItem>
|
||||
|
||||
<Collapse title={<div>Security</div>}>
|
||||
<section className="space-y-4">
|
||||
<RAGFlowFormItem
|
||||
|
|
@ -98,17 +91,8 @@ export function WebHook() {
|
|||
>
|
||||
<Textarea></Textarea>
|
||||
</RAGFlowFormItem>
|
||||
<RAGFlowFormItem
|
||||
name="execution_mode"
|
||||
label={t('flow.webhook.executionMode')}
|
||||
>
|
||||
<SelectWithSearch
|
||||
options={buildOptions(WebhookExecutionMode)}
|
||||
></SelectWithSearch>
|
||||
</RAGFlowFormItem>
|
||||
{executionMode === WebhookExecutionMode.Immediately && (
|
||||
<WebhookResponse></WebhookResponse>
|
||||
)}
|
||||
|
||||
<WebhookResponse></WebhookResponse>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,8 @@
|
|||
import { Collapse } from '@/components/collapse';
|
||||
import { SelectWithSearch } from '@/components/originui/select-with-search';
|
||||
import { RAGFlowFormItem } from '@/components/ragflow-form';
|
||||
import { WebhookContentType } from '@/pages/agent/constant';
|
||||
import { buildOptions } from '@/utils/form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { DynamicRequest } from './dynamic-request';
|
||||
|
||||
|
|
@ -8,6 +12,14 @@ export function WebhookRequestSchema() {
|
|||
return (
|
||||
<Collapse title={<div>{t('flow.webhook.schema')}</div>}>
|
||||
<section className="space-y-4">
|
||||
<RAGFlowFormItem
|
||||
name="content_types"
|
||||
label={t('flow.webhook.contentTypes')}
|
||||
>
|
||||
<SelectWithSearch
|
||||
options={buildOptions(WebhookContentType)}
|
||||
></SelectWithSearch>
|
||||
</RAGFlowFormItem>
|
||||
<DynamicRequest
|
||||
name="schema.query"
|
||||
label={t('flow.webhook.queryParameters')}
|
||||
|
|
|
|||
|
|
@ -1,29 +1,58 @@
|
|||
import { Collapse } from '@/components/collapse';
|
||||
import { SelectWithSearch } from '@/components/originui/select-with-search';
|
||||
import { RAGFlowFormItem } from '@/components/ragflow-form';
|
||||
import { Input } from '@/components/ui/input';
|
||||
import { Textarea } from '@/components/ui/textarea';
|
||||
import { WebhookExecutionMode } from '@/pages/agent/constant';
|
||||
import { buildOptions } from '@/utils/form';
|
||||
import { useFormContext, useWatch } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { DynamicResponse } from './dynamic-response';
|
||||
|
||||
export function WebhookResponse() {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const form = useFormContext();
|
||||
|
||||
const executionMode = useWatch({
|
||||
control: form.control,
|
||||
name: 'execution_mode',
|
||||
});
|
||||
|
||||
return (
|
||||
<Collapse title={<div>Response</div>}>
|
||||
<section className="space-y-4">
|
||||
<RAGFlowFormItem
|
||||
name={'response.status'}
|
||||
label={t('flow.webhook.status')}
|
||||
name="execution_mode"
|
||||
label={t('flow.webhook.executionMode')}
|
||||
>
|
||||
<Input type="number"></Input>
|
||||
<SelectWithSearch
|
||||
options={buildOptions(WebhookExecutionMode)}
|
||||
></SelectWithSearch>
|
||||
</RAGFlowFormItem>
|
||||
<DynamicResponse
|
||||
name="response.headers_template"
|
||||
label={t('flow.webhook.headersTemplate')}
|
||||
></DynamicResponse>
|
||||
<DynamicResponse
|
||||
name="response.body_template"
|
||||
label={t('flow.webhook.bodyTemplate')}
|
||||
></DynamicResponse>
|
||||
{executionMode === WebhookExecutionMode.Immediately && (
|
||||
<>
|
||||
<RAGFlowFormItem
|
||||
name={'response.status'}
|
||||
label={t('flow.webhook.status')}
|
||||
>
|
||||
<Input type="number"></Input>
|
||||
</RAGFlowFormItem>
|
||||
{/* <DynamicResponse
|
||||
name="response.headers_template"
|
||||
label={t('flow.webhook.headersTemplate')}
|
||||
></DynamicResponse> */}
|
||||
{/* <DynamicResponse
|
||||
name="response.body_template"
|
||||
label={t('flow.webhook.bodyTemplate')}
|
||||
></DynamicResponse> */}
|
||||
<RAGFlowFormItem
|
||||
name="response.body_template"
|
||||
label={t('flow.webhook.bodyTemplate')}
|
||||
>
|
||||
<Textarea></Textarea>
|
||||
</RAGFlowFormItem>
|
||||
</>
|
||||
)}
|
||||
</section>
|
||||
</Collapse>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -355,13 +355,6 @@ function transformBeginParams(params: BeginFormSchemaType) {
|
|||
...params.security,
|
||||
ip_whitelist: params.security?.ip_whitelist.map((x) => x.value),
|
||||
},
|
||||
response: {
|
||||
...params.response,
|
||||
headers_template: transformArrayToObject(
|
||||
params.response?.headers_template,
|
||||
),
|
||||
body_template: transformArrayToObject(params.response?.body_template),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -252,22 +252,7 @@ export default function DatasetSettings() {
|
|||
{t('knowledgeConfiguration.baseInfo')}
|
||||
</div>
|
||||
<GeneralForm></GeneralForm>
|
||||
<Divider />
|
||||
<div className="text-base font-medium text-text-primary">
|
||||
{t('knowledgeConfiguration.globalIndex')}
|
||||
</div>
|
||||
<GraphRagItems
|
||||
className="border-none p-0"
|
||||
data={graphRagGenerateData as IGenerateLogButtonProps}
|
||||
onDelete={() =>
|
||||
handleDeletePipelineTask(GenerateType.KnowledgeGraph)
|
||||
}
|
||||
></GraphRagItems>
|
||||
<Divider />
|
||||
<RaptorFormFields
|
||||
data={raptorGenerateData as IGenerateLogButtonProps}
|
||||
onDelete={() => handleDeletePipelineTask(GenerateType.Raptor)}
|
||||
></RaptorFormFields>
|
||||
|
||||
<Divider />
|
||||
<div className="text-base font-medium text-text-primary">
|
||||
{t('knowledgeConfiguration.dataPipeline')}
|
||||
|
|
@ -292,7 +277,6 @@ export default function DatasetSettings() {
|
|||
data={pipelineData}
|
||||
handleLinkOrEditSubmit={handleLinkOrEditSubmit}
|
||||
/> */}
|
||||
|
||||
<Divider />
|
||||
<LinkDataSource
|
||||
data={sourceData}
|
||||
|
|
@ -300,6 +284,22 @@ export default function DatasetSettings() {
|
|||
unbindFunc={unbindFunc}
|
||||
handleAutoParse={handleAutoParse}
|
||||
/>
|
||||
<Divider />
|
||||
<div className="text-base font-medium text-text-primary">
|
||||
{t('knowledgeConfiguration.globalIndex')}
|
||||
</div>
|
||||
<GraphRagItems
|
||||
className="border-none p-0"
|
||||
data={graphRagGenerateData as IGenerateLogButtonProps}
|
||||
onDelete={() =>
|
||||
handleDeletePipelineTask(GenerateType.KnowledgeGraph)
|
||||
}
|
||||
></GraphRagItems>
|
||||
<Divider />
|
||||
<RaptorFormFields
|
||||
data={raptorGenerateData as IGenerateLogButtonProps}
|
||||
onDelete={() => handleDeletePipelineTask(GenerateType.Raptor)}
|
||||
></RaptorFormFields>
|
||||
</MainContainer>
|
||||
</div>
|
||||
<div className="text-right items-center flex justify-end gap-3 w-[768px]">
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue