[OND211-2329]: Updated few imports and fixed a http_api_auth function.

This commit is contained in:
Hetavi Shah 2025-12-03 13:05:32 +05:30
parent c0c22fbb8f
commit 6ed50acc5f
3 changed files with 50 additions and 30 deletions

View file

@ -155,6 +155,9 @@ dependencies = [
"exceptiongroup>=1.3.0,<2.0.0", "exceptiongroup>=1.3.0,<2.0.0",
"ffmpeg-python>=0.2.0", "ffmpeg-python>=0.2.0",
"imageio-ffmpeg>=0.6.0", "imageio-ffmpeg>=0.6.0",
"reportlab>=4.4.3",
"hypothesis>=6.132.0",
] ]
[dependency-groups] [dependency-groups]
@ -165,11 +168,11 @@ test = [
"pytest>=8.3.5", "pytest>=8.3.5",
"python-docx>=1.1.2", "python-docx>=1.1.2",
"python-pptx>=1.0.2", "python-pptx>=1.0.2",
"reportlab>=4.4.1",
"requests>=2.32.2", "requests>=2.32.2",
"requests-toolbelt>=1.0.0", "requests-toolbelt>=1.0.0",
"pycryptodomex==3.20.0", "pycryptodomex==3.20.0",
"strenum==0.4.15", "strenum==0.4.15",
"reportlab>=4.4.3",
] ]
[[tool.uv.index]] [[tool.uv.index]]

View file

@ -43,12 +43,25 @@ from utils.file_utils import (
) )
@wait_for(30, 1, "Document parsing timeout") @wait_for(60, 1, "Document parsing timeout")
def condition(_auth, _dataset_id): def condition(_auth, _dataset_id):
res = list_documents(_auth, _dataset_id) res = list_documents(_auth, _dataset_id)
for doc in res["data"]["docs"]: docs = res.get("data", {}).get("docs", [])
if doc["run"] != "DONE": if not docs:
return False return "No documents found"
status_info = []
for doc in docs:
run_status = doc.get("run", "")
doc_id = doc.get("id", "unknown")
doc_name = doc.get("name", "unknown")
if run_status == "FAIL":
raise AssertionError(f"Document {doc_id} ({doc_name}) failed to parse: {doc.get('progress_msg', 'Unknown error')}")
if run_status != "DONE":
progress = doc.get("progress", 0)
progress_msg = doc.get("progress_msg", "")
status_info.append(f"doc {doc_id} ({doc_name}): status={run_status}, progress={progress}%, msg='{progress_msg}'")
if status_info:
return "; ".join(status_info)
return True return True
@ -82,7 +95,7 @@ def ragflow_tmp_dir(request, tmp_path_factory):
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def http_api_auth(token): def HttpApiAuth(token):
return RAGFlowHttpApiAuth(token) return RAGFlowHttpApiAuth(token)
@ -93,26 +106,26 @@ def web_api_auth(auth):
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
def clear_datasets(request, http_api_auth): def clear_datasets(request, HttpApiAuth):
def cleanup(): def cleanup():
delete_datasets(http_api_auth, {"ids": None}) delete_datasets(HttpApiAuth, {"ids": None})
request.addfinalizer(cleanup) request.addfinalizer(cleanup)
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
def clear_chat_assistants(request, http_api_auth): def clear_chat_assistants(request, HttpApiAuth):
def cleanup(): def cleanup():
delete_chat_assistants(http_api_auth) delete_chat_assistants(HttpApiAuth)
request.addfinalizer(cleanup) request.addfinalizer(cleanup)
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
def clear_session_with_chat_assistants(request, http_api_auth, add_chat_assistants): def clear_session_with_chat_assistants(request, HttpApiAuth, add_chat_assistants):
def cleanup(): def cleanup():
for chat_assistant_id in chat_assistant_ids: for chat_assistant_id in chat_assistant_ids:
delete_session_with_chat_assistants(http_api_auth, chat_assistant_id) delete_session_with_chat_assistants(HttpApiAuth, chat_assistant_id)
request.addfinalizer(cleanup) request.addfinalizer(cleanup)
@ -120,51 +133,51 @@ def clear_session_with_chat_assistants(request, http_api_auth, add_chat_assistan
@pytest.fixture(scope="class") @pytest.fixture(scope="class")
def add_dataset(request, http_api_auth): def add_dataset(request, HttpApiAuth):
def cleanup(): def cleanup():
delete_datasets(http_api_auth, {"ids": None}) delete_datasets(HttpApiAuth, {"ids": None})
request.addfinalizer(cleanup) request.addfinalizer(cleanup)
dataset_ids = batch_create_datasets(http_api_auth, 1) dataset_ids = batch_create_datasets(HttpApiAuth, 1)
return dataset_ids[0] return dataset_ids[0]
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
def add_dataset_func(request, http_api_auth): def add_dataset_func(request, HttpApiAuth):
def cleanup(): def cleanup():
delete_datasets(http_api_auth, {"ids": None}) delete_datasets(HttpApiAuth, {"ids": None})
request.addfinalizer(cleanup) request.addfinalizer(cleanup)
return batch_create_datasets(http_api_auth, 1)[0] return batch_create_datasets(HttpApiAuth, 1)[0]
@pytest.fixture(scope="class") @pytest.fixture(scope="class")
def add_document(http_api_auth, add_dataset, ragflow_tmp_dir): def add_document(HttpApiAuth, add_dataset, ragflow_tmp_dir):
dataset_id = add_dataset dataset_id = add_dataset
document_ids = bulk_upload_documents(http_api_auth, dataset_id, 1, ragflow_tmp_dir) document_ids = bulk_upload_documents(HttpApiAuth, dataset_id, 1, ragflow_tmp_dir)
return dataset_id, document_ids[0] return dataset_id, document_ids[0]
@pytest.fixture(scope="class") @pytest.fixture(scope="class")
def add_chunks(http_api_auth, add_document): def add_chunks(HttpApiAuth, add_document):
dataset_id, document_id = add_document dataset_id, document_id = add_document
parse_documents(http_api_auth, dataset_id, {"document_ids": [document_id]}) parse_documents(HttpApiAuth, dataset_id, {"document_ids": [document_id]})
condition(http_api_auth, dataset_id) condition(HttpApiAuth, dataset_id)
chunk_ids = batch_add_chunks(http_api_auth, dataset_id, document_id, 4) chunk_ids = batch_add_chunks(HttpApiAuth, dataset_id, document_id, 4)
sleep(1) # issues/6487 sleep(1) # issues/6487
return dataset_id, document_id, chunk_ids return dataset_id, document_id, chunk_ids
@pytest.fixture(scope="class") @pytest.fixture(scope="class")
def add_chat_assistants(request, http_api_auth, add_document): def add_chat_assistants(request, HttpApiAuth, add_document):
def cleanup(): def cleanup():
delete_chat_assistants(http_api_auth) delete_chat_assistants(HttpApiAuth)
request.addfinalizer(cleanup) request.addfinalizer(cleanup)
dataset_id, document_id = add_document dataset_id, document_id = add_document
parse_documents(http_api_auth, dataset_id, {"document_ids": [document_id]}) parse_documents(HttpApiAuth, dataset_id, {"document_ids": [document_id]})
condition(http_api_auth, dataset_id) condition(HttpApiAuth, dataset_id)
return dataset_id, document_id, batch_create_chat_assistants(http_api_auth, 5) return dataset_id, document_id, batch_create_chat_assistants(HttpApiAuth, 5)

6
uv.lock generated
View file

@ -5417,6 +5417,7 @@ dependencies = [
{ name = "html-text" }, { name = "html-text" },
{ name = "httpx", extra = ["socks"] }, { name = "httpx", extra = ["socks"] },
{ name = "huggingface-hub" }, { name = "huggingface-hub" },
{ name = "hypothesis" },
{ name = "imageio-ffmpeg" }, { name = "imageio-ffmpeg" },
{ name = "infinity-emb" }, { name = "infinity-emb" },
{ name = "infinity-sdk" }, { name = "infinity-sdk" },
@ -5479,6 +5480,7 @@ dependencies = [
{ name = "ranx" }, { name = "ranx" },
{ name = "readability-lxml" }, { name = "readability-lxml" },
{ name = "replicate" }, { name = "replicate" },
{ name = "reportlab" },
{ name = "requests" }, { name = "requests" },
{ name = "roman-numbers" }, { name = "roman-numbers" },
{ name = "ruamel-base" }, { name = "ruamel-base" },
@ -5584,6 +5586,7 @@ requires-dist = [
{ name = "html-text", specifier = "==0.6.2" }, { name = "html-text", specifier = "==0.6.2" },
{ name = "httpx", extras = ["socks"], specifier = ">=0.28.1,<0.29.0" }, { name = "httpx", extras = ["socks"], specifier = ">=0.28.1,<0.29.0" },
{ name = "huggingface-hub", specifier = ">=0.25.0,<0.26.0" }, { name = "huggingface-hub", specifier = ">=0.25.0,<0.26.0" },
{ name = "hypothesis", specifier = ">=6.132.0" },
{ name = "imageio-ffmpeg", specifier = ">=0.6.0" }, { name = "imageio-ffmpeg", specifier = ">=0.6.0" },
{ name = "infinity-emb", specifier = ">=0.0.66,<0.0.67" }, { name = "infinity-emb", specifier = ">=0.0.66,<0.0.67" },
{ name = "infinity-sdk", specifier = "==0.6.7" }, { name = "infinity-sdk", specifier = "==0.6.7" },
@ -5646,6 +5649,7 @@ requires-dist = [
{ name = "ranx", specifier = "==0.3.20" }, { name = "ranx", specifier = "==0.3.20" },
{ name = "readability-lxml", specifier = "==0.8.1" }, { name = "readability-lxml", specifier = "==0.8.1" },
{ name = "replicate", specifier = "==0.31.0" }, { name = "replicate", specifier = "==0.31.0" },
{ name = "reportlab", specifier = ">=4.4.3" },
{ name = "requests", specifier = "==2.32.2" }, { name = "requests", specifier = "==2.32.2" },
{ name = "roman-numbers", specifier = "==1.0.2" }, { name = "roman-numbers", specifier = "==1.0.2" },
{ name = "ruamel-base", specifier = "==1.0.0" }, { name = "ruamel-base", specifier = "==1.0.0" },
@ -5691,7 +5695,7 @@ test = [
{ name = "pytest", specifier = ">=8.3.5" }, { name = "pytest", specifier = ">=8.3.5" },
{ name = "python-docx", specifier = ">=1.1.2" }, { name = "python-docx", specifier = ">=1.1.2" },
{ name = "python-pptx", specifier = ">=1.0.2" }, { name = "python-pptx", specifier = ">=1.0.2" },
{ name = "reportlab", specifier = ">=4.4.1" }, { name = "reportlab", specifier = ">=4.4.3" },
{ name = "requests", specifier = ">=2.32.2" }, { name = "requests", specifier = ">=2.32.2" },
{ name = "requests-toolbelt", specifier = ">=1.0.0" }, { name = "requests-toolbelt", specifier = ">=1.0.0" },
{ name = "strenum", specifier = "==0.4.15" }, { name = "strenum", specifier = "==0.4.15" },