Merge branch 'ingestion-flow' into langflow-ingestion-modes

This commit is contained in:
Edwin Jose 2025-09-08 10:38:31 -04:00 committed by GitHub
commit 5dd9959dd0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 340 additions and 151 deletions

File diff suppressed because one or more lines are too long

View file

@ -34,9 +34,7 @@ async def upload_user_file(
logger.debug("JWT token status", jwt_present=jwt_token is not None)
logger.debug("Calling langflow_file_service.upload_user_file")
result = await langflow_file_service.upload_user_file(
file_tuple, jwt_token=jwt_token
)
result = await langflow_file_service.upload_user_file(file_tuple, jwt_token)
logger.debug("Upload successful", result=result)
return JSONResponse(result, status_code=201)
except Exception as e:
@ -99,15 +97,20 @@ async def run_ingestion(
# (search parameters are for retrieval, not document processing)
logger.debug("Final tweaks with settings applied", tweaks=tweaks)
# Include user JWT if available
jwt_token = getattr(request.state, "jwt_token", None)
if jwt_token:
# Set auth context for downstream services
from auth_context import set_auth_context
user_id = getattr(request.state, "user_id", None)
set_auth_context(user_id, jwt_token)
result = await langflow_file_service.run_ingestion_flow(
file_paths=file_paths or [],
jwt_token=jwt_token,
session_id=session_id,
tweaks=tweaks,
jwt_token=jwt_token,
)
return JSONResponse(result)
except Exception as e:

View file

@ -322,6 +322,10 @@ class AppClients:
existing_headers = kwargs.pop("headers", {})
headers = {**default_headers, **existing_headers}
# Remove Content-Type if explicitly set to None (for file uploads)
if headers.get("Content-Type") is None:
headers.pop("Content-Type", None)
url = f"{LANGFLOW_URL}{endpoint}"
return await self.langflow_http_client.request(

View file

@ -3,11 +3,12 @@ from typing import Any, Dict, List, Optional
from config.settings import LANGFLOW_INGEST_FLOW_ID, clients
logger = logging.getLogger(__name__)
class LangflowFileService:
def __init__(self):
self.flow_id_ingest = LANGFLOW_INGEST_FLOW_ID
self.logger = logging.getLogger(__name__)
async def upload_user_file(
self, file_tuple, jwt_token: Optional[str] = None
@ -15,15 +16,18 @@ class LangflowFileService:
"""Upload a file using Langflow Files API v2: POST /api/v2/files.
Returns JSON with keys: id, name, path, size, provider.
"""
self.logger.debug("[LF] Upload (v2) -> /api/v2/files")
logger.debug("[LF] Upload (v2) -> /api/v2/files")
resp = await clients.langflow_request(
"POST", "/api/v2/files", files={"file": file_tuple}
"POST",
"/api/v2/files",
files={"file": file_tuple},
headers={"Content-Type": None},
)
self.logger.debug(
logger.debug(
"[LF] Upload response: %s %s", resp.status_code, resp.reason_phrase
)
if resp.status_code >= 400:
self.logger.error(
logger.error(
"[LF] Upload failed: %s %s | body=%s",
resp.status_code,
resp.reason_phrase,
@ -35,13 +39,13 @@ class LangflowFileService:
async def delete_user_file(self, file_id: str) -> None:
"""Delete a file by id using v2: DELETE /api/v2/files/{id}."""
# NOTE: use v2 root, not /api/v1
self.logger.debug("[LF] Delete (v2) -> /api/v2/files/%s", file_id)
logger.debug("[LF] Delete (v2) -> /api/v2/files/%s", file_id)
resp = await clients.langflow_request("DELETE", f"/api/v2/files/{file_id}")
self.logger.debug(
logger.debug(
"[LF] Delete response: %s %s", resp.status_code, resp.reason_phrase
)
if resp.status_code >= 400:
self.logger.error(
logger.error(
"[LF] Delete failed: %s %s | body=%s",
resp.status_code,
resp.reason_phrase,
@ -52,15 +56,16 @@ class LangflowFileService:
async def run_ingestion_flow(
self,
file_paths: List[str],
jwt_token: str,
session_id: Optional[str] = None,
tweaks: Optional[Dict[str, Any]] = None,
jwt_token: Optional[str] = None,
) -> Dict[str, Any]:
"""
Trigger the ingestion flow with provided file paths.
The flow must expose a File component path in input schema or accept files parameter.
"""
if not self.flow_id_ingest:
logger.error("[LF] LANGFLOW_INGEST_FLOW_ID is not configured")
raise ValueError("LANGFLOW_INGEST_FLOW_ID is not configured")
payload: Dict[str, Any] = {
@ -68,19 +73,26 @@ class LangflowFileService:
"input_type": "chat",
"output_type": "text", # Changed from "json" to "text"
}
if not tweaks:
tweaks = {}
# Pass files via tweaks to File component (File-PSU37 from the flow)
if file_paths:
if not tweaks:
tweaks = {}
tweaks["File-PSU37"] = {"path": file_paths}
# Pass JWT token via tweaks using the x-langflow-global-var- pattern
if jwt_token:
# Using the global variable pattern that Langflow expects for OpenSearch components
tweaks["OpenSearchHybrid-Ve6bS"] = {"jwt_token": jwt_token}
logger.error("[LF] Adding JWT token to tweaks for OpenSearch components")
else:
logger.error("[LF] No JWT token provided")
if tweaks:
payload["tweaks"] = tweaks
if session_id:
payload["session_id"] = session_id
self.logger.debug(
logger.debug(
"[LF] Run ingestion -> /run/%s | files=%s session_id=%s tweaks_keys=%s jwt_present=%s",
self.flow_id_ingest,
len(file_paths) if file_paths else 0,
@ -90,16 +102,14 @@ class LangflowFileService:
)
# Log the full payload for debugging
self.logger.debug("[LF] Request payload: %s", payload)
logger.debug("[LF] Request payload: %s", payload)
resp = await clients.langflow_request(
"POST", f"/api/v1/run/{self.flow_id_ingest}", json=payload
)
self.logger.debug(
"[LF] Run response: %s %s", resp.status_code, resp.reason_phrase
)
logger.debug("[LF] Run response: %s %s", resp.status_code, resp.reason_phrase)
if resp.status_code >= 400:
self.logger.error(
logger.error(
"[LF] Run failed: %s %s | body=%s",
resp.status_code,
resp.reason_phrase,

View file

@ -5,7 +5,7 @@ import uuid
from typing import Dict
from models.tasks import FileTask, TaskStatus, UploadTask
from src.utils.gpu_detection import get_worker_count
from utils.gpu_detection import get_worker_count
from utils.logging_config import get_logger
logger = get_logger(__name__)