changes to support jwt through langflow
This commit is contained in:
parent
d5dd1c1183
commit
7ab8433aa8
4 changed files with 44 additions and 16 deletions
20
src/agent.py
20
src/agent.py
|
|
@ -11,7 +11,7 @@ def get_user_conversation(user_id: str):
|
|||
return user_conversations[user_id]
|
||||
|
||||
# Generic async response function for streaming
|
||||
async def async_response_stream(client, prompt: str, model: str, previous_response_id: str = None, log_prefix: str = "response"):
|
||||
async def async_response_stream(client, prompt: str, model: str, extra_headers: dict = None, previous_response_id: str = None, log_prefix: str = "response"):
|
||||
print(f"user ==> {prompt}")
|
||||
|
||||
try:
|
||||
|
|
@ -24,6 +24,8 @@ async def async_response_stream(client, prompt: str, model: str, previous_respon
|
|||
}
|
||||
if previous_response_id is not None:
|
||||
request_params["previous_response_id"] = previous_response_id
|
||||
if extra_headers:
|
||||
request_params["extra_headers"] = extra_headers
|
||||
|
||||
response = await client.responses.create(**request_params)
|
||||
|
||||
|
|
@ -74,7 +76,7 @@ async def async_response_stream(client, prompt: str, model: str, previous_respon
|
|||
raise
|
||||
|
||||
# Generic async response function for non-streaming
|
||||
async def async_response(client, prompt: str, model: str, previous_response_id: str = None, log_prefix: str = "response"):
|
||||
async def async_response(client, prompt: str, model: str, extra_headers: dict = None, previous_response_id: str = None, log_prefix: str = "response"):
|
||||
print(f"user ==> {prompt}")
|
||||
|
||||
# Build request parameters
|
||||
|
|
@ -86,6 +88,8 @@ async def async_response(client, prompt: str, model: str, previous_response_id:
|
|||
}
|
||||
if previous_response_id is not None:
|
||||
request_params["previous_response_id"] = previous_response_id
|
||||
if extra_headers:
|
||||
request_params["extra_headers"] = extra_headers
|
||||
|
||||
response = await client.responses.create(**request_params)
|
||||
|
||||
|
|
@ -98,20 +102,20 @@ async def async_response(client, prompt: str, model: str, previous_response_id:
|
|||
return response_text, response_id
|
||||
|
||||
# Unified streaming function for both chat and langflow
|
||||
async def async_stream(client, prompt: str, model: str, previous_response_id: str = None, log_prefix: str = "response"):
|
||||
async for chunk in async_response_stream(client, prompt, model, previous_response_id=previous_response_id, log_prefix=log_prefix):
|
||||
async def async_stream(client, prompt: str, model: str, extra_headers: dict = None, previous_response_id: str = None, log_prefix: str = "response"):
|
||||
async for chunk in async_response_stream(client, prompt, model, extra_headers=extra_headers, previous_response_id=previous_response_id, log_prefix=log_prefix):
|
||||
yield chunk
|
||||
|
||||
# Async langflow function (non-streaming only)
|
||||
async def async_langflow(langflow_client, flow_id: str, prompt: str, previous_response_id: str = None):
|
||||
response_text, response_id = await async_response(langflow_client, prompt, flow_id, previous_response_id=previous_response_id, log_prefix="langflow")
|
||||
async def async_langflow(langflow_client, flow_id: str, prompt: str, extra_headers: dict = None, previous_response_id: str = None):
|
||||
response_text, response_id = await async_response(langflow_client, prompt, flow_id, extra_headers=extra_headers, previous_response_id=previous_response_id, log_prefix="langflow")
|
||||
return response_text, response_id
|
||||
|
||||
# Async langflow function for streaming (alias for compatibility)
|
||||
async def async_langflow_stream(langflow_client, flow_id: str, prompt: str, previous_response_id: str = None):
|
||||
async def async_langflow_stream(langflow_client, flow_id: str, prompt: str, extra_headers: dict = None, previous_response_id: str = None):
|
||||
print(f"[DEBUG] Starting langflow stream for prompt: {prompt}")
|
||||
try:
|
||||
async for chunk in async_stream(langflow_client, prompt, flow_id, previous_response_id=previous_response_id, log_prefix="langflow"):
|
||||
async for chunk in async_stream(langflow_client, prompt, flow_id, extra_headers=extra_headers, previous_response_id=previous_response_id, log_prefix="langflow"):
|
||||
print(f"[DEBUG] Yielding chunk from langflow_stream: {chunk[:100]}...")
|
||||
yield chunk
|
||||
print(f"[DEBUG] Langflow stream completed")
|
||||
|
|
|
|||
|
|
@ -39,13 +39,16 @@ async def langflow_endpoint(request: Request, chat_service, session_manager):
|
|||
previous_response_id = data.get("previous_response_id")
|
||||
stream = data.get("stream", False)
|
||||
|
||||
# Get JWT token from request cookie
|
||||
jwt_token = request.cookies.get("auth_token")
|
||||
|
||||
if not prompt:
|
||||
return JSONResponse({"error": "Prompt is required"}, status_code=400)
|
||||
|
||||
try:
|
||||
if stream:
|
||||
return StreamingResponse(
|
||||
await chat_service.langflow_chat(prompt, previous_response_id=previous_response_id, stream=True),
|
||||
await chat_service.langflow_chat(prompt, jwt_token, previous_response_id=previous_response_id, stream=True),
|
||||
media_type="text/event-stream",
|
||||
headers={
|
||||
"Cache-Control": "no-cache",
|
||||
|
|
@ -55,7 +58,7 @@ async def langflow_endpoint(request: Request, chat_service, session_manager):
|
|||
}
|
||||
)
|
||||
else:
|
||||
result = await chat_service.langflow_chat(prompt, previous_response_id=previous_response_id, stream=False)
|
||||
result = await chat_service.langflow_chat(prompt, jwt_token, previous_response_id=previous_response_id, stream=False)
|
||||
return JSONResponse(result)
|
||||
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -45,6 +45,13 @@ async def upload_context(request: Request, document_service, chat_service, sessi
|
|||
# Get optional parameters
|
||||
previous_response_id = form.get("previous_response_id")
|
||||
endpoint = form.get("endpoint", "langflow")
|
||||
|
||||
# Get JWT token from request cookie for authentication
|
||||
jwt_token = request.cookies.get("auth_token")
|
||||
|
||||
# Get user info from request state (set by auth middleware)
|
||||
user = request.state.user
|
||||
user_id = user.user_id if user else None
|
||||
|
||||
# Process document and extract content
|
||||
doc_result = await document_service.process_upload_context(upload_file, filename)
|
||||
|
|
@ -53,6 +60,8 @@ async def upload_context(request: Request, document_service, chat_service, sessi
|
|||
response_text, response_id = await chat_service.upload_context_chat(
|
||||
doc_result["content"],
|
||||
filename,
|
||||
user_id=user_id,
|
||||
jwt_token=jwt_token,
|
||||
previous_response_id=previous_response_id,
|
||||
endpoint=endpoint
|
||||
)
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class ChatService:
|
|||
response_data["response_id"] = response_id
|
||||
return response_data
|
||||
|
||||
async def langflow_chat(self, prompt: str, previous_response_id: str = None, stream: bool = False):
|
||||
async def langflow_chat(self, prompt: str, jwt_token: str = None, previous_response_id: str = None, stream: bool = False):
|
||||
"""Handle Langflow chat requests"""
|
||||
if not prompt:
|
||||
raise ValueError("Prompt is required")
|
||||
|
|
@ -30,23 +30,35 @@ class ChatService:
|
|||
if not LANGFLOW_URL or not FLOW_ID or not LANGFLOW_KEY:
|
||||
raise ValueError("LANGFLOW_URL, FLOW_ID, and LANGFLOW_KEY environment variables are required")
|
||||
|
||||
# Prepare extra headers for JWT authentication
|
||||
extra_headers = {}
|
||||
if jwt_token:
|
||||
extra_headers['X-LANGFLOW-GLOBAL-VAR-JWT'] = jwt_token
|
||||
|
||||
if stream:
|
||||
return async_langflow_stream(clients.langflow_client, FLOW_ID, prompt, previous_response_id=previous_response_id)
|
||||
return async_langflow_stream(clients.langflow_client, FLOW_ID, prompt, extra_headers=extra_headers, previous_response_id=previous_response_id)
|
||||
else:
|
||||
response_text, response_id = await async_langflow(clients.langflow_client, FLOW_ID, prompt, previous_response_id=previous_response_id)
|
||||
response_text, response_id = await async_langflow(clients.langflow_client, FLOW_ID, prompt, extra_headers=extra_headers, previous_response_id=previous_response_id)
|
||||
response_data = {"response": response_text}
|
||||
if response_id:
|
||||
response_data["response_id"] = response_id
|
||||
return response_data
|
||||
|
||||
async def upload_context_chat(self, document_content: str, filename: str,
|
||||
previous_response_id: str = None, endpoint: str = "langflow"):
|
||||
user_id: str = None, jwt_token: str = None, previous_response_id: str = None, endpoint: str = "langflow"):
|
||||
"""Send document content as user message to get proper response_id"""
|
||||
document_prompt = f"I'm uploading a document called '{filename}'. Here is its content:\n\n{document_content}\n\nPlease confirm you've received this document and are ready to answer questions about it."
|
||||
|
||||
if endpoint == "langflow":
|
||||
response_text, response_id = await async_langflow(clients.langflow_client, FLOW_ID, document_prompt, previous_response_id=previous_response_id)
|
||||
# Prepare extra headers for JWT authentication
|
||||
extra_headers = {}
|
||||
if jwt_token:
|
||||
extra_headers['X-LANGFLOW-GLOBAL-VAR-JWT'] = jwt_token
|
||||
response_text, response_id = await async_langflow(clients.langflow_client, FLOW_ID, document_prompt, extra_headers=extra_headers, previous_response_id=previous_response_id)
|
||||
else: # chat
|
||||
response_text, response_id = await async_chat(clients.patched_async_client, document_prompt, previous_response_id=previous_response_id)
|
||||
# Set auth context for chat tools and provide user_id
|
||||
if user_id and jwt_token:
|
||||
set_auth_context(user_id, jwt_token)
|
||||
response_text, response_id = await async_chat(clients.patched_async_client, document_prompt, user_id, previous_response_id=previous_response_id)
|
||||
|
||||
return response_text, response_id
|
||||
Loading…
Add table
Reference in a new issue