"use client" import { useState, useRef, useEffect } from "react" import { Button } from "@/components/ui/button" import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card" import { Input } from "@/components/ui/input" import { MessageCircle, Send, Loader2, User, Bot, Zap, Settings, ChevronDown, ChevronRight, Upload } from "lucide-react" import { ProtectedRoute } from "@/components/protected-route" import { useAuth } from "@/contexts/auth-context" import { useTask } from "@/contexts/task-context" interface Message { role: "user" | "assistant" content: string timestamp: Date functionCalls?: FunctionCall[] isStreaming?: boolean } interface FunctionCall { name: string arguments?: Record result?: Record | ToolCallResult[] status: "pending" | "completed" | "error" argumentsString?: string id?: string type?: string } interface ToolCallResult { text_key?: string data?: { file_path?: string text?: string [key: string]: unknown } default_value?: string [key: string]: unknown } type EndpointType = "chat" | "langflow" interface RequestBody { prompt: string stream?: boolean previous_response_id?: string } function ChatPage() { const [messages, setMessages] = useState([]) const [input, setInput] = useState("") const [loading, setLoading] = useState(false) const [endpoint, setEndpoint] = useState("langflow") const [asyncMode, setAsyncMode] = useState(true) const [streamingMessage, setStreamingMessage] = useState<{ content: string functionCalls: FunctionCall[] timestamp: Date } | null>(null) const [expandedFunctionCalls, setExpandedFunctionCalls] = useState>(new Set()) const [previousResponseIds, setPreviousResponseIds] = useState<{ chat: string | null langflow: string | null }>({ chat: null, langflow: null }) const [isUploading, setIsUploading] = useState(false) const [isDragOver, setIsDragOver] = useState(false) const dragCounterRef = useRef(0) const messagesEndRef = useRef(null) const inputRef = useRef(null) const { user } = useAuth() const { addTask } = useTask() const scrollToBottom = () => { messagesEndRef.current?.scrollIntoView({ behavior: "smooth" }) } const handleEndpointChange = (newEndpoint: EndpointType) => { setEndpoint(newEndpoint) // Clear the conversation when switching endpoints to avoid response ID conflicts setMessages([]) setPreviousResponseIds({ chat: null, langflow: null }) } const handleFileUpload = async (file: File) => { console.log("handleFileUpload called with file:", file.name) if (isUploading) return setIsUploading(true) // Add initial upload message const uploadStartMessage: Message = { role: "assistant", content: `🔄 Starting upload of **${file.name}**...`, timestamp: new Date() } setMessages(prev => [...prev, uploadStartMessage]) try { const formData = new FormData() formData.append('file', file) formData.append('endpoint', endpoint) // Add previous_response_id if we have one for this endpoint const currentResponseId = previousResponseIds[endpoint] if (currentResponseId) { formData.append('previous_response_id', currentResponseId) } const response = await fetch('/api/upload_context', { method: 'POST', body: formData, }) console.log("Upload response status:", response.status) if (!response.ok) { const errorText = await response.text() console.error("Upload failed with status:", response.status, "Response:", errorText) throw new Error(`Upload failed: ${response.status} - ${errorText}`) } const result = await response.json() console.log("Upload result:", result) if (response.status === 201) { // New flow: Got task ID, start tracking with centralized system const taskId = result.task_id || result.id if (!taskId) { console.error("No task ID in 201 response:", result) throw new Error("No task ID received from server") } // Add task to centralized tracking addTask(taskId) // Update message to show task is being tracked const pollingMessage: Message = { role: "assistant", content: `⏳ Upload initiated for **${file.name}**. Processing in background... (Task ID: ${taskId})`, timestamp: new Date() } setMessages(prev => [...prev.slice(0, -1), pollingMessage]) } else if (response.ok) { // Original flow: Direct response const uploadMessage: Message = { role: "assistant", content: `📄 Document uploaded: **${result.filename}** (${result.pages} pages, ${result.content_length.toLocaleString()} characters)\n\n${result.confirmation}`, timestamp: new Date() } setMessages(prev => [...prev.slice(0, -1), uploadMessage]) // Update the response ID for this endpoint if (result.response_id) { setPreviousResponseIds(prev => ({ ...prev, [endpoint]: result.response_id })) } } else { throw new Error(`Upload failed: ${response.status}`) } } catch (error) { console.error('Upload failed:', error) const errorMessage: Message = { role: "assistant", content: `❌ Upload failed: ${error instanceof Error ? error.message : 'Unknown error'}`, timestamp: new Date() } setMessages(prev => [...prev.slice(0, -1), errorMessage]) } finally { setIsUploading(false) } } // Remove the old pollTaskStatus function since we're using centralized system const handleDragEnter = (e: React.DragEvent) => { e.preventDefault() e.stopPropagation() dragCounterRef.current++ if (dragCounterRef.current === 1) { setIsDragOver(true) } } const handleDragOver = (e: React.DragEvent) => { e.preventDefault() e.stopPropagation() } const handleDragLeave = (e: React.DragEvent) => { e.preventDefault() e.stopPropagation() dragCounterRef.current-- if (dragCounterRef.current === 0) { setIsDragOver(false) } } const handleDrop = (e: React.DragEvent) => { e.preventDefault() e.stopPropagation() dragCounterRef.current = 0 setIsDragOver(false) const files = Array.from(e.dataTransfer.files) if (files.length > 0) { handleFileUpload(files[0]) // Upload first file only } } useEffect(() => { scrollToBottom() }, [messages, streamingMessage]) // Auto-focus the input on component mount useEffect(() => { inputRef.current?.focus() }, []) const handleSSEStream = async (userMessage: Message) => { const apiEndpoint = endpoint === "chat" ? "/api/chat" : "/api/langflow" try { const requestBody: RequestBody = { prompt: userMessage.content, stream: true } // Add previous_response_id if we have one for this endpoint const currentResponseId = previousResponseIds[endpoint] if (currentResponseId) { requestBody.previous_response_id = currentResponseId } const response = await fetch(apiEndpoint, { method: "POST", headers: { "Content-Type": "application/json", }, body: JSON.stringify(requestBody), }) if (!response.ok) { throw new Error(`HTTP error! status: ${response.status}`) } const reader = response.body?.getReader() if (!reader) { throw new Error("No reader available") } const decoder = new TextDecoder() let buffer = "" let currentContent = "" const currentFunctionCalls: FunctionCall[] = [] let newResponseId: string | null = null // Initialize streaming message setStreamingMessage({ content: "", functionCalls: [], timestamp: new Date() }) try { while (true) { const { done, value } = await reader.read() if (done) break buffer += decoder.decode(value, { stream: true }) // Process complete lines (JSON objects) const lines = buffer.split('\n') buffer = lines.pop() || "" // Keep incomplete line in buffer for (const line of lines) { if (line.trim()) { try { const chunk = JSON.parse(line) console.log("Received chunk:", chunk.type || chunk.object, chunk) // Extract response ID if present if (chunk.id) { newResponseId = chunk.id } else if (chunk.response_id) { newResponseId = chunk.response_id } // Handle OpenAI Chat Completions streaming format if (chunk.object === "response.chunk" && chunk.delta) { // Handle function calls in delta if (chunk.delta.function_call) { console.log("Function call in delta:", chunk.delta.function_call) // Check if this is a new function call if (chunk.delta.function_call.name) { console.log("New function call:", chunk.delta.function_call.name) const functionCall: FunctionCall = { name: chunk.delta.function_call.name, arguments: undefined, status: "pending", argumentsString: chunk.delta.function_call.arguments || "" } currentFunctionCalls.push(functionCall) console.log("Added function call:", functionCall) } // Or if this is arguments continuation else if (chunk.delta.function_call.arguments) { console.log("Function call arguments delta:", chunk.delta.function_call.arguments) const lastFunctionCall = currentFunctionCalls[currentFunctionCalls.length - 1] if (lastFunctionCall) { if (!lastFunctionCall.argumentsString) { lastFunctionCall.argumentsString = "" } lastFunctionCall.argumentsString += chunk.delta.function_call.arguments console.log("Accumulated arguments:", lastFunctionCall.argumentsString) // Try to parse arguments if they look complete if (lastFunctionCall.argumentsString.includes("}")) { try { const parsed = JSON.parse(lastFunctionCall.argumentsString) lastFunctionCall.arguments = parsed lastFunctionCall.status = "completed" console.log("Parsed function arguments:", parsed) } catch (e) { console.log("Arguments not yet complete or invalid JSON:", e) } } } } } // Handle tool calls in delta else if (chunk.delta.tool_calls && Array.isArray(chunk.delta.tool_calls)) { console.log("Tool calls in delta:", chunk.delta.tool_calls) for (const toolCall of chunk.delta.tool_calls) { if (toolCall.function) { // Check if this is a new tool call if (toolCall.function.name) { console.log("New tool call:", toolCall.function.name) const functionCall: FunctionCall = { name: toolCall.function.name, arguments: undefined, status: "pending", argumentsString: toolCall.function.arguments || "" } currentFunctionCalls.push(functionCall) console.log("Added tool call:", functionCall) } // Or if this is arguments continuation else if (toolCall.function.arguments) { console.log("Tool call arguments delta:", toolCall.function.arguments) const lastFunctionCall = currentFunctionCalls[currentFunctionCalls.length - 1] if (lastFunctionCall) { if (!lastFunctionCall.argumentsString) { lastFunctionCall.argumentsString = "" } lastFunctionCall.argumentsString += toolCall.function.arguments console.log("Accumulated tool arguments:", lastFunctionCall.argumentsString) // Try to parse arguments if they look complete if (lastFunctionCall.argumentsString.includes("}")) { try { const parsed = JSON.parse(lastFunctionCall.argumentsString) lastFunctionCall.arguments = parsed lastFunctionCall.status = "completed" console.log("Parsed tool arguments:", parsed) } catch (e) { console.log("Tool arguments not yet complete or invalid JSON:", e) } } } } } } } // Handle content/text in delta else if (chunk.delta.content) { console.log("Content delta:", chunk.delta.content) currentContent += chunk.delta.content } // Handle finish reason if (chunk.delta.finish_reason) { console.log("Finish reason:", chunk.delta.finish_reason) // Mark any pending function calls as completed currentFunctionCalls.forEach(fc => { if (fc.status === "pending" && fc.argumentsString) { try { fc.arguments = JSON.parse(fc.argumentsString) fc.status = "completed" console.log("Completed function call on finish:", fc) } catch (e) { fc.arguments = { raw: fc.argumentsString } fc.status = "error" console.log("Error parsing function call on finish:", fc, e) } } }) } } // Handle Realtime API format (this is what you're actually getting!) else if (chunk.type === "response.output_item.added" && chunk.item?.type === "function_call") { console.log("Function call started (Realtime API):", chunk.item.name) const functionCall: FunctionCall = { name: chunk.item.name || "unknown", arguments: undefined, status: "pending", argumentsString: "" } currentFunctionCalls.push(functionCall) } // Handle function call arguments streaming (Realtime API) else if (chunk.type === "response.function_call_arguments.delta") { console.log("Function args delta (Realtime API):", chunk.delta) const lastFunctionCall = currentFunctionCalls[currentFunctionCalls.length - 1] if (lastFunctionCall) { if (!lastFunctionCall.argumentsString) { lastFunctionCall.argumentsString = "" } lastFunctionCall.argumentsString += chunk.delta || "" console.log("Accumulated arguments (Realtime API):", lastFunctionCall.argumentsString) } } // Handle function call arguments completion (Realtime API) else if (chunk.type === "response.function_call_arguments.done") { console.log("Function args done (Realtime API):", chunk.arguments) const lastFunctionCall = currentFunctionCalls[currentFunctionCalls.length - 1] if (lastFunctionCall) { try { lastFunctionCall.arguments = JSON.parse(chunk.arguments || "{}") lastFunctionCall.status = "completed" console.log("Parsed function arguments (Realtime API):", lastFunctionCall.arguments) } catch (e) { lastFunctionCall.arguments = { raw: chunk.arguments } lastFunctionCall.status = "error" console.log("Error parsing function arguments (Realtime API):", e) } } } // Handle function call completion (Realtime API) else if (chunk.type === "response.output_item.done" && chunk.item?.type === "function_call") { console.log("Function call done (Realtime API):", chunk.item.status) const lastFunctionCall = currentFunctionCalls[currentFunctionCalls.length - 1] if (lastFunctionCall) { lastFunctionCall.status = chunk.item.status === "completed" ? "completed" : "error" } } // Handle tool call completion with results (new format) else if (chunk.type === "response.output_item.done" && chunk.item?.type?.includes("_call")) { console.log("Tool call done with results (new format):", chunk.item) // Find existing function call by ID, or by name/type if ID not available let functionCall = currentFunctionCalls.find(fc => fc.id === chunk.item.id || (fc.name === chunk.item.type) || (fc.name.includes(chunk.item.type.replace('_call', '')) || chunk.item.type.includes(fc.name)) ) if (functionCall) { // Update existing function call functionCall.arguments = chunk.item.inputs || functionCall.arguments functionCall.status = chunk.item.status === "completed" ? "completed" : "error" functionCall.id = chunk.item.id functionCall.type = chunk.item.type // Set the results if (chunk.item.results) { functionCall.result = chunk.item.results } } else { // Only create new if we really can't find an existing one console.log("Creating new function call for:", chunk.item.type) functionCall = { name: chunk.item.type || "unknown", arguments: chunk.item.inputs || {}, status: "completed" as const, id: chunk.item.id, type: chunk.item.type, result: chunk.item.results } currentFunctionCalls.push(functionCall) } } // Handle function call output item added (new format) else if (chunk.type === "response.output_item.added" && chunk.item?.type?.includes("_call")) { console.log("Tool call started (new format):", chunk.item) const functionCall = { name: chunk.item.type || "unknown", arguments: chunk.item.inputs || {}, status: "pending" as const, id: chunk.item.id, type: chunk.item.type } currentFunctionCalls.push(functionCall) } // Handle function call results else if (chunk.type === "response.function_call.result" || chunk.type === "function_call_result") { console.log("Function call result:", chunk.result || chunk) const lastFunctionCall = currentFunctionCalls[currentFunctionCalls.length - 1] if (lastFunctionCall) { lastFunctionCall.result = chunk.result || chunk.output || chunk.response lastFunctionCall.status = "completed" } } // Handle tool call results else if (chunk.type === "response.tool_call.result" || chunk.type === "tool_call_result") { console.log("Tool call result:", chunk.result || chunk) const lastFunctionCall = currentFunctionCalls[currentFunctionCalls.length - 1] if (lastFunctionCall) { lastFunctionCall.result = chunk.result || chunk.output || chunk.response lastFunctionCall.status = "completed" } } // Handle generic results that might be in different formats else if ((chunk.type && chunk.type.includes("result")) || chunk.result) { console.log("Generic result:", chunk) const lastFunctionCall = currentFunctionCalls[currentFunctionCalls.length - 1] if (lastFunctionCall && !lastFunctionCall.result) { lastFunctionCall.result = chunk.result || chunk.output || chunk.response || chunk lastFunctionCall.status = "completed" } } // Handle text output streaming (Realtime API) else if (chunk.type === "response.output_text.delta") { console.log("Text delta (Realtime API):", chunk.delta) currentContent += chunk.delta || "" } // Log unhandled chunks else if (chunk.type !== null && chunk.object !== "response.chunk") { console.log("Unhandled chunk format:", chunk) } // Update streaming message setStreamingMessage({ content: currentContent, functionCalls: [...currentFunctionCalls], timestamp: new Date() }) } catch (parseError) { console.warn("Failed to parse chunk:", line, parseError) } } } } } finally { reader.releaseLock() } // Finalize the message const finalMessage: Message = { role: "assistant", content: currentContent, functionCalls: currentFunctionCalls, timestamp: new Date() } setMessages(prev => [...prev, finalMessage]) setStreamingMessage(null) // Store the response ID for the next request for this endpoint if (newResponseId) { setPreviousResponseIds(prev => ({ ...prev, [endpoint]: newResponseId })) } } catch (error) { console.error("SSE Stream error:", error) setStreamingMessage(null) const errorMessage: Message = { role: "assistant", content: "Sorry, I couldn't connect to the chat service. Please try again.", timestamp: new Date() } setMessages(prev => [...prev, errorMessage]) } } const handleSubmit = async (e: React.FormEvent) => { e.preventDefault() if (!input.trim() || loading) return const userMessage: Message = { role: "user", content: input.trim(), timestamp: new Date() } setMessages(prev => [...prev, userMessage]) setInput("") setLoading(true) if (asyncMode) { await handleSSEStream(userMessage) } else { // Original non-streaming logic try { const apiEndpoint = endpoint === "chat" ? "/api/chat" : "/api/langflow" const requestBody: RequestBody = { prompt: userMessage.content } // Add previous_response_id if we have one for this endpoint const currentResponseId = previousResponseIds[endpoint] if (currentResponseId) { requestBody.previous_response_id = currentResponseId } const response = await fetch(apiEndpoint, { method: "POST", headers: { "Content-Type": "application/json", }, body: JSON.stringify(requestBody), }) const result = await response.json() if (response.ok) { const assistantMessage: Message = { role: "assistant", content: result.response, timestamp: new Date() } setMessages(prev => [...prev, assistantMessage]) // Store the response ID if present for this endpoint if (result.response_id) { setPreviousResponseIds(prev => ({ ...prev, [endpoint]: result.response_id })) } } else { console.error("Chat failed:", result.error) const errorMessage: Message = { role: "assistant", content: "Sorry, I encountered an error. Please try again.", timestamp: new Date() } setMessages(prev => [...prev, errorMessage]) } } catch (error) { console.error("Chat error:", error) const errorMessage: Message = { role: "assistant", content: "Sorry, I couldn't connect to the chat service. Please try again.", timestamp: new Date() } setMessages(prev => [...prev, errorMessage]) } } setLoading(false) } const toggleFunctionCall = (functionCallId: string) => { setExpandedFunctionCalls(prev => { const newSet = new Set(prev) if (newSet.has(functionCallId)) { newSet.delete(functionCallId) } else { newSet.add(functionCallId) } return newSet }) } const renderFunctionCalls = (functionCalls: FunctionCall[], messageIndex?: number) => { if (!functionCalls || functionCalls.length === 0) return null return (
{functionCalls.map((fc, index) => { const functionCallId = `${messageIndex || 'streaming'}-${index}` const isExpanded = expandedFunctionCalls.has(functionCallId) return (
toggleFunctionCall(functionCallId)} > Function Call: {fc.name}
{fc.status}
{isExpanded ? ( ) : ( )}
{isExpanded && (
{/* Show arguments - either completed or streaming */} {(fc.arguments || fc.argumentsString) && (
Arguments:
                        {fc.arguments 
                          ? JSON.stringify(fc.arguments, null, 2)
                          : fc.argumentsString || "..."
                        }
                      
)} {fc.result && (
Result: {Array.isArray(fc.result) ? (
{fc.result.map((result, idx) => (
{result.data?.file_path && (
📄 {result.data.file_path || "Unknown file"}
)} {result.data?.text && (
{result.data.text.length > 300 ? result.data.text.substring(0, 300) + "..." : result.data.text }
)} {result.text_key && (
Key: {result.text_key}
)}
))}
Found {fc.result.length} result{fc.result.length !== 1 ? 's' : ''}
) : (
                          {JSON.stringify(fc.result, null, 2)}
                        
)}
)}
)}
) })}
) } return (

Chat Assistant

Ask questions about your documents and get AI-powered answers

Chat
{/* Async Mode Toggle */}
{/* Endpoint Toggle */}
Chat with AI about your indexed documents using {endpoint === "chat" ? "Chat" : "Langflow"} endpoint {asyncMode ? " with real-time streaming" : ""}
{/* Messages Area */}
{messages.length === 0 && !streamingMessage ? (
{isDragOver ? ( <>

Drop your document here

I'll process it and add it to our conversation context

) : isUploading ? ( <>

Processing your document...

This may take a few moments

) : ( <>

Start a conversation by asking a question!

I can help you find information in your documents.

💡 Tip: Drag & drop a document here to add context

)}
) : ( <> {messages.map((message, index) => (
{message.role === "user" && (
User

{message.content}

)} {message.role === "assistant" && (
AI gpt-4.1
Finished {message.timestamp.toLocaleTimeString()}
{renderFunctionCalls(message.functionCalls || [], index)}

{message.content}

)}
))} {/* Streaming Message Display */} {streamingMessage && (
AI gpt-4.1
Streaming... {streamingMessage.timestamp.toLocaleTimeString()}
{renderFunctionCalls(streamingMessage.functionCalls, messages.length)}

{streamingMessage.content}

)} {loading && !asyncMode && (
AI gpt-4.1
Thinking...
)}
)} {/* Drag overlay for existing messages */} {isDragOver && messages.length > 0 && (

Drop document to add context

)}
{/* Input Area */}
setInput(e.target.value)} placeholder="Ask a question about your documents..." disabled={loading} className="flex-1 min-w-0" />
) } export default function ProtectedChatPage() { return ( ) }