diff --git a/Dockerfile.langflow b/Dockerfile.langflow index bdae1f70..5343585a 100644 --- a/Dockerfile.langflow +++ b/Dockerfile.langflow @@ -1,4 +1,4 @@ -FROM langflowai/langflow-nightly:1.6.3.dev1 +FROM langflowai/langflow-nightly:1.7.0.dev5 EXPOSE 7860 diff --git a/docker-compose-cpu.yml b/docker-compose-cpu.yml index cfe082f1..b0585897 100644 --- a/docker-compose-cpu.yml +++ b/docker-compose-cpu.yml @@ -96,7 +96,7 @@ services: langflow: volumes: - ./flows:/app/flows:U,z - image: langflowai/langflow-nightly:${LANGFLOW_VERSION:-1.7.0.dev5} + image: phact/openrag-langflow:${LANGFLOW_VERSION:-latest} # build: # context: . # dockerfile: Dockerfile.langflow diff --git a/frontend/app/api/queries/useGetNudgesQuery.ts b/frontend/app/api/queries/useGetNudgesQuery.ts index c2668f61..45ef61e7 100644 --- a/frontend/app/api/queries/useGetNudgesQuery.ts +++ b/frontend/app/api/queries/useGetNudgesQuery.ts @@ -76,6 +76,11 @@ export const useGetNudgesQuery = ( { queryKey: ["nudges", chatId, filters, limit, scoreThreshold], queryFn: getNudges, + refetchInterval: (query) => { + // If data is empty, refetch every 5 seconds + const data = query.state.data; + return Array.isArray(data) && data.length === 0 ? 5000 : false; + }, ...options, }, queryClient, diff --git a/frontend/app/chat/page.tsx b/frontend/app/chat/page.tsx index d63fc92a..a7362e4d 100644 --- a/frontend/app/chat/page.tsx +++ b/frontend/app/chat/page.tsx @@ -782,7 +782,11 @@ function ChatPage() { timestamp: new Date(), }; - setMessages((prev) => [...prev, userMessage]); + if (messages.length === 1) { + setMessages([userMessage]); + } else { + setMessages((prev) => [...prev, userMessage]); + } setInput(""); setLoading(true); setIsFilterHighlighted(false); diff --git a/frontend/app/onboarding/_components/animated-provider-steps.tsx b/frontend/app/onboarding/_components/animated-provider-steps.tsx index 79188ba0..4a1b0e47 100644 --- a/frontend/app/onboarding/_components/animated-provider-steps.tsx +++ b/frontend/app/onboarding/_components/animated-provider-steps.tsx @@ -5,210 +5,211 @@ import { CheckIcon, XIcon } from "lucide-react"; import { useEffect, useState } from "react"; import AnimatedProcessingIcon from "@/components/icons/animated-processing-icon"; import { - Accordion, - AccordionContent, - AccordionItem, - AccordionTrigger, + Accordion, + AccordionContent, + AccordionItem, + AccordionTrigger, } from "@/components/ui/accordion"; +import { ONBOARDING_CARD_STEPS_KEY } from "@/lib/constants"; import { cn } from "@/lib/utils"; export function AnimatedProviderSteps({ - currentStep, - isCompleted, - setCurrentStep, - steps, - storageKey = "provider-steps", - processingStartTime, - hasError = false, + currentStep, + isCompleted, + setCurrentStep, + steps, + storageKey = ONBOARDING_CARD_STEPS_KEY, + processingStartTime, + hasError = false, }: { - currentStep: number; - isCompleted: boolean; - setCurrentStep: (step: number) => void; - steps: string[]; - storageKey?: string; - processingStartTime?: number | null; - hasError?: boolean; + currentStep: number; + isCompleted: boolean; + setCurrentStep: (step: number) => void; + steps: string[]; + storageKey?: string; + processingStartTime?: number | null; + hasError?: boolean; }) { - const [startTime, setStartTime] = useState(null); - const [elapsedTime, setElapsedTime] = useState(0); + const [startTime, setStartTime] = useState(null); + const [elapsedTime, setElapsedTime] = useState(0); - // Initialize start time from prop or local storage - useEffect(() => { - const storedElapsedTime = localStorage.getItem(`${storageKey}-elapsed`); + // Initialize start time from prop or local storage + useEffect(() => { + const storedElapsedTime = localStorage.getItem(storageKey); - if (isCompleted && storedElapsedTime) { - // If completed, use stored elapsed time - setElapsedTime(parseFloat(storedElapsedTime)); - } else if (processingStartTime) { - // Use the start time passed from parent (when user clicked Complete) - setStartTime(processingStartTime); - } - }, [storageKey, isCompleted, processingStartTime]); + if (isCompleted && storedElapsedTime) { + // If completed, use stored elapsed time + setElapsedTime(parseFloat(storedElapsedTime)); + } else if (processingStartTime) { + // Use the start time passed from parent (when user clicked Complete) + setStartTime(processingStartTime); + } + }, [storageKey, isCompleted, processingStartTime]); - // Progress through steps - useEffect(() => { - if (currentStep < steps.length - 1 && !isCompleted) { - const interval = setInterval(() => { - setCurrentStep(currentStep + 1); - }, 1500); - return () => clearInterval(interval); - } - }, [currentStep, setCurrentStep, steps, isCompleted]); + // Progress through steps + useEffect(() => { + if (currentStep < steps.length - 1 && !isCompleted) { + const interval = setInterval(() => { + setCurrentStep(currentStep + 1); + }, 1500); + return () => clearInterval(interval); + } + }, [currentStep, setCurrentStep, steps, isCompleted]); - // Calculate and store elapsed time when completed - useEffect(() => { - if (isCompleted && startTime) { - const elapsed = Date.now() - startTime; - setElapsedTime(elapsed); - localStorage.setItem(`${storageKey}-elapsed`, elapsed.toString()); - } - }, [isCompleted, startTime, storageKey]); + // Calculate and store elapsed time when completed + useEffect(() => { + if (isCompleted && startTime) { + const elapsed = Date.now() - startTime; + setElapsedTime(elapsed); + localStorage.setItem(storageKey, elapsed.toString()); + } + }, [isCompleted, startTime, storageKey]); - const isDone = currentStep >= steps.length && !isCompleted && !hasError; + const isDone = currentStep >= steps.length && !isCompleted && !hasError; - return ( - - {!isCompleted ? ( - -
-
- - - -
+ return ( + + {!isCompleted ? ( + +
+
+ + + +
- - {hasError ? "Error" : isDone ? "Done" : "Thinking"} - -
-
- - {!isDone && !hasError && ( - -
-
- - - {steps[currentStep]} - - -
- - )} - -
-
- ) : ( - - - - -
- - {`Initialized in ${(elapsedTime / 1000).toFixed(1)} seconds`} - -
-
- -
- {/* Connecting line on the left */} - + + {hasError ? "Error" : isDone ? "Done" : "Thinking"} + +
+
+ + {!isDone && !hasError && ( + +
+
+ + + {steps[currentStep]} + + +
+ + )} + +
+
+ ) : ( + + + + +
+ + {`Initialized in ${(elapsedTime / 1000).toFixed(1)} seconds`} + +
+
+ +
+ {/* Connecting line on the left */} + -
- - {steps.map((step, index) => ( - - - - - - - - {step} - - - ))} - -
-
-
-
-
-
- )} -
- ); +
+ + {steps.map((step, index) => ( + + + + + + + + {step} + + + ))} + +
+
+
+
+
+
+ )} +
+ ); } diff --git a/frontend/app/onboarding/_components/onboarding-card.tsx b/frontend/app/onboarding/_components/onboarding-card.tsx index d79259cf..34f3680c 100644 --- a/frontend/app/onboarding/_components/onboarding-card.tsx +++ b/frontend/app/onboarding/_components/onboarding-card.tsx @@ -6,8 +6,8 @@ import { Info, X } from "lucide-react"; import { useEffect, useState } from "react"; import { toast } from "sonner"; import { - type OnboardingVariables, - useOnboardingMutation, + type OnboardingVariables, + useOnboardingMutation, } from "@/app/api/mutations/useOnboardingMutation"; import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery"; import { useGetTasksQuery } from "@/app/api/queries/useGetTasksQuery"; @@ -20,10 +20,11 @@ import OpenAILogo from "@/components/icons/openai-logo"; import { Button } from "@/components/ui/button"; import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; import { - Tooltip, - TooltipContent, - TooltipTrigger, + Tooltip, + TooltipContent, + TooltipTrigger, } from "@/components/ui/tooltip"; +import { ONBOARDING_CARD_STEPS_KEY } from "@/lib/constants"; import { cn } from "@/lib/utils"; import { AnimatedProviderSteps } from "./animated-provider-steps"; import { AnthropicOnboarding } from "./anthropic-onboarding"; @@ -33,506 +34,507 @@ import { OpenAIOnboarding } from "./openai-onboarding"; import { TabTrigger } from "./tab-trigger"; interface OnboardingCardProps { - onComplete: () => void; - isCompleted?: boolean; - isEmbedding?: boolean; - setIsLoadingModels?: (isLoading: boolean) => void; - setLoadingStatus?: (status: string[]) => void; + onComplete: () => void; + isCompleted?: boolean; + isEmbedding?: boolean; + setIsLoadingModels?: (isLoading: boolean) => void; + setLoadingStatus?: (status: string[]) => void; } const STEP_LIST = [ - "Setting up your model provider", - "Defining schema", - "Configuring Langflow", + "Setting up your model provider", + "Defining schema", + "Configuring Langflow", ]; const EMBEDDING_STEP_LIST = [ - "Setting up your model provider", - "Defining schema", - "Configuring Langflow", - "Ingesting sample data", + "Setting up your model provider", + "Defining schema", + "Configuring Langflow", + "Ingesting sample data", ]; const OnboardingCard = ({ - onComplete, - isEmbedding = false, - isCompleted = false, + onComplete, + isEmbedding = false, + isCompleted = false, }: OnboardingCardProps) => { - const { isHealthy: isDoclingHealthy } = useDoclingHealth(); + const { isHealthy: isDoclingHealthy } = useDoclingHealth(); - const [modelProvider, setModelProvider] = useState( - isEmbedding ? "openai" : "anthropic", - ); + const [modelProvider, setModelProvider] = useState( + isEmbedding ? "openai" : "anthropic", + ); - const [sampleDataset, setSampleDataset] = useState(true); + const [sampleDataset, setSampleDataset] = useState(true); - const [isLoadingModels, setIsLoadingModels] = useState(false); + const [isLoadingModels, setIsLoadingModels] = useState(false); - const queryClient = useQueryClient(); + const queryClient = useQueryClient(); - // Fetch current settings to check if providers are already configured - const { data: currentSettings } = useGetSettingsQuery(); + // Fetch current settings to check if providers are already configured + const { data: currentSettings } = useGetSettingsQuery(); - const handleSetModelProvider = (provider: string) => { - setIsLoadingModels(false); - setModelProvider(provider); - setSettings({ - [isEmbedding ? "embedding_provider" : "llm_provider"]: provider, - embedding_model: "", - llm_model: "", - }); - setError(null); - }; + const handleSetModelProvider = (provider: string) => { + setIsLoadingModels(false); + setModelProvider(provider); + setSettings({ + [isEmbedding ? "embedding_provider" : "llm_provider"]: provider, + embedding_model: "", + llm_model: "", + }); + setError(null); + }; - // Check if the selected provider is already configured - const isProviderAlreadyConfigured = (provider: string): boolean => { - if (!isEmbedding || !currentSettings?.providers) return false; + // Check if the selected provider is already configured + const isProviderAlreadyConfigured = (provider: string): boolean => { + if (!isEmbedding || !currentSettings?.providers) return false; - // Check if provider has been explicitly configured (not just from env vars) - if (provider === "openai") { - return currentSettings.providers.openai?.configured === true; - } else if (provider === "anthropic") { - return currentSettings.providers.anthropic?.configured === true; - } else if (provider === "watsonx") { - return currentSettings.providers.watsonx?.configured === true; - } else if (provider === "ollama") { - return currentSettings.providers.ollama?.configured === true; - } - return false; - }; + // Check if provider has been explicitly configured (not just from env vars) + if (provider === "openai") { + return currentSettings.providers.openai?.configured === true; + } else if (provider === "anthropic") { + return currentSettings.providers.anthropic?.configured === true; + } else if (provider === "watsonx") { + return currentSettings.providers.watsonx?.configured === true; + } else if (provider === "ollama") { + return currentSettings.providers.ollama?.configured === true; + } + return false; + }; - const showProviderConfiguredMessage = - isProviderAlreadyConfigured(modelProvider); - const providerAlreadyConfigured = - isEmbedding && showProviderConfiguredMessage; + const showProviderConfiguredMessage = + isProviderAlreadyConfigured(modelProvider); + const providerAlreadyConfigured = + isEmbedding && showProviderConfiguredMessage; - const totalSteps = isEmbedding - ? EMBEDDING_STEP_LIST.length - : STEP_LIST.length; + const totalSteps = isEmbedding + ? EMBEDDING_STEP_LIST.length + : STEP_LIST.length; - const [settings, setSettings] = useState({ - [isEmbedding ? "embedding_provider" : "llm_provider"]: modelProvider, - embedding_model: "", - llm_model: "", - // Provider-specific fields will be set by provider components - openai_api_key: "", - anthropic_api_key: "", - watsonx_api_key: "", - watsonx_endpoint: "", - watsonx_project_id: "", - ollama_endpoint: "", - }); + const [settings, setSettings] = useState({ + [isEmbedding ? "embedding_provider" : "llm_provider"]: modelProvider, + embedding_model: "", + llm_model: "", + // Provider-specific fields will be set by provider components + openai_api_key: "", + anthropic_api_key: "", + watsonx_api_key: "", + watsonx_endpoint: "", + watsonx_project_id: "", + ollama_endpoint: "", + }); - const [currentStep, setCurrentStep] = useState( - isCompleted ? totalSteps : null, - ); + const [currentStep, setCurrentStep] = useState( + isCompleted ? totalSteps : null, + ); - const [processingStartTime, setProcessingStartTime] = useState( - null, - ); + const [processingStartTime, setProcessingStartTime] = useState( + null, + ); - const [error, setError] = useState(null); + const [error, setError] = useState(null); - // Query tasks to track completion - const { data: tasks } = useGetTasksQuery({ - enabled: currentStep !== null, // Only poll when onboarding has started - refetchInterval: currentStep !== null ? 1000 : false, // Poll every 1 second during onboarding - }); + // Query tasks to track completion + const { data: tasks } = useGetTasksQuery({ + enabled: currentStep !== null, // Only poll when onboarding has started + refetchInterval: currentStep !== null ? 1000 : false, // Poll every 1 second during onboarding + }); - // Monitor tasks and call onComplete when all tasks are done - useEffect(() => { - if (currentStep === null || !tasks || !isEmbedding) { - return; - } + // Monitor tasks and call onComplete when all tasks are done + useEffect(() => { + if (currentStep === null || !tasks || !isEmbedding) { + return; + } - // Check if there are any active tasks (pending, running, or processing) - const activeTasks = tasks.find( - (task) => - task.status === "pending" || - task.status === "running" || - task.status === "processing", - ); + // Check if there are any active tasks (pending, running, or processing) + const activeTasks = tasks.find( + (task) => + task.status === "pending" || + task.status === "running" || + task.status === "processing", + ); - // If no active tasks and we've started onboarding, complete it - if ( - (!activeTasks || (activeTasks.processed_files ?? 0) > 0) && - tasks.length > 0 && - !isCompleted - ) { - // Set to final step to show "Done" - setCurrentStep(totalSteps); - // Wait a bit before completing - setTimeout(() => { - onComplete(); - }, 1000); - } - }, [tasks, currentStep, onComplete, isCompleted, isEmbedding, totalSteps]); + // If no active tasks and we've started onboarding, complete it + if ( + (!activeTasks || (activeTasks.processed_files ?? 0) > 0) && + tasks.length > 0 && + !isCompleted + ) { + // Set to final step to show "Done" + setCurrentStep(totalSteps); + // Wait a bit before completing + setTimeout(() => { + onComplete(); + }, 1000); + } + }, [tasks, currentStep, onComplete, isCompleted, isEmbedding, totalSteps]); - // Mutations - const onboardingMutation = useOnboardingMutation({ - onSuccess: (data) => { - console.log("Onboarding completed successfully", data); - // Update provider health cache to healthy since backend just validated - const provider = - (isEmbedding ? settings.embedding_provider : settings.llm_provider) || - modelProvider; - const healthData: ProviderHealthResponse = { - status: "healthy", - message: "Provider is configured and working correctly", - provider: provider, - }; - queryClient.setQueryData(["provider", "health"], healthData); - setError(null); - if (!isEmbedding) { - setCurrentStep(totalSteps); - setTimeout(() => { - onComplete(); - }, 1000); - } else { - setCurrentStep(0); - } - }, - onError: (error) => { - setError(error.message); - setCurrentStep(totalSteps); - // Reset to provider selection after 1 second - setTimeout(() => { - setCurrentStep(null); - }, 1000); - }, - }); + // Mutations + const onboardingMutation = useOnboardingMutation({ + onSuccess: (data) => { + console.log("Onboarding completed successfully", data); + // Update provider health cache to healthy since backend just validated + const provider = + (isEmbedding ? settings.embedding_provider : settings.llm_provider) || + modelProvider; + const healthData: ProviderHealthResponse = { + status: "healthy", + message: "Provider is configured and working correctly", + provider: provider, + }; + queryClient.setQueryData(["provider", "health"], healthData); + setError(null); + if (!isEmbedding) { + setCurrentStep(totalSteps); + setTimeout(() => { + onComplete(); + }, 1000); + } else { + setCurrentStep(0); + } + }, + onError: (error) => { + setError(error.message); + setCurrentStep(totalSteps); + // Reset to provider selection after 1 second + setTimeout(() => { + setCurrentStep(null); + }, 1000); + }, + }); - const handleComplete = () => { - const currentProvider = isEmbedding - ? settings.embedding_provider - : settings.llm_provider; + const handleComplete = () => { + const currentProvider = isEmbedding + ? settings.embedding_provider + : settings.llm_provider; - if ( - !currentProvider || - (isEmbedding && - !settings.embedding_model && - !showProviderConfiguredMessage) || - (!isEmbedding && !settings.llm_model) - ) { - toast.error("Please complete all required fields"); - return; - } + if ( + !currentProvider || + (isEmbedding && + !settings.embedding_model && + !showProviderConfiguredMessage) || + (!isEmbedding && !settings.llm_model) + ) { + toast.error("Please complete all required fields"); + return; + } - // Clear any previous error - setError(null); + // Clear any previous error + setError(null); - // Prepare onboarding data with provider-specific fields - const onboardingData: OnboardingVariables = { - sample_data: sampleDataset, - }; + // Prepare onboarding data with provider-specific fields + const onboardingData: OnboardingVariables = { + sample_data: sampleDataset, + }; - // Set the provider field - if (isEmbedding) { - onboardingData.embedding_provider = currentProvider; - // If provider is already configured, use the existing embedding model from settings - // Otherwise, use the embedding model from the form - if ( - showProviderConfiguredMessage && - currentSettings?.knowledge?.embedding_model - ) { - onboardingData.embedding_model = - currentSettings.knowledge.embedding_model; - } else { - onboardingData.embedding_model = settings.embedding_model; - } - } else { - onboardingData.llm_provider = currentProvider; - onboardingData.llm_model = settings.llm_model; - } + // Set the provider field + if (isEmbedding) { + onboardingData.embedding_provider = currentProvider; + // If provider is already configured, use the existing embedding model from settings + // Otherwise, use the embedding model from the form + if ( + showProviderConfiguredMessage && + currentSettings?.knowledge?.embedding_model + ) { + onboardingData.embedding_model = + currentSettings.knowledge.embedding_model; + } else { + onboardingData.embedding_model = settings.embedding_model; + } + } else { + onboardingData.llm_provider = currentProvider; + onboardingData.llm_model = settings.llm_model; + } - // Add provider-specific credentials based on the selected provider - if (currentProvider === "openai" && settings.openai_api_key) { - onboardingData.openai_api_key = settings.openai_api_key; - } else if (currentProvider === "anthropic" && settings.anthropic_api_key) { - onboardingData.anthropic_api_key = settings.anthropic_api_key; - } else if (currentProvider === "watsonx") { - if (settings.watsonx_api_key) { - onboardingData.watsonx_api_key = settings.watsonx_api_key; - } - if (settings.watsonx_endpoint) { - onboardingData.watsonx_endpoint = settings.watsonx_endpoint; - } - if (settings.watsonx_project_id) { - onboardingData.watsonx_project_id = settings.watsonx_project_id; - } - } else if (currentProvider === "ollama" && settings.ollama_endpoint) { - onboardingData.ollama_endpoint = settings.ollama_endpoint; - } + // Add provider-specific credentials based on the selected provider + if (currentProvider === "openai" && settings.openai_api_key) { + onboardingData.openai_api_key = settings.openai_api_key; + } else if (currentProvider === "anthropic" && settings.anthropic_api_key) { + onboardingData.anthropic_api_key = settings.anthropic_api_key; + } else if (currentProvider === "watsonx") { + if (settings.watsonx_api_key) { + onboardingData.watsonx_api_key = settings.watsonx_api_key; + } + if (settings.watsonx_endpoint) { + onboardingData.watsonx_endpoint = settings.watsonx_endpoint; + } + if (settings.watsonx_project_id) { + onboardingData.watsonx_project_id = settings.watsonx_project_id; + } + } else if (currentProvider === "ollama" && settings.ollama_endpoint) { + onboardingData.ollama_endpoint = settings.ollama_endpoint; + } - // Record the start time when user clicks Complete - setProcessingStartTime(Date.now()); - onboardingMutation.mutate(onboardingData); - setCurrentStep(0); - }; + // Record the start time when user clicks Complete + setProcessingStartTime(Date.now()); + onboardingMutation.mutate(onboardingData); + setCurrentStep(0); + }; - const isComplete = - (isEmbedding && - (!!settings.embedding_model || showProviderConfiguredMessage)) || - (!isEmbedding && !!settings.llm_model && isDoclingHealthy); + const isComplete = + (isEmbedding && + (!!settings.embedding_model || showProviderConfiguredMessage)) || + (!isEmbedding && !!settings.llm_model && isDoclingHealthy); - return ( - - {currentStep === null ? ( - -
- - {error && ( - -
- - - {error} - -
-
- )} -
-
- - - {!isEmbedding && ( - - -
- -
- Anthropic -
-
- )} - - -
- -
- OpenAI -
-
- - -
- -
- IBM watsonx.ai -
-
- - -
- -
- Ollama -
-
-
- {!isEmbedding && ( - - - - )} - - - - - - - - - -
+ return ( + + {currentStep === null ? ( + +
+ + {error && ( + +
+ + + {error} + +
+
+ )} +
+
+ + + {!isEmbedding && ( + + +
+ +
+ Anthropic +
+
+ )} + + +
+ +
+ OpenAI +
+
+ + +
+ +
+ IBM watsonx.ai +
+
+ + +
+ +
+ Ollama +
+
+
+ {!isEmbedding && ( + + + + )} + + + + + + + + + +
- - -
- -
-
- {!isComplete && ( - - {isLoadingModels - ? "Loading models..." - : !!settings.llm_model && - !!settings.embedding_model && - !isDoclingHealthy - ? "docling-serve must be running to continue" - : "Please fill in all required fields"} - - )} -
-
-
-
- ) : ( - - - - )} -
- ); + + +
+ +
+
+ {!isComplete && ( + + {isLoadingModels + ? "Loading models..." + : !!settings.llm_model && + !!settings.embedding_model && + !isDoclingHealthy + ? "docling-serve must be running to continue" + : "Please fill in all required fields"} + + )} +
+
+
+
+ ) : ( + + + + )} +
+ ); }; export default OnboardingCard; diff --git a/frontend/app/onboarding/_components/onboarding-content.tsx b/frontend/app/onboarding/_components/onboarding-content.tsx index 2167ec0f..3e5428ba 100644 --- a/frontend/app/onboarding/_components/onboarding-content.tsx +++ b/frontend/app/onboarding/_components/onboarding-content.tsx @@ -1,6 +1,6 @@ "use client"; -import { useEffect, useState } from "react"; +import { useEffect, useRef, useState } from "react"; import { StickToBottom } from "use-stick-to-bottom"; import { AssistantMessage } from "@/app/chat/_components/assistant-message"; import Nudges from "@/app/chat/_components/nudges"; @@ -8,26 +8,79 @@ import { UserMessage } from "@/app/chat/_components/user-message"; import type { Message } from "@/app/chat/_types/types"; import OnboardingCard from "@/app/onboarding/_components/onboarding-card"; import { useChatStreaming } from "@/hooks/useChatStreaming"; +import { + ONBOARDING_ASSISTANT_MESSAGE_KEY, + ONBOARDING_SELECTED_NUDGE_KEY, +} from "@/lib/constants"; import { OnboardingStep } from "./onboarding-step"; import OnboardingUpload from "./onboarding-upload"; export function OnboardingContent({ handleStepComplete, + handleStepBack, currentStep, }: { handleStepComplete: () => void; + handleStepBack: () => void; currentStep: number; }) { + const parseFailedRef = useRef(false); const [responseId, setResponseId] = useState(null); - const [selectedNudge, setSelectedNudge] = useState(""); + const [selectedNudge, setSelectedNudge] = useState(() => { + // Retrieve selected nudge from localStorage on mount + if (typeof window === "undefined") return ""; + return localStorage.getItem(ONBOARDING_SELECTED_NUDGE_KEY) || ""; + }); const [assistantMessage, setAssistantMessage] = useState( - null, + () => { + // Retrieve assistant message from localStorage on mount + if (typeof window === "undefined") return null; + const savedMessage = localStorage.getItem(ONBOARDING_ASSISTANT_MESSAGE_KEY); + if (savedMessage) { + try { + const parsed = JSON.parse(savedMessage); + // Convert timestamp string back to Date object + return { + ...parsed, + timestamp: new Date(parsed.timestamp), + }; + } catch (error) { + console.error("Failed to parse saved assistant message:", error); + parseFailedRef.current = true; + // Clear corrupted data - will go back a step in useEffect + if (typeof window !== "undefined") { + localStorage.removeItem(ONBOARDING_ASSISTANT_MESSAGE_KEY); + localStorage.removeItem(ONBOARDING_SELECTED_NUDGE_KEY); + } + return null; + } + } + return null; + }, ); + // Handle parse errors by going back a step + useEffect(() => { + if (parseFailedRef.current && currentStep >= 2) { + handleStepBack(); + } + }, [handleStepBack, currentStep]); + const { streamingMessage, isLoading, sendMessage } = useChatStreaming({ onComplete: (message, newResponseId) => { setAssistantMessage(message); + // Save assistant message to localStorage when complete + if (typeof window !== "undefined") { + try { + localStorage.setItem( + ONBOARDING_ASSISTANT_MESSAGE_KEY, + JSON.stringify(message), + ); + } catch (error) { + console.error("Failed to save assistant message to localStorage:", error); + } + } if (newResponseId) { setResponseId(newResponseId); } @@ -47,7 +100,15 @@ export function OnboardingContent({ const handleNudgeClick = async (nudge: string) => { setSelectedNudge(nudge); + // Save selected nudge to localStorage + if (typeof window !== "undefined") { + localStorage.setItem(ONBOARDING_SELECTED_NUDGE_KEY, nudge); + } setAssistantMessage(null); + // Clear saved assistant message when starting a new conversation + if (typeof window !== "undefined") { + localStorage.removeItem(ONBOARDING_ASSISTANT_MESSAGE_KEY); + } setTimeout(async () => { await sendMessage({ prompt: nudge, diff --git a/frontend/app/onboarding/_components/onboarding-upload.tsx b/frontend/app/onboarding/_components/onboarding-upload.tsx index 70d487bd..60fb676e 100644 --- a/frontend/app/onboarding/_components/onboarding-upload.tsx +++ b/frontend/app/onboarding/_components/onboarding-upload.tsx @@ -4,154 +4,156 @@ import { useGetNudgesQuery } from "@/app/api/queries/useGetNudgesQuery"; import { useGetTasksQuery } from "@/app/api/queries/useGetTasksQuery"; import { AnimatedProviderSteps } from "@/app/onboarding/_components/animated-provider-steps"; import { Button } from "@/components/ui/button"; +import { ONBOARDING_UPLOAD_STEPS_KEY } from "@/lib/constants"; import { uploadFile } from "@/lib/upload-utils"; interface OnboardingUploadProps { - onComplete: () => void; + onComplete: () => void; } const OnboardingUpload = ({ onComplete }: OnboardingUploadProps) => { - const fileInputRef = useRef(null); - const [isUploading, setIsUploading] = useState(false); - const [currentStep, setCurrentStep] = useState(null); + const fileInputRef = useRef(null); + const [isUploading, setIsUploading] = useState(false); + const [currentStep, setCurrentStep] = useState(null); - const STEP_LIST = [ - "Uploading your document", - "Generating embeddings", - "Ingesting document", - "Processing your document", - ]; + const STEP_LIST = [ + "Uploading your document", + "Generating embeddings", + "Ingesting document", + "Processing your document", + ]; - // Query tasks to track completion - const { data: tasks } = useGetTasksQuery({ - enabled: currentStep !== null, // Only poll when upload has started - refetchInterval: currentStep !== null ? 1000 : false, // Poll every 1 second during upload - }); + // Query tasks to track completion + const { data: tasks } = useGetTasksQuery({ + enabled: currentStep !== null, // Only poll when upload has started + refetchInterval: currentStep !== null ? 1000 : false, // Poll every 1 second during upload + }); - const { refetch: refetchNudges } = useGetNudgesQuery(null); + const { refetch: refetchNudges } = useGetNudgesQuery(null); - // Monitor tasks and call onComplete when file processing is done - useEffect(() => { - if (currentStep === null || !tasks) { - return; - } + // Monitor tasks and call onComplete when file processing is done + useEffect(() => { + if (currentStep === null || !tasks) { + return; + } - // Check if there are any active tasks (pending, running, or processing) - const activeTasks = tasks.find( - (task) => - task.status === "pending" || - task.status === "running" || - task.status === "processing", - ); + // Check if there are any active tasks (pending, running, or processing) + const activeTasks = tasks.find( + (task) => + task.status === "pending" || + task.status === "running" || + task.status === "processing", + ); - // If no active tasks and we have more than 1 task (initial + new upload), complete it - if ( - (!activeTasks || (activeTasks.processed_files ?? 0) > 0) && - tasks.length > 1 - ) { - // Set to final step to show "Done" - setCurrentStep(STEP_LIST.length); + // If no active tasks and we have more than 1 task (initial + new upload), complete it + if ( + (!activeTasks || (activeTasks.processed_files ?? 0) > 0) && + tasks.length > 1 + ) { + // Set to final step to show "Done" + setCurrentStep(STEP_LIST.length); - // Refetch nudges to get new ones - refetchNudges(); + // Refetch nudges to get new ones + refetchNudges(); - // Wait a bit before completing - setTimeout(() => { - onComplete(); - }, 1000); - } - }, [tasks, currentStep, onComplete, refetchNudges]); + // Wait a bit before completing + setTimeout(() => { + onComplete(); + }, 1000); + } + }, [tasks, currentStep, onComplete, refetchNudges]); - const resetFileInput = () => { - if (fileInputRef.current) { - fileInputRef.current.value = ""; - } - }; + const resetFileInput = () => { + if (fileInputRef.current) { + fileInputRef.current.value = ""; + } + }; - const handleUploadClick = () => { - fileInputRef.current?.click(); - }; + const handleUploadClick = () => { + fileInputRef.current?.click(); + }; - const performUpload = async (file: File) => { - setIsUploading(true); - try { - setCurrentStep(0); - await uploadFile(file, true); - console.log("Document upload task started successfully"); - // Move to processing step - task monitoring will handle completion - setTimeout(() => { - setCurrentStep(1); - }, 1500); - } catch (error) { - console.error("Upload failed", (error as Error).message); - // Reset on error - setCurrentStep(null); - } finally { - setIsUploading(false); - } - }; + const performUpload = async (file: File) => { + setIsUploading(true); + try { + setCurrentStep(0); + await uploadFile(file, true); + console.log("Document upload task started successfully"); + // Move to processing step - task monitoring will handle completion + setTimeout(() => { + setCurrentStep(1); + }, 1500); + } catch (error) { + console.error("Upload failed", (error as Error).message); + // Reset on error + setCurrentStep(null); + } finally { + setIsUploading(false); + } + }; - const handleFileChange = async (event: ChangeEvent) => { - const selectedFile = event.target.files?.[0]; - if (!selectedFile) { - resetFileInput(); - return; - } + const handleFileChange = async (event: ChangeEvent) => { + const selectedFile = event.target.files?.[0]; + if (!selectedFile) { + resetFileInput(); + return; + } - try { - await performUpload(selectedFile); - } catch (error) { - console.error( - "Unable to prepare file for upload", - (error as Error).message, - ); - } finally { - resetFileInput(); - } - }; + try { + await performUpload(selectedFile); + } catch (error) { + console.error( + "Unable to prepare file for upload", + (error as Error).message, + ); + } finally { + resetFileInput(); + } + }; - return ( - - {currentStep === null ? ( - - - - - ) : ( - - - - )} - - ); + return ( + + {currentStep === null ? ( + + + + + ) : ( + + + + )} + + ); }; export default OnboardingUpload; diff --git a/frontend/app/onboarding/_components/progress-bar.tsx b/frontend/app/onboarding/_components/progress-bar.tsx index ce79c322..618483ab 100644 --- a/frontend/app/onboarding/_components/progress-bar.tsx +++ b/frontend/app/onboarding/_components/progress-bar.tsx @@ -32,7 +32,7 @@ export function ProgressBar({
- {currentStep > 0 && onSkip && ( + {currentStep > 1 && onSkip && ( - )} {/* Show regular conversations */} - {conversations.length === 0 && !placeholderConversation ? ( + {conversations.length === 0 && !isConversationsLoading ? (
No conversations yet
@@ -469,7 +396,7 @@ export function Navigation({ key={conversation.response_id} type="button" className={`w-full px-3 h-11 rounded-lg group relative text-left ${ - loading + loading || isConversationsLoading ? "opacity-50 cursor-not-allowed" : "hover:bg-accent cursor-pointer" } ${ @@ -478,11 +405,11 @@ export function Navigation({ : "" }`} onClick={() => { - if (loading) return; + if (loading || isConversationsLoading) return; loadConversation(conversation); refreshConversations(); }} - disabled={loading} + disabled={loading || isConversationsLoading} >
@@ -493,7 +420,7 @@ export function Navigation({ @@ -543,51 +470,6 @@ export function Navigation({ )}
- - {/* Conversation Knowledge Section - appears right after last conversation -
-
-

- Conversation knowledge -

- -
- -
- {conversationDocs.length === 0 ? ( -
- No documents yet -
- ) : ( - conversationDocs.map(doc => ( -
- -
-
- {doc.filename} -
-
-
- )) - )} -
-
*/}

@@ -595,7 +477,7 @@ export function Navigation({

- {newConversationFiles?.length === 0 ? ( + {(newConversationFiles?.length ?? 0) === 0 ? (
No documents yet
diff --git a/frontend/components/ui/sonner.tsx b/frontend/components/ui/sonner.tsx index 549cf841..dc043507 100644 --- a/frontend/components/ui/sonner.tsx +++ b/frontend/components/ui/sonner.tsx @@ -16,11 +16,11 @@ const Toaster = ({ ...props }: ToasterProps) => { classNames: { toast: "group toast group-[.toaster]:bg-background group-[.toaster]:text-foreground group-[.toaster]:border-border group-[.toaster]:shadow-lg", - description: "group-[.toast]:text-muted-foreground", + description: "!text-muted-foreground", actionButton: - "group-[.toast]:bg-primary group-[.toast]:text-primary-foreground", + "!bg-primary !text-primary-foreground", cancelButton: - "group-[.toast]:bg-muted group-[.toast]:text-muted-foreground", + "!bg-muted !text-muted-foreground", }, }} {...props} diff --git a/frontend/lib/constants.ts b/frontend/lib/constants.ts index e4870fa8..6368ed0e 100644 --- a/frontend/lib/constants.ts +++ b/frontend/lib/constants.ts @@ -28,12 +28,16 @@ export const UI_CONSTANTS = { export const ANIMATION_DURATION = 0.4; export const SIDEBAR_WIDTH = 280; export const HEADER_HEIGHT = 54; -export const TOTAL_ONBOARDING_STEPS = 4; +export const TOTAL_ONBOARDING_STEPS = 5; /** * Local Storage Keys */ export const ONBOARDING_STEP_KEY = "onboarding_current_step"; +export const ONBOARDING_ASSISTANT_MESSAGE_KEY = "onboarding_assistant_message"; +export const ONBOARDING_SELECTED_NUDGE_KEY = "onboarding_selected_nudge"; +export const ONBOARDING_CARD_STEPS_KEY = "onboarding_card_steps"; +export const ONBOARDING_UPLOAD_STEPS_KEY = "onboarding_upload_steps"; export const FILES_REGEX = /(?<=I'm uploading a document called ['"])[^'"]+\.[^.]+(?=['"]\. Here is its content:)/; diff --git a/pyproject.toml b/pyproject.toml index 566572c4..934d2d8e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "openrag" -version = "0.1.34" +version = "0.1.35" description = "Add your description here" readme = "README.md" requires-python = ">=3.13" diff --git a/uv.lock b/uv.lock index 5dfc22b2..cb8cd34e 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.13" resolution-markers = [ "platform_machine == 'x86_64' and sys_platform == 'linux'", @@ -2352,7 +2352,7 @@ wheels = [ [[package]] name = "openrag" -version = "0.1.33" +version = "0.1.35" source = { editable = "." } dependencies = [ { name = "agentd" },