- {isLoadingModels && loadingStatus.length > 0 ? (
-
-
-
-
-
-
-
-
- {loadingStatus[currentStatusIndex]}
-
-
-
-
-
-
- ) : isMarkdown ? (
+ {isMarkdown ? (
) : (
- <>
-
- {displayedText}
- {!showChildren && !isCompleted && (
-
- )}
-
- {reserveSpaceForThinking &&
}
- >
+
+ {displayedText}
+ {!showChildren && !isCompleted && (
+
+ )}
+
)}
{children && (
@@ -178,7 +115,7 @@ export function OnboardingStep({
exit={{ opacity: 0, height: 0 }}
transition={{ duration: 0.3, delay: 0.3, ease: "easeOut" }}
>
- {children}
+ {children}
)}
diff --git a/frontend/src/app/onboarding/components/ibm-onboarding.tsx b/frontend/src/app/onboarding/components/ibm-onboarding.tsx
index cd638025..272ab1b2 100644
--- a/frontend/src/app/onboarding/components/ibm-onboarding.tsx
+++ b/frontend/src/app/onboarding/components/ibm-onboarding.tsx
@@ -15,13 +15,11 @@ export function IBMOnboarding({
sampleDataset,
setSampleDataset,
setIsLoadingModels,
- setLoadingStatus,
}: {
setSettings: (settings: OnboardingVariables) => void;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
- setLoadingStatus?: (status: string[]) => void;
}) {
const [endpoint, setEndpoint] = useState("https://us-south.ml.cloud.ibm.com");
const [apiKey, setApiKey] = useState("");
@@ -91,6 +89,10 @@ export function IBMOnboarding({
setSampleDataset(dataset);
};
+ useEffect(() => {
+ setIsLoadingModels?.(isLoadingModels);
+ }, [isLoadingModels, setIsLoadingModels]);
+
// Update settings when values change
useUpdateSettings(
"watsonx",
@@ -104,18 +106,6 @@ export function IBMOnboarding({
setSettings,
);
- // Notify parent about loading state
- useEffect(() => {
- setIsLoadingModels?.(isLoadingModels);
-
- // Set detailed loading status
- if (isLoadingModels) {
- const status = ["Connecting to IBM watsonx.ai", "Fetching language models", "Fetching embedding models"];
- setLoadingStatus?.(status);
- } else {
- setLoadingStatus?.([]);
- }
- }, [isLoadingModels, setIsLoadingModels, setLoadingStatus]);
return (
<>
diff --git a/frontend/src/app/onboarding/components/ollama-onboarding.tsx b/frontend/src/app/onboarding/components/ollama-onboarding.tsx
index 82d86d83..b085fa95 100644
--- a/frontend/src/app/onboarding/components/ollama-onboarding.tsx
+++ b/frontend/src/app/onboarding/components/ollama-onboarding.tsx
@@ -14,13 +14,11 @@ export function OllamaOnboarding({
sampleDataset,
setSampleDataset,
setIsLoadingModels,
- setLoadingStatus,
}: {
setSettings: (settings: OnboardingVariables) => void;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
- setLoadingStatus?: (status: string[]) => void;
}) {
const [endpoint, setEndpoint] = useState(`http://localhost:11434`);
const [showConnecting, setShowConnecting] = useState(false);
@@ -74,20 +72,7 @@ export function OllamaOnboarding({
},
setSettings,
);
-
- // Notify parent about loading state
- useEffect(() => {
- setIsLoadingModels?.(isLoadingModels);
-
- // Set detailed loading status
- if (isLoadingModels) {
- const status = ["Connecting to Ollama", "Fetching language models", "Fetching embedding models"];
- setLoadingStatus?.(status);
- } else {
- setLoadingStatus?.([]);
- }
- }, [isLoadingModels, setIsLoadingModels, setLoadingStatus]);
-
+
// Check validation state based on models query
const hasConnectionError = debouncedEndpoint && modelsError;
const hasNoModels =
diff --git a/frontend/src/app/onboarding/components/onboarding-card.tsx b/frontend/src/app/onboarding/components/onboarding-card.tsx
index 4414b601..a1c8b79f 100644
--- a/frontend/src/app/onboarding/components/onboarding-card.tsx
+++ b/frontend/src/app/onboarding/components/onboarding-card.tsx
@@ -44,8 +44,6 @@ const TOTAL_PROVIDER_STEPS = STEP_LIST.length;
const OnboardingCard = ({
onComplete,
isCompleted = false,
- setIsLoadingModels: setIsLoadingModelsParent,
- setLoadingStatus: setLoadingStatusParent,
}: OnboardingCardProps) => {
const { isHealthy: isDoclingHealthy } = useDoclingHealth();
@@ -55,40 +53,14 @@ const OnboardingCard = ({
const [isLoadingModels, setIsLoadingModels] = useState
(false);
- const [loadingStatus, setLoadingStatus] = useState([]);
+ const [loadingStep, setLoadingStep] = useState(0);
- const [currentStatusIndex, setCurrentStatusIndex] = useState(0);
-
- // Pass loading state to parent
+ // Reset loading step when models start loading
useEffect(() => {
- setIsLoadingModelsParent?.(isLoadingModels);
- }, [isLoadingModels, setIsLoadingModelsParent]);
-
- useEffect(() => {
- setLoadingStatusParent?.(loadingStatus);
- }, [loadingStatus, setLoadingStatusParent]);
-
- // Cycle through loading status messages once
- useEffect(() => {
- if (!isLoadingModels || loadingStatus.length === 0) {
- setCurrentStatusIndex(0);
- return;
+ if (isLoadingModels) {
+ setLoadingStep(0);
}
-
- const interval = setInterval(() => {
- setCurrentStatusIndex((prev) => {
- const nextIndex = prev + 1;
- // Stop at the last message
- if (nextIndex >= loadingStatus.length - 1) {
- clearInterval(interval);
- return loadingStatus.length - 1;
- }
- return nextIndex;
- });
- }, 1500); // Change status every 1.5 seconds
-
- return () => clearInterval(interval);
- }, [isLoadingModels, loadingStatus]);
+ }, [isLoadingModels]);
const handleSetModelProvider = (provider: string) => {
setModelProvider(provider);
@@ -273,13 +245,36 @@ const OnboardingCard = ({
Ollama
+
+ {isLoadingModels && (
+
+
+
+ )}
+
@@ -288,7 +283,6 @@ const OnboardingCard = ({
sampleDataset={sampleDataset}
setSampleDataset={setSampleDataset}
setIsLoadingModels={setIsLoadingModels}
- setLoadingStatus={setLoadingStatus}
/>
@@ -297,11 +291,12 @@ const OnboardingCard = ({
sampleDataset={sampleDataset}
setSampleDataset={setSampleDataset}
setIsLoadingModels={setIsLoadingModels}
- setLoadingStatus={setLoadingStatus}
/>
+
+
diff --git a/frontend/src/app/onboarding/components/openai-onboarding.tsx b/frontend/src/app/onboarding/components/openai-onboarding.tsx
index 01646ad9..2997cea4 100644
--- a/frontend/src/app/onboarding/components/openai-onboarding.tsx
+++ b/frontend/src/app/onboarding/components/openai-onboarding.tsx
@@ -15,13 +15,11 @@ export function OpenAIOnboarding({
sampleDataset,
setSampleDataset,
setIsLoadingModels,
- setLoadingStatus,
}: {
setSettings: (settings: OnboardingVariables) => void;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
- setLoadingStatus?: (status: string[]) => void;
}) {
const [apiKey, setApiKey] = useState("");
const [getFromEnv, setGetFromEnv] = useState(true);
@@ -62,6 +60,10 @@ export function OpenAIOnboarding({
setEmbeddingModel("");
};
+ useEffect(() => {
+ setIsLoadingModels?.(isLoadingModels);
+ }, [isLoadingModels, setIsLoadingModels]);
+
// Update settings when values change
useUpdateSettings(
"openai",
@@ -72,19 +74,7 @@ export function OpenAIOnboarding({
},
setSettings,
);
-
- // Notify parent about loading state
- useEffect(() => {
- setIsLoadingModels?.(isLoadingModels);
-
- // Set detailed loading status
- if (isLoadingModels) {
- const status = ["Connecting to OpenAI", "Fetching language models", "Fetching embedding models"];
- setLoadingStatus?.(status);
- } else {
- setLoadingStatus?.([]);
- }
- }, [isLoadingModels, setIsLoadingModels, setLoadingStatus]);
+
return (
<>
From 511d309c7e2c9b1c74c3d3492c090bb360800e15 Mon Sep 17 00:00:00 2001
From: phact
Date: Mon, 27 Oct 2025 17:33:03 -0400
Subject: [PATCH 6/6] v0.1.25
---
pyproject.toml | 2 +-
uv.lock | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index ea8e4a03..93e68c9d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "openrag"
-version = "0.1.24"
+version = "0.1.25"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
diff --git a/uv.lock b/uv.lock
index 3b095861..e4f7578b 100644
--- a/uv.lock
+++ b/uv.lock
@@ -1,5 +1,5 @@
version = 1
-revision = 2
+revision = 3
requires-python = ">=3.13"
resolution-markers = [
"platform_machine == 'x86_64' and sys_platform == 'linux'",
@@ -2352,7 +2352,7 @@ wheels = [
[[package]]
name = "openrag"
-version = "0.1.24"
+version = "0.1.25"
source = { editable = "." }
dependencies = [
{ name = "agentd" },