fix: remove provider loading (#314)

* Remove provider loading

* Added loading to the steps
This commit is contained in:
Lucas Oliveira 2025-10-27 18:21:33 -03:00 committed by GitHub
parent d06d011cd3
commit 93ba498d4b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 53 additions and 163 deletions

View file

@ -24,8 +24,6 @@ export function OnboardingContent({
const [assistantMessage, setAssistantMessage] = useState<Message | null>(
null,
);
const [isLoadingModels, setIsLoadingModels] = useState<boolean>(false);
const [loadingStatus, setLoadingStatus] = useState<string[]>([]);
const { streamingMessage, isLoading, sendMessage } = useChatStreaming({
onComplete: (message, newResponseId) => {
@ -82,17 +80,12 @@ export function OnboardingContent({
isCompleted={currentStep > 0}
showCompleted={true}
text="Let's get started by setting up your model provider."
isLoadingModels={isLoadingModels}
loadingStatus={loadingStatus}
reserveSpaceForThinking={true}
>
<OnboardingCard
onComplete={() => {
handleStepComplete();
}}
isCompleted={currentStep > 0}
setIsLoadingModels={setIsLoadingModels}
setLoadingStatus={setLoadingStatus}
/>
</OnboardingStep>

View file

@ -3,7 +3,6 @@ import { type ReactNode, useEffect, useState } from "react";
import { Message } from "@/app/chat/components/message";
import DogIcon from "@/components/logo/dog-icon";
import { MarkdownRenderer } from "@/components/markdown-renderer";
import AnimatedProcessingIcon from "@/components/ui/animated-processing-icon";
import { cn } from "@/lib/utils";
interface OnboardingStepProps {
@ -15,9 +14,6 @@ interface OnboardingStepProps {
icon?: ReactNode;
isMarkdown?: boolean;
hideIcon?: boolean;
isLoadingModels?: boolean;
loadingStatus?: string[];
reserveSpaceForThinking?: boolean;
}
export function OnboardingStep({
@ -29,35 +25,9 @@ export function OnboardingStep({
icon,
isMarkdown = false,
hideIcon = false,
isLoadingModels = false,
loadingStatus = [],
reserveSpaceForThinking = false,
}: OnboardingStepProps) {
const [displayedText, setDisplayedText] = useState("");
const [showChildren, setShowChildren] = useState(false);
const [currentStatusIndex, setCurrentStatusIndex] = useState<number>(0);
// Cycle through loading status messages once
useEffect(() => {
if (!isLoadingModels || loadingStatus.length === 0) {
setCurrentStatusIndex(0);
return;
}
const interval = setInterval(() => {
setCurrentStatusIndex((prev) => {
const nextIndex = prev + 1;
// Stop at the last message
if (nextIndex >= loadingStatus.length - 1) {
clearInterval(interval);
return loadingStatus.length - 1;
}
return nextIndex;
});
}, 1500); // Change status every 1.5 seconds
return () => clearInterval(interval);
}, [isLoadingModels, loadingStatus]);
useEffect(() => {
if (!isVisible) {
@ -115,37 +85,7 @@ export function OnboardingStep({
}
>
<div>
{isLoadingModels && loadingStatus.length > 0 ? (
<div className="flex flex-col gap-2 py-1.5">
<div className="flex items-center gap-2">
<div className="relative w-3.5 h-2.5">
<AnimatedProcessingIcon className="text-current shrink-0 absolute inset-0" />
</div>
<span className="text-mmd font-medium text-muted-foreground">
Thinking
</span>
</div>
<div className="overflow-hidden">
<div className="flex items-center gap-5 overflow-y-hidden relative h-6">
<div className="w-px h-6 bg-border" />
<div className="relative h-5 w-full">
<AnimatePresence mode="sync" initial={false}>
<motion.span
key={currentStatusIndex}
initial={{ y: 24, opacity: 0 }}
animate={{ y: 0, opacity: 1 }}
exit={{ y: -24, opacity: 0 }}
transition={{ duration: 0.3, ease: "easeInOut" }}
className="text-mmd font-medium text-primary absolute left-0"
>
{loadingStatus[currentStatusIndex]}
</motion.span>
</AnimatePresence>
</div>
</div>
</div>
</div>
) : isMarkdown ? (
{isMarkdown ? (
<MarkdownRenderer
className={cn(
isCompleted ? "text-placeholder-foreground" : "text-foreground",
@ -154,19 +94,16 @@ export function OnboardingStep({
chatMessage={text}
/>
) : (
<>
<p
className={`text-foreground text-sm py-1.5 transition-colors duration-300 ${
isCompleted ? "text-placeholder-foreground" : ""
}`}
>
{displayedText}
{!showChildren && !isCompleted && (
<span className="inline-block w-1 h-3.5 bg-primary ml-1 animate-pulse" />
)}
</p>
{reserveSpaceForThinking && <div className="h-8" />}
</>
<p
className={`text-foreground text-sm py-1.5 transition-colors duration-300 ${
isCompleted ? "text-placeholder-foreground" : ""
}`}
>
{displayedText}
{!showChildren && !isCompleted && (
<span className="inline-block w-1 h-3.5 bg-primary ml-1 animate-pulse" />
)}
</p>
)}
{children && (
<AnimatePresence>
@ -178,7 +115,7 @@ export function OnboardingStep({
exit={{ opacity: 0, height: 0 }}
transition={{ duration: 0.3, delay: 0.3, ease: "easeOut" }}
>
<div className="pt-2">{children}</div>
<div className="pt-4">{children}</div>
</motion.div>
)}
</AnimatePresence>

View file

@ -15,13 +15,11 @@ export function IBMOnboarding({
sampleDataset,
setSampleDataset,
setIsLoadingModels,
setLoadingStatus,
}: {
setSettings: (settings: OnboardingVariables) => void;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
setLoadingStatus?: (status: string[]) => void;
}) {
const [endpoint, setEndpoint] = useState("https://us-south.ml.cloud.ibm.com");
const [apiKey, setApiKey] = useState("");
@ -91,6 +89,10 @@ export function IBMOnboarding({
setSampleDataset(dataset);
};
useEffect(() => {
setIsLoadingModels?.(isLoadingModels);
}, [isLoadingModels, setIsLoadingModels]);
// Update settings when values change
useUpdateSettings(
"watsonx",
@ -104,18 +106,6 @@ export function IBMOnboarding({
setSettings,
);
// Notify parent about loading state
useEffect(() => {
setIsLoadingModels?.(isLoadingModels);
// Set detailed loading status
if (isLoadingModels) {
const status = ["Connecting to IBM watsonx.ai", "Fetching language models", "Fetching embedding models"];
setLoadingStatus?.(status);
} else {
setLoadingStatus?.([]);
}
}, [isLoadingModels, setIsLoadingModels, setLoadingStatus]);
return (
<>
<div className="space-y-4">

View file

@ -14,13 +14,11 @@ export function OllamaOnboarding({
sampleDataset,
setSampleDataset,
setIsLoadingModels,
setLoadingStatus,
}: {
setSettings: (settings: OnboardingVariables) => void;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
setLoadingStatus?: (status: string[]) => void;
}) {
const [endpoint, setEndpoint] = useState(`http://localhost:11434`);
const [showConnecting, setShowConnecting] = useState(false);
@ -74,20 +72,7 @@ export function OllamaOnboarding({
},
setSettings,
);
// Notify parent about loading state
useEffect(() => {
setIsLoadingModels?.(isLoadingModels);
// Set detailed loading status
if (isLoadingModels) {
const status = ["Connecting to Ollama", "Fetching language models", "Fetching embedding models"];
setLoadingStatus?.(status);
} else {
setLoadingStatus?.([]);
}
}, [isLoadingModels, setIsLoadingModels, setLoadingStatus]);
// Check validation state based on models query
const hasConnectionError = debouncedEndpoint && modelsError;
const hasNoModels =

View file

@ -44,8 +44,6 @@ const TOTAL_PROVIDER_STEPS = STEP_LIST.length;
const OnboardingCard = ({
onComplete,
isCompleted = false,
setIsLoadingModels: setIsLoadingModelsParent,
setLoadingStatus: setLoadingStatusParent,
}: OnboardingCardProps) => {
const { isHealthy: isDoclingHealthy } = useDoclingHealth();
@ -55,40 +53,14 @@ const OnboardingCard = ({
const [isLoadingModels, setIsLoadingModels] = useState<boolean>(false);
const [loadingStatus, setLoadingStatus] = useState<string[]>([]);
const [loadingStep, setLoadingStep] = useState<number>(0);
const [currentStatusIndex, setCurrentStatusIndex] = useState<number>(0);
// Pass loading state to parent
// Reset loading step when models start loading
useEffect(() => {
setIsLoadingModelsParent?.(isLoadingModels);
}, [isLoadingModels, setIsLoadingModelsParent]);
useEffect(() => {
setLoadingStatusParent?.(loadingStatus);
}, [loadingStatus, setLoadingStatusParent]);
// Cycle through loading status messages once
useEffect(() => {
if (!isLoadingModels || loadingStatus.length === 0) {
setCurrentStatusIndex(0);
return;
if (isLoadingModels) {
setLoadingStep(0);
}
const interval = setInterval(() => {
setCurrentStatusIndex((prev) => {
const nextIndex = prev + 1;
// Stop at the last message
if (nextIndex >= loadingStatus.length - 1) {
clearInterval(interval);
return loadingStatus.length - 1;
}
return nextIndex;
});
}, 1500); // Change status every 1.5 seconds
return () => clearInterval(interval);
}, [isLoadingModels, loadingStatus]);
}, [isLoadingModels]);
const handleSetModelProvider = (provider: string) => {
setModelProvider(provider);
@ -273,13 +245,36 @@ const OnboardingCard = ({
Ollama
</TabsTrigger>
</TabsList>
<AnimatePresence>
{isLoadingModels && (
<motion.div
initial={{ opacity: 0, height: 0 }}
animate={{ opacity: 1, height: "auto" }}
exit={{ opacity: 0, height: 0 }}
transition={{ duration: 0.1, ease: "easeInOut" }}
className="overflow-hidden"
>
<div className="py-3">
<AnimatedProviderSteps
currentStep={loadingStep}
isCompleted={false}
setCurrentStep={setLoadingStep}
steps={[
"Connecting to the provider",
"Fetching language models",
"Fetching embedding models",
]}
storageKey="model-loading-steps"
/></div>
</motion.div>
)}
</AnimatePresence>
<TabsContent value="openai">
<OpenAIOnboarding
setSettings={setSettings}
sampleDataset={sampleDataset}
setSampleDataset={setSampleDataset}
setIsLoadingModels={setIsLoadingModels}
setLoadingStatus={setLoadingStatus}
/>
</TabsContent>
<TabsContent value="watsonx">
@ -288,7 +283,6 @@ const OnboardingCard = ({
sampleDataset={sampleDataset}
setSampleDataset={setSampleDataset}
setIsLoadingModels={setIsLoadingModels}
setLoadingStatus={setLoadingStatus}
/>
</TabsContent>
<TabsContent value="ollama">
@ -297,11 +291,12 @@ const OnboardingCard = ({
sampleDataset={sampleDataset}
setSampleDataset={setSampleDataset}
setIsLoadingModels={setIsLoadingModels}
setLoadingStatus={setLoadingStatus}
/>
</TabsContent>
</Tabs>
<Tooltip>
<TooltipTrigger asChild>
<div>

View file

@ -15,13 +15,11 @@ export function OpenAIOnboarding({
sampleDataset,
setSampleDataset,
setIsLoadingModels,
setLoadingStatus,
}: {
setSettings: (settings: OnboardingVariables) => void;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
setLoadingStatus?: (status: string[]) => void;
}) {
const [apiKey, setApiKey] = useState("");
const [getFromEnv, setGetFromEnv] = useState(true);
@ -62,6 +60,10 @@ export function OpenAIOnboarding({
setEmbeddingModel("");
};
useEffect(() => {
setIsLoadingModels?.(isLoadingModels);
}, [isLoadingModels, setIsLoadingModels]);
// Update settings when values change
useUpdateSettings(
"openai",
@ -72,19 +74,7 @@ export function OpenAIOnboarding({
},
setSettings,
);
// Notify parent about loading state
useEffect(() => {
setIsLoadingModels?.(isLoadingModels);
// Set detailed loading status
if (isLoadingModels) {
const status = ["Connecting to OpenAI", "Fetching language models", "Fetching embedding models"];
setLoadingStatus?.(status);
} else {
setLoadingStatus?.([]);
}
}, [isLoadingModels, setIsLoadingModels, setLoadingStatus]);
return (
<>
<div className="space-y-5">