Merge pull request #289 from langflow-ai/fix/provider-design

fix: adds new provider design, fixes issues with auth mode
This commit is contained in:
Mike Fortman 2025-10-23 11:47:18 -05:00 committed by GitHub
commit 883877b4e7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 343 additions and 359 deletions

View file

@ -14,7 +14,7 @@ const AccordionItem = React.forwardRef<
>(({ className, ...props }, ref) => ( >(({ className, ...props }, ref) => (
<AccordionPrimitive.Item <AccordionPrimitive.Item
ref={ref} ref={ref}
className={cn("border rounded-md", className)} className={cn("border rounded-xl", className)}
{...props} {...props}
/> />
)); ));

View file

@ -13,7 +13,7 @@ const TabsList = React.forwardRef<
<TabsPrimitive.List <TabsPrimitive.List
ref={ref} ref={ref}
className={cn( className={cn(
"inline-flex h-12 gap-3 items-center justify-center p-0 text-muted-foreground w-full", "inline-flex h-fit gap-3 items-center justify-center p-0 text-muted-foreground w-full",
className, className,
)} )}
{...props} {...props}
@ -28,7 +28,7 @@ const TabsTrigger = React.forwardRef<
<TabsPrimitive.Trigger <TabsPrimitive.Trigger
ref={ref} ref={ref}
className={cn( className={cn(
"inline-flex w-full h-full border border-border gap-1.5 items-center justify-center whitespace-nowrap rounded-lg px-3 py-1.5 text-sm font-medium transition-all focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 data-[state=active]:border-accent-pink-foreground data-[state=active]:text-foreground", "flex flex-col items-start justify-between p-5 gap-4 w-full h-full border border-border whitespace-nowrap rounded-lg text-sm font-medium transition-all focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 data-[state=active]:border-muted-foreground data-[state=active]:text-foreground",
className, className,
)} )}
{...props} {...props}

View file

@ -75,20 +75,6 @@ export function AdvancedOnboarding({
/> />
</LabelWrapper> </LabelWrapper>
)} )}
{(hasLanguageModels || hasEmbeddingModels) && !updatedOnboarding && <Separator />}
{!updatedOnboarding && (
<LabelWrapper
label="Sample dataset"
description="Load sample data to chat with immediately."
id="sample-dataset"
flex
>
<Switch
checked={sampleDataset}
onCheckedChange={setSampleDataset}
/>
</LabelWrapper>
)}
</AccordionContent> </AccordionContent>
</AccordionItem> </AccordionItem>
</Accordion> </Accordion>

View file

@ -7,154 +7,143 @@ import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutat
import { useGetOllamaModelsQuery } from "../../api/queries/useGetModelsQuery"; import { useGetOllamaModelsQuery } from "../../api/queries/useGetModelsQuery";
import { useModelSelection } from "../hooks/useModelSelection"; import { useModelSelection } from "../hooks/useModelSelection";
import { useUpdateSettings } from "../hooks/useUpdateSettings"; import { useUpdateSettings } from "../hooks/useUpdateSettings";
import { AdvancedOnboarding } from "./advanced";
import { ModelSelector } from "./model-selector"; import { ModelSelector } from "./model-selector";
export function OllamaOnboarding({ export function OllamaOnboarding({
setSettings, setSettings,
sampleDataset, sampleDataset,
setSampleDataset, setSampleDataset,
}: { }: {
setSettings: (settings: OnboardingVariables) => void; setSettings: (settings: OnboardingVariables) => void;
sampleDataset: boolean; sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void; setSampleDataset: (dataset: boolean) => void;
}) { }) {
const [endpoint, setEndpoint] = useState(`http://localhost:11434`); const [endpoint, setEndpoint] = useState(`http://localhost:11434`);
const [showConnecting, setShowConnecting] = useState(false); const [showConnecting, setShowConnecting] = useState(false);
const debouncedEndpoint = useDebouncedValue(endpoint, 500); const debouncedEndpoint = useDebouncedValue(endpoint, 500);
// Fetch models from API when endpoint is provided (debounced) // Fetch models from API when endpoint is provided (debounced)
const { const {
data: modelsData, data: modelsData,
isLoading: isLoadingModels, isLoading: isLoadingModels,
error: modelsError, error: modelsError,
} = useGetOllamaModelsQuery( } = useGetOllamaModelsQuery(
debouncedEndpoint ? { endpoint: debouncedEndpoint } : undefined, debouncedEndpoint ? { endpoint: debouncedEndpoint } : undefined,
); );
// Use custom hook for model selection logic // Use custom hook for model selection logic
const { const {
languageModel, languageModel,
embeddingModel, embeddingModel,
setLanguageModel, setLanguageModel,
setEmbeddingModel, setEmbeddingModel,
languageModels, languageModels,
embeddingModels, embeddingModels,
} = useModelSelection(modelsData); } = useModelSelection(modelsData);
// Handle delayed display of connecting state // Handle delayed display of connecting state
useEffect(() => { useEffect(() => {
let timeoutId: NodeJS.Timeout; let timeoutId: NodeJS.Timeout;
if (debouncedEndpoint && isLoadingModels) { if (debouncedEndpoint && isLoadingModels) {
timeoutId = setTimeout(() => { timeoutId = setTimeout(() => {
setShowConnecting(true); setShowConnecting(true);
}, 500); }, 500);
} else { } else {
setShowConnecting(false); setShowConnecting(false);
} }
return () => { return () => {
if (timeoutId) { if (timeoutId) {
clearTimeout(timeoutId); clearTimeout(timeoutId);
} }
}; };
}, [debouncedEndpoint, isLoadingModels]); }, [debouncedEndpoint, isLoadingModels]);
const handleSampleDatasetChange = (dataset: boolean) => { // Update settings when values change
setSampleDataset(dataset); useUpdateSettings(
}; "ollama",
{
endpoint,
languageModel,
embeddingModel,
},
setSettings,
);
// Update settings when values change // Check validation state based on models query
useUpdateSettings( const hasConnectionError = debouncedEndpoint && modelsError;
"ollama", const hasNoModels =
{ modelsData &&
endpoint, !modelsData.language_models?.length &&
languageModel, !modelsData.embedding_models?.length;
embeddingModel,
},
setSettings,
);
// Check validation state based on models query return (
const hasConnectionError = debouncedEndpoint && modelsError; <div className="space-y-4">
const hasNoModels = <div className="space-y-1">
modelsData && <LabelInput
!modelsData.language_models?.length && label="Ollama Base URL"
!modelsData.embedding_models?.length; helperText="Base URL of your Ollama server"
id="api-endpoint"
return ( required
<> placeholder="http://localhost:11434"
<div className="space-y-4"> value={endpoint}
<div className="space-y-1"> onChange={(e) => setEndpoint(e.target.value)}
<LabelInput />
label="Ollama Base URL" {showConnecting && (
helperText="Base URL of your Ollama server" <p className="text-mmd text-muted-foreground">
id="api-endpoint" Connecting to Ollama server...
required </p>
placeholder="http://localhost:11434" )}
value={endpoint} {hasConnectionError && (
onChange={(e) => setEndpoint(e.target.value)} <p className="text-mmd text-accent-amber-foreground">
/> Cant reach Ollama at {debouncedEndpoint}. Update the base URL or
{showConnecting && ( start the server.
<p className="text-mmd text-muted-foreground"> </p>
Connecting to Ollama server... )}
</p> {hasNoModels && (
)} <p className="text-mmd text-accent-amber-foreground">
{hasConnectionError && ( No models found. Install embedding and agent models on your Ollama
<p className="text-mmd text-accent-amber-foreground"> server.
Cant reach Ollama at {debouncedEndpoint}. Update the base URL or </p>
start the server. )}
</p> </div>
)} <LabelWrapper
{hasNoModels && ( label="Embedding model"
<p className="text-mmd text-accent-amber-foreground"> helperText="Model used for knowledge ingest and retrieval"
No models found. Install embedding and agent models on your Ollama id="embedding-model"
server. required={true}
</p> >
)} <ModelSelector
</div> options={embeddingModels}
<LabelWrapper icon={<OllamaLogo className="w-4 h-4" />}
label="Embedding model" noOptionsPlaceholder={
helperText="Model used for knowledge ingest and retrieval" isLoadingModels
id="embedding-model" ? "Loading models..."
required={true} : "No embedding models detected. Install an embedding model to continue."
> }
<ModelSelector value={embeddingModel}
options={embeddingModels} onValueChange={setEmbeddingModel}
icon={<OllamaLogo className="w-4 h-4" />} />
noOptionsPlaceholder={ </LabelWrapper>
isLoadingModels <LabelWrapper
? "Loading models..." label="Language model"
: "No embedding models detected. Install an embedding model to continue." helperText="Model used for chat"
} id="embedding-model"
value={embeddingModel} required={true}
onValueChange={setEmbeddingModel} >
/> <ModelSelector
</LabelWrapper> options={languageModels}
<LabelWrapper icon={<OllamaLogo className="w-4 h-4" />}
label="Language model" noOptionsPlaceholder={
helperText="Model used for chat" isLoadingModels
id="embedding-model" ? "Loading models..."
required={true} : "No language models detected. Install a language model to continue."
> }
<ModelSelector value={languageModel}
options={languageModels} onValueChange={setLanguageModel}
icon={<OllamaLogo className="w-4 h-4" />} />
noOptionsPlaceholder={ </LabelWrapper>
isLoadingModels </div>
? "Loading models..." );
: "No language models detected. Install a language model to continue."
}
value={languageModel}
onValueChange={setLanguageModel}
/>
</LabelWrapper>
</div>
<AdvancedOnboarding
sampleDataset={sampleDataset}
setSampleDataset={handleSampleDatasetChange}
/>
</>
);
} }

View file

@ -4,8 +4,8 @@ import { AnimatePresence, motion } from "framer-motion";
import { useEffect, useState } from "react"; import { useEffect, useState } from "react";
import { toast } from "sonner"; import { toast } from "sonner";
import { import {
type OnboardingVariables, type OnboardingVariables,
useOnboardingMutation, useOnboardingMutation,
} from "@/app/api/mutations/useOnboardingMutation"; } from "@/app/api/mutations/useOnboardingMutation";
import { useGetTasksQuery } from "@/app/api/queries/useGetTasksQuery"; import { useGetTasksQuery } from "@/app/api/queries/useGetTasksQuery";
import { useDoclingHealth } from "@/components/docling-health-banner"; import { useDoclingHealth } from "@/components/docling-health-banner";
@ -14,24 +14,25 @@ import OllamaLogo from "@/components/logo/ollama-logo";
import OpenAILogo from "@/components/logo/openai-logo"; import OpenAILogo from "@/components/logo/openai-logo";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { import {
Card, Card,
CardContent, CardContent,
CardFooter, CardFooter,
CardHeader, CardHeader,
} from "@/components/ui/card"; } from "@/components/ui/card";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { import {
Tooltip, Tooltip,
TooltipContent, TooltipContent,
TooltipTrigger, TooltipTrigger,
} from "@/components/ui/tooltip"; } from "@/components/ui/tooltip";
import { cn } from "@/lib/utils";
import { AnimatedProviderSteps } from "./animated-provider-steps"; import { AnimatedProviderSteps } from "./animated-provider-steps";
import { IBMOnboarding } from "./ibm-onboarding"; import { IBMOnboarding } from "./ibm-onboarding";
import { OllamaOnboarding } from "./ollama-onboarding"; import { OllamaOnboarding } from "./ollama-onboarding";
import { OpenAIOnboarding } from "./openai-onboarding"; import { OpenAIOnboarding } from "./openai-onboarding";
interface OnboardingCardProps { interface OnboardingCardProps {
onComplete: () => void; onComplete: () => void;
} }
@ -45,220 +46,226 @@ const STEP_LIST = [
const TOTAL_PROVIDER_STEPS = STEP_LIST.length; const TOTAL_PROVIDER_STEPS = STEP_LIST.length;
const OnboardingCard = ({ onComplete }: OnboardingCardProps) => { const OnboardingCard = ({ onComplete }: OnboardingCardProps) => {
const updatedOnboarding = process.env.UPDATED_ONBOARDING === "true"; const updatedOnboarding = process.env.UPDATED_ONBOARDING === "true";
const { isHealthy: isDoclingHealthy } = useDoclingHealth(); const { isHealthy: isDoclingHealthy } = useDoclingHealth();
const [modelProvider, setModelProvider] = useState<string>("openai"); const [modelProvider, setModelProvider] = useState<string>("openai");
const [sampleDataset, setSampleDataset] = useState<boolean>(true); const [sampleDataset, setSampleDataset] = useState<boolean>(true);
const handleSetModelProvider = (provider: string) => { const handleSetModelProvider = (provider: string) => {
setModelProvider(provider); setModelProvider(provider);
setSettings({ setSettings({
model_provider: provider, model_provider: provider,
embedding_model: "", embedding_model: "",
llm_model: "", llm_model: "",
}); });
}; };
const [settings, setSettings] = useState<OnboardingVariables>({ const [settings, setSettings] = useState<OnboardingVariables>({
model_provider: modelProvider, model_provider: modelProvider,
embedding_model: "", embedding_model: "",
llm_model: "", llm_model: "",
}); });
const [currentStep, setCurrentStep] = useState<number | null>(null); const [currentStep, setCurrentStep] = useState<number | null>(null);
// Query tasks to track completion // Query tasks to track completion
const { data: tasks } = useGetTasksQuery({ const { data: tasks } = useGetTasksQuery({
enabled: currentStep !== null, // Only poll when onboarding has started enabled: currentStep !== null, // Only poll when onboarding has started
refetchInterval: currentStep !== null ? 1000 : false, // Poll every 1 second during onboarding refetchInterval: currentStep !== null ? 1000 : false, // Poll every 1 second during onboarding
}); });
// Monitor tasks and call onComplete when all tasks are done // Monitor tasks and call onComplete when all tasks are done
useEffect(() => { useEffect(() => {
if (currentStep === null || !tasks) { if (currentStep === null || !tasks) {
return; return;
} }
// Check if there are any active tasks (pending, running, or processing) // Check if there are any active tasks (pending, running, or processing)
const activeTasks = tasks.find( const activeTasks = tasks.find(
(task) => (task) =>
task.status === "pending" || task.status === "pending" ||
task.status === "running" || task.status === "running" ||
task.status === "processing", task.status === "processing",
); );
// If no active tasks and we've started onboarding, complete it // If no active tasks and we've started onboarding, complete it
if ( if (
(!activeTasks || (activeTasks.processed_files ?? 0) > 0) && (!activeTasks || (activeTasks.processed_files ?? 0) > 0) &&
tasks.length > 0 tasks.length > 0
) { ) {
// Set to final step to show "Done" // Set to final step to show "Done"
setCurrentStep(TOTAL_PROVIDER_STEPS); setCurrentStep(TOTAL_PROVIDER_STEPS);
// Wait a bit before completing // Wait a bit before completing
setTimeout(() => { setTimeout(() => {
onComplete(); onComplete();
}, 1000); }, 1000);
} }
}, [tasks, currentStep, onComplete]); }, [tasks, currentStep, onComplete]);
// Mutations // Mutations
const onboardingMutation = useOnboardingMutation({ const onboardingMutation = useOnboardingMutation({
onSuccess: (data) => { onSuccess: (data) => {
console.log("Onboarding completed successfully", data); console.log("Onboarding completed successfully", data);
setCurrentStep(0); setCurrentStep(0);
}, },
onError: (error) => { onError: (error) => {
toast.error("Failed to complete onboarding", { toast.error("Failed to complete onboarding", {
description: error.message, description: error.message,
}); });
}, },
}); });
const handleComplete = () => { const handleComplete = () => {
if ( if (
!settings.model_provider || !settings.model_provider ||
!settings.llm_model || !settings.llm_model ||
!settings.embedding_model !settings.embedding_model
) { ) {
toast.error("Please complete all required fields"); toast.error("Please complete all required fields");
return; return;
} }
// Prepare onboarding data // Prepare onboarding data
const onboardingData: OnboardingVariables = { const onboardingData: OnboardingVariables = {
model_provider: settings.model_provider, model_provider: settings.model_provider,
llm_model: settings.llm_model, llm_model: settings.llm_model,
embedding_model: settings.embedding_model, embedding_model: settings.embedding_model,
sample_data: sampleDataset, sample_data: sampleDataset,
}; };
// Add API key if available // Add API key if available
if (settings.api_key) { if (settings.api_key) {
onboardingData.api_key = settings.api_key; onboardingData.api_key = settings.api_key;
} }
// Add endpoint if available // Add endpoint if available
if (settings.endpoint) { if (settings.endpoint) {
onboardingData.endpoint = settings.endpoint; onboardingData.endpoint = settings.endpoint;
} }
// Add project_id if available // Add project_id if available
if (settings.project_id) { if (settings.project_id) {
onboardingData.project_id = settings.project_id; onboardingData.project_id = settings.project_id;
} }
onboardingMutation.mutate(onboardingData); onboardingMutation.mutate(onboardingData);
setCurrentStep(0); setCurrentStep(0);
}; };
const isComplete = const isComplete =
!!settings.llm_model && !!settings.embedding_model && isDoclingHealthy; !!settings.llm_model && !!settings.embedding_model && isDoclingHealthy;
return ( return (
<AnimatePresence mode="wait"> <AnimatePresence mode="wait">
{currentStep === null ? ( {currentStep === null ? (
<motion.div <motion.div
key="onboarding-form" key="onboarding-form"
initial={{ opacity: 1, y: 0 }} initial={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, y: -24 }} exit={{ opacity: 0, y: -24 }}
transition={{ duration: 0.4, ease: "easeInOut" }} transition={{ duration: 0.4, ease: "easeInOut" }}
> >
<Card <div className={`w-full max-w-[600px] flex flex-col gap-6`}>
className={`w-full max-w-[600px] ${ <Tabs
updatedOnboarding ? "border-none" : "" defaultValue={modelProvider}
}`} onValueChange={handleSetModelProvider}
> >
<Tabs <TabsList className="mb-4">
defaultValue={modelProvider} <TabsTrigger
onValueChange={handleSetModelProvider} value="openai"
> >
<CardHeader className={`${updatedOnboarding ? "px-0" : ""}`}> <div className={cn("flex items-center justify-center gap-2 w-8 h-8 rounded-md", modelProvider === "openai" ? "bg-white" : "bg-muted")}>
<TabsList> <OpenAILogo className={cn("w-4 h-4 shrink-0", modelProvider === "openai" ? "text-black" : "text-muted-foreground")} />
<TabsTrigger value="openai"> </div>
<OpenAILogo className="w-4 h-4" /> OpenAI
OpenAI </TabsTrigger>
</TabsTrigger> <TabsTrigger
<TabsTrigger value="watsonx"> value="watsonx"
<IBMLogo className="w-4 h-4" /> >
IBM watsonx.ai <div className={cn("flex items-center justify-center gap-2 w-8 h-8 rounded-md", modelProvider === "watsonx" ? "bg-[#1063FE]" : "bg-muted")}>
</TabsTrigger> <IBMLogo className={cn("w-4 h-4 shrink-0", modelProvider === "watsonx" ? "text-white" : "text-muted-foreground")} />
<TabsTrigger value="ollama"> </div>
<OllamaLogo className="w-4 h-4" /> IBM watsonx.ai
Ollama </TabsTrigger>
</TabsTrigger> <TabsTrigger
</TabsList> value="ollama"
</CardHeader> >
<CardContent className={`${updatedOnboarding ? "px-0" : ""}`}> <div className={cn("flex items-center justify-center gap-2 w-8 h-8 rounded-md", modelProvider === "ollama" ? "bg-white" : "bg-muted")}>
<TabsContent value="openai"> <OllamaLogo
<OpenAIOnboarding className={cn(
setSettings={setSettings} "w-4 h-4 shrink-0",
sampleDataset={sampleDataset} modelProvider === "ollama" ? "text-black" : "text-muted-foreground",
setSampleDataset={setSampleDataset} )}
/> />
</TabsContent> </div>
<TabsContent value="watsonx"> Ollama
<IBMOnboarding </TabsTrigger>
setSettings={setSettings} </TabsList>
sampleDataset={sampleDataset} <TabsContent value="openai">
setSampleDataset={setSampleDataset} <OpenAIOnboarding
/> setSettings={setSettings}
</TabsContent> sampleDataset={sampleDataset}
<TabsContent value="ollama"> setSampleDataset={setSampleDataset}
<OllamaOnboarding />
setSettings={setSettings} </TabsContent>
sampleDataset={sampleDataset} <TabsContent value="watsonx">
setSampleDataset={setSampleDataset} <IBMOnboarding
/> setSettings={setSettings}
</TabsContent> sampleDataset={sampleDataset}
</CardContent> setSampleDataset={setSampleDataset}
</Tabs> />
<CardFooter </TabsContent>
className={`flex ${updatedOnboarding ? "px-0" : "justify-end"}`} <TabsContent value="ollama">
> <OllamaOnboarding
<Tooltip> setSettings={setSettings}
<TooltipTrigger asChild> sampleDataset={sampleDataset}
<div> setSampleDataset={setSampleDataset}
<Button />
size="sm" </TabsContent>
onClick={handleComplete} </Tabs>
disabled={!isComplete}
loading={onboardingMutation.isPending} <Tooltip>
> <TooltipTrigger asChild>
<span className="select-none">Complete</span> <div>
</Button> <Button
</div> size="sm"
</TooltipTrigger> onClick={handleComplete}
{!isComplete && ( disabled={!isComplete}
<TooltipContent> loading={onboardingMutation.isPending}
{!!settings.llm_model && >
!!settings.embedding_model && <span className="select-none">Complete</span>
!isDoclingHealthy </Button>
? "docling-serve must be running to continue" </div>
: "Please fill in all required fields"} </TooltipTrigger>
</TooltipContent> {!isComplete && (
)} <TooltipContent>
</Tooltip> {!!settings.llm_model &&
</CardFooter> !!settings.embedding_model &&
</Card> !isDoclingHealthy
</motion.div> ? "docling-serve must be running to continue"
) : ( : "Please fill in all required fields"}
<motion.div </TooltipContent>
key="provider-steps" )}
initial={{ opacity: 0, y: 24 }} </Tooltip>
animate={{ opacity: 1, y: 0 }} </div>
transition={{ duration: 0.4, ease: "easeInOut" }} </motion.div>
> ) : (
<AnimatedProviderSteps <motion.div
key="provider-steps"
initial={{ opacity: 0, y: 24 }}
animate={{ opacity: 1, y: 0 }}
transition={{ duration: 0.4, ease: "easeInOut" }}
>
<AnimatedProviderSteps
currentStep={currentStep} currentStep={currentStep}
setCurrentStep={setCurrentStep} setCurrentStep={setCurrentStep}
steps={STEP_LIST} steps={STEP_LIST}
/> />
</motion.div> </motion.div>
)} )}
</AnimatePresence> </AnimatePresence>
); );
}; };
export default OnboardingCard; export default OnboardingCard;

View file

@ -28,7 +28,7 @@ export function ChatRenderer({
settings, settings,
children, children,
}: { }: {
settings: Settings; settings: Settings | undefined;
children: React.ReactNode; children: React.ReactNode;
}) { }) {
const pathname = usePathname(); const pathname = usePathname();
@ -51,7 +51,8 @@ export function ChatRenderer({
if (typeof window === "undefined") return false; if (typeof window === "undefined") return false;
const savedStep = localStorage.getItem(ONBOARDING_STEP_KEY); const savedStep = localStorage.getItem(ONBOARDING_STEP_KEY);
// Show layout if settings.edited is true and if no onboarding step is saved // Show layout if settings.edited is true and if no onboarding step is saved
return !!settings?.edited && savedStep === null; const isEdited = settings?.edited ?? true;
return isEdited ? savedStep === null : false;
}); });
// Only fetch conversations on chat page // Only fetch conversations on chat page

View file

@ -35,9 +35,10 @@ export function LayoutWrapper({ children }: { children: React.ReactNode }) {
const isUnhealthy = health?.status === "unhealthy" || isError; const isUnhealthy = health?.status === "unhealthy" || isError;
const isBannerVisible = !isHealthLoading && isUnhealthy; const isBannerVisible = !isHealthLoading && isUnhealthy;
const isSettingsLoadingOrError = isSettingsLoading || !settings;
// Show loading state when backend isn't ready // Show loading state when backend isn't ready
if (isLoading || isSettingsLoading || !settings) { if (isLoading || (isSettingsLoadingOrError && (isNoAuthMode || isAuthenticated))) {
return ( return (
<div className="min-h-screen flex items-center justify-center bg-background"> <div className="min-h-screen flex items-center justify-center bg-background">
<div className="flex flex-col items-center gap-4"> <div className="flex flex-col items-center gap-4">