changed ollama onboarding to remove advanced

This commit is contained in:
Lucas Oliveira 2025-10-22 17:46:44 -03:00
parent 1e8be3b075
commit 988ab0096d

View file

@ -7,154 +7,143 @@ import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutat
import { useGetOllamaModelsQuery } from "../../api/queries/useGetModelsQuery"; import { useGetOllamaModelsQuery } from "../../api/queries/useGetModelsQuery";
import { useModelSelection } from "../hooks/useModelSelection"; import { useModelSelection } from "../hooks/useModelSelection";
import { useUpdateSettings } from "../hooks/useUpdateSettings"; import { useUpdateSettings } from "../hooks/useUpdateSettings";
import { AdvancedOnboarding } from "./advanced";
import { ModelSelector } from "./model-selector"; import { ModelSelector } from "./model-selector";
export function OllamaOnboarding({ export function OllamaOnboarding({
setSettings, setSettings,
sampleDataset, sampleDataset,
setSampleDataset, setSampleDataset,
}: { }: {
setSettings: (settings: OnboardingVariables) => void; setSettings: (settings: OnboardingVariables) => void;
sampleDataset: boolean; sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void; setSampleDataset: (dataset: boolean) => void;
}) { }) {
const [endpoint, setEndpoint] = useState(`http://localhost:11434`); const [endpoint, setEndpoint] = useState(`http://localhost:11434`);
const [showConnecting, setShowConnecting] = useState(false); const [showConnecting, setShowConnecting] = useState(false);
const debouncedEndpoint = useDebouncedValue(endpoint, 500); const debouncedEndpoint = useDebouncedValue(endpoint, 500);
// Fetch models from API when endpoint is provided (debounced) // Fetch models from API when endpoint is provided (debounced)
const { const {
data: modelsData, data: modelsData,
isLoading: isLoadingModels, isLoading: isLoadingModels,
error: modelsError, error: modelsError,
} = useGetOllamaModelsQuery( } = useGetOllamaModelsQuery(
debouncedEndpoint ? { endpoint: debouncedEndpoint } : undefined, debouncedEndpoint ? { endpoint: debouncedEndpoint } : undefined,
); );
// Use custom hook for model selection logic // Use custom hook for model selection logic
const { const {
languageModel, languageModel,
embeddingModel, embeddingModel,
setLanguageModel, setLanguageModel,
setEmbeddingModel, setEmbeddingModel,
languageModels, languageModels,
embeddingModels, embeddingModels,
} = useModelSelection(modelsData); } = useModelSelection(modelsData);
// Handle delayed display of connecting state // Handle delayed display of connecting state
useEffect(() => { useEffect(() => {
let timeoutId: NodeJS.Timeout; let timeoutId: NodeJS.Timeout;
if (debouncedEndpoint && isLoadingModels) { if (debouncedEndpoint && isLoadingModels) {
timeoutId = setTimeout(() => { timeoutId = setTimeout(() => {
setShowConnecting(true); setShowConnecting(true);
}, 500); }, 500);
} else { } else {
setShowConnecting(false); setShowConnecting(false);
} }
return () => { return () => {
if (timeoutId) { if (timeoutId) {
clearTimeout(timeoutId); clearTimeout(timeoutId);
} }
}; };
}, [debouncedEndpoint, isLoadingModels]); }, [debouncedEndpoint, isLoadingModels]);
const handleSampleDatasetChange = (dataset: boolean) => { // Update settings when values change
setSampleDataset(dataset); useUpdateSettings(
}; "ollama",
{
endpoint,
languageModel,
embeddingModel,
},
setSettings,
);
// Update settings when values change // Check validation state based on models query
useUpdateSettings( const hasConnectionError = debouncedEndpoint && modelsError;
"ollama", const hasNoModels =
{ modelsData &&
endpoint, !modelsData.language_models?.length &&
languageModel, !modelsData.embedding_models?.length;
embeddingModel,
},
setSettings,
);
// Check validation state based on models query return (
const hasConnectionError = debouncedEndpoint && modelsError; <div className="space-y-4">
const hasNoModels = <div className="space-y-1">
modelsData && <LabelInput
!modelsData.language_models?.length && label="Ollama Base URL"
!modelsData.embedding_models?.length; helperText="Base URL of your Ollama server"
id="api-endpoint"
return ( required
<> placeholder="http://localhost:11434"
<div className="space-y-4"> value={endpoint}
<div className="space-y-1"> onChange={(e) => setEndpoint(e.target.value)}
<LabelInput />
label="Ollama Base URL" {showConnecting && (
helperText="Base URL of your Ollama server" <p className="text-mmd text-muted-foreground">
id="api-endpoint" Connecting to Ollama server...
required </p>
placeholder="http://localhost:11434" )}
value={endpoint} {hasConnectionError && (
onChange={(e) => setEndpoint(e.target.value)} <p className="text-mmd text-accent-amber-foreground">
/> Cant reach Ollama at {debouncedEndpoint}. Update the base URL or
{showConnecting && ( start the server.
<p className="text-mmd text-muted-foreground"> </p>
Connecting to Ollama server... )}
</p> {hasNoModels && (
)} <p className="text-mmd text-accent-amber-foreground">
{hasConnectionError && ( No models found. Install embedding and agent models on your Ollama
<p className="text-mmd text-accent-amber-foreground"> server.
Cant reach Ollama at {debouncedEndpoint}. Update the base URL or </p>
start the server. )}
</p> </div>
)} <LabelWrapper
{hasNoModels && ( label="Embedding model"
<p className="text-mmd text-accent-amber-foreground"> helperText="Model used for knowledge ingest and retrieval"
No models found. Install embedding and agent models on your Ollama id="embedding-model"
server. required={true}
</p> >
)} <ModelSelector
</div> options={embeddingModels}
<LabelWrapper icon={<OllamaLogo className="w-4 h-4" />}
label="Embedding model" noOptionsPlaceholder={
helperText="Model used for knowledge ingest and retrieval" isLoadingModels
id="embedding-model" ? "Loading models..."
required={true} : "No embedding models detected. Install an embedding model to continue."
> }
<ModelSelector value={embeddingModel}
options={embeddingModels} onValueChange={setEmbeddingModel}
icon={<OllamaLogo className="w-4 h-4" />} />
noOptionsPlaceholder={ </LabelWrapper>
isLoadingModels <LabelWrapper
? "Loading models..." label="Language model"
: "No embedding models detected. Install an embedding model to continue." helperText="Model used for chat"
} id="embedding-model"
value={embeddingModel} required={true}
onValueChange={setEmbeddingModel} >
/> <ModelSelector
</LabelWrapper> options={languageModels}
<LabelWrapper icon={<OllamaLogo className="w-4 h-4" />}
label="Language model" noOptionsPlaceholder={
helperText="Model used for chat" isLoadingModels
id="embedding-model" ? "Loading models..."
required={true} : "No language models detected. Install a language model to continue."
> }
<ModelSelector value={languageModel}
options={languageModels} onValueChange={setLanguageModel}
icon={<OllamaLogo className="w-4 h-4" />} />
noOptionsPlaceholder={ </LabelWrapper>
isLoadingModels </div>
? "Loading models..." );
: "No language models detected. Install a language model to continue."
}
value={languageModel}
onValueChange={setLanguageModel}
/>
</LabelWrapper>
</div>
<AdvancedOnboarding
sampleDataset={sampleDataset}
setSampleDataset={handleSampleDatasetChange}
/>
</>
);
} }