This commit is contained in:
Lucas Oliveira 2025-12-03 17:27:08 -03:00
parent 97c9fdb2c6
commit b20da9ab58
2 changed files with 309 additions and 309 deletions

View file

@ -11,170 +11,170 @@ import { useUpdateSettings } from "../_hooks/useUpdateSettings";
import { ModelSelector } from "./model-selector";
export function OllamaOnboarding({
setSettings,
sampleDataset,
setSampleDataset,
setIsLoadingModels,
isEmbedding = false,
alreadyConfigured = false,
existingEndpoint,
setSettings,
sampleDataset,
setSampleDataset,
setIsLoadingModels,
isEmbedding = false,
alreadyConfigured = false,
existingEndpoint,
}: {
setSettings: Dispatch<SetStateAction<OnboardingVariables>>;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
isEmbedding?: boolean;
alreadyConfigured?: boolean;
existingEndpoint?: string;
setSettings: Dispatch<SetStateAction<OnboardingVariables>>;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
isEmbedding?: boolean;
alreadyConfigured?: boolean;
existingEndpoint?: string;
}) {
const [endpoint, setEndpoint] = useState(
alreadyConfigured
? undefined
: existingEndpoint || `http://localhost:11434`,
);
const [showConnecting, setShowConnecting] = useState(false);
const debouncedEndpoint = useDebouncedValue(endpoint, 500);
const [endpoint, setEndpoint] = useState(
alreadyConfigured
? undefined
: existingEndpoint || `http://localhost:11434`,
);
const [showConnecting, setShowConnecting] = useState(false);
const debouncedEndpoint = useDebouncedValue(endpoint, 500);
// Fetch models from API when endpoint is provided (debounced)
const {
data: modelsData,
isLoading: isLoadingModels,
error: modelsError,
} = useGetOllamaModelsQuery(
debouncedEndpoint ? { endpoint: debouncedEndpoint } : undefined,
{ enabled: !!debouncedEndpoint || alreadyConfigured || alreadyConfigured },
);
// Fetch models from API when endpoint is provided (debounced)
const {
data: modelsData,
isLoading: isLoadingModels,
error: modelsError,
} = useGetOllamaModelsQuery(
debouncedEndpoint ? { endpoint: debouncedEndpoint } : undefined,
{ enabled: !!debouncedEndpoint || alreadyConfigured || alreadyConfigured },
);
// Use custom hook for model selection logic
const {
languageModel,
embeddingModel,
setLanguageModel,
setEmbeddingModel,
languageModels,
embeddingModels,
} = useModelSelection(modelsData, isEmbedding);
// Use custom hook for model selection logic
const {
languageModel,
embeddingModel,
setLanguageModel,
setEmbeddingModel,
languageModels,
embeddingModels,
} = useModelSelection(modelsData, isEmbedding);
// Handle delayed display of connecting state
useEffect(() => {
let timeoutId: NodeJS.Timeout;
// Handle delayed display of connecting state
useEffect(() => {
let timeoutId: NodeJS.Timeout;
if (debouncedEndpoint && isLoadingModels) {
timeoutId = setTimeout(() => {
setIsLoadingModels?.(true);
setShowConnecting(true);
}, 500);
} else {
setShowConnecting(false);
setIsLoadingModels?.(false);
}
if (debouncedEndpoint && isLoadingModels) {
timeoutId = setTimeout(() => {
setIsLoadingModels?.(true);
setShowConnecting(true);
}, 500);
} else {
setShowConnecting(false);
setIsLoadingModels?.(false);
}
return () => {
if (timeoutId) {
clearTimeout(timeoutId);
}
};
}, [debouncedEndpoint, isLoadingModels, setIsLoadingModels]);
return () => {
if (timeoutId) {
clearTimeout(timeoutId);
}
};
}, [debouncedEndpoint, isLoadingModels, setIsLoadingModels]);
// Update settings when values change
useUpdateSettings(
"ollama",
{
endpoint,
languageModel,
embeddingModel,
},
setSettings,
isEmbedding,
);
// Update settings when values change
useUpdateSettings(
"ollama",
{
endpoint,
languageModel,
embeddingModel,
},
setSettings,
isEmbedding,
);
// Check validation state based on models query
const hasConnectionError = debouncedEndpoint && modelsError;
const hasNoModels =
modelsData &&
!modelsData.language_models?.length &&
!modelsData.embedding_models?.length;
// Check validation state based on models query
const hasConnectionError = debouncedEndpoint && modelsError;
const hasNoModels =
modelsData &&
!modelsData.language_models?.length &&
!modelsData.embedding_models?.length;
return (
<div className="space-y-4">
<div className="space-y-1">
<LabelInput
label="Ollama Base URL"
helperText="Base URL of your Ollama server"
id="api-endpoint"
required
placeholder={
alreadyConfigured
? "http://••••••••••••••••••••"
: "http://localhost:11434"
}
value={endpoint}
onChange={(e) => setEndpoint(e.target.value)}
disabled={alreadyConfigured}
/>
{alreadyConfigured && (
<p className="text-mmd text-muted-foreground">
Reusing endpoint from model provider selection.
</p>
)}
{showConnecting && (
<p className="text-mmd text-muted-foreground">
Connecting to Ollama server...
</p>
)}
{hasConnectionError && (
<p className="text-mmd text-accent-amber-foreground">
Can't reach Ollama at {debouncedEndpoint}. Update the base URL or
start the server.
</p>
)}
{hasNoModels && (
<p className="text-mmd text-accent-amber-foreground">
No models found. Install embedding and agent models on your Ollama
server.
</p>
)}
</div>
{isEmbedding && setEmbeddingModel && (
<LabelWrapper
label="Embedding model"
helperText="Model used for knowledge ingest and retrieval"
id="embedding-model"
required={true}
>
<ModelSelector
options={embeddingModels}
icon={<OllamaLogo className="w-4 h-4" />}
noOptionsPlaceholder={
isLoadingModels
? "Loading models..."
: "No embedding models detected. Install an embedding model to continue."
}
value={embeddingModel}
onValueChange={setEmbeddingModel}
/>
</LabelWrapper>
)}
{!isEmbedding && setLanguageModel && (
<LabelWrapper
label="Language model"
helperText="Model used for chat"
id="embedding-model"
required={true}
>
<ModelSelector
options={languageModels}
icon={<OllamaLogo className="w-4 h-4" />}
noOptionsPlaceholder={
isLoadingModels
? "Loading models..."
: "No language models detected. Install a language model to continue."
}
value={languageModel}
onValueChange={setLanguageModel}
/>
</LabelWrapper>
)}
</div>
);
return (
<div className="space-y-4">
<div className="space-y-1">
<LabelInput
label="Ollama Base URL"
helperText="Base URL of your Ollama server"
id="api-endpoint"
required
placeholder={
alreadyConfigured
? "http://••••••••••••••••••••"
: "http://localhost:11434"
}
value={endpoint}
onChange={(e) => setEndpoint(e.target.value)}
disabled={alreadyConfigured}
/>
{alreadyConfigured && (
<p className="text-mmd text-muted-foreground">
Reusing endpoint from model provider selection.
</p>
)}
{showConnecting && (
<p className="text-mmd text-muted-foreground">
Connecting to Ollama server...
</p>
)}
{hasConnectionError && (
<p className="text-mmd text-accent-amber-foreground">
Can't reach Ollama at {debouncedEndpoint}. Update the base URL or
start the server.
</p>
)}
{hasNoModels && (
<p className="text-mmd text-accent-amber-foreground">
No models found. Install embedding and agent models on your Ollama
server.
</p>
)}
</div>
{isEmbedding && setEmbeddingModel && (
<LabelWrapper
label="Embedding model"
helperText="Model used for knowledge ingest and retrieval"
id="embedding-model"
required={true}
>
<ModelSelector
options={embeddingModels}
icon={<OllamaLogo className="w-4 h-4" />}
noOptionsPlaceholder={
isLoadingModels
? "Loading models..."
: "No embedding models detected. Install an embedding model to continue."
}
value={embeddingModel}
onValueChange={setEmbeddingModel}
/>
</LabelWrapper>
)}
{!isEmbedding && setLanguageModel && (
<LabelWrapper
label="Language model"
helperText="Model used for chat"
id="embedding-model"
required={true}
>
<ModelSelector
options={languageModels}
icon={<OllamaLogo className="w-4 h-4" />}
noOptionsPlaceholder={
isLoadingModels
? "Loading models..."
: "No language models detected. Install a language model to continue."
}
value={languageModel}
onValueChange={setLanguageModel}
/>
</LabelWrapper>
)}
</div>
);
}

View file

@ -5,9 +5,9 @@ import { LabelInput } from "@/components/label-input";
import { LabelWrapper } from "@/components/label-wrapper";
import { Switch } from "@/components/ui/switch";
import {
Tooltip,
TooltipContent,
TooltipTrigger,
Tooltip,
TooltipContent,
TooltipTrigger,
} from "@/components/ui/tooltip";
import { useDebouncedValue } from "@/lib/debounce";
import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutation";
@ -17,161 +17,161 @@ import { useUpdateSettings } from "../_hooks/useUpdateSettings";
import { AdvancedOnboarding } from "./advanced";
export function OpenAIOnboarding({
setSettings,
sampleDataset,
setSampleDataset,
setIsLoadingModels,
isEmbedding = false,
hasEnvApiKey = false,
alreadyConfigured = false,
setSettings,
sampleDataset,
setSampleDataset,
setIsLoadingModels,
isEmbedding = false,
hasEnvApiKey = false,
alreadyConfigured = false,
}: {
setSettings: Dispatch<SetStateAction<OnboardingVariables>>;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
isEmbedding?: boolean;
hasEnvApiKey?: boolean;
alreadyConfigured?: boolean;
setSettings: Dispatch<SetStateAction<OnboardingVariables>>;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
isEmbedding?: boolean;
hasEnvApiKey?: boolean;
alreadyConfigured?: boolean;
}) {
const [apiKey, setApiKey] = useState("");
const [getFromEnv, setGetFromEnv] = useState(
hasEnvApiKey && !alreadyConfigured,
);
const debouncedApiKey = useDebouncedValue(apiKey, 500);
const [apiKey, setApiKey] = useState("");
const [getFromEnv, setGetFromEnv] = useState(
hasEnvApiKey && !alreadyConfigured,
);
const debouncedApiKey = useDebouncedValue(apiKey, 500);
// Fetch models from API when API key is provided
const {
data: modelsData,
isLoading: isLoadingModels,
error: modelsError,
} = useGetOpenAIModelsQuery(
getFromEnv
? { apiKey: "" }
: debouncedApiKey
? { apiKey: debouncedApiKey }
: undefined,
{
// Only validate when the user opts in (env) or provides a key.
// If a key was previously configured, let the user decide to reuse or replace it
// without triggering an immediate validation error.
enabled: debouncedApiKey !== "" || getFromEnv || alreadyConfigured,
},
);
// Use custom hook for model selection logic
const {
languageModel,
embeddingModel,
setLanguageModel,
setEmbeddingModel,
languageModels,
embeddingModels,
} = useModelSelection(modelsData, isEmbedding);
const handleSampleDatasetChange = (dataset: boolean) => {
setSampleDataset(dataset);
};
// Fetch models from API when API key is provided
const {
data: modelsData,
isLoading: isLoadingModels,
error: modelsError,
} = useGetOpenAIModelsQuery(
getFromEnv
? { apiKey: "" }
: debouncedApiKey
? { apiKey: debouncedApiKey }
: undefined,
{
// Only validate when the user opts in (env) or provides a key.
// If a key was previously configured, let the user decide to reuse or replace it
// without triggering an immediate validation error.
enabled: debouncedApiKey !== "" || getFromEnv || alreadyConfigured,
},
);
// Use custom hook for model selection logic
const {
languageModel,
embeddingModel,
setLanguageModel,
setEmbeddingModel,
languageModels,
embeddingModels,
} = useModelSelection(modelsData, isEmbedding);
const handleSampleDatasetChange = (dataset: boolean) => {
setSampleDataset(dataset);
};
const handleGetFromEnvChange = (fromEnv: boolean) => {
setGetFromEnv(fromEnv);
if (fromEnv) {
setApiKey("");
}
setEmbeddingModel?.("");
setLanguageModel?.("");
};
const handleGetFromEnvChange = (fromEnv: boolean) => {
setGetFromEnv(fromEnv);
if (fromEnv) {
setApiKey("");
}
setEmbeddingModel?.("");
setLanguageModel?.("");
};
useEffect(() => {
setIsLoadingModels?.(isLoadingModels);
}, [isLoadingModels, setIsLoadingModels]);
useEffect(() => {
setIsLoadingModels?.(isLoadingModels);
}, [isLoadingModels, setIsLoadingModels]);
// Update settings when values change
useUpdateSettings(
"openai",
{
apiKey,
languageModel,
embeddingModel,
},
setSettings,
isEmbedding,
);
// Update settings when values change
useUpdateSettings(
"openai",
{
apiKey,
languageModel,
embeddingModel,
},
setSettings,
isEmbedding,
);
return (
<>
<div className="space-y-5">
{!alreadyConfigured && (
<LabelWrapper
label="Use environment OpenAI API key"
id="get-api-key"
description="Reuse the key from your environment config. Turn off to enter a different key."
flex
>
<Tooltip>
<TooltipTrigger asChild>
<div>
<Switch
checked={getFromEnv}
onCheckedChange={handleGetFromEnvChange}
disabled={!hasEnvApiKey}
/>
</div>
</TooltipTrigger>
{!hasEnvApiKey && (
<TooltipContent>
OpenAI API key not detected in the environment.
</TooltipContent>
)}
</Tooltip>
</LabelWrapper>
)}
{(!getFromEnv || alreadyConfigured) && (
<div className="space-y-1">
<LabelInput
label="OpenAI API key"
helperText="The API key for your OpenAI account."
className={modelsError ? "!border-destructive" : ""}
id="api-key"
type="password"
required
placeholder={
alreadyConfigured
? "sk-•••••••••••••••••••••••••••••••••••••••••"
: "sk-..."
}
value={apiKey}
onChange={(e) => setApiKey(e.target.value)}
// Even if a key exists, allow replacing it to avoid getting stuck on stale creds.
disabled={false}
/>
{alreadyConfigured && (
<p className="text-mmd text-muted-foreground">
Existing OpenAI key detected. You can reuse it or enter a new
one.
</p>
)}
{isLoadingModels && (
<p className="text-mmd text-muted-foreground">
Validating API key...
</p>
)}
{modelsError && (
<p className="text-mmd text-destructive">
Invalid OpenAI API key. Verify or replace the key.
</p>
)}
</div>
)}
</div>
<AdvancedOnboarding
icon={<OpenAILogo className="w-4 h-4" />}
languageModels={languageModels}
embeddingModels={embeddingModels}
languageModel={languageModel}
embeddingModel={embeddingModel}
sampleDataset={sampleDataset}
setLanguageModel={setLanguageModel}
setSampleDataset={handleSampleDatasetChange}
setEmbeddingModel={setEmbeddingModel}
/>
</>
);
return (
<>
<div className="space-y-5">
{!alreadyConfigured && (
<LabelWrapper
label="Use environment OpenAI API key"
id="get-api-key"
description="Reuse the key from your environment config. Turn off to enter a different key."
flex
>
<Tooltip>
<TooltipTrigger asChild>
<div>
<Switch
checked={getFromEnv}
onCheckedChange={handleGetFromEnvChange}
disabled={!hasEnvApiKey}
/>
</div>
</TooltipTrigger>
{!hasEnvApiKey && (
<TooltipContent>
OpenAI API key not detected in the environment.
</TooltipContent>
)}
</Tooltip>
</LabelWrapper>
)}
{(!getFromEnv || alreadyConfigured) && (
<div className="space-y-1">
<LabelInput
label="OpenAI API key"
helperText="The API key for your OpenAI account."
className={modelsError ? "!border-destructive" : ""}
id="api-key"
type="password"
required
placeholder={
alreadyConfigured
? "sk-•••••••••••••••••••••••••••••••••••••••••"
: "sk-..."
}
value={apiKey}
onChange={(e) => setApiKey(e.target.value)}
// Even if a key exists, allow replacing it to avoid getting stuck on stale creds.
disabled={false}
/>
{alreadyConfigured && (
<p className="text-mmd text-muted-foreground">
Existing OpenAI key detected. You can reuse it or enter a new
one.
</p>
)}
{isLoadingModels && (
<p className="text-mmd text-muted-foreground">
Validating API key...
</p>
)}
{modelsError && (
<p className="text-mmd text-destructive">
Invalid OpenAI API key. Verify or replace the key.
</p>
)}
</div>
)}
</div>
<AdvancedOnboarding
icon={<OpenAILogo className="w-4 h-4" />}
languageModels={languageModels}
embeddingModels={embeddingModels}
languageModel={languageModel}
embeddingModel={embeddingModel}
sampleDataset={sampleDataset}
setLanguageModel={setLanguageModel}
setSampleDataset={handleSampleDatasetChange}
setEmbeddingModel={setEmbeddingModel}
/>
</>
);
}