Feat/278 Edit current model provider settings (#307)

* update settings update api to allow changing model provider config

* use react hook form

* make settings page small width

* re-use the onboarding forms instead of rolling a custom one

* issue

* remove test

* make custom forms with react-hook-form

* replace the updateFlow mutation with updateSettings

* show all the model providers

* revert changes to onboarding forms

* disabled state styles for providers

* break model selectors into their own file

* use existing selector component, use settings endpoint instead of onboarding, clean up form styles

* revert changes to openai onboarding

* small form changes
This commit is contained in:
Cole Goldsmith 2025-10-31 13:22:51 -05:00 committed by GitHub
parent bf8af00e06
commit 2d31c4b9b0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 1362 additions and 147 deletions

View file

@ -42,6 +42,7 @@
"next-themes": "^0.4.6",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-hook-form": "^7.65.0",
"react-icons": "^5.5.0",
"react-markdown": "^10.1.0",
"react-syntax-highlighter": "^15.6.1",
@ -8348,6 +8349,21 @@
"react": "^19.1.1"
}
},
"node_modules/react-hook-form": {
"version": "7.65.0",
"resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.65.0.tgz",
"integrity": "sha512-xtOzDz063WcXvGWaHgLNrNzlsdFgtUWcb32E6WFaGTd7kPZG3EeDusjdZfUsPwKCKVXy1ZlntifaHZ4l8pAsmw==",
"engines": {
"node": ">=18.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/react-hook-form"
},
"peerDependencies": {
"react": "^16.8.0 || ^17 || ^18 || ^19"
}
},
"node_modules/react-icons": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/react-icons/-/react-icons-5.5.0.tgz",

View file

@ -43,6 +43,7 @@
"next-themes": "^0.4.6",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-hook-form": "^7.65.0",
"react-icons": "^5.5.0",
"react-markdown": "^10.1.0",
"react-syntax-highlighter": "^15.6.1",

View file

@ -1,61 +0,0 @@
import {
type UseMutationOptions,
useMutation,
useQueryClient,
} from "@tanstack/react-query";
interface UpdateFlowSettingVariables {
llm_model?: string;
system_prompt?: string;
embedding_model?: string;
table_structure?: boolean;
ocr?: boolean;
picture_descriptions?: boolean;
chunk_size?: number;
chunk_overlap?: number;
}
interface UpdateFlowSettingResponse {
message: string;
}
export const useUpdateFlowSettingMutation = (
options?: Omit<
UseMutationOptions<
UpdateFlowSettingResponse,
Error,
UpdateFlowSettingVariables
>,
"mutationFn"
>,
) => {
const queryClient = useQueryClient();
async function updateFlowSetting(
variables: UpdateFlowSettingVariables,
): Promise<UpdateFlowSettingResponse> {
const response = await fetch("/api/settings", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(variables),
});
if (!response.ok) {
const error = await response.json();
throw new Error(error.error || "Failed to update settings");
}
return response.json();
}
return useMutation({
mutationFn: updateFlowSetting,
onSettled: () => {
// Invalidate settings query to refetch updated data
queryClient.invalidateQueries({ queryKey: ["settings"] });
},
...options,
});
};

View file

@ -0,0 +1,72 @@
import {
type UseMutationOptions,
useMutation,
useQueryClient,
} from "@tanstack/react-query";
import type { Settings } from "../queries/useGetSettingsQuery";
export interface UpdateSettingsRequest {
// Agent settings
llm_model?: string;
system_prompt?: string;
// Knowledge settings
chunk_size?: number;
chunk_overlap?: number;
table_structure?: boolean;
ocr?: boolean;
picture_descriptions?: boolean;
embedding_model?: string;
// Provider settings
model_provider?: string;
api_key?: string;
endpoint?: string;
project_id?: string;
}
export interface UpdateSettingsResponse {
message: string;
settings: Settings;
}
export const useUpdateSettingsMutation = (
options?: Omit<
UseMutationOptions<UpdateSettingsResponse, Error, UpdateSettingsRequest>,
"mutationFn"
>
) => {
const queryClient = useQueryClient();
async function updateSettings(
variables: UpdateSettingsRequest
): Promise<UpdateSettingsResponse> {
const response = await fetch("/api/settings", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(variables),
});
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.error || "Failed to update settings");
}
return response.json();
}
return useMutation({
mutationFn: updateSettings,
onSuccess: (...args) => {
queryClient.invalidateQueries({
queryKey: ["settings"],
refetchType: "all"
});
options?.onSuccess?.(...args);
},
onError: options?.onError,
onSettled: options?.onSettled,
});
};

View file

@ -53,8 +53,6 @@ export const useGetOpenAIModelsQuery = (
{
queryKey: ["models", "openai", params],
queryFn: getOpenAIModels,
retry: 2,
enabled: !!params?.apiKey,
staleTime: 0, // Always fetch fresh data
gcTime: 0, // Don't cache results
...options,
@ -89,7 +87,6 @@ export const useGetOllamaModelsQuery = (
{
queryKey: ["models", "ollama", params],
queryFn: getOllamaModels,
retry: 2,
staleTime: 0, // Always fetch fresh data
gcTime: 0, // Don't cache results
...options,
@ -130,8 +127,6 @@ export const useGetIBMModelsQuery = (
{
queryKey: ["models", "ibm", params],
queryFn: getIBMModels,
retry: 2,
enabled: !!params?.endpoint && !!params?.apiKey && !!params?.projectId, // Only run if all required params are provided
staleTime: 0, // Always fetch fresh data
gcTime: 0, // Don't cache results
...options,

View file

@ -26,6 +26,9 @@ export interface Settings {
edited?: boolean;
provider?: {
model_provider?: string;
// Note: api_key is never returned by the backend for security reasons
endpoint?: string;
project_id?: string;
};
knowledge?: KnowledgeSettings;
agent?: AgentSettings;

View file

@ -67,13 +67,14 @@ export function IBMOnboarding({
isLoading: isLoadingModels,
error: modelsError,
} = useGetIBMModelsQuery(
debouncedEndpoint && debouncedApiKey && debouncedProjectId
? {
endpoint: debouncedEndpoint,
apiKey: debouncedApiKey,
projectId: debouncedProjectId,
}
: undefined,
{
endpoint: debouncedEndpoint,
apiKey: debouncedApiKey,
projectId: debouncedProjectId,
},
{
enabled: !!debouncedEndpoint && !!debouncedApiKey && !!debouncedProjectId,
}
);
// Use custom hook for model selection logic

View file

@ -26,6 +26,7 @@ export function ModelSelector({
searchPlaceholder = "Search model...",
noOptionsPlaceholder = "No models available",
custom = false,
hasError = false,
}: {
options: {
value: string;
@ -39,6 +40,7 @@ export function ModelSelector({
noOptionsPlaceholder?: string;
custom?: boolean;
onValueChange: (value: string) => void;
hasError?: boolean;
}) {
const [open, setOpen] = useState(false);
const [searchValue, setSearchValue] = useState("");
@ -57,7 +59,7 @@ export function ModelSelector({
role="combobox"
disabled={options.length === 0}
aria-expanded={open}
className="w-full gap-2 justify-between font-normal text-sm"
className={cn("w-full gap-2 justify-between font-normal text-sm", hasError && "!border-destructive")}
>
{value ? (
<div className="flex items-center gap-2">

View file

@ -0,0 +1,156 @@
import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery";
import { Button } from "@/components/ui/button";
import { Card, CardHeader, CardTitle, CardContent } from "@/components/ui/card";
import { ModelProvider } from "../helpers/model-helpers";
import OpenAILogo from "@/components/logo/openai-logo";
import IBMLogo from "@/components/logo/ibm-logo";
import OllamaLogo from "@/components/logo/ollama-logo";
import { useAuth } from "@/contexts/auth-context";
import { ReactNode, useState } from "react";
import OpenAISettingsDialog from "./openai-settings-dialog";
import OllamaSettingsDialog from "./ollama-settings-dialog";
import WatsonxSettingsDialog from "./watsonx-settings-dialog";
import { cn } from "@/lib/utils";
import Link from "next/link";
export const ModelProviders = () => {
const { isAuthenticated, isNoAuthMode } = useAuth();
const { data: settings = {} } = useGetSettingsQuery({
enabled: isAuthenticated || isNoAuthMode,
});
const [dialogOpen, setDialogOpen] = useState<ModelProvider | undefined>();
const modelProvidersMap: Record<
ModelProvider,
{
name: string;
logo: (props: React.SVGProps<SVGSVGElement>) => ReactNode;
logoColor: string;
logoBgColor: string;
}
> = {
openai: {
name: "OpenAI",
logo: OpenAILogo,
logoColor: "text-black",
logoBgColor: "bg-white",
},
ollama: {
name: "Ollama",
logo: OllamaLogo,
logoColor: "text-black",
logoBgColor: "bg-white",
},
watsonx: {
name: "IBM watsonx.ai",
logo: IBMLogo,
logoColor: "text-white",
logoBgColor: "bg-[#1063FE]",
},
};
const currentProviderKey =
(settings.provider?.model_provider as ModelProvider) || "openai";
// Get all provider keys with active provider first
const allProviderKeys: ModelProvider[] = ["openai", "ollama", "watsonx"];
const sortedProviderKeys = [
currentProviderKey,
...allProviderKeys.filter((key) => key !== currentProviderKey),
];
return (
<>
<div className="grid gap-6 md:grid-cols-2 lg:grid-cols-3">
{sortedProviderKeys.map((providerKey) => {
const {
name,
logo: Logo,
logoColor,
logoBgColor,
} = modelProvidersMap[providerKey];
const isActive = providerKey === currentProviderKey;
return (
<Card
key={providerKey}
className={cn(
"relative flex flex-col",
!isActive && "text-muted-foreground"
)}
>
<CardHeader>
<div className="flex flex-col items-start justify-between">
<div className="flex flex-col gap-3">
<div className="mb-1">
<div
className={cn(
"w-8 h-8 rounded flex items-center justify-center border",
isActive ? logoBgColor : "bg-muted"
)}
>
{
<Logo
className={
isActive ? logoColor : "text-muted-foreground"
}
/>
}
</div>
</div>
<CardTitle className="flex flex-row items-center gap-2">
{name}
{isActive && (
<div className="h-2 w-2 bg-accent-emerald-foreground rounded-full" />
)}
</CardTitle>
</div>
</div>
</CardHeader>
<CardContent className="flex-1 flex flex-col justify-end space-y-4">
{isActive ? (
<Button
variant="outline"
onClick={() => setDialogOpen(providerKey)}
>
Edit Setup
</Button>
) : (
<p>
See{" "}
<Link
href="https://docs.openr.ag/install/#application-onboarding"
className="text-accent-purple-foreground"
target="_blank"
rel="noopener noreferrer"
>
Application onboarding docs
</Link>{" "}
for configuration detail.
</p>
)}
</CardContent>
</Card>
);
})}
</div>
<OpenAISettingsDialog
open={dialogOpen === "openai"}
setOpen={() => setDialogOpen(undefined)}
/>
<OllamaSettingsDialog
open={dialogOpen === "ollama"}
setOpen={() => setDialogOpen(undefined)}
/>
<WatsonxSettingsDialog
open={dialogOpen === "watsonx"}
setOpen={() => setDialogOpen(undefined)}
/>
</>
);
};
export default ModelProviders;

View file

@ -0,0 +1,122 @@
import { Controller, useFormContext } from "react-hook-form";
import { LabelWrapper } from "@/components/label-wrapper";
import { ReactNode, useEffect } from "react";
import { ModelOption } from "@/app/api/queries/useGetModelsQuery";
import { ModelSelector } from "@/app/onboarding/components/model-selector";
interface ModelSelectorsProps {
languageModels: ModelOption[];
embeddingModels: ModelOption[];
isLoadingModels: boolean;
logo: ReactNode;
languageModelName?: string;
embeddingModelName?: string;
}
export function ModelSelectors({
languageModels,
embeddingModels,
isLoadingModels,
logo,
languageModelName = "llmModel",
embeddingModelName = "embeddingModel",
}: ModelSelectorsProps) {
const {
control,
watch,
formState: { errors },
setValue,
} = useFormContext<Record<string, any>>();
const llmModel = watch(languageModelName);
const embeddingModel = watch(embeddingModelName);
const defaultLlmModel =
languageModels.find((model) => model.default)?.value ||
languageModels[0]?.value;
const defaultEmbeddingModel =
embeddingModels.find((model) => model.default)?.value ||
embeddingModels[0]?.value;
useEffect(() => {
if (defaultLlmModel && !llmModel) {
setValue(languageModelName, defaultLlmModel, { shouldValidate: true });
}
if (defaultEmbeddingModel && !embeddingModel) {
setValue(embeddingModelName, defaultEmbeddingModel, {
shouldValidate: true,
});
}
}, [defaultLlmModel, defaultEmbeddingModel, setValue]);
return (
<>
<div className="space-y-2">
<LabelWrapper
label="Embedding model"
helperText="Model used for knowledge ingest and retrieval"
id="embedding-model"
required={true}
>
<Controller
control={control}
name={embeddingModelName}
rules={{ required: "Embedding model is required" }}
render={({ field }) => (
<ModelSelector
options={embeddingModels}
icon={logo}
noOptionsPlaceholder={
isLoadingModels
? "Loading models..."
: "No embedding models detected"
}
placeholder="Select an embedding model"
value={field.value}
onValueChange={field.onChange}
/>
)}
/>
</LabelWrapper>
{embeddingModels.length > 0 && errors[embeddingModelName] && (
<p className="text-sm text-destructive">
{errors[embeddingModelName]?.message as string}
</p>
)}
</div>
<div className="space-y-2">
<LabelWrapper
label="Language model"
helperText="Model used for chat"
id="language-model"
required={true}
>
<Controller
control={control}
name={languageModelName}
rules={{ required: "Language model is required" }}
render={({ field }) => (
<ModelSelector
options={languageModels}
icon={logo}
noOptionsPlaceholder={
isLoadingModels
? "Loading models..."
: "No language models detected"
}
placeholder="Select a language model"
value={field.value}
onValueChange={field.onChange}
/>
)}
/>
</LabelWrapper>
{languageModels.length > 0 && errors[languageModelName] && (
<p className="text-sm text-destructive">
{errors[languageModelName]?.message as string}
</p>
)}
</div>
</>
);
}

View file

@ -0,0 +1,105 @@
import OllamaLogo from "@/components/logo/ollama-logo";
import { Button } from "@/components/ui/button";
import {
Dialog,
DialogContent,
DialogFooter,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import { FormProvider, useForm } from "react-hook-form";
import { toast } from "sonner";
import {
OllamaSettingsForm,
type OllamaSettingsFormData,
} from "./ollama-settings-form";
import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery";
import { useAuth } from "@/contexts/auth-context";
import { useUpdateSettingsMutation } from "@/app/api/mutations/useUpdateSettingsMutation";
const OllamaSettingsDialog = ({
open,
setOpen,
}: {
open: boolean;
setOpen: (open: boolean) => void;
}) => {
const { isAuthenticated, isNoAuthMode } = useAuth();
const { data: settings = {} } = useGetSettingsQuery({
enabled: isAuthenticated || isNoAuthMode,
});
const isOllamaConfigured = settings.provider?.model_provider === "ollama";
const methods = useForm<OllamaSettingsFormData>({
mode: "onSubmit",
defaultValues: {
endpoint: isOllamaConfigured
? settings.provider?.endpoint
: "http://localhost:11434",
llmModel: isOllamaConfigured ? settings.agent?.llm_model : "",
embeddingModel: isOllamaConfigured
? settings.knowledge?.embedding_model
: "",
},
});
const { handleSubmit } = methods;
const settingsMutation = useUpdateSettingsMutation({
onSuccess: () => {
toast.success("Ollama settings updated successfully");
setOpen(false);
},
onError: (error) => {
toast.error("Failed to update Ollama settings", {
description: error.message,
});
},
});
const onSubmit = (data: OllamaSettingsFormData) => {
settingsMutation.mutate({
endpoint: data.endpoint,
model_provider: "ollama",
llm_model: data.llmModel,
embedding_model: data.embeddingModel,
});
};
return (
<Dialog open={open} onOpenChange={setOpen}>
<DialogContent className="max-w-2xl">
<FormProvider {...methods}>
<form onSubmit={handleSubmit(onSubmit)} className="grid gap-4">
<DialogHeader className="mb-2">
<DialogTitle className="flex items-center gap-3">
<div className="w-8 h-8 rounded flex items-center justify-center bg-white border">
<OllamaLogo className="text-black" />
</div>
Ollama Setup
</DialogTitle>
</DialogHeader>
<OllamaSettingsForm />
<DialogFooter className="mt-4">
<Button
variant="outline"
type="button"
onClick={() => setOpen(false)}
>
Cancel
</Button>
<Button type="submit" disabled={settingsMutation.isPending}>
{settingsMutation.isPending ? "Saving..." : "Save"}
</Button>
</DialogFooter>
</form>
</FormProvider>
</DialogContent>
</Dialog>
);
};
export default OllamaSettingsDialog;

View file

@ -0,0 +1,81 @@
import { useFormContext } from "react-hook-form";
import { LabelWrapper } from "@/components/label-wrapper";
import { Input } from "@/components/ui/input";
import { useGetOllamaModelsQuery } from "@/app/api/queries/useGetModelsQuery";
import { useDebouncedValue } from "@/lib/debounce";
import OllamaLogo from "@/components/logo/ollama-logo";
import { ModelSelectors } from "./model-selectors";
export interface OllamaSettingsFormData {
endpoint: string;
llmModel: string;
embeddingModel: string;
}
export function OllamaSettingsForm() {
const {
register,
watch,
formState: { errors, isDirty },
} = useFormContext<OllamaSettingsFormData>();
const endpoint = watch("endpoint");
const debouncedEndpoint = useDebouncedValue(endpoint, 500);
const {
data: modelsData,
isLoading: isLoadingModels,
error: modelsError,
} = useGetOllamaModelsQuery(
{
endpoint: debouncedEndpoint,
},
{
enabled: isDirty && !!debouncedEndpoint,
}
);
const languageModels = modelsData?.language_models || [];
const embeddingModels = modelsData?.embedding_models || [];
const endpointError = modelsError
? "Connection failed. Check your Ollama server URL."
: errors.endpoint?.message;
return (
<div className="space-y-4">
<div className="space-y-2">
<LabelWrapper
label="Ollama Base URL"
helperText="Base URL of your Ollama server"
required
id="endpoint"
>
<Input
{...register("endpoint", {
required: "Ollama base URL is required",
})}
className={endpointError ? "!border-destructive" : ""}
id="endpoint"
type="text"
placeholder="http://localhost:11434"
/>
</LabelWrapper>
{endpointError && (
<p className="text-sm text-destructive">{endpointError}</p>
)}
{isLoadingModels && (
<p className="text-sm text-muted-foreground">
Validating connection...
</p>
)}
</div>
<ModelSelectors
languageModels={languageModels}
embeddingModels={embeddingModels}
isLoadingModels={isLoadingModels}
logo={<OllamaLogo className="w-4 h-4" />}
/>
</div>
);
}

View file

@ -0,0 +1,116 @@
import OpenAILogo from "@/components/logo/openai-logo";
import { Button } from "@/components/ui/button";
import {
Dialog,
DialogContent,
DialogFooter,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import { FormProvider, useForm } from "react-hook-form";
import { toast } from "sonner";
import {
OpenAISettingsForm,
type OpenAISettingsFormData,
} from "./openai-settings-form";
import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery";
import { useAuth } from "@/contexts/auth-context";
import { useUpdateSettingsMutation } from "@/app/api/mutations/useUpdateSettingsMutation";
const OpenAISettingsDialog = ({
open,
setOpen,
}: {
open: boolean;
setOpen: (open: boolean) => void;
}) => {
const { isAuthenticated, isNoAuthMode } = useAuth();
const { data: settings = {} } = useGetSettingsQuery({
enabled: isAuthenticated || isNoAuthMode,
});
const isOpenAIConfigured = settings.provider?.model_provider === "openai";
const methods = useForm<OpenAISettingsFormData>({
mode: "onSubmit",
defaultValues: {
apiKey: "",
llmModel: isOpenAIConfigured ? settings.agent?.llm_model : "",
embeddingModel: isOpenAIConfigured
? settings.knowledge?.embedding_model
: "",
},
});
const { handleSubmit } = methods;
const settingsMutation = useUpdateSettingsMutation({
onSuccess: () => {
toast.success("OpenAI settings updated successfully");
setOpen(false);
},
onError: (error) => {
toast.error("Failed to update OpenAI settings", {
description: error.message,
});
},
});
const onSubmit = (data: OpenAISettingsFormData) => {
const payload: {
api_key?: string;
model_provider: string;
llm_model: string;
embedding_model: string;
} = {
model_provider: "openai",
llm_model: data.llmModel,
embedding_model: data.embeddingModel,
};
// Only include api_key if a value was entered
if (data.apiKey) {
payload.api_key = data.apiKey;
}
// Submit the update
settingsMutation.mutate(payload);
};
return (
<Dialog open={open} onOpenChange={setOpen}>
<DialogContent className="max-w-2xl">
<FormProvider {...methods}>
<form onSubmit={handleSubmit(onSubmit)} className="grid gap-4">
<DialogHeader className="mb-2">
<DialogTitle className="flex items-center gap-3">
<div className="w-8 h-8 rounded flex items-center justify-center bg-white border">
<OpenAILogo className="text-black" />
</div>
OpenAI Setup
</DialogTitle>
</DialogHeader>
<OpenAISettingsForm isCurrentProvider={isOpenAIConfigured} />
<DialogFooter>
<Button
variant="outline"
type="button"
onClick={() => setOpen(false)}
>
Cancel
</Button>
<Button type="submit" disabled={settingsMutation.isPending}>
{settingsMutation.isPending ? "Saving..." : "Save"}
</Button>
</DialogFooter>
</form>
</FormProvider>
</DialogContent>
</Dialog>
);
};
export default OpenAISettingsDialog;

View file

@ -0,0 +1,129 @@
import { useEffect, useState } from "react";
import { useFormContext } from "react-hook-form";
import { LabelWrapper } from "@/components/label-wrapper";
import { Input } from "@/components/ui/input";
import { Switch } from "@/components/ui/switch";
import { useGetOpenAIModelsQuery } from "@/app/api/queries/useGetModelsQuery";
import { useDebouncedValue } from "@/lib/debounce";
import { AnimatedConditional } from "@/components/animated-conditional";
import OpenAILogo from "@/components/logo/openai-logo";
import { ModelSelectors } from "./model-selectors";
export interface OpenAISettingsFormData {
apiKey: string;
llmModel: string;
embeddingModel: string;
}
export function OpenAISettingsForm({
isCurrentProvider = false,
}: {
isCurrentProvider: boolean;
}) {
const [useExistingKey, setUseExistingKey] = useState(true);
const {
register,
watch,
setValue,
clearErrors,
formState: { errors },
} = useFormContext<OpenAISettingsFormData>();
const apiKey = watch("apiKey");
const debouncedApiKey = useDebouncedValue(apiKey, 500);
// Handle switch change
const handleUseExistingKeyChange = (checked: boolean) => {
setUseExistingKey(checked);
if (checked) {
// Clear the API key field when using existing key
setValue("apiKey", "");
}
};
// Clear form errors when useExistingKey changes
useEffect(() => {
clearErrors("apiKey");
}, [useExistingKey, clearErrors]);
const shouldFetchModels = isCurrentProvider
? useExistingKey
? true
: !!debouncedApiKey
: !!debouncedApiKey;
const {
data: modelsData,
isLoading: isLoadingModels,
error: modelsError,
} = useGetOpenAIModelsQuery(
{
apiKey: useExistingKey ? "" : debouncedApiKey,
},
{
enabled: shouldFetchModels,
}
);
const languageModels = modelsData?.language_models || [];
const embeddingModels = modelsData?.embedding_models || [];
const apiKeyError = modelsError
? "Invalid OpenAI API key. Verify or replace the key."
: errors.apiKey?.message;
return (
<div className="space-y-4">
<div className="space-y-2">
{isCurrentProvider && (
<LabelWrapper
label="Use existing OpenAI API key"
id="use-existing-key"
description="Reuse the key from your environment config. Turn off to enter a different key."
flex
>
<Switch
checked={useExistingKey}
onCheckedChange={handleUseExistingKeyChange}
/>
</LabelWrapper>
)}
<AnimatedConditional
isOpen={!useExistingKey}
duration={0.2}
vertical
className={!useExistingKey ? "!mt-4" : "!mt-0"}
>
<LabelWrapper
label="OpenAI API key"
helperText="The API key for your OpenAI account"
required
id="api-key"
>
<Input
{...register("apiKey", {
required: !useExistingKey ? "API key is required" : false,
})}
className={apiKeyError ? "!border-destructive" : ""}
id="api-key"
type="password"
placeholder="sk-..."
/>
</LabelWrapper>
</AnimatedConditional>
{apiKeyError && (
<p className="text-sm text-destructive">{apiKeyError}</p>
)}
{isLoadingModels && (
<p className="text-sm text-muted-foreground">Validating API key...</p>
)}
</div>
<ModelSelectors
languageModels={languageModels}
embeddingModels={embeddingModels}
isLoadingModels={isLoadingModels}
logo={<OpenAILogo className="w-4 h-4" />}
/>
</div>
);
}

View file

@ -0,0 +1,124 @@
import IBMLogo from "@/components/logo/ibm-logo";
import { Button } from "@/components/ui/button";
import {
Dialog,
DialogContent,
DialogFooter,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import { FormProvider, useForm } from "react-hook-form";
import { toast } from "sonner";
import {
WatsonxSettingsForm,
type WatsonxSettingsFormData,
} from "./watsonx-settings-form";
import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery";
import { useAuth } from "@/contexts/auth-context";
import { useUpdateSettingsMutation } from "@/app/api/mutations/useUpdateSettingsMutation";
const WatsonxSettingsDialog = ({
open,
setOpen,
}: {
open: boolean;
setOpen: (open: boolean) => void;
}) => {
const { isAuthenticated, isNoAuthMode } = useAuth();
const { data: settings = {} } = useGetSettingsQuery({
enabled: isAuthenticated || isNoAuthMode,
});
const isWatsonxConfigured = settings.provider?.model_provider === "watsonx";
const methods = useForm<WatsonxSettingsFormData>({
mode: "onSubmit",
defaultValues: {
endpoint: isWatsonxConfigured
? settings.provider?.endpoint
: "https://us-south.ml.cloud.ibm.com",
apiKey: "",
projectId: isWatsonxConfigured ? settings.provider?.project_id : "",
llmModel: isWatsonxConfigured ? settings.agent?.llm_model : "",
embeddingModel: isWatsonxConfigured
? settings.knowledge?.embedding_model
: "",
},
});
const { handleSubmit } = methods;
const settingsMutation = useUpdateSettingsMutation({
onSuccess: () => {
toast.success("watsonx settings updated successfully");
setOpen(false);
},
onError: (error) => {
toast.error("Failed to update watsonx settings", {
description: error.message,
});
},
});
const onSubmit = (data: WatsonxSettingsFormData) => {
const payload: {
endpoint: string;
api_key?: string;
project_id: string;
model_provider: string;
llm_model: string;
embedding_model: string;
} = {
endpoint: data.endpoint,
project_id: data.projectId,
model_provider: "watsonx",
llm_model: data.llmModel,
embedding_model: data.embeddingModel,
};
// Only include api_key if a value was entered
if (data.apiKey) {
payload.api_key = data.apiKey;
}
// Submit the update
settingsMutation.mutate(payload);
};
return (
<Dialog open={open} onOpenChange={setOpen}>
<DialogContent autoFocus={false} className="max-w-2xl">
<FormProvider {...methods}>
<form onSubmit={handleSubmit(onSubmit)} className="grid gap-4">
<DialogHeader className="mb-2">
<DialogTitle className="flex items-center gap-3">
<div className="w-8 h-8 rounded flex items-center justify-center bg-white border">
<IBMLogo className="text-black" />
</div>
IBM watsonx.ai Setup
</DialogTitle>
</DialogHeader>
<WatsonxSettingsForm isCurrentProvider={isWatsonxConfigured} />
<DialogFooter>
<Button
variant="outline"
type="button"
onClick={() => setOpen(false)}
>
Cancel
</Button>
<Button type="submit" disabled={settingsMutation.isPending}>
{settingsMutation.isPending ? "Saving..." : "Save"}
</Button>
</DialogFooter>
</form>
</FormProvider>
</DialogContent>
</Dialog>
);
};
export default WatsonxSettingsDialog;

View file

@ -0,0 +1,229 @@
import { useEffect, useState } from "react";
import { useFormContext, Controller } from "react-hook-form";
import { LabelWrapper } from "@/components/label-wrapper";
import { Input } from "@/components/ui/input";
import { Switch } from "@/components/ui/switch";
import { useGetIBMModelsQuery } from "@/app/api/queries/useGetModelsQuery";
import { useDebouncedValue } from "@/lib/debounce";
import { AnimatedConditional } from "@/components/animated-conditional";
import IBMLogo from "@/components/logo/ibm-logo";
import { ModelSelectors } from "./model-selectors";
import { ModelSelector } from "@/app/onboarding/components/model-selector";
export interface WatsonxSettingsFormData {
endpoint: string;
apiKey: string;
projectId: string;
llmModel: string;
embeddingModel: string;
}
const endpointOptions = [
{
value: "https://us-south.ml.cloud.ibm.com",
label: "https://us-south.ml.cloud.ibm.com",
},
{
value: "https://eu-de.ml.cloud.ibm.com",
label: "https://eu-de.ml.cloud.ibm.com",
},
{
value: "https://eu-gb.ml.cloud.ibm.com",
label: "https://eu-gb.ml.cloud.ibm.com",
},
{
value: "https://au-syd.ml.cloud.ibm.com",
label: "https://au-syd.ml.cloud.ibm.com",
},
{
value: "https://jp-tok.ml.cloud.ibm.com",
label: "https://jp-tok.ml.cloud.ibm.com",
},
{
value: "https://ca-tor.ml.cloud.ibm.com",
label: "https://ca-tor.ml.cloud.ibm.com",
},
];
export function WatsonxSettingsForm({
isCurrentProvider = false,
}: {
isCurrentProvider: boolean;
}) {
const [useExistingKey, setUseExistingKey] = useState(true);
const {
control,
register,
watch,
setValue,
clearErrors,
formState: { errors },
} = useFormContext<WatsonxSettingsFormData>();
const endpoint = watch("endpoint");
const apiKey = watch("apiKey");
const projectId = watch("projectId");
const debouncedEndpoint = useDebouncedValue(endpoint, 500);
const debouncedApiKey = useDebouncedValue(apiKey, 500);
const debouncedProjectId = useDebouncedValue(projectId, 500);
// Handle switch change
const handleUseExistingKeyChange = (checked: boolean) => {
setUseExistingKey(checked);
if (checked) {
// Clear the API key field when using existing key
setValue("apiKey", "");
}
};
// Clear form errors when useExistingKey changes
useEffect(() => {
clearErrors("apiKey");
}, [useExistingKey, clearErrors]);
const shouldFetchModels = isCurrentProvider
? useExistingKey
? !!debouncedEndpoint && !!debouncedProjectId
: !!debouncedEndpoint && !!debouncedApiKey && !!debouncedProjectId
: !!debouncedEndpoint && !!debouncedProjectId && !!debouncedApiKey;
const {
data: modelsData,
isLoading: isLoadingModels,
error: modelsError,
} = useGetIBMModelsQuery(
{
endpoint: debouncedEndpoint,
apiKey: useExistingKey ? "" : debouncedApiKey,
projectId: debouncedProjectId,
},
{
enabled: shouldFetchModels,
}
);
const languageModels = modelsData?.language_models || [];
const embeddingModels = modelsData?.embedding_models || [];
return (
<div className="space-y-4">
<div className="space-y-2">
<LabelWrapper
label="watsonx.ai API Endpoint"
helperText="Base URL of the API"
id="api-endpoint"
required
>
<Controller
control={control}
name="endpoint"
rules={{ required: "API endpoint is required" }}
render={({ field }) => (
<ModelSelector
options={endpointOptions.map((option) => ({
value: option.value,
label: option.label,
}))}
value={field.value}
custom
onValueChange={field.onChange}
searchPlaceholder="Search endpoint..."
noOptionsPlaceholder="No endpoints available"
placeholder="Select endpoint..."
hasError={!!errors.endpoint || !!modelsError}
/>
)}
/>
</LabelWrapper>
{errors.endpoint && (
<p className="text-sm text-destructive">{errors.endpoint.message}</p>
)}
</div>
<div className="space-y-2">
<LabelWrapper
label="watsonx Project ID"
helperText="Project ID for the model"
required
id="project-id"
>
<Input
{...register("projectId", {
required: "Project ID is required",
})}
className={
errors.projectId || modelsError ? "!border-destructive" : ""
}
id="project-id"
type="text"
placeholder="your-project-id"
/>
</LabelWrapper>
{errors.projectId && (
<p className="text-sm text-destructive">{errors.projectId.message}</p>
)}
</div>
<div className={useExistingKey ? "space-y-3" : "space-y-2"}>
{isCurrentProvider && (
<LabelWrapper
label="Use existing watsonx API key"
id="use-existing-key"
description="Reuse the key from your environment config. Turn off to enter a different key."
flex
>
<Switch
checked={useExistingKey}
onCheckedChange={handleUseExistingKeyChange}
/>
</LabelWrapper>
)}
<AnimatedConditional
isOpen={!useExistingKey}
duration={0.2}
vertical
className={!useExistingKey ? "!mt-4" : "!mt-0"}
>
<LabelWrapper
label="watsonx API key"
helperText="API key to access watsonx.ai"
required
id="api-key"
>
<Input
{...register("apiKey", {
required: !useExistingKey ? "API key is required" : false,
})}
className={
errors.apiKey || modelsError ? "!border-destructive" : ""
}
id="api-key"
type="password"
placeholder="your-api-key"
/>
</LabelWrapper>
{errors.apiKey && (
<p className="text-sm text-destructive mt-2">
{errors.apiKey.message}
</p>
)}
</AnimatedConditional>
{isLoadingModels && (
<p className="text-sm text-muted-foreground">
Validating configuration...
</p>
)}
{modelsError && (
<p className="text-sm text-destructive">
Connection failed. Check your configuration.
</p>
)}
</div>
<ModelSelectors
languageModels={languageModels}
embeddingModels={embeddingModels}
isLoadingModels={isLoadingModels}
logo={<IBMLogo className="w-4 h-4 text-[#1063FE]" />}
/>
</div>
);
}

View file

@ -4,7 +4,6 @@ import { ArrowUpRight, Loader2, Minus, PlugZap, Plus } from "lucide-react";
import Link from "next/link";
import { useRouter, useSearchParams } from "next/navigation";
import { Suspense, useCallback, useEffect, useState } from "react";
import { useUpdateFlowSettingMutation } from "@/app/api/mutations/useUpdateFlowSettingMutation";
import {
useGetIBMModelsQuery,
useGetOllamaModelsQuery,
@ -53,6 +52,8 @@ import { ModelSelectItems } from "./helpers/model-select-item";
import GoogleDriveIcon from "./icons/google-drive-icon";
import OneDriveIcon from "./icons/one-drive-icon";
import SharePointIcon from "./icons/share-point-icon";
import ModelProviders from "./components/model-providers";
import { useUpdateSettingsMutation } from "../api/mutations/useUpdateSettingsMutation";
const { MAX_SYSTEM_PROMPT_CHARS } = UI_CONSTANTS;
@ -138,7 +139,9 @@ function KnowledgeSourcesPage() {
// Fetch available models based on provider
const { data: openaiModelsData } = useGetOpenAIModelsQuery(
undefined, // Let backend use stored API key from configuration
{
apiKey: ""
},
{
enabled:
(isAuthenticated || isNoAuthMode) && currentProvider === "openai",
@ -146,7 +149,9 @@ function KnowledgeSourcesPage() {
);
const { data: ollamaModelsData } = useGetOllamaModelsQuery(
undefined, // No params for now, could be extended later
{
endpoint: settings.provider?.endpoint,
},
{
enabled:
(isAuthenticated || isNoAuthMode) && currentProvider === "ollama",
@ -154,7 +159,11 @@ function KnowledgeSourcesPage() {
);
const { data: ibmModelsData } = useGetIBMModelsQuery(
undefined, // No params for now, could be extended later
{
endpoint: settings.provider?.endpoint,
apiKey: "",
projectId: settings.provider?.project_id,
},
{
enabled:
(isAuthenticated || isNoAuthMode) && currentProvider === "watsonx",
@ -172,7 +181,7 @@ function KnowledgeSourcesPage() {
: openaiModelsData; // fallback to openai
// Mutations
const updateFlowSettingMutation = useUpdateFlowSettingMutation({
const updateSettingsMutation = useUpdateSettingsMutation({
onSuccess: () => {
console.log("Setting updated successfully");
},
@ -183,8 +192,8 @@ function KnowledgeSourcesPage() {
// Debounced update function
const debouncedUpdate = useDebounce(
(variables: Parameters<typeof updateFlowSettingMutation.mutate>[0]) => {
updateFlowSettingMutation.mutate(variables);
(variables: Parameters<typeof updateSettingsMutation.mutate>[0]) => {
updateSettingsMutation.mutate(variables);
},
500
);
@ -230,20 +239,20 @@ function KnowledgeSourcesPage() {
// Update model selection immediately
const handleModelChange = (newModel: string) => {
updateFlowSettingMutation.mutate({ llm_model: newModel });
updateSettingsMutation.mutate({ llm_model: newModel });
};
// Update system prompt with save button
const handleSystemPromptSave = () => {
updateFlowSettingMutation.mutate({ system_prompt: systemPrompt });
updateSettingsMutation.mutate({ system_prompt: systemPrompt });
};
// Update embedding model selection immediately
const handleEmbeddingModelChange = (newModel: string) => {
updateFlowSettingMutation.mutate({ embedding_model: newModel });
updateSettingsMutation.mutate({ embedding_model: newModel });
};
const isEmbeddingModelSelectDisabled = updateFlowSettingMutation.isPending;
const isEmbeddingModelSelectDisabled = updateSettingsMutation.isPending;
// Update chunk size setting with debounce
const handleChunkSizeChange = (value: string) => {
@ -262,17 +271,17 @@ function KnowledgeSourcesPage() {
// Update docling settings
const handleTableStructureChange = (checked: boolean) => {
setTableStructure(checked);
updateFlowSettingMutation.mutate({ table_structure: checked });
updateSettingsMutation.mutate({ table_structure: checked });
};
const handleOcrChange = (checked: boolean) => {
setOcr(checked);
updateFlowSettingMutation.mutate({ ocr: checked });
updateSettingsMutation.mutate({ ocr: checked });
};
const handlePictureDescriptionsChange = (checked: boolean) => {
setPictureDescriptions(checked);
updateFlowSettingMutation.mutate({ picture_descriptions: checked });
updateSettingsMutation.mutate({ picture_descriptions: checked });
};
// Helper function to get connector icon
@ -715,7 +724,7 @@ function KnowledgeSourcesPage() {
<div
className={`w-8 h-8 ${
connector ? "bg-white" : "bg-muted grayscale"
} rounded flex items-center justify-center`}
} rounded flex items-center justify-center border`}
>
{connector.icon}
</div>
@ -738,6 +747,7 @@ function KnowledgeSourcesPage() {
{connector?.status === "connected" ? (
<>
<Button
variant="outline"
onClick={() => navigateToKnowledgePage(connector)}
disabled={isSyncing === connector.id}
className="w-full cursor-pointer"
@ -804,6 +814,17 @@ function KnowledgeSourcesPage() {
})}
</div>
</div>
{/* Model Providers Section */}
<div className="space-y-6">
<div>
<h2 className="text-lg font-semibold tracking-tight mb-2">
Model Providers
</h2>
</div>
<ModelProviders />
</div>
{/* Agent Behavior Section */}
<Card>
<CardHeader>
@ -872,7 +893,8 @@ function KnowledgeSourcesPage() {
</div>
</div>
<CardDescription>
This Agent retrieves from your knowledge and generates chat responses. Edit in Langflow for full control.
This Agent retrieves from your knowledge and generates chat
responses. Edit in Langflow for full control.
</CardDescription>
</CardHeader>
<CardContent>
@ -928,14 +950,14 @@ function KnowledgeSourcesPage() {
<Button
onClick={handleSystemPromptSave}
disabled={
updateFlowSettingMutation.isPending ||
updateSettingsMutation.isPending ||
systemPrompt.length > MAX_SYSTEM_PROMPT_CHARS
}
className="min-w-[120px]"
size="sm"
variant="outline"
>
{updateFlowSettingMutation.isPending ? (
{updateSettingsMutation.isPending ? (
<>
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
Saving...
@ -953,9 +975,7 @@ function KnowledgeSourcesPage() {
<Card>
<CardHeader>
<div className="flex items-center justify-between mb-3">
<CardTitle className="text-lg">
Knowledge Ingest
</CardTitle>
<CardTitle className="text-lg">Knowledge Ingest</CardTitle>
<div className="flex gap-2">
<ConfirmationDialog
trigger={
@ -1019,7 +1039,8 @@ function KnowledgeSourcesPage() {
</div>
</div>
<CardDescription>
Configure how files are ingested and stored for retrieval. Edit in Langflow for full control.
Configure how files are ingested and stored for retrieval. Edit in
Langflow for full control.
</CardDescription>
</CardHeader>
<CardContent>

View file

@ -2,52 +2,54 @@ import { motion } from "framer-motion";
import { ANIMATION_DURATION } from "@/lib/constants";
export const AnimatedConditional = ({
children,
isOpen,
className,
slide = false,
delay,
vertical = false,
children,
isOpen,
className,
slide = false,
delay,
duration,
vertical = false,
}: {
children: React.ReactNode;
isOpen: boolean;
className?: string;
delay?: number;
vertical?: boolean;
slide?: boolean;
children: React.ReactNode;
isOpen: boolean;
className?: string;
delay?: number;
duration?: number;
vertical?: boolean;
slide?: boolean;
}) => {
const animationProperty = slide
? vertical
? "translateY"
: "translateX"
: vertical
? "height"
: "width";
const animationValue = isOpen
? slide
? "0px"
: "auto"
: slide
? "-100%"
: "0px";
const animationProperty = slide
? vertical
? "translateY"
: "translateX"
: vertical
? "height"
: "width";
const animationValue = isOpen
? slide
? "0px"
: "auto"
: slide
? "-100%"
: "0px";
return (
<motion.div
initial={{ [animationProperty]: animationValue }}
animate={{ [animationProperty]: animationValue }}
exit={{ [animationProperty]: 0 }}
transition={{
duration: ANIMATION_DURATION,
ease: "easeOut",
delay: delay,
}}
style={{
overflow: "hidden",
whiteSpace: vertical ? "normal" : "nowrap",
}}
className={className}
>
{children}
</motion.div>
);
return (
<motion.div
initial={{ [animationProperty]: animationValue }}
animate={{ [animationProperty]: animationValue }}
exit={{ [animationProperty]: 0 }}
transition={{
duration: duration ?? ANIMATION_DURATION,
ease: "easeOut",
delay: delay,
}}
style={{
overflow: "hidden",
whiteSpace: vertical ? "normal" : "nowrap",
}}
className={className}
>
{children}
</motion.div>
);
};

View file

@ -63,6 +63,8 @@ async def get_settings(request, session_manager):
# OpenRAG configuration
"provider": {
"model_provider": provider_config.model_provider,
"endpoint": provider_config.endpoint if provider_config.endpoint else None,
"project_id": provider_config.project_id if provider_config.project_id else None,
# Note: API key is not exposed for security
},
"knowledge": {
@ -183,6 +185,10 @@ async def update_settings(request, session_manager):
"ocr",
"picture_descriptions",
"embedding_model",
"model_provider",
"api_key",
"endpoint",
"project_id",
}
# Check for invalid fields
@ -396,22 +402,117 @@ async def update_settings(request, session_manager):
# Don't fail the entire settings update if flow update fails
# The config will still be saved
# Update provider settings
if "model_provider" in body:
if (
not isinstance(body["model_provider"], str)
or not body["model_provider"].strip()
):
return JSONResponse(
{"error": "model_provider must be a non-empty string"},
status_code=400,
)
current_config.provider.model_provider = body["model_provider"].strip()
config_updated = True
if "api_key" in body:
if not isinstance(body["api_key"], str):
return JSONResponse(
{"error": "api_key must be a string"}, status_code=400
)
# Only update if non-empty string (empty string means keep current value)
if body["api_key"].strip():
current_config.provider.api_key = body["api_key"]
config_updated = True
if "endpoint" in body:
if not isinstance(body["endpoint"], str) or not body["endpoint"].strip():
return JSONResponse(
{"error": "endpoint must be a non-empty string"}, status_code=400
)
current_config.provider.endpoint = body["endpoint"].strip()
config_updated = True
if "project_id" in body:
if (
not isinstance(body["project_id"], str)
or not body["project_id"].strip()
):
return JSONResponse(
{"error": "project_id must be a non-empty string"}, status_code=400
)
current_config.provider.project_id = body["project_id"].strip()
config_updated = True
if not config_updated:
return JSONResponse(
{"error": "No valid fields provided for update"}, status_code=400
)
# Save the updated configuration
if config_manager.save_config_file(current_config):
logger.info(
"Configuration updated successfully", updated_fields=list(body.keys())
)
return JSONResponse({"message": "Configuration updated successfully"})
else:
if not config_manager.save_config_file(current_config):
return JSONResponse(
{"error": "Failed to save configuration"}, status_code=500
)
# Update Langflow global variables if provider settings changed
if any(key in body for key in ["model_provider", "api_key", "endpoint", "project_id"]):
try:
provider = current_config.provider.model_provider.lower() if current_config.provider.model_provider else "openai"
# Set API key for IBM/Watson providers
if (provider == "watsonx") and "api_key" in body:
api_key = body["api_key"]
await clients._create_langflow_global_variable(
"WATSONX_API_KEY", api_key, modify=True
)
logger.info("Set WATSONX_API_KEY global variable in Langflow")
# Set project ID for IBM/Watson providers
if (provider == "watsonx") and "project_id" in body:
project_id = body["project_id"]
await clients._create_langflow_global_variable(
"WATSONX_PROJECT_ID", project_id, modify=True
)
logger.info("Set WATSONX_PROJECT_ID global variable in Langflow")
# Set API key for OpenAI provider
if provider == "openai" and "api_key" in body:
api_key = body["api_key"]
await clients._create_langflow_global_variable(
"OPENAI_API_KEY", api_key, modify=True
)
logger.info("Set OPENAI_API_KEY global variable in Langflow")
# Set base URL for Ollama provider
if provider == "ollama" and "endpoint" in body:
endpoint = transform_localhost_url(body["endpoint"])
await clients._create_langflow_global_variable(
"OLLAMA_BASE_URL", endpoint, modify=True
)
logger.info("Set OLLAMA_BASE_URL global variable in Langflow")
# Update model values across flows if provider changed
if "model_provider" in body:
flows_service = _get_flows_service()
await flows_service.change_langflow_model_value(
provider,
current_config.knowledge.embedding_model,
current_config.agent.llm_model,
current_config.provider.endpoint,
)
logger.info(f"Successfully updated Langflow flows for provider {provider}")
except Exception as e:
logger.error(f"Failed to update Langflow settings: {str(e)}")
# Don't fail the entire settings update if Langflow update fails
# The config was still saved
logger.info(
"Configuration updated successfully", updated_fields=list(body.keys())
)
return JSONResponse({"message": "Configuration updated successfully"})
except Exception as e:
logger.error("Failed to update settings", error=str(e))
return JSONResponse(