From 2d31c4b9b069e722cdb469066a50331265505098 Mon Sep 17 00:00:00 2001 From: Cole Goldsmith Date: Fri, 31 Oct 2025 13:22:51 -0500 Subject: [PATCH] Feat/278 Edit current model provider settings (#307) * update settings update api to allow changing model provider config * use react hook form * make settings page small width * re-use the onboarding forms instead of rolling a custom one * issue * remove test * make custom forms with react-hook-form * replace the updateFlow mutation with updateSettings * show all the model providers * revert changes to onboarding forms * disabled state styles for providers * break model selectors into their own file * use existing selector component, use settings endpoint instead of onboarding, clean up form styles * revert changes to openai onboarding * small form changes --- frontend/package-lock.json | 16 ++ frontend/package.json | 1 + .../mutations/useUpdateFlowSettingMutation.ts | 61 ----- .../mutations/useUpdateSettingsMutation.ts | 72 ++++++ .../src/app/api/queries/useGetModelsQuery.ts | 5 - .../app/api/queries/useGetSettingsQuery.ts | 3 + .../onboarding/components/ibm-onboarding.tsx | 15 +- .../onboarding/components/model-selector.tsx | 4 +- .../settings/components/model-providers.tsx | 156 ++++++++++++ .../settings/components/model-selectors.tsx | 122 ++++++++++ .../components/ollama-settings-dialog.tsx | 105 ++++++++ .../components/ollama-settings-form.tsx | 81 +++++++ .../components/openai-settings-dialog.tsx | 116 +++++++++ .../components/openai-settings-form.tsx | 129 ++++++++++ .../components/watsonx-settings-dialog.tsx | 124 ++++++++++ .../components/watsonx-settings-form.tsx | 229 ++++++++++++++++++ frontend/src/app/settings/page.tsx | 65 +++-- .../src/components/animated-conditional.tsx | 92 +++---- src/api/settings.py | 113 ++++++++- 19 files changed, 1362 insertions(+), 147 deletions(-) delete mode 100644 frontend/src/app/api/mutations/useUpdateFlowSettingMutation.ts create mode 100644 frontend/src/app/api/mutations/useUpdateSettingsMutation.ts create mode 100644 frontend/src/app/settings/components/model-providers.tsx create mode 100644 frontend/src/app/settings/components/model-selectors.tsx create mode 100644 frontend/src/app/settings/components/ollama-settings-dialog.tsx create mode 100644 frontend/src/app/settings/components/ollama-settings-form.tsx create mode 100644 frontend/src/app/settings/components/openai-settings-dialog.tsx create mode 100644 frontend/src/app/settings/components/openai-settings-form.tsx create mode 100644 frontend/src/app/settings/components/watsonx-settings-dialog.tsx create mode 100644 frontend/src/app/settings/components/watsonx-settings-form.tsx diff --git a/frontend/package-lock.json b/frontend/package-lock.json index c724fde9..d890e136 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -42,6 +42,7 @@ "next-themes": "^0.4.6", "react": "^19.0.0", "react-dom": "^19.0.0", + "react-hook-form": "^7.65.0", "react-icons": "^5.5.0", "react-markdown": "^10.1.0", "react-syntax-highlighter": "^15.6.1", @@ -8348,6 +8349,21 @@ "react": "^19.1.1" } }, + "node_modules/react-hook-form": { + "version": "7.65.0", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.65.0.tgz", + "integrity": "sha512-xtOzDz063WcXvGWaHgLNrNzlsdFgtUWcb32E6WFaGTd7kPZG3EeDusjdZfUsPwKCKVXy1ZlntifaHZ4l8pAsmw==", + "engines": { + "node": ">=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/react-hook-form" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17 || ^18 || ^19" + } + }, "node_modules/react-icons": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-5.5.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index fd6fc0cb..517e36da 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -43,6 +43,7 @@ "next-themes": "^0.4.6", "react": "^19.0.0", "react-dom": "^19.0.0", + "react-hook-form": "^7.65.0", "react-icons": "^5.5.0", "react-markdown": "^10.1.0", "react-syntax-highlighter": "^15.6.1", diff --git a/frontend/src/app/api/mutations/useUpdateFlowSettingMutation.ts b/frontend/src/app/api/mutations/useUpdateFlowSettingMutation.ts deleted file mode 100644 index e789af48..00000000 --- a/frontend/src/app/api/mutations/useUpdateFlowSettingMutation.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { - type UseMutationOptions, - useMutation, - useQueryClient, -} from "@tanstack/react-query"; - -interface UpdateFlowSettingVariables { - llm_model?: string; - system_prompt?: string; - embedding_model?: string; - table_structure?: boolean; - ocr?: boolean; - picture_descriptions?: boolean; - chunk_size?: number; - chunk_overlap?: number; -} - -interface UpdateFlowSettingResponse { - message: string; -} - -export const useUpdateFlowSettingMutation = ( - options?: Omit< - UseMutationOptions< - UpdateFlowSettingResponse, - Error, - UpdateFlowSettingVariables - >, - "mutationFn" - >, -) => { - const queryClient = useQueryClient(); - - async function updateFlowSetting( - variables: UpdateFlowSettingVariables, - ): Promise { - const response = await fetch("/api/settings", { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify(variables), - }); - - if (!response.ok) { - const error = await response.json(); - throw new Error(error.error || "Failed to update settings"); - } - - return response.json(); - } - - return useMutation({ - mutationFn: updateFlowSetting, - onSettled: () => { - // Invalidate settings query to refetch updated data - queryClient.invalidateQueries({ queryKey: ["settings"] }); - }, - ...options, - }); -}; diff --git a/frontend/src/app/api/mutations/useUpdateSettingsMutation.ts b/frontend/src/app/api/mutations/useUpdateSettingsMutation.ts new file mode 100644 index 00000000..6f75f4cb --- /dev/null +++ b/frontend/src/app/api/mutations/useUpdateSettingsMutation.ts @@ -0,0 +1,72 @@ +import { + type UseMutationOptions, + useMutation, + useQueryClient, +} from "@tanstack/react-query"; +import type { Settings } from "../queries/useGetSettingsQuery"; + +export interface UpdateSettingsRequest { + // Agent settings + llm_model?: string; + system_prompt?: string; + + // Knowledge settings + chunk_size?: number; + chunk_overlap?: number; + table_structure?: boolean; + ocr?: boolean; + picture_descriptions?: boolean; + embedding_model?: string; + + // Provider settings + model_provider?: string; + api_key?: string; + endpoint?: string; + project_id?: string; +} + +export interface UpdateSettingsResponse { + message: string; + settings: Settings; +} + +export const useUpdateSettingsMutation = ( + options?: Omit< + UseMutationOptions, + "mutationFn" + > +) => { + const queryClient = useQueryClient(); + + async function updateSettings( + variables: UpdateSettingsRequest + ): Promise { + const response = await fetch("/api/settings", { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(variables), + }); + + if (!response.ok) { + const errorData = await response.json().catch(() => ({})); + throw new Error(errorData.error || "Failed to update settings"); + } + + return response.json(); + } + + return useMutation({ + mutationFn: updateSettings, + onSuccess: (...args) => { + queryClient.invalidateQueries({ + queryKey: ["settings"], + refetchType: "all" + }); + options?.onSuccess?.(...args); + }, + onError: options?.onError, + onSettled: options?.onSettled, + }); +}; diff --git a/frontend/src/app/api/queries/useGetModelsQuery.ts b/frontend/src/app/api/queries/useGetModelsQuery.ts index 3a5eb77e..bd175c86 100644 --- a/frontend/src/app/api/queries/useGetModelsQuery.ts +++ b/frontend/src/app/api/queries/useGetModelsQuery.ts @@ -53,8 +53,6 @@ export const useGetOpenAIModelsQuery = ( { queryKey: ["models", "openai", params], queryFn: getOpenAIModels, - retry: 2, - enabled: !!params?.apiKey, staleTime: 0, // Always fetch fresh data gcTime: 0, // Don't cache results ...options, @@ -89,7 +87,6 @@ export const useGetOllamaModelsQuery = ( { queryKey: ["models", "ollama", params], queryFn: getOllamaModels, - retry: 2, staleTime: 0, // Always fetch fresh data gcTime: 0, // Don't cache results ...options, @@ -130,8 +127,6 @@ export const useGetIBMModelsQuery = ( { queryKey: ["models", "ibm", params], queryFn: getIBMModels, - retry: 2, - enabled: !!params?.endpoint && !!params?.apiKey && !!params?.projectId, // Only run if all required params are provided staleTime: 0, // Always fetch fresh data gcTime: 0, // Don't cache results ...options, diff --git a/frontend/src/app/api/queries/useGetSettingsQuery.ts b/frontend/src/app/api/queries/useGetSettingsQuery.ts index 0f090299..2e21d22d 100644 --- a/frontend/src/app/api/queries/useGetSettingsQuery.ts +++ b/frontend/src/app/api/queries/useGetSettingsQuery.ts @@ -26,6 +26,9 @@ export interface Settings { edited?: boolean; provider?: { model_provider?: string; + // Note: api_key is never returned by the backend for security reasons + endpoint?: string; + project_id?: string; }; knowledge?: KnowledgeSettings; agent?: AgentSettings; diff --git a/frontend/src/app/onboarding/components/ibm-onboarding.tsx b/frontend/src/app/onboarding/components/ibm-onboarding.tsx index 272ab1b2..a4aec862 100644 --- a/frontend/src/app/onboarding/components/ibm-onboarding.tsx +++ b/frontend/src/app/onboarding/components/ibm-onboarding.tsx @@ -67,13 +67,14 @@ export function IBMOnboarding({ isLoading: isLoadingModels, error: modelsError, } = useGetIBMModelsQuery( - debouncedEndpoint && debouncedApiKey && debouncedProjectId - ? { - endpoint: debouncedEndpoint, - apiKey: debouncedApiKey, - projectId: debouncedProjectId, - } - : undefined, + { + endpoint: debouncedEndpoint, + apiKey: debouncedApiKey, + projectId: debouncedProjectId, + }, + { + enabled: !!debouncedEndpoint && !!debouncedApiKey && !!debouncedProjectId, + } ); // Use custom hook for model selection logic diff --git a/frontend/src/app/onboarding/components/model-selector.tsx b/frontend/src/app/onboarding/components/model-selector.tsx index c5f9454c..e6f1c072 100644 --- a/frontend/src/app/onboarding/components/model-selector.tsx +++ b/frontend/src/app/onboarding/components/model-selector.tsx @@ -26,6 +26,7 @@ export function ModelSelector({ searchPlaceholder = "Search model...", noOptionsPlaceholder = "No models available", custom = false, + hasError = false, }: { options: { value: string; @@ -39,6 +40,7 @@ export function ModelSelector({ noOptionsPlaceholder?: string; custom?: boolean; onValueChange: (value: string) => void; + hasError?: boolean; }) { const [open, setOpen] = useState(false); const [searchValue, setSearchValue] = useState(""); @@ -57,7 +59,7 @@ export function ModelSelector({ role="combobox" disabled={options.length === 0} aria-expanded={open} - className="w-full gap-2 justify-between font-normal text-sm" + className={cn("w-full gap-2 justify-between font-normal text-sm", hasError && "!border-destructive")} > {value ? (
diff --git a/frontend/src/app/settings/components/model-providers.tsx b/frontend/src/app/settings/components/model-providers.tsx new file mode 100644 index 00000000..c6231d8d --- /dev/null +++ b/frontend/src/app/settings/components/model-providers.tsx @@ -0,0 +1,156 @@ +import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery"; +import { Button } from "@/components/ui/button"; +import { Card, CardHeader, CardTitle, CardContent } from "@/components/ui/card"; +import { ModelProvider } from "../helpers/model-helpers"; +import OpenAILogo from "@/components/logo/openai-logo"; +import IBMLogo from "@/components/logo/ibm-logo"; +import OllamaLogo from "@/components/logo/ollama-logo"; +import { useAuth } from "@/contexts/auth-context"; +import { ReactNode, useState } from "react"; + +import OpenAISettingsDialog from "./openai-settings-dialog"; +import OllamaSettingsDialog from "./ollama-settings-dialog"; +import WatsonxSettingsDialog from "./watsonx-settings-dialog"; +import { cn } from "@/lib/utils"; +import Link from "next/link"; + +export const ModelProviders = () => { + const { isAuthenticated, isNoAuthMode } = useAuth(); + + const { data: settings = {} } = useGetSettingsQuery({ + enabled: isAuthenticated || isNoAuthMode, + }); + + const [dialogOpen, setDialogOpen] = useState(); + + const modelProvidersMap: Record< + ModelProvider, + { + name: string; + logo: (props: React.SVGProps) => ReactNode; + logoColor: string; + logoBgColor: string; + } + > = { + openai: { + name: "OpenAI", + logo: OpenAILogo, + logoColor: "text-black", + logoBgColor: "bg-white", + }, + ollama: { + name: "Ollama", + logo: OllamaLogo, + logoColor: "text-black", + logoBgColor: "bg-white", + }, + watsonx: { + name: "IBM watsonx.ai", + logo: IBMLogo, + logoColor: "text-white", + logoBgColor: "bg-[#1063FE]", + }, + }; + + const currentProviderKey = + (settings.provider?.model_provider as ModelProvider) || "openai"; + + // Get all provider keys with active provider first + const allProviderKeys: ModelProvider[] = ["openai", "ollama", "watsonx"]; + const sortedProviderKeys = [ + currentProviderKey, + ...allProviderKeys.filter((key) => key !== currentProviderKey), + ]; + + return ( + <> +
+ {sortedProviderKeys.map((providerKey) => { + const { + name, + logo: Logo, + logoColor, + logoBgColor, + } = modelProvidersMap[providerKey]; + const isActive = providerKey === currentProviderKey; + + return ( + + +
+
+
+
+ { + + } +
+
+ + {name} + {isActive && ( +
+ )} + +
+
+ + + {isActive ? ( + + ) : ( +

+ See{" "} + + Application onboarding docs + {" "} + for configuration detail. +

+ )} +
+ + ); + })} +
+ setDialogOpen(undefined)} + /> + setDialogOpen(undefined)} + /> + setDialogOpen(undefined)} + /> + + ); +}; + +export default ModelProviders; diff --git a/frontend/src/app/settings/components/model-selectors.tsx b/frontend/src/app/settings/components/model-selectors.tsx new file mode 100644 index 00000000..5c3304a9 --- /dev/null +++ b/frontend/src/app/settings/components/model-selectors.tsx @@ -0,0 +1,122 @@ +import { Controller, useFormContext } from "react-hook-form"; +import { LabelWrapper } from "@/components/label-wrapper"; +import { ReactNode, useEffect } from "react"; +import { ModelOption } from "@/app/api/queries/useGetModelsQuery"; +import { ModelSelector } from "@/app/onboarding/components/model-selector"; + +interface ModelSelectorsProps { + languageModels: ModelOption[]; + embeddingModels: ModelOption[]; + isLoadingModels: boolean; + logo: ReactNode; + languageModelName?: string; + embeddingModelName?: string; +} + +export function ModelSelectors({ + languageModels, + embeddingModels, + isLoadingModels, + logo, + languageModelName = "llmModel", + embeddingModelName = "embeddingModel", +}: ModelSelectorsProps) { + const { + control, + watch, + formState: { errors }, + setValue, + } = useFormContext>(); + + const llmModel = watch(languageModelName); + const embeddingModel = watch(embeddingModelName); + + const defaultLlmModel = + languageModels.find((model) => model.default)?.value || + languageModels[0]?.value; + const defaultEmbeddingModel = + embeddingModels.find((model) => model.default)?.value || + embeddingModels[0]?.value; + + useEffect(() => { + if (defaultLlmModel && !llmModel) { + setValue(languageModelName, defaultLlmModel, { shouldValidate: true }); + } + if (defaultEmbeddingModel && !embeddingModel) { + setValue(embeddingModelName, defaultEmbeddingModel, { + shouldValidate: true, + }); + } + }, [defaultLlmModel, defaultEmbeddingModel, setValue]); + + return ( + <> +
+ + ( + + )} + /> + + {embeddingModels.length > 0 && errors[embeddingModelName] && ( +

+ {errors[embeddingModelName]?.message as string} +

+ )} +
+
+ + ( + + )} + /> + + {languageModels.length > 0 && errors[languageModelName] && ( +

+ {errors[languageModelName]?.message as string} +

+ )} +
+ + ); +} diff --git a/frontend/src/app/settings/components/ollama-settings-dialog.tsx b/frontend/src/app/settings/components/ollama-settings-dialog.tsx new file mode 100644 index 00000000..daf77f34 --- /dev/null +++ b/frontend/src/app/settings/components/ollama-settings-dialog.tsx @@ -0,0 +1,105 @@ +import OllamaLogo from "@/components/logo/ollama-logo"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogFooter, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { FormProvider, useForm } from "react-hook-form"; +import { toast } from "sonner"; +import { + OllamaSettingsForm, + type OllamaSettingsFormData, +} from "./ollama-settings-form"; +import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery"; +import { useAuth } from "@/contexts/auth-context"; +import { useUpdateSettingsMutation } from "@/app/api/mutations/useUpdateSettingsMutation"; + +const OllamaSettingsDialog = ({ + open, + setOpen, +}: { + open: boolean; + setOpen: (open: boolean) => void; +}) => { + const { isAuthenticated, isNoAuthMode } = useAuth(); + + const { data: settings = {} } = useGetSettingsQuery({ + enabled: isAuthenticated || isNoAuthMode, + }); + + const isOllamaConfigured = settings.provider?.model_provider === "ollama"; + + const methods = useForm({ + mode: "onSubmit", + defaultValues: { + endpoint: isOllamaConfigured + ? settings.provider?.endpoint + : "http://localhost:11434", + llmModel: isOllamaConfigured ? settings.agent?.llm_model : "", + embeddingModel: isOllamaConfigured + ? settings.knowledge?.embedding_model + : "", + }, + }); + + const { handleSubmit } = methods; + + const settingsMutation = useUpdateSettingsMutation({ + onSuccess: () => { + toast.success("Ollama settings updated successfully"); + setOpen(false); + }, + onError: (error) => { + toast.error("Failed to update Ollama settings", { + description: error.message, + }); + }, + }); + + const onSubmit = (data: OllamaSettingsFormData) => { + settingsMutation.mutate({ + endpoint: data.endpoint, + model_provider: "ollama", + llm_model: data.llmModel, + embedding_model: data.embeddingModel, + }); + }; + + return ( + + + +
+ + +
+ +
+ Ollama Setup +
+
+ + + + + + + +
+
+
+ ); +}; + +export default OllamaSettingsDialog; diff --git a/frontend/src/app/settings/components/ollama-settings-form.tsx b/frontend/src/app/settings/components/ollama-settings-form.tsx new file mode 100644 index 00000000..c8100c59 --- /dev/null +++ b/frontend/src/app/settings/components/ollama-settings-form.tsx @@ -0,0 +1,81 @@ +import { useFormContext } from "react-hook-form"; +import { LabelWrapper } from "@/components/label-wrapper"; +import { Input } from "@/components/ui/input"; +import { useGetOllamaModelsQuery } from "@/app/api/queries/useGetModelsQuery"; +import { useDebouncedValue } from "@/lib/debounce"; +import OllamaLogo from "@/components/logo/ollama-logo"; +import { ModelSelectors } from "./model-selectors"; + +export interface OllamaSettingsFormData { + endpoint: string; + llmModel: string; + embeddingModel: string; +} + +export function OllamaSettingsForm() { + const { + register, + watch, + formState: { errors, isDirty }, + } = useFormContext(); + + const endpoint = watch("endpoint"); + const debouncedEndpoint = useDebouncedValue(endpoint, 500); + + const { + data: modelsData, + isLoading: isLoadingModels, + error: modelsError, + } = useGetOllamaModelsQuery( + { + endpoint: debouncedEndpoint, + }, + { + enabled: isDirty && !!debouncedEndpoint, + } + ); + + const languageModels = modelsData?.language_models || []; + const embeddingModels = modelsData?.embedding_models || []; + + const endpointError = modelsError + ? "Connection failed. Check your Ollama server URL." + : errors.endpoint?.message; + + return ( +
+
+ + + + {endpointError && ( +

{endpointError}

+ )} + {isLoadingModels && ( +

+ Validating connection... +

+ )} +
+ } + /> +
+ ); +} diff --git a/frontend/src/app/settings/components/openai-settings-dialog.tsx b/frontend/src/app/settings/components/openai-settings-dialog.tsx new file mode 100644 index 00000000..221f643e --- /dev/null +++ b/frontend/src/app/settings/components/openai-settings-dialog.tsx @@ -0,0 +1,116 @@ +import OpenAILogo from "@/components/logo/openai-logo"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogFooter, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { FormProvider, useForm } from "react-hook-form"; +import { toast } from "sonner"; +import { + OpenAISettingsForm, + type OpenAISettingsFormData, +} from "./openai-settings-form"; +import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery"; +import { useAuth } from "@/contexts/auth-context"; +import { useUpdateSettingsMutation } from "@/app/api/mutations/useUpdateSettingsMutation"; + +const OpenAISettingsDialog = ({ + open, + setOpen, +}: { + open: boolean; + setOpen: (open: boolean) => void; +}) => { + const { isAuthenticated, isNoAuthMode } = useAuth(); + + const { data: settings = {} } = useGetSettingsQuery({ + enabled: isAuthenticated || isNoAuthMode, + }); + + const isOpenAIConfigured = settings.provider?.model_provider === "openai"; + + const methods = useForm({ + mode: "onSubmit", + defaultValues: { + apiKey: "", + llmModel: isOpenAIConfigured ? settings.agent?.llm_model : "", + embeddingModel: isOpenAIConfigured + ? settings.knowledge?.embedding_model + : "", + }, + }); + + const { handleSubmit } = methods; + + const settingsMutation = useUpdateSettingsMutation({ + onSuccess: () => { + toast.success("OpenAI settings updated successfully"); + setOpen(false); + }, + onError: (error) => { + toast.error("Failed to update OpenAI settings", { + description: error.message, + }); + }, + }); + + const onSubmit = (data: OpenAISettingsFormData) => { + const payload: { + api_key?: string; + model_provider: string; + llm_model: string; + embedding_model: string; + } = { + model_provider: "openai", + llm_model: data.llmModel, + embedding_model: data.embeddingModel, + }; + + // Only include api_key if a value was entered + if (data.apiKey) { + payload.api_key = data.apiKey; + } + + // Submit the update + settingsMutation.mutate(payload); + }; + + return ( + + + +
+ + +
+ +
+ OpenAI Setup +
+
+ + + + + + + + +
+
+
+ ); +}; + +export default OpenAISettingsDialog; diff --git a/frontend/src/app/settings/components/openai-settings-form.tsx b/frontend/src/app/settings/components/openai-settings-form.tsx new file mode 100644 index 00000000..7e5e5a4c --- /dev/null +++ b/frontend/src/app/settings/components/openai-settings-form.tsx @@ -0,0 +1,129 @@ +import { useEffect, useState } from "react"; +import { useFormContext } from "react-hook-form"; +import { LabelWrapper } from "@/components/label-wrapper"; +import { Input } from "@/components/ui/input"; +import { Switch } from "@/components/ui/switch"; +import { useGetOpenAIModelsQuery } from "@/app/api/queries/useGetModelsQuery"; +import { useDebouncedValue } from "@/lib/debounce"; +import { AnimatedConditional } from "@/components/animated-conditional"; +import OpenAILogo from "@/components/logo/openai-logo"; +import { ModelSelectors } from "./model-selectors"; + +export interface OpenAISettingsFormData { + apiKey: string; + llmModel: string; + embeddingModel: string; +} + +export function OpenAISettingsForm({ + isCurrentProvider = false, +}: { + isCurrentProvider: boolean; +}) { + const [useExistingKey, setUseExistingKey] = useState(true); + const { + register, + watch, + setValue, + clearErrors, + formState: { errors }, + } = useFormContext(); + + const apiKey = watch("apiKey"); + const debouncedApiKey = useDebouncedValue(apiKey, 500); + + // Handle switch change + const handleUseExistingKeyChange = (checked: boolean) => { + setUseExistingKey(checked); + if (checked) { + // Clear the API key field when using existing key + setValue("apiKey", ""); + } + }; + + // Clear form errors when useExistingKey changes + useEffect(() => { + clearErrors("apiKey"); + }, [useExistingKey, clearErrors]); + + const shouldFetchModels = isCurrentProvider + ? useExistingKey + ? true + : !!debouncedApiKey + : !!debouncedApiKey; + + const { + data: modelsData, + isLoading: isLoadingModels, + error: modelsError, + } = useGetOpenAIModelsQuery( + { + apiKey: useExistingKey ? "" : debouncedApiKey, + }, + { + enabled: shouldFetchModels, + } + ); + + const languageModels = modelsData?.language_models || []; + const embeddingModels = modelsData?.embedding_models || []; + + const apiKeyError = modelsError + ? "Invalid OpenAI API key. Verify or replace the key." + : errors.apiKey?.message; + + return ( +
+
+ {isCurrentProvider && ( + + + + )} + + + + + + {apiKeyError && ( +

{apiKeyError}

+ )} + {isLoadingModels && ( +

Validating API key...

+ )} +
+ } + /> +
+ ); +} diff --git a/frontend/src/app/settings/components/watsonx-settings-dialog.tsx b/frontend/src/app/settings/components/watsonx-settings-dialog.tsx new file mode 100644 index 00000000..73b727c2 --- /dev/null +++ b/frontend/src/app/settings/components/watsonx-settings-dialog.tsx @@ -0,0 +1,124 @@ +import IBMLogo from "@/components/logo/ibm-logo"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogFooter, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { FormProvider, useForm } from "react-hook-form"; +import { toast } from "sonner"; +import { + WatsonxSettingsForm, + type WatsonxSettingsFormData, +} from "./watsonx-settings-form"; +import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery"; +import { useAuth } from "@/contexts/auth-context"; +import { useUpdateSettingsMutation } from "@/app/api/mutations/useUpdateSettingsMutation"; + +const WatsonxSettingsDialog = ({ + open, + setOpen, +}: { + open: boolean; + setOpen: (open: boolean) => void; +}) => { + const { isAuthenticated, isNoAuthMode } = useAuth(); + + const { data: settings = {} } = useGetSettingsQuery({ + enabled: isAuthenticated || isNoAuthMode, + }); + + const isWatsonxConfigured = settings.provider?.model_provider === "watsonx"; + + const methods = useForm({ + mode: "onSubmit", + defaultValues: { + endpoint: isWatsonxConfigured + ? settings.provider?.endpoint + : "https://us-south.ml.cloud.ibm.com", + apiKey: "", + projectId: isWatsonxConfigured ? settings.provider?.project_id : "", + llmModel: isWatsonxConfigured ? settings.agent?.llm_model : "", + embeddingModel: isWatsonxConfigured + ? settings.knowledge?.embedding_model + : "", + }, + }); + + const { handleSubmit } = methods; + + const settingsMutation = useUpdateSettingsMutation({ + onSuccess: () => { + toast.success("watsonx settings updated successfully"); + setOpen(false); + }, + onError: (error) => { + toast.error("Failed to update watsonx settings", { + description: error.message, + }); + }, + }); + + const onSubmit = (data: WatsonxSettingsFormData) => { + const payload: { + endpoint: string; + api_key?: string; + project_id: string; + model_provider: string; + llm_model: string; + embedding_model: string; + } = { + endpoint: data.endpoint, + project_id: data.projectId, + model_provider: "watsonx", + llm_model: data.llmModel, + embedding_model: data.embeddingModel, + }; + + // Only include api_key if a value was entered + if (data.apiKey) { + payload.api_key = data.apiKey; + } + + // Submit the update + settingsMutation.mutate(payload); + }; + + return ( + + + +
+ + +
+ +
+ IBM watsonx.ai Setup +
+
+ + + + + + + + +
+
+
+ ); +}; + +export default WatsonxSettingsDialog; diff --git a/frontend/src/app/settings/components/watsonx-settings-form.tsx b/frontend/src/app/settings/components/watsonx-settings-form.tsx new file mode 100644 index 00000000..f74d297d --- /dev/null +++ b/frontend/src/app/settings/components/watsonx-settings-form.tsx @@ -0,0 +1,229 @@ +import { useEffect, useState } from "react"; +import { useFormContext, Controller } from "react-hook-form"; +import { LabelWrapper } from "@/components/label-wrapper"; +import { Input } from "@/components/ui/input"; +import { Switch } from "@/components/ui/switch"; +import { useGetIBMModelsQuery } from "@/app/api/queries/useGetModelsQuery"; +import { useDebouncedValue } from "@/lib/debounce"; +import { AnimatedConditional } from "@/components/animated-conditional"; +import IBMLogo from "@/components/logo/ibm-logo"; +import { ModelSelectors } from "./model-selectors"; +import { ModelSelector } from "@/app/onboarding/components/model-selector"; + +export interface WatsonxSettingsFormData { + endpoint: string; + apiKey: string; + projectId: string; + llmModel: string; + embeddingModel: string; +} + +const endpointOptions = [ + { + value: "https://us-south.ml.cloud.ibm.com", + label: "https://us-south.ml.cloud.ibm.com", + }, + { + value: "https://eu-de.ml.cloud.ibm.com", + label: "https://eu-de.ml.cloud.ibm.com", + }, + { + value: "https://eu-gb.ml.cloud.ibm.com", + label: "https://eu-gb.ml.cloud.ibm.com", + }, + { + value: "https://au-syd.ml.cloud.ibm.com", + label: "https://au-syd.ml.cloud.ibm.com", + }, + { + value: "https://jp-tok.ml.cloud.ibm.com", + label: "https://jp-tok.ml.cloud.ibm.com", + }, + { + value: "https://ca-tor.ml.cloud.ibm.com", + label: "https://ca-tor.ml.cloud.ibm.com", + }, +]; + +export function WatsonxSettingsForm({ + isCurrentProvider = false, +}: { + isCurrentProvider: boolean; +}) { + const [useExistingKey, setUseExistingKey] = useState(true); + const { + control, + register, + watch, + setValue, + clearErrors, + formState: { errors }, + } = useFormContext(); + + const endpoint = watch("endpoint"); + const apiKey = watch("apiKey"); + const projectId = watch("projectId"); + + const debouncedEndpoint = useDebouncedValue(endpoint, 500); + const debouncedApiKey = useDebouncedValue(apiKey, 500); + const debouncedProjectId = useDebouncedValue(projectId, 500); + + // Handle switch change + const handleUseExistingKeyChange = (checked: boolean) => { + setUseExistingKey(checked); + if (checked) { + // Clear the API key field when using existing key + setValue("apiKey", ""); + } + }; + + // Clear form errors when useExistingKey changes + useEffect(() => { + clearErrors("apiKey"); + }, [useExistingKey, clearErrors]); + + const shouldFetchModels = isCurrentProvider + ? useExistingKey + ? !!debouncedEndpoint && !!debouncedProjectId + : !!debouncedEndpoint && !!debouncedApiKey && !!debouncedProjectId + : !!debouncedEndpoint && !!debouncedProjectId && !!debouncedApiKey; + + const { + data: modelsData, + isLoading: isLoadingModels, + error: modelsError, + } = useGetIBMModelsQuery( + { + endpoint: debouncedEndpoint, + apiKey: useExistingKey ? "" : debouncedApiKey, + projectId: debouncedProjectId, + }, + { + enabled: shouldFetchModels, + } + ); + + const languageModels = modelsData?.language_models || []; + const embeddingModels = modelsData?.embedding_models || []; + + return ( +
+
+ + ( + ({ + value: option.value, + label: option.label, + }))} + value={field.value} + custom + onValueChange={field.onChange} + searchPlaceholder="Search endpoint..." + noOptionsPlaceholder="No endpoints available" + placeholder="Select endpoint..." + hasError={!!errors.endpoint || !!modelsError} + /> + )} + /> + + {errors.endpoint && ( +

{errors.endpoint.message}

+ )} +
+
+ + + + {errors.projectId && ( +

{errors.projectId.message}

+ )} +
+
+ {isCurrentProvider && ( + + + + )} + + + + + {errors.apiKey && ( +

+ {errors.apiKey.message} +

+ )} +
+ {isLoadingModels && ( +

+ Validating configuration... +

+ )} + {modelsError && ( +

+ Connection failed. Check your configuration. +

+ )} +
+ } + /> +
+ ); +} diff --git a/frontend/src/app/settings/page.tsx b/frontend/src/app/settings/page.tsx index 6143c626..830e0316 100644 --- a/frontend/src/app/settings/page.tsx +++ b/frontend/src/app/settings/page.tsx @@ -4,7 +4,6 @@ import { ArrowUpRight, Loader2, Minus, PlugZap, Plus } from "lucide-react"; import Link from "next/link"; import { useRouter, useSearchParams } from "next/navigation"; import { Suspense, useCallback, useEffect, useState } from "react"; -import { useUpdateFlowSettingMutation } from "@/app/api/mutations/useUpdateFlowSettingMutation"; import { useGetIBMModelsQuery, useGetOllamaModelsQuery, @@ -53,6 +52,8 @@ import { ModelSelectItems } from "./helpers/model-select-item"; import GoogleDriveIcon from "./icons/google-drive-icon"; import OneDriveIcon from "./icons/one-drive-icon"; import SharePointIcon from "./icons/share-point-icon"; +import ModelProviders from "./components/model-providers"; +import { useUpdateSettingsMutation } from "../api/mutations/useUpdateSettingsMutation"; const { MAX_SYSTEM_PROMPT_CHARS } = UI_CONSTANTS; @@ -138,7 +139,9 @@ function KnowledgeSourcesPage() { // Fetch available models based on provider const { data: openaiModelsData } = useGetOpenAIModelsQuery( - undefined, // Let backend use stored API key from configuration + { + apiKey: "" + }, { enabled: (isAuthenticated || isNoAuthMode) && currentProvider === "openai", @@ -146,7 +149,9 @@ function KnowledgeSourcesPage() { ); const { data: ollamaModelsData } = useGetOllamaModelsQuery( - undefined, // No params for now, could be extended later + { + endpoint: settings.provider?.endpoint, + }, { enabled: (isAuthenticated || isNoAuthMode) && currentProvider === "ollama", @@ -154,7 +159,11 @@ function KnowledgeSourcesPage() { ); const { data: ibmModelsData } = useGetIBMModelsQuery( - undefined, // No params for now, could be extended later + { + endpoint: settings.provider?.endpoint, + apiKey: "", + projectId: settings.provider?.project_id, + }, { enabled: (isAuthenticated || isNoAuthMode) && currentProvider === "watsonx", @@ -172,7 +181,7 @@ function KnowledgeSourcesPage() { : openaiModelsData; // fallback to openai // Mutations - const updateFlowSettingMutation = useUpdateFlowSettingMutation({ + const updateSettingsMutation = useUpdateSettingsMutation({ onSuccess: () => { console.log("Setting updated successfully"); }, @@ -183,8 +192,8 @@ function KnowledgeSourcesPage() { // Debounced update function const debouncedUpdate = useDebounce( - (variables: Parameters[0]) => { - updateFlowSettingMutation.mutate(variables); + (variables: Parameters[0]) => { + updateSettingsMutation.mutate(variables); }, 500 ); @@ -230,20 +239,20 @@ function KnowledgeSourcesPage() { // Update model selection immediately const handleModelChange = (newModel: string) => { - updateFlowSettingMutation.mutate({ llm_model: newModel }); + updateSettingsMutation.mutate({ llm_model: newModel }); }; // Update system prompt with save button const handleSystemPromptSave = () => { - updateFlowSettingMutation.mutate({ system_prompt: systemPrompt }); + updateSettingsMutation.mutate({ system_prompt: systemPrompt }); }; // Update embedding model selection immediately const handleEmbeddingModelChange = (newModel: string) => { - updateFlowSettingMutation.mutate({ embedding_model: newModel }); + updateSettingsMutation.mutate({ embedding_model: newModel }); }; - const isEmbeddingModelSelectDisabled = updateFlowSettingMutation.isPending; + const isEmbeddingModelSelectDisabled = updateSettingsMutation.isPending; // Update chunk size setting with debounce const handleChunkSizeChange = (value: string) => { @@ -262,17 +271,17 @@ function KnowledgeSourcesPage() { // Update docling settings const handleTableStructureChange = (checked: boolean) => { setTableStructure(checked); - updateFlowSettingMutation.mutate({ table_structure: checked }); + updateSettingsMutation.mutate({ table_structure: checked }); }; const handleOcrChange = (checked: boolean) => { setOcr(checked); - updateFlowSettingMutation.mutate({ ocr: checked }); + updateSettingsMutation.mutate({ ocr: checked }); }; const handlePictureDescriptionsChange = (checked: boolean) => { setPictureDescriptions(checked); - updateFlowSettingMutation.mutate({ picture_descriptions: checked }); + updateSettingsMutation.mutate({ picture_descriptions: checked }); }; // Helper function to get connector icon @@ -715,7 +724,7 @@ function KnowledgeSourcesPage() {
{connector.icon}
@@ -738,6 +747,7 @@ function KnowledgeSourcesPage() { {connector?.status === "connected" ? ( <>
+ + {/* Model Providers Section */} +
+
+

+ Model Providers +

+
+ +
+ {/* Agent Behavior Section */} @@ -872,7 +893,8 @@ function KnowledgeSourcesPage() { - This Agent retrieves from your knowledge and generates chat responses. Edit in Langflow for full control. + This Agent retrieves from your knowledge and generates chat + responses. Edit in Langflow for full control. @@ -928,14 +950,14 @@ function KnowledgeSourcesPage() {