diff --git a/frontend/src/app/onboarding/components/model-selector.tsx b/frontend/src/app/onboarding/components/model-selector.tsx index 9fe2d982..2948a2fd 100644 --- a/frontend/src/app/onboarding/components/model-selector.tsx +++ b/frontend/src/app/onboarding/components/model-selector.tsx @@ -17,8 +17,22 @@ import { } from "@/components/ui/popover"; import { cn } from "@/lib/utils"; +export type ModelOption = { + value: string; + label: string; + default?: boolean; + provider?: string; +}; + +export type GroupedModelOption = { + group: string; + options: ModelOption[]; + icon?: React.ReactNode; +}; + export function ModelSelector({ options, + groupedOptions, value = "", onValueChange, icon, @@ -28,33 +42,40 @@ export function ModelSelector({ custom = false, hasError = false, }: { - options: { - value: string; - label: string; - default?: boolean; - }[]; + options?: ModelOption[]; + groupedOptions?: GroupedModelOption[]; value: string; icon?: React.ReactNode; placeholder?: string; searchPlaceholder?: string; noOptionsPlaceholder?: string; custom?: boolean; - onValueChange: (value: string) => void; + onValueChange: (value: string, provider?: string) => void; hasError?: boolean; }) { const [open, setOpen] = useState(false); const [searchValue, setSearchValue] = useState(""); + // Flatten grouped options or use regular options + const allOptions = + groupedOptions?.flatMap((group) => group.options) || options || []; + + // Find the group icon for the selected value + const selectedOptionGroup = groupedOptions?.find((group) => + group.options.some((opt) => opt.value === value) + ); + const selectedIcon = selectedOptionGroup?.icon || icon; + useEffect(() => { if ( value && value !== "" && - !options.find((option) => option.value === value) && + !allOptions.find((option) => option.value === value) && !custom ) { onValueChange(""); } - }, [options, value, custom, onValueChange]); + }, [allOptions, value, custom, onValueChange]); return ( @@ -63,7 +84,7 @@ export function ModelSelector({ - - - - - - - ); + return ( + + + +
+ + +
+ +
+ Anthropic Setup +
+
+ + + + + {settingsMutation.isError && ( + +

+ {settingsMutation.error?.message} +

+
+ )} +
+ + + + + +
+
+
+ ); }; export default AnthropicSettingsDialog; diff --git a/frontend/src/app/settings/components/ollama-settings-dialog.tsx b/frontend/src/app/settings/components/ollama-settings-dialog.tsx index e68f93fd..8c9e5fa3 100644 --- a/frontend/src/app/settings/components/ollama-settings-dialog.tsx +++ b/frontend/src/app/settings/components/ollama-settings-dialog.tsx @@ -7,6 +7,7 @@ import { DialogHeader, DialogTitle, } from "@/components/ui/dialog"; +import { useState } from "react"; import { FormProvider, useForm } from "react-hook-form"; import { toast } from "sonner"; import { @@ -20,7 +21,6 @@ import { useUpdateSettingsMutation } from "@/app/api/mutations/useUpdateSettings import { useQueryClient } from "@tanstack/react-query"; import type { ProviderHealthResponse } from "@/app/api/queries/useProviderHealthQuery"; import { AnimatePresence, motion } from "motion/react"; -import { useDebouncedValue } from "@/lib/debounce"; const OllamaSettingsDialog = ({ open, @@ -31,6 +31,8 @@ const OllamaSettingsDialog = ({ }) => { const { isAuthenticated, isNoAuthMode } = useAuth(); const queryClient = useQueryClient(); + const [isValidating, setIsValidating] = useState(false); + const [validationError, setValidationError] = useState(null); const { data: settings = {} } = useGetSettingsQuery({ enabled: isAuthenticated || isNoAuthMode, @@ -47,24 +49,18 @@ const OllamaSettingsDialog = ({ }, }); - const { handleSubmit, watch, formState } = methods; + const { handleSubmit, watch } = methods; const endpoint = watch("endpoint"); - const debouncedEndpoint = useDebouncedValue(endpoint, 500); - const { - isLoading: isLoadingModels, - error: modelsError, - } = useGetOllamaModelsQuery( + const { refetch: validateCredentials } = useGetOllamaModelsQuery( { - endpoint: debouncedEndpoint, + endpoint: endpoint, }, { - enabled: formState.isDirty && !!debouncedEndpoint && open, + enabled: false, } ); - const hasValidationError = !!modelsError || !!formState.errors.endpoint; - const settingsMutation = useUpdateSettingsMutation({ onSuccess: () => { // Update provider health cache to healthy since backend validated the setup @@ -75,12 +71,27 @@ const OllamaSettingsDialog = ({ }; queryClient.setQueryData(["provider", "health"], healthData); - toast.success("Ollama endpoint saved. Configure models in the Settings page."); + toast.success( + "Ollama endpoint saved. Configure models in the Settings page." + ); setOpen(false); }, }); - const onSubmit = (data: OllamaSettingsFormData) => { + const onSubmit = async (data: OllamaSettingsFormData) => { + // Clear any previous validation errors + setValidationError(null); + + // Validate endpoint by fetching models + setIsValidating(true); + const result = await validateCredentials(); + setIsValidating(false); + + if (result.isError) { + setValidationError(result.error); + return; + } + settingsMutation.mutate({ ollama_endpoint: data.endpoint, }); @@ -101,8 +112,8 @@ const OllamaSettingsDialog = ({ @@ -129,9 +140,13 @@ const OllamaSettingsDialog = ({ diff --git a/frontend/src/app/settings/components/openai-settings-dialog.tsx b/frontend/src/app/settings/components/openai-settings-dialog.tsx index 4d1cdd88..1f432503 100644 --- a/frontend/src/app/settings/components/openai-settings-dialog.tsx +++ b/frontend/src/app/settings/components/openai-settings-dialog.tsx @@ -1,5 +1,6 @@ import { useQueryClient } from "@tanstack/react-query"; import { AnimatePresence, motion } from "motion/react"; +import { useState } from "react"; import { FormProvider, useForm } from "react-hook-form"; import { toast } from "sonner"; import { useUpdateSettingsMutation } from "@/app/api/mutations/useUpdateSettingsMutation"; @@ -8,134 +9,150 @@ import type { ProviderHealthResponse } from "@/app/api/queries/useProviderHealth import OpenAILogo from "@/components/logo/openai-logo"; import { Button } from "@/components/ui/button"; import { - Dialog, - DialogContent, - DialogFooter, - DialogHeader, - DialogTitle, + Dialog, + DialogContent, + DialogFooter, + DialogHeader, + DialogTitle, } from "@/components/ui/dialog"; -import { useDebouncedValue } from "@/lib/debounce"; import { - OpenAISettingsForm, - type OpenAISettingsFormData, + OpenAISettingsForm, + type OpenAISettingsFormData, } from "./openai-settings-form"; const OpenAISettingsDialog = ({ - open, - setOpen, + open, + setOpen, }: { - open: boolean; - setOpen: (open: boolean) => void; + open: boolean; + setOpen: (open: boolean) => void; }) => { - const queryClient = useQueryClient(); + const queryClient = useQueryClient(); + const [isValidating, setIsValidating] = useState(false); + const [validationError, setValidationError] = useState(null); - const methods = useForm({ - mode: "onSubmit", - defaultValues: { - apiKey: "", - }, - }); + const methods = useForm({ + mode: "onSubmit", + defaultValues: { + apiKey: "", + }, + }); - const { handleSubmit, watch, formState } = methods; - const apiKey = watch("apiKey"); - const debouncedApiKey = useDebouncedValue(apiKey, 500); + const { handleSubmit, watch } = methods; + const apiKey = watch("apiKey"); - const { - isLoading: isLoadingModels, - error: modelsError, - } = useGetOpenAIModelsQuery( - { - apiKey: debouncedApiKey, - }, - { - enabled: !!debouncedApiKey && open, - } - ); + const { refetch: validateCredentials } = useGetOpenAIModelsQuery( + { + apiKey: apiKey, + }, + { + enabled: false, + } + ); - const hasValidationError = !!modelsError || !!formState.errors.apiKey; + const settingsMutation = useUpdateSettingsMutation({ + onSuccess: () => { + // Update provider health cache to healthy since backend validated the setup + const healthData: ProviderHealthResponse = { + status: "healthy", + message: "Provider is configured and working correctly", + provider: "openai", + }; + queryClient.setQueryData(["provider", "health"], healthData); - const settingsMutation = useUpdateSettingsMutation({ - onSuccess: () => { - // Update provider health cache to healthy since backend validated the setup - const healthData: ProviderHealthResponse = { - status: "healthy", - message: "Provider is configured and working correctly", - provider: "openai", - }; - queryClient.setQueryData(["provider", "health"], healthData); + toast.success( + "OpenAI credentials saved. Configure models in the Settings page." + ); + setOpen(false); + }, + }); - toast.success("OpenAI credentials saved. Configure models in the Settings page."); - setOpen(false); - }, - }); + const onSubmit = async (data: OpenAISettingsFormData) => { + // Clear any previous validation errors + setValidationError(null); - const onSubmit = (data: OpenAISettingsFormData) => { - const payload: { - openai_api_key?: string; - } = {}; + // Only validate if a new API key was entered + if (data.apiKey) { + setIsValidating(true); + const result = await validateCredentials(); + setIsValidating(false); - // Only include api_key if a value was entered - if (data.apiKey) { - payload.openai_api_key = data.apiKey; - } + if (result.isError) { + setValidationError(result.error); + return; + } + } - // Submit the update - settingsMutation.mutate(payload); - }; + const payload: { + openai_api_key?: string; + } = {}; - return ( - - - -
- - -
- -
- OpenAI Setup -
-
+ // Only include api_key if a value was entered + if (data.apiKey) { + payload.openai_api_key = data.apiKey; + } - + // Submit the update + settingsMutation.mutate(payload); + }; - - {settingsMutation.isError && ( - -

- {settingsMutation.error?.message} -

-
- )} -
- - - - - -
-
-
- ); + return ( + + + +
+ + +
+ +
+ OpenAI Setup +
+
+ + + + + {settingsMutation.isError && ( + +

+ {settingsMutation.error?.message} +

+
+ )} +
+ + + + + +
+
+
+ ); }; export default OpenAISettingsDialog; diff --git a/frontend/src/app/settings/components/watsonx-settings-dialog.tsx b/frontend/src/app/settings/components/watsonx-settings-dialog.tsx index 355c83f5..332377f1 100644 --- a/frontend/src/app/settings/components/watsonx-settings-dialog.tsx +++ b/frontend/src/app/settings/components/watsonx-settings-dialog.tsx @@ -7,6 +7,7 @@ import { DialogHeader, DialogTitle, } from "@/components/ui/dialog"; +import { useState } from "react"; import { FormProvider, useForm } from "react-hook-form"; import { toast } from "sonner"; import { @@ -18,7 +19,6 @@ import { useUpdateSettingsMutation } from "@/app/api/mutations/useUpdateSettings import { useQueryClient } from "@tanstack/react-query"; import type { ProviderHealthResponse } from "@/app/api/queries/useProviderHealthQuery"; import { AnimatePresence, motion } from "motion/react"; -import { useDebouncedValue } from "@/lib/debounce"; const WatsonxSettingsDialog = ({ open, @@ -28,6 +28,8 @@ const WatsonxSettingsDialog = ({ setOpen: (open: boolean) => void; }) => { const queryClient = useQueryClient(); + const [isValidating, setIsValidating] = useState(false); + const [validationError, setValidationError] = useState(null); const methods = useForm({ mode: "onSubmit", @@ -38,31 +40,22 @@ const WatsonxSettingsDialog = ({ }, }); - const { handleSubmit, watch, formState } = methods; + const { handleSubmit, watch } = methods; const endpoint = watch("endpoint"); const apiKey = watch("apiKey"); const projectId = watch("projectId"); - const debouncedEndpoint = useDebouncedValue(endpoint, 500); - const debouncedApiKey = useDebouncedValue(apiKey, 500); - const debouncedProjectId = useDebouncedValue(projectId, 500); - - const { - isLoading: isLoadingModels, - error: modelsError, - } = useGetIBMModelsQuery( + const { refetch: validateCredentials } = useGetIBMModelsQuery( { - endpoint: debouncedEndpoint, - apiKey: debouncedApiKey, - projectId: debouncedProjectId, + endpoint: endpoint, + apiKey: apiKey, + projectId: projectId, }, { - enabled: !!debouncedEndpoint && !!debouncedApiKey && !!debouncedProjectId && open, + enabled: false, } ); - const hasValidationError = !!modelsError || !!formState.errors.endpoint || !!formState.errors.apiKey || !!formState.errors.projectId; - const settingsMutation = useUpdateSettingsMutation({ onSuccess: () => { // Update provider health cache to healthy since backend validated the setup @@ -72,12 +65,27 @@ const WatsonxSettingsDialog = ({ provider: "watsonx", }; queryClient.setQueryData(["provider", "health"], healthData); - toast.success("watsonx credentials saved. Configure models in the Settings page."); + toast.success( + "watsonx credentials saved. Configure models in the Settings page." + ); setOpen(false); }, }); - const onSubmit = (data: WatsonxSettingsFormData) => { + const onSubmit = async (data: WatsonxSettingsFormData) => { + // Clear any previous validation errors + setValidationError(null); + + // Validate credentials by fetching models + setIsValidating(true); + const result = await validateCredentials(); + setIsValidating(false); + + if (result.isError) { + setValidationError(result.error); + return; + } + const payload: { watsonx_endpoint: string; watsonx_api_key?: string; @@ -111,10 +119,10 @@ const WatsonxSettingsDialog = ({ - + {settingsMutation.isError && ( diff --git a/frontend/src/app/settings/page.tsx b/frontend/src/app/settings/page.tsx index ce26b3bb..560d95f0 100644 --- a/frontend/src/app/settings/page.tsx +++ b/frontend/src/app/settings/page.tsx @@ -5,18 +5,23 @@ import Link from "next/link"; import { useRouter, useSearchParams } from "next/navigation"; import { Suspense, useCallback, useEffect, useState } from "react"; import { toast } from "sonner"; -import { useGetOpenAIModelsQuery, useGetAnthropicModelsQuery, useGetOllamaModelsQuery, useGetIBMModelsQuery } from "@/app/api/queries/useGetModelsQuery"; +import { + useGetOpenAIModelsQuery, + useGetAnthropicModelsQuery, + useGetOllamaModelsQuery, + useGetIBMModelsQuery, +} from "@/app/api/queries/useGetModelsQuery"; import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery"; import { ConfirmationDialog } from "@/components/confirmation-dialog"; import { LabelWrapper } from "@/components/label-wrapper"; import { ProtectedRoute } from "@/components/protected-route"; import { Button } from "@/components/ui/button"; import { - Card, - CardContent, - CardDescription, - CardHeader, - CardTitle, + Card, + CardContent, + CardDescription, + CardHeader, + CardTitle, } from "@/components/ui/card"; import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; @@ -25,9 +30,9 @@ import { Textarea } from "@/components/ui/textarea"; import { useAuth } from "@/contexts/auth-context"; import { useTask } from "@/contexts/task-context"; import { - DEFAULT_AGENT_SETTINGS, - DEFAULT_KNOWLEDGE_SETTINGS, - UI_CONSTANTS, + DEFAULT_AGENT_SETTINGS, + DEFAULT_KNOWLEDGE_SETTINGS, + UI_CONSTANTS, } from "@/lib/constants"; import { useDebounce } from "@/lib/debounce"; import { useUpdateSettingsMutation } from "../api/mutations/useUpdateSettingsMutation"; @@ -41,1293 +46,1266 @@ import SharePointIcon from "./icons/share-point-icon"; const { MAX_SYSTEM_PROMPT_CHARS } = UI_CONSTANTS; interface GoogleDriveFile { - id: string; - name: string; - mimeType: string; - webViewLink?: string; - iconLink?: string; + id: string; + name: string; + mimeType: string; + webViewLink?: string; + iconLink?: string; } interface OneDriveFile { - id: string; - name: string; - mimeType?: string; - webUrl?: string; - driveItem?: { - file?: { mimeType: string }; - folder?: unknown; - }; + id: string; + name: string; + mimeType?: string; + webUrl?: string; + driveItem?: { + file?: { mimeType: string }; + folder?: unknown; + }; } interface Connector { - id: string; - name: string; - description: string; - icon: React.ReactNode; - status: "not_connected" | "connecting" | "connected" | "error"; - type: string; - connectionId?: string; - access_token?: string; - selectedFiles?: GoogleDriveFile[] | OneDriveFile[]; - available?: boolean; + id: string; + name: string; + description: string; + icon: React.ReactNode; + status: "not_connected" | "connecting" | "connected" | "error"; + type: string; + connectionId?: string; + access_token?: string; + selectedFiles?: GoogleDriveFile[] | OneDriveFile[]; + available?: boolean; } interface SyncResult { - processed?: number; - added?: number; - errors?: number; - skipped?: number; - total?: number; + processed?: number; + added?: number; + errors?: number; + skipped?: number; + total?: number; } interface Connection { - connection_id: string; - is_active: boolean; - created_at: string; - last_sync?: string; + connection_id: string; + is_active: boolean; + created_at: string; + last_sync?: string; } function KnowledgeSourcesPage() { - const { isAuthenticated, isNoAuthMode } = useAuth(); - const { addTask, tasks } = useTask(); - const searchParams = useSearchParams(); - const router = useRouter(); - - // Connectors state - const [connectors, setConnectors] = useState([]); - const [isConnecting, setIsConnecting] = useState(null); - const [isSyncing, setIsSyncing] = useState(null); - const [syncResults, setSyncResults] = useState<{ - [key: string]: SyncResult | null; - }>({}); - const [maxFiles, setMaxFiles] = useState(10); - const [syncAllFiles, setSyncAllFiles] = useState(false); - - // Only keep systemPrompt state since it needs manual save button - const [systemPrompt, setSystemPrompt] = useState(""); - const [chunkSize, setChunkSize] = useState(1024); - const [chunkOverlap, setChunkOverlap] = useState(50); - const [tableStructure, setTableStructure] = useState(true); - const [ocr, setOcr] = useState(false); - const [pictureDescriptions, setPictureDescriptions] = - useState(false); - - // Fetch settings using React Query - const { data: settings = {} } = useGetSettingsQuery({ - enabled: isAuthenticated || isNoAuthMode, - }); - - // Get the current providers from settings - const currentLlmProvider = (settings.agent?.llm_provider || - "openai") as ModelProvider; - const currentEmbeddingProvider = (settings.knowledge?.embedding_provider || - "openai") as ModelProvider; - - // State for selected providers (for changing provider on the fly) - const [selectedLlmProvider, setSelectedLlmProvider] = useState(currentLlmProvider); - const [selectedEmbeddingProvider, setSelectedEmbeddingProvider] = useState(currentEmbeddingProvider); - - // Sync state with settings when they change - useEffect(() => { - if (settings.agent?.llm_provider) { - setSelectedLlmProvider(settings.agent.llm_provider as ModelProvider); - } - }, [settings.agent?.llm_provider]); - - useEffect(() => { - if (settings.knowledge?.embedding_provider) { - setSelectedEmbeddingProvider(settings.knowledge.embedding_provider as ModelProvider); - } - }, [settings.knowledge?.embedding_provider]); - - // Fetch models for each provider - const { data: openaiModels, isLoading: openaiLoading } = useGetOpenAIModelsQuery( - { apiKey: "" }, - { enabled: settings?.providers?.openai?.configured === true } - ); - - const { data: anthropicModels, isLoading: anthropicLoading } = useGetAnthropicModelsQuery( - { apiKey: "" }, - { enabled: settings?.providers?.anthropic?.configured === true } - ); - - const { data: ollamaModels, isLoading: ollamaLoading } = useGetOllamaModelsQuery( - { endpoint: settings?.providers?.ollama?.endpoint }, - { enabled: settings?.providers?.ollama?.configured === true && !!settings?.providers?.ollama?.endpoint } - ); - - const { data: watsonxModels, isLoading: watsonxLoading } = useGetIBMModelsQuery( - { - endpoint: settings?.providers?.watsonx?.endpoint, - apiKey: "", - projectId: settings?.providers?.watsonx?.project_id, - }, - { - enabled: settings?.providers?.watsonx?.configured === true && - !!settings?.providers?.watsonx?.endpoint && - !!settings?.providers?.watsonx?.project_id - } - ); - - // Get models for selected LLM provider - const getModelsForProvider = (provider: ModelProvider) => { - switch (provider) { - case "openai": - return { data: openaiModels, isLoading: openaiLoading }; - case "anthropic": - return { data: anthropicModels, isLoading: anthropicLoading }; - case "ollama": - return { data: ollamaModels, isLoading: ollamaLoading }; - case "watsonx": - return { data: watsonxModels, isLoading: watsonxLoading }; - default: - return { data: undefined, isLoading: false }; - } - }; - - const llmModelsQuery = getModelsForProvider(selectedLlmProvider); - const embeddingModelsQuery = getModelsForProvider(selectedEmbeddingProvider); - - // Filter provider options to only show configured ones - const configuredLlmProviders = [ - { value: "openai", label: "OpenAI", default: selectedLlmProvider === "openai" }, - { value: "anthropic", label: "Anthropic", default: selectedLlmProvider === "anthropic" }, - { value: "ollama", label: "Ollama", default: selectedLlmProvider === "ollama" }, - { value: "watsonx", label: "IBM watsonx.ai", default: selectedLlmProvider === "watsonx" }, - ].filter((option) => settings.providers?.[option.value as ModelProvider]?.configured === true); - - const configuredEmbeddingProviders = [ - { value: "openai", label: "OpenAI", default: selectedEmbeddingProvider === "openai" }, - { value: "ollama", label: "Ollama", default: selectedEmbeddingProvider === "ollama" }, - { value: "watsonx", label: "IBM watsonx.ai", default: selectedEmbeddingProvider === "watsonx" }, - ].filter((option) => settings.providers?.[option.value as ModelProvider]?.configured === true); - - // Mutations - const updateSettingsMutation = useUpdateSettingsMutation({ - onSuccess: () => { - toast.success("Settings updated successfully"); - }, - onError: (error) => { - toast.error("Failed to update settings", { - description: error.message, - }); - }, - }); - - // Debounced update function - const debouncedUpdate = useDebounce( - (variables: Parameters[0]) => { - updateSettingsMutation.mutate(variables); - }, - 500, - ); - - // Sync system prompt state with settings data - useEffect(() => { - if (settings.agent?.system_prompt) { - setSystemPrompt(settings.agent.system_prompt); - } - }, [settings.agent?.system_prompt]); - - // Sync chunk size and overlap state with settings data - useEffect(() => { - if (settings.knowledge?.chunk_size) { - setChunkSize(settings.knowledge.chunk_size); - } - }, [settings.knowledge?.chunk_size]); - - useEffect(() => { - if (settings.knowledge?.chunk_overlap) { - setChunkOverlap(settings.knowledge.chunk_overlap); - } - }, [settings.knowledge?.chunk_overlap]); - - // Sync docling settings with settings data - useEffect(() => { - if (settings.knowledge?.table_structure !== undefined) { - setTableStructure(settings.knowledge.table_structure); - } - }, [settings.knowledge?.table_structure]); - - useEffect(() => { - if (settings.knowledge?.ocr !== undefined) { - setOcr(settings.knowledge.ocr); - } - }, [settings.knowledge?.ocr]); - - useEffect(() => { - if (settings.knowledge?.picture_descriptions !== undefined) { - setPictureDescriptions(settings.knowledge.picture_descriptions); - } - }, [settings.knowledge?.picture_descriptions]); - - // Update model selection immediately - const handleModelChange = (newModel: string) => { - if (newModel) updateSettingsMutation.mutate({ llm_model: newModel }); - }; - - // Update LLM provider selection - const handleLlmProviderChange = (newProvider: string) => { - setSelectedLlmProvider(newProvider as ModelProvider); - - // Get models for the new provider - const modelsForProvider = getModelsForProvider(newProvider as ModelProvider); - const models = modelsForProvider.data?.language_models; - - // If models are available, select the first one along with the provider - if (models && models.length > 0 && models[0].value) { - updateSettingsMutation.mutate({ - llm_provider: newProvider, - llm_model: models[0].value - }); - } else { - // If models aren't loaded yet, just update the provider - updateSettingsMutation.mutate({ llm_provider: newProvider }); - } - }; - - // Update system prompt with save button - const handleSystemPromptSave = () => { - updateSettingsMutation.mutate({ system_prompt: systemPrompt }); - }; - - // Update embedding model selection immediately - const handleEmbeddingModelChange = (newModel: string) => { - if (newModel) updateSettingsMutation.mutate({ embedding_model: newModel }); - }; - - // Update embedding provider selection - const handleEmbeddingProviderChange = (newProvider: string) => { - setSelectedEmbeddingProvider(newProvider as ModelProvider); - - // Get models for the new provider - const modelsForProvider = getModelsForProvider(newProvider as ModelProvider); - const models = modelsForProvider.data?.embedding_models; - - // If models are available, select the first one along with the provider - if (models && models.length > 0 && models[0].value) { - updateSettingsMutation.mutate({ - embedding_provider: newProvider, - embedding_model: models[0].value - }); - } else { - // If models aren't loaded yet, just update the provider - updateSettingsMutation.mutate({ embedding_provider: newProvider }); - } - }; - - const isEmbeddingModelSelectDisabled = updateSettingsMutation.isPending; - - // Update chunk size setting with debounce - const handleChunkSizeChange = (value: string) => { - const numValue = Math.max(0, parseInt(value) || 0); - setChunkSize(numValue); - debouncedUpdate({ chunk_size: numValue }); - }; - - // Update chunk overlap setting with debounce - const handleChunkOverlapChange = (value: string) => { - const numValue = Math.max(0, parseInt(value) || 0); - setChunkOverlap(numValue); - debouncedUpdate({ chunk_overlap: numValue }); - }; - - // Update docling settings - const handleTableStructureChange = (checked: boolean) => { - setTableStructure(checked); - updateSettingsMutation.mutate({ table_structure: checked }); - }; - - const handleOcrChange = (checked: boolean) => { - setOcr(checked); - updateSettingsMutation.mutate({ ocr: checked }); - }; - - const handlePictureDescriptionsChange = (checked: boolean) => { - setPictureDescriptions(checked); - updateSettingsMutation.mutate({ picture_descriptions: checked }); - }; - - // Helper function to get connector icon - const getConnectorIcon = useCallback((iconName: string) => { - const iconMap: { [key: string]: React.ReactElement } = { - "google-drive": , - sharepoint: , - onedrive: , - }; - return ( - iconMap[iconName] || ( -
- ? -
- ) - ); - }, []); - - // Connector functions - const checkConnectorStatuses = useCallback(async () => { - try { - // Fetch available connectors from backend - const connectorsResponse = await fetch("/api/connectors"); - if (!connectorsResponse.ok) { - throw new Error("Failed to load connectors"); - } - - const connectorsResult = await connectorsResponse.json(); - const connectorTypes = Object.keys(connectorsResult.connectors); - - // Initialize connectors list with metadata from backend - const initialConnectors = connectorTypes - .filter((type) => connectorsResult.connectors[type].available) // Only show available connectors - .map((type) => ({ - id: type, - name: connectorsResult.connectors[type].name, - description: connectorsResult.connectors[type].description, - icon: getConnectorIcon(connectorsResult.connectors[type].icon), - status: "not_connected" as const, - type: type, - available: connectorsResult.connectors[type].available, - })); - - setConnectors(initialConnectors); - - // Check status for each connector type - - for (const connectorType of connectorTypes) { - const response = await fetch(`/api/connectors/${connectorType}/status`); - if (response.ok) { - const data = await response.json(); - const connections = data.connections || []; - const activeConnection = connections.find( - (conn: Connection) => conn.is_active, - ); - const isConnected = activeConnection !== undefined; - - setConnectors((prev) => - prev.map((c) => - c.type === connectorType - ? { - ...c, - status: isConnected ? "connected" : "not_connected", - connectionId: activeConnection?.connection_id, - } - : c, - ), - ); - } - } - } catch (error) { - console.error("Failed to check connector statuses:", error); - } - }, [getConnectorIcon]); - - const handleConnect = async (connector: Connector) => { - setIsConnecting(connector.id); - setSyncResults((prev) => ({ ...prev, [connector.id]: null })); - - try { - // Use the shared auth callback URL, same as connectors page - const redirectUri = `${window.location.origin}/auth/callback`; - - const response = await fetch("/api/auth/init", { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ - connector_type: connector.type, - purpose: "data_source", - name: `${connector.name} Connection`, - redirect_uri: redirectUri, - }), - }); - - if (response.ok) { - const result = await response.json(); - - if (result.oauth_config) { - localStorage.setItem("connecting_connector_id", result.connection_id); - localStorage.setItem("connecting_connector_type", connector.type); - - const authUrl = - `${result.oauth_config.authorization_endpoint}?` + - `client_id=${result.oauth_config.client_id}&` + - `response_type=code&` + - `scope=${result.oauth_config.scopes.join(" ")}&` + - `redirect_uri=${encodeURIComponent( - result.oauth_config.redirect_uri, - )}&` + - `access_type=offline&` + - `prompt=consent&` + - `state=${result.connection_id}`; - - window.location.href = authUrl; - } - } else { - console.error("Failed to initiate connection"); - setIsConnecting(null); - } - } catch (error) { - console.error("Connection error:", error); - setIsConnecting(null); - } - }; - - // const handleSync = async (connector: Connector) => { - // if (!connector.connectionId) return; - - // setIsSyncing(connector.id); - // setSyncResults(prev => ({ ...prev, [connector.id]: null })); - - // try { - // const syncBody: { - // connection_id: string; - // max_files?: number; - // selected_files?: string[]; - // } = { - // connection_id: connector.connectionId, - // max_files: syncAllFiles ? 0 : maxFiles || undefined, - // }; - - // // Note: File selection is now handled via the cloud connectors dialog - - // const response = await fetch(`/api/connectors/${connector.type}/sync`, { - // method: "POST", - // headers: { - // "Content-Type": "application/json", - // }, - // body: JSON.stringify(syncBody), - // }); - - // const result = await response.json(); - - // if (response.status === 201) { - // const taskId = result.task_id; - // if (taskId) { - // addTask(taskId); - // setSyncResults(prev => ({ - // ...prev, - // [connector.id]: { - // processed: 0, - // total: result.total_files || 0, - // }, - // })); - // } - // } else if (response.ok) { - // setSyncResults(prev => ({ ...prev, [connector.id]: result })); - // // Note: Stats will auto-refresh via task completion watcher for async syncs - // } else { - // console.error("Sync failed:", result.error); - // } - // } catch (error) { - // console.error("Sync error:", error); - // } finally { - // setIsSyncing(null); - // } - // }; - - const getStatusBadge = (status: Connector["status"]) => { - switch (status) { - case "connected": - return ( -
- ); - case "connecting": - return ( -
- ); - case "error": - return ( -
- ); - default: - return
; - } - }; - - const navigateToKnowledgePage = (connector: Connector) => { - const provider = connector.type.replace(/-/g, "_"); - router.push(`/upload/${provider}`); - }; - - // Check connector status on mount and when returning from OAuth - useEffect(() => { - if (isAuthenticated) { - checkConnectorStatuses(); - } - - if (searchParams.get("oauth_success") === "true") { - const url = new URL(window.location.href); - url.searchParams.delete("oauth_success"); - window.history.replaceState({}, "", url.toString()); - } - }, [searchParams, isAuthenticated, checkConnectorStatuses]); - - // Track previous tasks to detect new completions - const [prevTasks, setPrevTasks] = useState([]); - - // Watch for task completions and refresh stats - useEffect(() => { - // Find newly completed tasks by comparing with previous state - const newlyCompletedTasks = tasks.filter((task) => { - const wasCompleted = - prevTasks.find((prev) => prev.task_id === task.task_id)?.status === - "completed"; - return task.status === "completed" && !wasCompleted; - }); - - if (newlyCompletedTasks.length > 0) { - // Task completed - could refresh data here if needed - const timeoutId = setTimeout(() => { - // Stats refresh removed - }, 1000); - - // Update previous tasks state - setPrevTasks(tasks); - - return () => clearTimeout(timeoutId); - } else { - // Always update previous tasks state - setPrevTasks(tasks); - } - }, [tasks, prevTasks]); - - const handleEditInLangflow = ( - flowType: "chat" | "ingest", - closeDialog: () => void, - ) => { - // Select the appropriate flow ID and edit URL based on flow type - const targetFlowId = - flowType === "ingest" ? settings.ingest_flow_id : settings.flow_id; - const editUrl = - flowType === "ingest" - ? settings.langflow_ingest_edit_url - : settings.langflow_edit_url; - - const derivedFromWindow = - typeof window !== "undefined" - ? `${window.location.protocol}//${window.location.hostname}:7860` - : ""; - const base = ( - settings.langflow_public_url || - derivedFromWindow || - "http://localhost:7860" - ).replace(/\/$/, ""); - const computed = targetFlowId ? `${base}/flow/${targetFlowId}` : base; - - const url = editUrl || computed; - - window.open(url, "_blank"); - closeDialog(); // Close immediately after opening Langflow - }; - - const handleRestoreRetrievalFlow = (closeDialog: () => void) => { - fetch(`/api/reset-flow/retrieval`, { - method: "POST", - }) - .then((response) => { - if (response.ok) { - return response.json(); - } - throw new Error(`HTTP ${response.status}: ${response.statusText}`); - }) - .then(() => { - // Only reset form values if the API call was successful - setSystemPrompt(DEFAULT_AGENT_SETTINGS.system_prompt); - // Trigger model update to default model - handleModelChange(DEFAULT_AGENT_SETTINGS.llm_model); - closeDialog(); // Close after successful completion - }) - .catch((error) => { - console.error("Error restoring retrieval flow:", error); - closeDialog(); // Close even on error (could show error toast instead) - }); - }; - - const handleRestoreIngestFlow = (closeDialog: () => void) => { - fetch(`/api/reset-flow/ingest`, { - method: "POST", - }) - .then((response) => { - if (response.ok) { - return response.json(); - } - throw new Error(`HTTP ${response.status}: ${response.statusText}`); - }) - .then(() => { - // Only reset form values if the API call was successful - setChunkSize(DEFAULT_KNOWLEDGE_SETTINGS.chunk_size); - setChunkOverlap(DEFAULT_KNOWLEDGE_SETTINGS.chunk_overlap); - setTableStructure(false); - setOcr(false); - setPictureDescriptions(false); - closeDialog(); // Close after successful completion - }) - .catch((error) => { - console.error("Error restoring ingest flow:", error); - closeDialog(); // Close even on error (could show error toast instead) - }); - }; - - return ( -
- {/* Connectors Section */} -
-
-

- Cloud Connectors -

-
- - {/* Conditional Sync Settings or No-Auth Message */} - { - isNoAuthMode ? ( - - - - Cloud connectors require authentication - - - Add the Google OAuth variables below to your .env{" "} - then restart the OpenRAG containers. - - - -
-
-
- - 27 - - # Google OAuth -
-
- - 28 - - # Create credentials here: -
-
- - 29 - - - # https://console.cloud.google.com/apis/credentials - -
-
-
- 30 - GOOGLE_OAUTH_CLIENT_ID= -
-
- 31 - GOOGLE_OAUTH_CLIENT_SECRET= -
-
-
-
- ) : null - //
- //
- //

Sync Settings

- //

- // Configure how many files to sync when manually triggering a sync - //

- //
- //
- //
- // { - // setSyncAllFiles(!!checked); - // if (checked) { - // setMaxFiles(0); - // } else { - // setMaxFiles(10); - // } - // }} - // /> - // - //
- // - //
- // setMaxFiles(parseInt(e.target.value) || 10)} - // disabled={syncAllFiles} - // className="w-16 min-w-16 max-w-16 flex-shrink-0 disabled:opacity-50 disabled:cursor-not-allowed" - // min="1" - // max="100" - // title={ - // syncAllFiles - // ? "Disabled when 'Sync all files' is checked" - // : "Leave blank or set to 0 for unlimited" - // } - // /> - //
- //
- //
- } - {/* Connectors Grid */} -
- {connectors.map((connector) => { - return ( - - -
-
-
-
- {connector.icon} -
-
- - {connector.name} - {connector && getStatusBadge(connector.status)} - - - {connector?.description - ? `${connector.name} is configured.` - : connector.description} - -
-
-
- - {connector?.available ? ( -
- {connector?.status === "connected" ? ( - <> - - {syncResults[connector.id] && ( -
-
- Processed:{" "} - {syncResults[connector.id]?.processed || 0} -
-
- Added: {syncResults[connector.id]?.added || 0} -
- {syncResults[connector.id]?.errors && ( -
- Errors: {syncResults[connector.id]?.errors} -
- )} -
- )} - - ) : ( - - )} -
- ) : ( -
-

- See our{" "} - - Cloud Connectors installation guide - {" "} - for more detail. -

-
- )} -
-
- ); - })} -
-
- - {/* Model Providers Section */} -
-
-

- Model Providers -

-
- -
- - {/* Agent Behavior Section */} - - -
- Agent -
- - Restore flow - - } - title="Restore default Agent flow" - description="This restores defaults and discards all custom settings and overrides. This can’t be undone." - confirmText="Restore" - variant="destructive" - onConfirm={handleRestoreRetrievalFlow} - /> - - - Langflow icon - - - - - Edit in Langflow - - } - title="Edit Agent flow in Langflow" - description={ - <> -

- You're entering Langflow. You can edit the{" "} - Agent flow and other underlying flows. Manual - changes to components, wiring, or I/O can break this - experience. -

-

You can restore this flow from Settings.

- - } - confirmText="Proceed" - confirmIcon={} - onConfirm={(closeDialog) => - handleEditInLangflow("chat", closeDialog) - } - variant="warning" - /> -
-
- - This Agent retrieves from your knowledge and generates chat - responses. Edit in Langflow for full control. - -
- -
-
- - - -
-
- - - -
-
- -