"use client"; import { ArrowUpRight, Copy, Key, Loader2, Minus, PlugZap, Plus, Trash2 } from "lucide-react"; import Link from "next/link"; import { useRouter, useSearchParams } from "next/navigation"; import { Suspense, useCallback, useEffect, useState } from "react"; import { toast } from "sonner"; import { useGetAnthropicModelsQuery, useGetIBMModelsQuery, useGetOllamaModelsQuery, useGetOpenAIModelsQuery, } from "@/app/api/queries/useGetModelsQuery"; import { useGetApiKeysQuery } from "@/app/api/queries/useGetApiKeysQuery"; import { useCreateApiKeyMutation } from "@/app/api/mutations/useCreateApiKeyMutation"; import { useRevokeApiKeyMutation } from "@/app/api/mutations/useRevokeApiKeyMutation"; import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery"; import { ConfirmationDialog } from "@/components/confirmation-dialog"; import { Dialog, DialogContent, DialogDescription, DialogFooter, DialogHeader, DialogTitle, } from "@/components/ui/dialog"; import { LabelWrapper } from "@/components/label-wrapper"; import { ProtectedRoute } from "@/components/protected-route"; import { Button } from "@/components/ui/button"; import { Card, CardContent, CardDescription, CardHeader, CardTitle, } from "@/components/ui/card"; import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; import { Switch } from "@/components/ui/switch"; import { Textarea } from "@/components/ui/textarea"; import { useAuth } from "@/contexts/auth-context"; import { useTask } from "@/contexts/task-context"; import { DEFAULT_AGENT_SETTINGS, DEFAULT_KNOWLEDGE_SETTINGS, UI_CONSTANTS, } from "@/lib/constants"; import { useDebounce } from "@/lib/debounce"; import { cn } from "@/lib/utils"; import GoogleDriveIcon from "../../components/icons/google-drive-logo"; import OneDriveIcon from "../../components/icons/one-drive-logo"; import SharePointIcon from "../../components/icons/share-point-logo"; import { useUpdateSettingsMutation } from "../api/mutations/useUpdateSettingsMutation"; import { ModelSelector } from "../onboarding/_components/model-selector"; import ModelProviders from "./_components/model-providers"; import { getModelLogo, type ModelProvider } from "./_helpers/model-helpers"; const { MAX_SYSTEM_PROMPT_CHARS } = UI_CONSTANTS; interface GoogleDriveFile { id: string; name: string; mimeType: string; webViewLink?: string; iconLink?: string; } interface OneDriveFile { id: string; name: string; mimeType?: string; webUrl?: string; driveItem?: { file?: { mimeType: string }; folder?: unknown; }; } interface Connector { id: string; name: string; description: string; icon: React.ReactNode; status: "not_connected" | "connecting" | "connected" | "error"; type: string; connectionId?: string; access_token?: string; selectedFiles?: GoogleDriveFile[] | OneDriveFile[]; available?: boolean; } interface SyncResult { processed?: number; added?: number; errors?: number; skipped?: number; total?: number; } interface Connection { connection_id: string; is_active: boolean; created_at: string; last_sync?: string; } function KnowledgeSourcesPage() { const { isAuthenticated, isNoAuthMode } = useAuth(); const { addTask, tasks } = useTask(); const searchParams = useSearchParams(); const router = useRouter(); // Check if we should auto-open the LLM model selector const focusLlmModel = searchParams.get("focusLlmModel") === "true"; // Use a trigger state that changes each time we detect the query param const [openLlmSelector, setOpenLlmSelector] = useState(false); // Connectors state const [connectors, setConnectors] = useState([]); const [isConnecting, setIsConnecting] = useState(null); const [isSyncing, setIsSyncing] = useState(null); const [syncResults, setSyncResults] = useState<{ [key: string]: SyncResult | null; }>({}); const [maxFiles, setMaxFiles] = useState(10); const [syncAllFiles, setSyncAllFiles] = useState(false); // Only keep systemPrompt state since it needs manual save button const [systemPrompt, setSystemPrompt] = useState(""); const [chunkSize, setChunkSize] = useState(1024); const [chunkOverlap, setChunkOverlap] = useState(50); const [tableStructure, setTableStructure] = useState(true); const [ocr, setOcr] = useState(false); const [pictureDescriptions, setPictureDescriptions] = useState(false); // API Keys state const [createKeyDialogOpen, setCreateKeyDialogOpen] = useState(false); const [newKeyName, setNewKeyName] = useState(""); const [newlyCreatedKey, setNewlyCreatedKey] = useState(null); const [showKeyDialogOpen, setShowKeyDialogOpen] = useState(false); // Fetch settings using React Query const { data: settings = {} } = useGetSettingsQuery({ enabled: isAuthenticated || isNoAuthMode, }); // Fetch API keys const { data: apiKeysData, isLoading: apiKeysLoading } = useGetApiKeysQuery({ enabled: isAuthenticated || isNoAuthMode, }); // API key mutations const createApiKeyMutation = useCreateApiKeyMutation({ onSuccess: (data) => { setNewlyCreatedKey(data.api_key); setCreateKeyDialogOpen(false); setShowKeyDialogOpen(true); setNewKeyName(""); toast.success("API key created"); }, onError: (error) => { toast.error("Failed to create API key", { description: error.message }); }, }); const revokeApiKeyMutation = useRevokeApiKeyMutation({ onSuccess: () => { toast.success("API key revoked"); }, onError: (error) => { toast.error("Failed to revoke API key", { description: error.message }); }, }); // Fetch models for each provider const { data: openaiModels, isLoading: openaiLoading } = useGetOpenAIModelsQuery( { apiKey: "" }, { enabled: settings?.providers?.openai?.configured === true }, ); const { data: anthropicModels, isLoading: anthropicLoading } = useGetAnthropicModelsQuery( { apiKey: "" }, { enabled: settings?.providers?.anthropic?.configured === true }, ); const { data: ollamaModels, isLoading: ollamaLoading } = useGetOllamaModelsQuery( { endpoint: settings?.providers?.ollama?.endpoint }, { enabled: settings?.providers?.ollama?.configured === true && !!settings?.providers?.ollama?.endpoint, }, ); const { data: watsonxModels, isLoading: watsonxLoading } = useGetIBMModelsQuery( { endpoint: settings?.providers?.watsonx?.endpoint, apiKey: "", projectId: settings?.providers?.watsonx?.project_id, }, { enabled: settings?.providers?.watsonx?.configured === true && !!settings?.providers?.watsonx?.endpoint && !!settings?.providers?.watsonx?.project_id, }, ); // Build grouped LLM model options from all configured providers const groupedLlmModels = [ { group: "OpenAI", provider: "openai", icon: getModelLogo("", "openai"), models: openaiModels?.language_models || [], configured: settings.providers?.openai?.configured === true, }, { group: "Anthropic", provider: "anthropic", icon: getModelLogo("", "anthropic"), models: anthropicModels?.language_models || [], configured: settings.providers?.anthropic?.configured === true, }, { group: "Ollama", provider: "ollama", icon: getModelLogo("", "ollama"), models: ollamaModels?.language_models || [], configured: settings.providers?.ollama?.configured === true, }, { group: "IBM watsonx.ai", provider: "watsonx", icon: getModelLogo("", "watsonx"), models: watsonxModels?.language_models || [], configured: settings.providers?.watsonx?.configured === true, }, ] .filter((provider) => provider.configured) .map((provider) => ({ group: provider.group, icon: provider.icon, options: provider.models.map((model) => ({ ...model, provider: provider.provider, })), })); // Build grouped embedding model options from all configured providers (excluding Anthropic) const groupedEmbeddingModels = [ { group: "OpenAI", provider: "openai", icon: getModelLogo("", "openai"), models: openaiModels?.embedding_models || [], configured: settings.providers?.openai?.configured === true, }, { group: "Ollama", provider: "ollama", icon: getModelLogo("", "ollama"), models: ollamaModels?.embedding_models || [], configured: settings.providers?.ollama?.configured === true, }, { group: "IBM watsonx.ai", provider: "watsonx", icon: getModelLogo("", "watsonx"), models: watsonxModels?.embedding_models || [], configured: settings.providers?.watsonx?.configured === true, }, ] .filter((provider) => provider.configured) .map((provider) => ({ group: provider.group, icon: provider.icon, options: provider.models.map((model) => ({ ...model, provider: provider.provider, })), })); const isLoadingAnyLlmModels = openaiLoading || anthropicLoading || ollamaLoading || watsonxLoading; const isLoadingAnyEmbeddingModels = openaiLoading || ollamaLoading || watsonxLoading; // Mutations const updateSettingsMutation = useUpdateSettingsMutation({ onSuccess: () => { toast.success("Settings updated successfully"); }, onError: (error) => { toast.error("Failed to update settings", { description: error.message, }); }, }); // Debounced update function const debouncedUpdate = useDebounce( (variables: Parameters[0]) => { updateSettingsMutation.mutate(variables); }, 500, ); // Sync system prompt state with settings data useEffect(() => { if (settings.agent?.system_prompt) { setSystemPrompt(settings.agent.system_prompt); } }, [settings.agent?.system_prompt]); // Sync chunk size and overlap state with settings data useEffect(() => { if (settings.knowledge?.chunk_size) { setChunkSize(settings.knowledge.chunk_size); } }, [settings.knowledge?.chunk_size]); useEffect(() => { if (settings.knowledge?.chunk_overlap) { setChunkOverlap(settings.knowledge.chunk_overlap); } }, [settings.knowledge?.chunk_overlap]); // Sync docling settings with settings data useEffect(() => { if (settings.knowledge?.table_structure !== undefined) { setTableStructure(settings.knowledge.table_structure); } }, [settings.knowledge?.table_structure]); useEffect(() => { if (settings.knowledge?.ocr !== undefined) { setOcr(settings.knowledge.ocr); } }, [settings.knowledge?.ocr]); useEffect(() => { if (settings.knowledge?.picture_descriptions !== undefined) { setPictureDescriptions(settings.knowledge.picture_descriptions); } }, [settings.knowledge?.picture_descriptions]); // Handle auto-focus on LLM model selector when coming from provider setup useEffect(() => { if (focusLlmModel) { // Trigger the selector to open setOpenLlmSelector(true); // Scroll to the agent card const agentCard = document.getElementById("agent-card"); if (agentCard) { agentCard.scrollIntoView({ behavior: "smooth", block: "start" }); } // Clear the query parameter const newSearchParams = new URLSearchParams(searchParams.toString()); newSearchParams.delete("focusLlmModel"); router.replace(`/settings?${newSearchParams.toString()}`, { scroll: false, }); // Reset the trigger after a brief delay so it can be triggered again setTimeout(() => setOpenLlmSelector(false), 100); } }, [focusLlmModel, searchParams, router]); // Update model selection immediately (also updates provider) const handleModelChange = (newModel: string, provider?: string) => { if (newModel && provider) { updateSettingsMutation.mutate({ llm_model: newModel, llm_provider: provider, }); } else if (newModel) { updateSettingsMutation.mutate({ llm_model: newModel }); } }; // Update system prompt with save button const handleSystemPromptSave = () => { updateSettingsMutation.mutate({ system_prompt: systemPrompt }); }; // Update embedding model selection immediately (also updates provider) const handleEmbeddingModelChange = (newModel: string, provider?: string) => { if (newModel && provider) { updateSettingsMutation.mutate({ embedding_model: newModel, embedding_provider: provider, }); } else if (newModel) { updateSettingsMutation.mutate({ embedding_model: newModel }); } }; // Update chunk size setting with debounce const handleChunkSizeChange = (value: string) => { const numValue = Math.max(0, parseInt(value) || 0); setChunkSize(numValue); debouncedUpdate({ chunk_size: numValue }); }; // Update chunk overlap setting with debounce const handleChunkOverlapChange = (value: string) => { const numValue = Math.max(0, parseInt(value) || 0); setChunkOverlap(numValue); debouncedUpdate({ chunk_overlap: numValue }); }; // Update docling settings const handleTableStructureChange = (checked: boolean) => { setTableStructure(checked); updateSettingsMutation.mutate({ table_structure: checked }); }; const handleOcrChange = (checked: boolean) => { setOcr(checked); updateSettingsMutation.mutate({ ocr: checked }); }; const handlePictureDescriptionsChange = (checked: boolean) => { setPictureDescriptions(checked); updateSettingsMutation.mutate({ picture_descriptions: checked }); }; // API Keys handlers const handleCreateApiKey = () => { if (!newKeyName.trim()) { toast.error("Please enter a name for the API key"); return; } createApiKeyMutation.mutate({ name: newKeyName.trim() }); }; const handleRevokeApiKey = (keyId: string) => { revokeApiKeyMutation.mutate({ key_id: keyId }); }; const handleCopyApiKey = async () => { if (newlyCreatedKey) { await navigator.clipboard.writeText(newlyCreatedKey); toast.success("API key copied to clipboard"); } }; const formatDate = (dateString: string | null) => { if (!dateString) return "Never"; const date = new Date(dateString); return date.toLocaleDateString(undefined, { year: "numeric", month: "short", day: "numeric", }); }; // Helper function to get connector icon const getConnectorIcon = useCallback((iconName: string) => { const iconMap: { [key: string]: React.ReactElement } = { "google-drive": , sharepoint: , onedrive: , }; return ( iconMap[iconName] || (
?
) ); }, []); // Connector functions const checkConnectorStatuses = useCallback(async () => { try { // Fetch available connectors from backend const connectorsResponse = await fetch("/api/connectors"); if (!connectorsResponse.ok) { throw new Error("Failed to load connectors"); } const connectorsResult = await connectorsResponse.json(); const connectorTypes = Object.keys(connectorsResult.connectors); // Initialize connectors list with metadata from backend const initialConnectors = connectorTypes // .filter((type) => connectorsResult.connectors[type].available) // Only show available connectors .map((type) => ({ id: type, name: connectorsResult.connectors[type].name, description: connectorsResult.connectors[type].description, icon: getConnectorIcon(connectorsResult.connectors[type].icon), status: "not_connected" as const, type: type, available: connectorsResult.connectors[type].available, })); setConnectors(initialConnectors); // Check status for each connector type for (const connectorType of connectorTypes) { const response = await fetch(`/api/connectors/${connectorType}/status`); if (response.ok) { const data = await response.json(); const connections = data.connections || []; const activeConnection = connections.find( (conn: Connection) => conn.is_active, ); const isConnected = activeConnection !== undefined; setConnectors((prev) => prev.map((c) => c.type === connectorType ? { ...c, status: isConnected ? "connected" : "not_connected", connectionId: activeConnection?.connection_id, } : c, ), ); } } } catch (error) { console.error("Failed to check connector statuses:", error); } }, [getConnectorIcon]); const handleConnect = async (connector: Connector) => { setIsConnecting(connector.id); setSyncResults((prev) => ({ ...prev, [connector.id]: null })); try { // Use the shared auth callback URL, same as connectors page const redirectUri = `${window.location.origin}/auth/callback`; const response = await fetch("/api/auth/init", { method: "POST", headers: { "Content-Type": "application/json", }, body: JSON.stringify({ connector_type: connector.type, purpose: "data_source", name: `${connector.name} Connection`, redirect_uri: redirectUri, }), }); if (response.ok) { const result = await response.json(); if (result.oauth_config) { localStorage.setItem("connecting_connector_id", result.connection_id); localStorage.setItem("connecting_connector_type", connector.type); const authUrl = `${result.oauth_config.authorization_endpoint}?` + `client_id=${result.oauth_config.client_id}&` + `response_type=code&` + `scope=${result.oauth_config.scopes.join(" ")}&` + `redirect_uri=${encodeURIComponent( result.oauth_config.redirect_uri, )}&` + `access_type=offline&` + `prompt=consent&` + `state=${result.connection_id}`; window.location.href = authUrl; } } else { console.error("Failed to initiate connection"); setIsConnecting(null); } } catch (error) { console.error("Connection error:", error); setIsConnecting(null); } }; // const handleSync = async (connector: Connector) => { // if (!connector.connectionId) return; // setIsSyncing(connector.id); // setSyncResults(prev => ({ ...prev, [connector.id]: null })); // try { // const syncBody: { // connection_id: string; // max_files?: number; // selected_files?: string[]; // } = { // connection_id: connector.connectionId, // max_files: syncAllFiles ? 0 : maxFiles || undefined, // }; // // Note: File selection is now handled via the cloud connectors dialog // const response = await fetch(`/api/connectors/${connector.type}/sync`, { // method: "POST", // headers: { // "Content-Type": "application/json", // }, // body: JSON.stringify(syncBody), // }); // const result = await response.json(); // if (response.status === 201) { // const taskId = result.task_id; // if (taskId) { // addTask(taskId); // setSyncResults(prev => ({ // ...prev, // [connector.id]: { // processed: 0, // total: result.total_files || 0, // }, // })); // } // } else if (response.ok) { // setSyncResults(prev => ({ ...prev, [connector.id]: result })); // // Note: Stats will auto-refresh via task completion watcher for async syncs // } else { // console.error("Sync failed:", result.error); // } // } catch (error) { // console.error("Sync error:", error); // } finally { // setIsSyncing(null); // } // }; const navigateToKnowledgePage = (connector: Connector) => { const provider = connector.type.replace(/-/g, "_"); router.push(`/upload/${provider}`); }; // Check connector status on mount and when returning from OAuth useEffect(() => { if (isAuthenticated) { checkConnectorStatuses(); } if (searchParams.get("oauth_success") === "true") { const url = new URL(window.location.href); url.searchParams.delete("oauth_success"); window.history.replaceState({}, "", url.toString()); } }, [searchParams, isAuthenticated, checkConnectorStatuses]); // Track previous tasks to detect new completions const [prevTasks, setPrevTasks] = useState([]); // Watch for task completions and refresh stats useEffect(() => { // Find newly completed tasks by comparing with previous state const newlyCompletedTasks = tasks.filter((task) => { const wasCompleted = prevTasks.find((prev) => prev.task_id === task.task_id)?.status === "completed"; return task.status === "completed" && !wasCompleted; }); if (newlyCompletedTasks.length > 0) { // Task completed - could refresh data here if needed const timeoutId = setTimeout(() => { // Stats refresh removed }, 1000); // Update previous tasks state setPrevTasks(tasks); return () => clearTimeout(timeoutId); } else { // Always update previous tasks state setPrevTasks(tasks); } }, [tasks, prevTasks]); const handleEditInLangflow = ( flowType: "chat" | "ingest", closeDialog: () => void, ) => { // Select the appropriate flow ID and edit URL based on flow type const targetFlowId = flowType === "ingest" ? settings.ingest_flow_id : settings.flow_id; const editUrl = flowType === "ingest" ? settings.langflow_ingest_edit_url : settings.langflow_edit_url; const derivedFromWindow = typeof window !== "undefined" ? `${window.location.protocol}//${window.location.hostname}:7860` : ""; const base = ( settings.langflow_public_url || derivedFromWindow || "http://localhost:7860" ).replace(/\/$/, ""); const computed = targetFlowId ? `${base}/flow/${targetFlowId}` : base; const url = editUrl || computed; window.open(url, "_blank"); closeDialog(); // Close immediately after opening Langflow }; const handleRestoreRetrievalFlow = (closeDialog: () => void) => { fetch(`/api/reset-flow/retrieval`, { method: "POST", }) .then((response) => { if (response.ok) { return response.json(); } throw new Error(`HTTP ${response.status}: ${response.statusText}`); }) .then(() => { // Flow restoration is complete - backend already updated flow with current provider/model // Just reset the UI form value for system prompt setSystemPrompt(DEFAULT_AGENT_SETTINGS.system_prompt); closeDialog(); // Close after successful completion }) .catch((error) => { console.error("Error restoring retrieval flow:", error); closeDialog(); // Close even on error (could show error toast instead) }); }; const handleRestoreIngestFlow = (closeDialog: () => void) => { fetch(`/api/reset-flow/ingest`, { method: "POST", }) .then((response) => { if (response.ok) { return response.json(); } throw new Error(`HTTP ${response.status}: ${response.statusText}`); }) .then(() => { // Only reset form values if the API call was successful setChunkSize(DEFAULT_KNOWLEDGE_SETTINGS.chunk_size); setChunkOverlap(DEFAULT_KNOWLEDGE_SETTINGS.chunk_overlap); setTableStructure(false); setOcr(false); setPictureDescriptions(false); closeDialog(); // Close after successful completion }) .catch((error) => { console.error("Error restoring ingest flow:", error); closeDialog(); // Close even on error (could show error toast instead) }); }; return (
{/* Connectors Section */}

Cloud Connectors

{/* Conditional Sync Settings or No-Auth Message */} { isNoAuthMode ? ( Cloud connectors require authentication Add the Google OAuth variables below to your .env{" "} then restart the OpenRAG containers.
27 # Google OAuth
28 # Create credentials here:
29 # https://console.cloud.google.com/apis/credentials
30 GOOGLE_OAUTH_CLIENT_ID=
31 GOOGLE_OAUTH_CLIENT_SECRET=
) : null //
//
//

Sync Settings

//

// Configure how many files to sync when manually triggering a sync //

//
//
//
// { // setSyncAllFiles(!!checked); // if (checked) { // setMaxFiles(0); // } else { // setMaxFiles(10); // } // }} // /> // //
// //
// setMaxFiles(parseInt(e.target.value) || 10)} // disabled={syncAllFiles} // className="w-16 min-w-16 max-w-16 flex-shrink-0 disabled:opacity-50 disabled:cursor-not-allowed" // min="1" // max="100" // title={ // syncAllFiles // ? "Disabled when 'Sync all files' is checked" // : "Leave blank or set to 0 for unlimited" // } // /> //
//
//
} {/* Connectors Grid */}
{connectors.map((connector) => { return (
{connector.icon}
{connector.name} {connector?.available ? `${connector.name} is configured.` : "Not configured."}
{connector?.available ? (
{connector?.status === "connected" ? ( <> {syncResults[connector.id] && (
Processed:{" "} {syncResults[connector.id]?.processed || 0}
Added: {syncResults[connector.id]?.added || 0}
{syncResults[connector.id]?.errors && (
Errors: {syncResults[connector.id]?.errors}
)}
)} ) : ( )}
) : (

See our{" "} Cloud Connectors installation guide {" "} for more detail.

)}
); })}
{/* Model Providers Section */}

Model Providers

{/* Agent Behavior Section */}
Agent
Restore flow } title="Restore default Agent flow" description="This restores defaults and discards all custom settings and overrides. This can’t be undone." confirmText="Restore" variant="destructive" onConfirm={handleRestoreRetrievalFlow} /> Langflow icon Edit in Langflow } title="Edit Agent flow in Langflow" description={ <>

You're entering Langflow. You can edit the{" "} Agent flow and other underlying flows. Manual changes to components, wiring, or I/O can break this experience.

To enable editing, you need to unlock the flow by clicking on its name and disabling the Lock flow option.

You can restore this flow from Settings.

} confirmText="Proceed" confirmIcon={} onConfirm={(closeDialog) => handleEditInLangflow("chat", closeDialog) } variant="warning" />
This Agent retrieves from your knowledge and generates chat responses. Edit in Langflow for full control.