diff --git a/frontend/app/chat/_components/assistant-message.tsx b/frontend/app/chat/_components/assistant-message.tsx index 04fbae5d..0f24dd8c 100644 --- a/frontend/app/chat/_components/assistant-message.tsx +++ b/frontend/app/chat/_components/assistant-message.tsx @@ -1,95 +1,97 @@ import { GitBranch } from "lucide-react"; import { motion } from "motion/react"; -import DogIcon from "@/components/icons/dog-icon"; +import DogIcon from "@/components/logo/dog-icon"; import { MarkdownRenderer } from "@/components/markdown-renderer"; import { cn } from "@/lib/utils"; -import type { FunctionCall } from "../_types/types"; +import type { FunctionCall } from "../types"; import { FunctionCalls } from "./function-calls"; import { Message } from "./message"; interface AssistantMessageProps { - content: string; - functionCalls?: FunctionCall[]; - messageIndex?: number; - expandedFunctionCalls: Set; - onToggle: (functionCallId: string) => void; - isStreaming?: boolean; - showForkButton?: boolean; - onFork?: (e: React.MouseEvent) => void; - isCompleted?: boolean; - isInactive?: boolean; - animate?: boolean; - delay?: number; + content: string; + functionCalls?: FunctionCall[]; + messageIndex?: number; + expandedFunctionCalls: Set; + onToggle: (functionCallId: string) => void; + isStreaming?: boolean; + showForkButton?: boolean; + onFork?: (e: React.MouseEvent) => void; + isCompleted?: boolean; + isInactive?: boolean; + animate?: boolean; + delay?: number; } export function AssistantMessage({ - content, - functionCalls = [], - messageIndex, - expandedFunctionCalls, - onToggle, - isStreaming = false, - showForkButton = false, - onFork, - isCompleted = false, - isInactive = false, - animate = true, - delay = 0.2, + content, + functionCalls = [], + messageIndex, + expandedFunctionCalls, + onToggle, + isStreaming = false, + showForkButton = false, + onFork, + isCompleted = false, + isInactive = false, + animate = true, + delay = 0.2, }: AssistantMessageProps) { - return ( - - - - - } - actions={ - showForkButton && onFork ? ( - - ) : undefined - } - > - -
- ' - : content - } - /> -
-
-
- ); + return ( + + + + + } + actions={ + showForkButton && onFork ? ( + + ) : undefined + } + > + +
+ ' + : 'Thinking') + : content + } + /> +
+
+
+ ); } diff --git a/frontend/app/onboarding/_components/ibm-onboarding.tsx b/frontend/app/onboarding/_components/ibm-onboarding.tsx index 5e444e54..cd6d95a6 100644 --- a/frontend/app/onboarding/_components/ibm-onboarding.tsx +++ b/frontend/app/onboarding/_components/ibm-onboarding.tsx @@ -1,8 +1,8 @@ import type { Dispatch, SetStateAction } from "react"; import { useEffect, useState } from "react"; -import IBMLogo from "@/components/icons/ibm-logo"; import { LabelInput } from "@/components/label-input"; import { LabelWrapper } from "@/components/label-wrapper"; +import IBMLogo from "@/components/icons/ibm-logo"; import { useDebouncedValue } from "@/lib/debounce"; import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutation"; import { useGetIBMModelsQuery } from "../../api/queries/useGetModelsQuery"; @@ -12,201 +12,199 @@ import { AdvancedOnboarding } from "./advanced"; import { ModelSelector } from "./model-selector"; export function IBMOnboarding({ - isEmbedding = false, - setSettings, - sampleDataset, - setSampleDataset, - setIsLoadingModels, - alreadyConfigured = false, + isEmbedding = false, + setSettings, + sampleDataset, + setSampleDataset, + setIsLoadingModels, + alreadyConfigured = false, }: { - isEmbedding?: boolean; - setSettings: Dispatch>; - sampleDataset: boolean; - setSampleDataset: (dataset: boolean) => void; - setIsLoadingModels?: (isLoading: boolean) => void; - alreadyConfigured?: boolean; + isEmbedding?: boolean; + setSettings: Dispatch>; + sampleDataset: boolean; + setSampleDataset: (dataset: boolean) => void; + setIsLoadingModels?: (isLoading: boolean) => void; + alreadyConfigured?: boolean; }) { - const [endpoint, setEndpoint] = useState("https://us-south.ml.cloud.ibm.com"); - const [apiKey, setApiKey] = useState(""); - const [projectId, setProjectId] = useState(""); + const [endpoint, setEndpoint] = useState(alreadyConfigured ? "" : "https://us-south.ml.cloud.ibm.com"); + const [apiKey, setApiKey] = useState(""); + const [projectId, setProjectId] = useState(""); - const options = [ - { - value: "https://us-south.ml.cloud.ibm.com", - label: "https://us-south.ml.cloud.ibm.com", - default: true, - }, - { - value: "https://eu-de.ml.cloud.ibm.com", - label: "https://eu-de.ml.cloud.ibm.com", - default: false, - }, - { - value: "https://eu-gb.ml.cloud.ibm.com", - label: "https://eu-gb.ml.cloud.ibm.com", - default: false, - }, - { - value: "https://au-syd.ml.cloud.ibm.com", - label: "https://au-syd.ml.cloud.ibm.com", - default: false, - }, - { - value: "https://jp-tok.ml.cloud.ibm.com", - label: "https://jp-tok.ml.cloud.ibm.com", - default: false, - }, - { - value: "https://ca-tor.ml.cloud.ibm.com", - label: "https://ca-tor.ml.cloud.ibm.com", - default: false, - }, - ]; - const debouncedEndpoint = useDebouncedValue(endpoint, 500); - const debouncedApiKey = useDebouncedValue(apiKey, 500); - const debouncedProjectId = useDebouncedValue(projectId, 500); + const options = [ + { + value: "https://us-south.ml.cloud.ibm.com", + label: "https://us-south.ml.cloud.ibm.com", + default: true, + }, + { + value: "https://eu-de.ml.cloud.ibm.com", + label: "https://eu-de.ml.cloud.ibm.com", + default: false, + }, + { + value: "https://eu-gb.ml.cloud.ibm.com", + label: "https://eu-gb.ml.cloud.ibm.com", + default: false, + }, + { + value: "https://au-syd.ml.cloud.ibm.com", + label: "https://au-syd.ml.cloud.ibm.com", + default: false, + }, + { + value: "https://jp-tok.ml.cloud.ibm.com", + label: "https://jp-tok.ml.cloud.ibm.com", + default: false, + }, + { + value: "https://ca-tor.ml.cloud.ibm.com", + label: "https://ca-tor.ml.cloud.ibm.com", + default: false, + }, + ]; + const debouncedEndpoint = useDebouncedValue(endpoint, 500); + const debouncedApiKey = useDebouncedValue(apiKey, 500); + const debouncedProjectId = useDebouncedValue(projectId, 500); - // Fetch models from API when all credentials are provided - const { - data: modelsData, - isLoading: isLoadingModels, - error: modelsError, - } = useGetIBMModelsQuery( - { - endpoint: debouncedEndpoint, - apiKey: debouncedApiKey, - projectId: debouncedProjectId, - }, - { - enabled: !!debouncedEndpoint && !!debouncedApiKey && !!debouncedProjectId, - }, - ); + // Fetch models from API when all credentials are provided + const { + data: modelsData, + isLoading: isLoadingModels, + error: modelsError, + } = useGetIBMModelsQuery( + { + endpoint: debouncedEndpoint ? debouncedEndpoint : undefined, + apiKey: debouncedApiKey ? debouncedApiKey : undefined, + projectId: debouncedProjectId ? debouncedProjectId : undefined, + }, + { enabled: !!debouncedEndpoint || !!debouncedApiKey || !!debouncedProjectId || alreadyConfigured }, + ); - // Use custom hook for model selection logic - const { - languageModel, - embeddingModel, - setLanguageModel, - setEmbeddingModel, - languageModels, - embeddingModels, - } = useModelSelection(modelsData, isEmbedding); - const handleSampleDatasetChange = (dataset: boolean) => { - setSampleDataset(dataset); - }; + // Use custom hook for model selection logic + const { + languageModel, + embeddingModel, + setLanguageModel, + setEmbeddingModel, + languageModels, + embeddingModels, + } = useModelSelection(modelsData, isEmbedding); + const handleSampleDatasetChange = (dataset: boolean) => { + setSampleDataset(dataset); + }; - useEffect(() => { - setIsLoadingModels?.(isLoadingModels); - }, [isLoadingModels, setIsLoadingModels]); + useEffect(() => { + setIsLoadingModels?.(isLoadingModels); + }, [isLoadingModels, setIsLoadingModels]); - // Update settings when values change - useUpdateSettings( - "watsonx", - { - endpoint, - apiKey, - projectId, - languageModel, - embeddingModel, - }, - setSettings, - isEmbedding, - ); + // Update settings when values change + useUpdateSettings( + "watsonx", + { + endpoint, + apiKey, + projectId, + languageModel, + embeddingModel, + }, + setSettings, + isEmbedding, + ); - return ( - <> -
- -
- {} : setEndpoint} - searchPlaceholder="Search endpoint..." - noOptionsPlaceholder={ - alreadyConfigured - ? "https://•••••••••••••••••••••••••••••••••••••••••" - : "No endpoints available" - } - placeholder="Select endpoint..." - /> - {alreadyConfigured && ( -

- Reusing endpoint from model provider selection. -

- )} -
-
+ return ( + <> +
+ +
+ {} : setEndpoint} + searchPlaceholder="Search endpoint..." + noOptionsPlaceholder={ + alreadyConfigured + ? "https://•••••••••••••••••••••••••••••••••••••••••" + : "No endpoints available" + } + placeholder="Select endpoint..." + /> + {alreadyConfigured && ( +

+ Reusing endpoint from model provider selection. +

+ )} +
+
-
- setProjectId(e.target.value)} - disabled={alreadyConfigured} - /> - {alreadyConfigured && ( -

- Reusing project ID from model provider selection. -

- )} -
-
- setApiKey(e.target.value)} - disabled={alreadyConfigured} - /> - {alreadyConfigured && ( -

- Reusing API key from model provider selection. -

- )} -
- {isLoadingModels && ( -

- Validating configuration... -

- )} - {modelsError && ( -

- Connection failed. Check your configuration. -

- )} -
- } - languageModels={languageModels} - embeddingModels={embeddingModels} - languageModel={languageModel} - embeddingModel={embeddingModel} - sampleDataset={sampleDataset} - setLanguageModel={setLanguageModel} - setEmbeddingModel={setEmbeddingModel} - setSampleDataset={handleSampleDatasetChange} - /> - - ); +
+ setProjectId(e.target.value)} + disabled={alreadyConfigured} + /> + {alreadyConfigured && ( +

+ Reusing project ID from model provider selection. +

+ )} +
+
+ setApiKey(e.target.value)} + disabled={alreadyConfigured} + /> + {alreadyConfigured && ( +

+ Reusing API key from model provider selection. +

+ )} +
+ {isLoadingModels && ( +

+ Validating configuration... +

+ )} + {modelsError && ( +

+ Connection failed. Check your configuration. +

+ )} +
+ } + languageModels={languageModels} + embeddingModels={embeddingModels} + languageModel={languageModel} + embeddingModel={embeddingModel} + sampleDataset={sampleDataset} + setLanguageModel={setLanguageModel} + setEmbeddingModel={setEmbeddingModel} + setSampleDataset={handleSampleDatasetChange} + /> + + ); } diff --git a/frontend/app/onboarding/_components/ollama-onboarding.tsx b/frontend/app/onboarding/_components/ollama-onboarding.tsx index 42ec1f79..c43ffa1c 100644 --- a/frontend/app/onboarding/_components/ollama-onboarding.tsx +++ b/frontend/app/onboarding/_components/ollama-onboarding.tsx @@ -1,8 +1,8 @@ import type { Dispatch, SetStateAction } from "react"; import { useEffect, useState } from "react"; -import OllamaLogo from "@/components/icons/ollama-logo"; import { LabelInput } from "@/components/label-input"; import { LabelWrapper } from "@/components/label-wrapper"; +import OllamaLogo from "@/components/icons/ollama-logo"; import { useDebouncedValue } from "@/lib/debounce"; import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutation"; import { useGetOllamaModelsQuery } from "../../api/queries/useGetModelsQuery"; @@ -11,163 +11,164 @@ import { useUpdateSettings } from "../_hooks/useUpdateSettings"; import { ModelSelector } from "./model-selector"; export function OllamaOnboarding({ - setSettings, - sampleDataset, - setSampleDataset, - setIsLoadingModels, - isEmbedding = false, - alreadyConfigured = false, + setSettings, + sampleDataset, + setSampleDataset, + setIsLoadingModels, + isEmbedding = false, + alreadyConfigured = false, }: { - setSettings: Dispatch>; - sampleDataset: boolean; - setSampleDataset: (dataset: boolean) => void; - setIsLoadingModels?: (isLoading: boolean) => void; - isEmbedding?: boolean; - alreadyConfigured?: boolean; + setSettings: Dispatch>; + sampleDataset: boolean; + setSampleDataset: (dataset: boolean) => void; + setIsLoadingModels?: (isLoading: boolean) => void; + isEmbedding?: boolean; + alreadyConfigured?: boolean; }) { - const [endpoint, setEndpoint] = useState(`http://localhost:11434`); - const [showConnecting, setShowConnecting] = useState(false); - const debouncedEndpoint = useDebouncedValue(endpoint, 500); + const [endpoint, setEndpoint] = useState(alreadyConfigured ? undefined : `http://localhost:11434`); + const [showConnecting, setShowConnecting] = useState(false); + const debouncedEndpoint = useDebouncedValue(endpoint, 500); - // Fetch models from API when endpoint is provided (debounced) - const { - data: modelsData, - isLoading: isLoadingModels, - error: modelsError, - } = useGetOllamaModelsQuery( - debouncedEndpoint ? { endpoint: debouncedEndpoint } : undefined, - ); + // Fetch models from API when endpoint is provided (debounced) + const { + data: modelsData, + isLoading: isLoadingModels, + error: modelsError, + } = useGetOllamaModelsQuery( + debouncedEndpoint ? { endpoint: debouncedEndpoint } : undefined, + { enabled: !!debouncedEndpoint || alreadyConfigured }, + ); - // Use custom hook for model selection logic - const { - languageModel, - embeddingModel, - setLanguageModel, - setEmbeddingModel, - languageModels, - embeddingModels, - } = useModelSelection(modelsData, isEmbedding); + // Use custom hook for model selection logic + const { + languageModel, + embeddingModel, + setLanguageModel, + setEmbeddingModel, + languageModels, + embeddingModels, + } = useModelSelection(modelsData, isEmbedding); - // Handle delayed display of connecting state - useEffect(() => { - let timeoutId: NodeJS.Timeout; + // Handle delayed display of connecting state + useEffect(() => { + let timeoutId: NodeJS.Timeout; - if (debouncedEndpoint && isLoadingModels) { - timeoutId = setTimeout(() => { - setIsLoadingModels?.(true); - setShowConnecting(true); - }, 500); - } else { - setShowConnecting(false); - setIsLoadingModels?.(false); - } + if (debouncedEndpoint && isLoadingModels) { + timeoutId = setTimeout(() => { + setIsLoadingModels?.(true); + setShowConnecting(true); + }, 500); + } else { + setShowConnecting(false); + setIsLoadingModels?.(false); + } - return () => { - if (timeoutId) { - clearTimeout(timeoutId); - } - }; - }, [debouncedEndpoint, isLoadingModels, setIsLoadingModels]); + return () => { + if (timeoutId) { + clearTimeout(timeoutId); + } + }; + }, [debouncedEndpoint, isLoadingModels, setIsLoadingModels]); - // Update settings when values change - useUpdateSettings( - "ollama", - { - endpoint, - languageModel, - embeddingModel, - }, - setSettings, - isEmbedding, - ); + // Update settings when values change + useUpdateSettings( + "ollama", + { + endpoint, + languageModel, + embeddingModel, + }, + setSettings, + isEmbedding, + ); - // Check validation state based on models query - const hasConnectionError = debouncedEndpoint && modelsError; - const hasNoModels = - modelsData && - !modelsData.language_models?.length && - !modelsData.embedding_models?.length; + // Check validation state based on models query + const hasConnectionError = debouncedEndpoint && modelsError; + const hasNoModels = + modelsData && + !modelsData.language_models?.length && + !modelsData.embedding_models?.length; - return ( -
-
- setEndpoint(e.target.value)} - disabled={alreadyConfigured} - /> - {alreadyConfigured && ( -

- Reusing endpoint from model provider selection. -

- )} - {showConnecting && ( -

- Connecting to Ollama server... -

- )} - {hasConnectionError && ( -

- Can't reach Ollama at {debouncedEndpoint}. Update the base URL or - start the server. -

- )} - {hasNoModels && ( -

- No models found. Install embedding and agent models on your Ollama - server. -

- )} -
- {isEmbedding && setEmbeddingModel && ( - - } - noOptionsPlaceholder={ - isLoadingModels - ? "Loading models..." - : "No embedding models detected. Install an embedding model to continue." - } - value={embeddingModel} - onValueChange={setEmbeddingModel} - /> - - )} - {!isEmbedding && setLanguageModel && ( - - } - noOptionsPlaceholder={ - isLoadingModels - ? "Loading models..." - : "No language models detected. Install a language model to continue." - } - value={languageModel} - onValueChange={setLanguageModel} - /> - - )} -
- ); + return ( +
+
+ setEndpoint(e.target.value)} + disabled={alreadyConfigured} + /> + {alreadyConfigured && ( +

+ Reusing endpoint from model provider selection. +

+ )} + {showConnecting && ( +

+ Connecting to Ollama server... +

+ )} + {hasConnectionError && ( +

+ Can't reach Ollama at {debouncedEndpoint}. Update the base URL or + start the server. +

+ )} + {hasNoModels && ( +

+ No models found. Install embedding and agent models on your Ollama + server. +

+ )} +
+ {isEmbedding && setEmbeddingModel && ( + + } + noOptionsPlaceholder={ + isLoadingModels + ? "Loading models..." + : "No embedding models detected. Install an embedding model to continue." + } + value={embeddingModel} + onValueChange={setEmbeddingModel} + /> + + )} + {!isEmbedding && setLanguageModel && ( + + } + noOptionsPlaceholder={ + isLoadingModels + ? "Loading models..." + : "No language models detected. Install a language model to continue." + } + value={languageModel} + onValueChange={setLanguageModel} + /> + + )} +
+ ); } diff --git a/frontend/app/onboarding/_components/openai-onboarding.tsx b/frontend/app/onboarding/_components/openai-onboarding.tsx index b7afa6f0..45205fe2 100644 --- a/frontend/app/onboarding/_components/openai-onboarding.tsx +++ b/frontend/app/onboarding/_components/openai-onboarding.tsx @@ -1,13 +1,13 @@ import type { Dispatch, SetStateAction } from "react"; import { useEffect, useState } from "react"; -import OpenAILogo from "@/components/icons/openai-logo"; import { LabelInput } from "@/components/label-input"; import { LabelWrapper } from "@/components/label-wrapper"; +import OpenAILogo from "@/components/icons/openai-logo"; import { Switch } from "@/components/ui/switch"; import { - Tooltip, - TooltipContent, - TooltipTrigger, + Tooltip, + TooltipContent, + TooltipTrigger, } from "@/components/ui/tooltip"; import { useDebouncedValue } from "@/lib/debounce"; import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutation"; @@ -17,152 +17,152 @@ import { useUpdateSettings } from "../_hooks/useUpdateSettings"; import { AdvancedOnboarding } from "./advanced"; export function OpenAIOnboarding({ - setSettings, - sampleDataset, - setSampleDataset, - setIsLoadingModels, - isEmbedding = false, - hasEnvApiKey = false, - alreadyConfigured = false, + setSettings, + sampleDataset, + setSampleDataset, + setIsLoadingModels, + isEmbedding = false, + hasEnvApiKey = false, + alreadyConfigured = false, }: { - setSettings: Dispatch>; - sampleDataset: boolean; - setSampleDataset: (dataset: boolean) => void; - setIsLoadingModels?: (isLoading: boolean) => void; - isEmbedding?: boolean; - hasEnvApiKey?: boolean; - alreadyConfigured?: boolean; + setSettings: Dispatch>; + sampleDataset: boolean; + setSampleDataset: (dataset: boolean) => void; + setIsLoadingModels?: (isLoading: boolean) => void; + isEmbedding?: boolean; + hasEnvApiKey?: boolean; + alreadyConfigured?: boolean; }) { - const [apiKey, setApiKey] = useState(""); - const [getFromEnv, setGetFromEnv] = useState(hasEnvApiKey); - const debouncedApiKey = useDebouncedValue(apiKey, 500); + const [apiKey, setApiKey] = useState(""); + const [getFromEnv, setGetFromEnv] = useState(hasEnvApiKey && !alreadyConfigured); + const debouncedApiKey = useDebouncedValue(apiKey, 500); - // Fetch models from API when API key is provided - const { - data: modelsData, - isLoading: isLoadingModels, - error: modelsError, - } = useGetOpenAIModelsQuery( - getFromEnv - ? { apiKey: "" } - : debouncedApiKey - ? { apiKey: debouncedApiKey } - : undefined, - { enabled: debouncedApiKey !== "" || getFromEnv }, - ); - // Use custom hook for model selection logic - const { - languageModel, - embeddingModel, - setLanguageModel, - setEmbeddingModel, - languageModels, - embeddingModels, - } = useModelSelection(modelsData, isEmbedding); - const handleSampleDatasetChange = (dataset: boolean) => { - setSampleDataset(dataset); - }; + // Fetch models from API when API key is provided + const { + data: modelsData, + isLoading: isLoadingModels, + error: modelsError, + } = useGetOpenAIModelsQuery( + getFromEnv + ? { apiKey: "" } + : debouncedApiKey + ? { apiKey: debouncedApiKey } + : undefined, + { enabled: debouncedApiKey !== "" || getFromEnv || alreadyConfigured }, + ); + // Use custom hook for model selection logic + const { + languageModel, + embeddingModel, + setLanguageModel, + setEmbeddingModel, + languageModels, + embeddingModels, + } = useModelSelection(modelsData, isEmbedding); + const handleSampleDatasetChange = (dataset: boolean) => { + setSampleDataset(dataset); + }; - const handleGetFromEnvChange = (fromEnv: boolean) => { - setGetFromEnv(fromEnv); - if (fromEnv) { - setApiKey(""); - } - setEmbeddingModel?.(""); - setLanguageModel?.(""); - }; + const handleGetFromEnvChange = (fromEnv: boolean) => { + setGetFromEnv(fromEnv); + if (fromEnv) { + setApiKey(""); + } + setEmbeddingModel?.(""); + setLanguageModel?.(""); + }; - useEffect(() => { - setIsLoadingModels?.(isLoadingModels); - }, [isLoadingModels, setIsLoadingModels]); + useEffect(() => { + setIsLoadingModels?.(isLoadingModels); + }, [isLoadingModels, setIsLoadingModels]); - // Update settings when values change - useUpdateSettings( - "openai", - { - apiKey, - languageModel, - embeddingModel, - }, - setSettings, - isEmbedding, - ); + // Update settings when values change + useUpdateSettings( + "openai", + { + apiKey, + languageModel, + embeddingModel, + }, + setSettings, + isEmbedding, + ); - return ( - <> -
- {!alreadyConfigured && ( - - - -
- -
-
- {!hasEnvApiKey && ( - - OpenAI API key not detected in the environment. - - )} -
-
- )} - {(!getFromEnv || alreadyConfigured) && ( -
- setApiKey(e.target.value)} - disabled={alreadyConfigured} - /> - {alreadyConfigured && ( -

- Reusing key from model provider selection. -

- )} - {isLoadingModels && ( -

- Validating API key... -

- )} - {modelsError && ( -

- Invalid OpenAI API key. Verify or replace the key. -

- )} -
- )} -
- } - languageModels={languageModels} - embeddingModels={embeddingModels} - languageModel={languageModel} - embeddingModel={embeddingModel} - sampleDataset={sampleDataset} - setLanguageModel={setLanguageModel} - setSampleDataset={handleSampleDatasetChange} - setEmbeddingModel={setEmbeddingModel} - /> - - ); + return ( + <> +
+ {!alreadyConfigured && ( + + + +
+ +
+
+ {!hasEnvApiKey && ( + + OpenAI API key not detected in the environment. + + )} +
+
+ )} + {(!getFromEnv || alreadyConfigured) && ( +
+ setApiKey(e.target.value)} + disabled={alreadyConfigured} + /> + {alreadyConfigured && ( +

+ Reusing key from model provider selection. +

+ )} + {isLoadingModels && ( +

+ Validating API key... +

+ )} + {modelsError && ( +

+ Invalid OpenAI API key. Verify or replace the key. +

+ )} +
+ )} +
+ } + languageModels={languageModels} + embeddingModels={embeddingModels} + languageModel={languageModel} + embeddingModel={embeddingModel} + sampleDataset={sampleDataset} + setLanguageModel={setLanguageModel} + setSampleDataset={handleSampleDatasetChange} + setEmbeddingModel={setEmbeddingModel} + /> + + ); } diff --git a/frontend/src/app/chat/components/assistant-message.tsx b/frontend/src/app/chat/components/assistant-message.tsx deleted file mode 100644 index 0f24dd8c..00000000 --- a/frontend/src/app/chat/components/assistant-message.tsx +++ /dev/null @@ -1,97 +0,0 @@ -import { GitBranch } from "lucide-react"; -import { motion } from "motion/react"; -import DogIcon from "@/components/logo/dog-icon"; -import { MarkdownRenderer } from "@/components/markdown-renderer"; -import { cn } from "@/lib/utils"; -import type { FunctionCall } from "../types"; -import { FunctionCalls } from "./function-calls"; -import { Message } from "./message"; - -interface AssistantMessageProps { - content: string; - functionCalls?: FunctionCall[]; - messageIndex?: number; - expandedFunctionCalls: Set; - onToggle: (functionCallId: string) => void; - isStreaming?: boolean; - showForkButton?: boolean; - onFork?: (e: React.MouseEvent) => void; - isCompleted?: boolean; - isInactive?: boolean; - animate?: boolean; - delay?: number; -} - -export function AssistantMessage({ - content, - functionCalls = [], - messageIndex, - expandedFunctionCalls, - onToggle, - isStreaming = false, - showForkButton = false, - onFork, - isCompleted = false, - isInactive = false, - animate = true, - delay = 0.2, -}: AssistantMessageProps) { - return ( - - - - - } - actions={ - showForkButton && onFork ? ( - - ) : undefined - } - > - -
- ' - : 'Thinking') - : content - } - /> -
-
-
- ); -} diff --git a/frontend/src/app/onboarding/components/ibm-onboarding.tsx b/frontend/src/app/onboarding/components/ibm-onboarding.tsx deleted file mode 100644 index 3bb830b6..00000000 --- a/frontend/src/app/onboarding/components/ibm-onboarding.tsx +++ /dev/null @@ -1,210 +0,0 @@ -import type { Dispatch, SetStateAction } from "react"; -import { useEffect, useState } from "react"; -import { LabelInput } from "@/components/label-input"; -import { LabelWrapper } from "@/components/label-wrapper"; -import IBMLogo from "@/components/logo/ibm-logo"; -import { useDebouncedValue } from "@/lib/debounce"; -import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutation"; -import { useGetIBMModelsQuery } from "../../api/queries/useGetModelsQuery"; -import { useModelSelection } from "../hooks/useModelSelection"; -import { useUpdateSettings } from "../hooks/useUpdateSettings"; -import { AdvancedOnboarding } from "./advanced"; -import { ModelSelector } from "./model-selector"; - -export function IBMOnboarding({ - isEmbedding = false, - setSettings, - sampleDataset, - setSampleDataset, - setIsLoadingModels, - alreadyConfigured = false, -}: { - isEmbedding?: boolean; - setSettings: Dispatch>; - sampleDataset: boolean; - setSampleDataset: (dataset: boolean) => void; - setIsLoadingModels?: (isLoading: boolean) => void; - alreadyConfigured?: boolean; -}) { - const [endpoint, setEndpoint] = useState(alreadyConfigured ? "" : "https://us-south.ml.cloud.ibm.com"); - const [apiKey, setApiKey] = useState(""); - const [projectId, setProjectId] = useState(""); - - const options = [ - { - value: "https://us-south.ml.cloud.ibm.com", - label: "https://us-south.ml.cloud.ibm.com", - default: true, - }, - { - value: "https://eu-de.ml.cloud.ibm.com", - label: "https://eu-de.ml.cloud.ibm.com", - default: false, - }, - { - value: "https://eu-gb.ml.cloud.ibm.com", - label: "https://eu-gb.ml.cloud.ibm.com", - default: false, - }, - { - value: "https://au-syd.ml.cloud.ibm.com", - label: "https://au-syd.ml.cloud.ibm.com", - default: false, - }, - { - value: "https://jp-tok.ml.cloud.ibm.com", - label: "https://jp-tok.ml.cloud.ibm.com", - default: false, - }, - { - value: "https://ca-tor.ml.cloud.ibm.com", - label: "https://ca-tor.ml.cloud.ibm.com", - default: false, - }, - ]; - const debouncedEndpoint = useDebouncedValue(endpoint, 500); - const debouncedApiKey = useDebouncedValue(apiKey, 500); - const debouncedProjectId = useDebouncedValue(projectId, 500); - - // Fetch models from API when all credentials are provided - const { - data: modelsData, - isLoading: isLoadingModels, - error: modelsError, - } = useGetIBMModelsQuery( - { - endpoint: debouncedEndpoint ? debouncedEndpoint : undefined, - apiKey: debouncedApiKey ? debouncedApiKey : undefined, - projectId: debouncedProjectId ? debouncedProjectId : undefined, - }, - { enabled: !!debouncedEndpoint || !!debouncedApiKey || !!debouncedProjectId || alreadyConfigured }, - ); - - // Use custom hook for model selection logic - const { - languageModel, - embeddingModel, - setLanguageModel, - setEmbeddingModel, - languageModels, - embeddingModels, - } = useModelSelection(modelsData, isEmbedding); - const handleSampleDatasetChange = (dataset: boolean) => { - setSampleDataset(dataset); - }; - - useEffect(() => { - setIsLoadingModels?.(isLoadingModels); - }, [isLoadingModels, setIsLoadingModels]); - - // Update settings when values change - useUpdateSettings( - "watsonx", - { - endpoint, - apiKey, - projectId, - languageModel, - embeddingModel, - }, - setSettings, - isEmbedding, - ); - - return ( - <> -
- -
- {} : setEndpoint} - searchPlaceholder="Search endpoint..." - noOptionsPlaceholder={ - alreadyConfigured - ? "https://•••••••••••••••••••••••••••••••••••••••••" - : "No endpoints available" - } - placeholder="Select endpoint..." - /> - {alreadyConfigured && ( -

- Reusing endpoint from model provider selection. -

- )} -
-
- -
- setProjectId(e.target.value)} - disabled={alreadyConfigured} - /> - {alreadyConfigured && ( -

- Reusing project ID from model provider selection. -

- )} -
-
- setApiKey(e.target.value)} - disabled={alreadyConfigured} - /> - {alreadyConfigured && ( -

- Reusing API key from model provider selection. -

- )} -
- {isLoadingModels && ( -

- Validating configuration... -

- )} - {modelsError && ( -

- Connection failed. Check your configuration. -

- )} -
- } - languageModels={languageModels} - embeddingModels={embeddingModels} - languageModel={languageModel} - embeddingModel={embeddingModel} - sampleDataset={sampleDataset} - setLanguageModel={setLanguageModel} - setEmbeddingModel={setEmbeddingModel} - setSampleDataset={handleSampleDatasetChange} - /> - - ); -} diff --git a/frontend/src/app/onboarding/components/ollama-onboarding.tsx b/frontend/src/app/onboarding/components/ollama-onboarding.tsx deleted file mode 100644 index e85366ba..00000000 --- a/frontend/src/app/onboarding/components/ollama-onboarding.tsx +++ /dev/null @@ -1,174 +0,0 @@ -import type { Dispatch, SetStateAction } from "react"; -import { useEffect, useState } from "react"; -import { LabelInput } from "@/components/label-input"; -import { LabelWrapper } from "@/components/label-wrapper"; -import OllamaLogo from "@/components/logo/ollama-logo"; -import { useDebouncedValue } from "@/lib/debounce"; -import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutation"; -import { useGetOllamaModelsQuery } from "../../api/queries/useGetModelsQuery"; -import { useModelSelection } from "../hooks/useModelSelection"; -import { useUpdateSettings } from "../hooks/useUpdateSettings"; -import { ModelSelector } from "./model-selector"; - -export function OllamaOnboarding({ - setSettings, - sampleDataset, - setSampleDataset, - setIsLoadingModels, - isEmbedding = false, - alreadyConfigured = false, -}: { - setSettings: Dispatch>; - sampleDataset: boolean; - setSampleDataset: (dataset: boolean) => void; - setIsLoadingModels?: (isLoading: boolean) => void; - isEmbedding?: boolean; - alreadyConfigured?: boolean; -}) { - const [endpoint, setEndpoint] = useState(alreadyConfigured ? undefined : `http://localhost:11434`); - const [showConnecting, setShowConnecting] = useState(false); - const debouncedEndpoint = useDebouncedValue(endpoint, 500); - - // Fetch models from API when endpoint is provided (debounced) - const { - data: modelsData, - isLoading: isLoadingModels, - error: modelsError, - } = useGetOllamaModelsQuery( - debouncedEndpoint ? { endpoint: debouncedEndpoint } : undefined, - { enabled: !!debouncedEndpoint || alreadyConfigured }, - ); - - // Use custom hook for model selection logic - const { - languageModel, - embeddingModel, - setLanguageModel, - setEmbeddingModel, - languageModels, - embeddingModels, - } = useModelSelection(modelsData, isEmbedding); - - // Handle delayed display of connecting state - useEffect(() => { - let timeoutId: NodeJS.Timeout; - - if (debouncedEndpoint && isLoadingModels) { - timeoutId = setTimeout(() => { - setIsLoadingModels?.(true); - setShowConnecting(true); - }, 500); - } else { - setShowConnecting(false); - setIsLoadingModels?.(false); - } - - return () => { - if (timeoutId) { - clearTimeout(timeoutId); - } - }; - }, [debouncedEndpoint, isLoadingModels, setIsLoadingModels]); - - // Update settings when values change - useUpdateSettings( - "ollama", - { - endpoint, - languageModel, - embeddingModel, - }, - setSettings, - isEmbedding, - ); - - // Check validation state based on models query - const hasConnectionError = debouncedEndpoint && modelsError; - const hasNoModels = - modelsData && - !modelsData.language_models?.length && - !modelsData.embedding_models?.length; - - return ( -
-
- setEndpoint(e.target.value)} - disabled={alreadyConfigured} - /> - {alreadyConfigured && ( -

- Reusing endpoint from model provider selection. -

- )} - {showConnecting && ( -

- Connecting to Ollama server... -

- )} - {hasConnectionError && ( -

- Can't reach Ollama at {debouncedEndpoint}. Update the base URL or - start the server. -

- )} - {hasNoModels && ( -

- No models found. Install embedding and agent models on your Ollama - server. -

- )} -
- {isEmbedding && setEmbeddingModel && ( - - } - noOptionsPlaceholder={ - isLoadingModels - ? "Loading models..." - : "No embedding models detected. Install an embedding model to continue." - } - value={embeddingModel} - onValueChange={setEmbeddingModel} - /> - - )} - {!isEmbedding && setLanguageModel && ( - - } - noOptionsPlaceholder={ - isLoadingModels - ? "Loading models..." - : "No language models detected. Install a language model to continue." - } - value={languageModel} - onValueChange={setLanguageModel} - /> - - )} -
- ); -} diff --git a/frontend/src/app/onboarding/components/openai-onboarding.tsx b/frontend/src/app/onboarding/components/openai-onboarding.tsx deleted file mode 100644 index 47c427a9..00000000 --- a/frontend/src/app/onboarding/components/openai-onboarding.tsx +++ /dev/null @@ -1,168 +0,0 @@ -import type { Dispatch, SetStateAction } from "react"; -import { useEffect, useState } from "react"; -import { LabelInput } from "@/components/label-input"; -import { LabelWrapper } from "@/components/label-wrapper"; -import OpenAILogo from "@/components/logo/openai-logo"; -import { Switch } from "@/components/ui/switch"; -import { - Tooltip, - TooltipContent, - TooltipTrigger, -} from "@/components/ui/tooltip"; -import { useDebouncedValue } from "@/lib/debounce"; -import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutation"; -import { useGetOpenAIModelsQuery } from "../../api/queries/useGetModelsQuery"; -import { useModelSelection } from "../hooks/useModelSelection"; -import { useUpdateSettings } from "../hooks/useUpdateSettings"; -import { AdvancedOnboarding } from "./advanced"; - -export function OpenAIOnboarding({ - setSettings, - sampleDataset, - setSampleDataset, - setIsLoadingModels, - isEmbedding = false, - hasEnvApiKey = false, - alreadyConfigured = false, -}: { - setSettings: Dispatch>; - sampleDataset: boolean; - setSampleDataset: (dataset: boolean) => void; - setIsLoadingModels?: (isLoading: boolean) => void; - isEmbedding?: boolean; - hasEnvApiKey?: boolean; - alreadyConfigured?: boolean; -}) { - const [apiKey, setApiKey] = useState(""); - const [getFromEnv, setGetFromEnv] = useState(hasEnvApiKey && !alreadyConfigured); - const debouncedApiKey = useDebouncedValue(apiKey, 500); - - // Fetch models from API when API key is provided - const { - data: modelsData, - isLoading: isLoadingModels, - error: modelsError, - } = useGetOpenAIModelsQuery( - getFromEnv - ? { apiKey: "" } - : debouncedApiKey - ? { apiKey: debouncedApiKey } - : undefined, - { enabled: debouncedApiKey !== "" || getFromEnv || alreadyConfigured }, - ); - // Use custom hook for model selection logic - const { - languageModel, - embeddingModel, - setLanguageModel, - setEmbeddingModel, - languageModels, - embeddingModels, - } = useModelSelection(modelsData, isEmbedding); - const handleSampleDatasetChange = (dataset: boolean) => { - setSampleDataset(dataset); - }; - - const handleGetFromEnvChange = (fromEnv: boolean) => { - setGetFromEnv(fromEnv); - if (fromEnv) { - setApiKey(""); - } - setEmbeddingModel?.(""); - setLanguageModel?.(""); - }; - - useEffect(() => { - setIsLoadingModels?.(isLoadingModels); - }, [isLoadingModels, setIsLoadingModels]); - - // Update settings when values change - useUpdateSettings( - "openai", - { - apiKey, - languageModel, - embeddingModel, - }, - setSettings, - isEmbedding, - ); - - return ( - <> -
- {!alreadyConfigured && ( - - - -
- -
-
- {!hasEnvApiKey && ( - - OpenAI API key not detected in the environment. - - )} -
-
- )} - {(!getFromEnv || alreadyConfigured) && ( -
- setApiKey(e.target.value)} - disabled={alreadyConfigured} - /> - {alreadyConfigured && ( -

- Reusing key from model provider selection. -

- )} - {isLoadingModels && ( -

- Validating API key... -

- )} - {modelsError && ( -

- Invalid OpenAI API key. Verify or replace the key. -

- )} -
- )} -
- } - languageModels={languageModels} - embeddingModels={embeddingModels} - languageModel={languageModel} - embeddingModel={embeddingModel} - sampleDataset={sampleDataset} - setLanguageModel={setLanguageModel} - setSampleDataset={handleSampleDatasetChange} - setEmbeddingModel={setEmbeddingModel} - /> - - ); -}