From 6a11aec1959fe7ecde70bb0345e1ca6f35affab2 Mon Sep 17 00:00:00 2001 From: Mike Fortman Date: Thu, 18 Sep 2025 17:03:51 -0500 Subject: [PATCH] update model selectors --- .../app/settings/helpers/model-helpers.tsx | 87 +++++++++++++++++++ .../settings/helpers/model-select-item.tsx | 36 ++++++++ frontend/src/app/settings/page.tsx | 73 ++++++++++------ 3 files changed, 170 insertions(+), 26 deletions(-) create mode 100644 frontend/src/app/settings/helpers/model-helpers.tsx create mode 100644 frontend/src/app/settings/helpers/model-select-item.tsx diff --git a/frontend/src/app/settings/helpers/model-helpers.tsx b/frontend/src/app/settings/helpers/model-helpers.tsx new file mode 100644 index 00000000..093b8862 --- /dev/null +++ b/frontend/src/app/settings/helpers/model-helpers.tsx @@ -0,0 +1,87 @@ +import OpenAILogo from "@/components/logo/openai-logo"; +import OllamaLogo from "@/components/logo/ollama-logo"; +import IBMLogo from "@/components/logo/ibm-logo"; + +export type ModelProvider = 'openai' | 'ollama' | 'ibm'; + +export interface ModelOption { + value: string; + label: string; +} + +// Helper function to get model logo based on provider or model name +export function getModelLogo(modelValue: string, provider?: ModelProvider) { + // First check by provider + if (provider === 'openai') { + return ; + } else if (provider === 'ollama') { + return ; + } else if (provider === 'ibm') { + return ; + } + + // Fallback to model name analysis + if (modelValue.includes('gpt') || modelValue.includes('text-embedding')) { + return ; + } else if (modelValue.includes('llama') || modelValue.includes('ollama')) { + return ; + } else if (modelValue.includes('granite') || modelValue.includes('slate') || modelValue.includes('ibm')) { + return ; + } + + return ; // Default to OpenAI logo +} + +// Helper function to get fallback models by provider +export function getFallbackModels(provider: ModelProvider) { + switch (provider) { + case 'openai': + return { + language: [ + { value: 'gpt-4', label: 'GPT-4' }, + { value: 'gpt-4-turbo', label: 'GPT-4 Turbo' }, + { value: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo' }, + ], + embedding: [ + { value: 'text-embedding-ada-002', label: 'text-embedding-ada-002' }, + { value: 'text-embedding-3-small', label: 'text-embedding-3-small' }, + { value: 'text-embedding-3-large', label: 'text-embedding-3-large' }, + ], + }; + case 'ollama': + return { + language: [ + { value: 'llama2', label: 'Llama 2' }, + { value: 'llama2:13b', label: 'Llama 2 13B' }, + { value: 'codellama', label: 'Code Llama' }, + ], + embedding: [ + { value: 'mxbai-embed-large', label: 'MxBai Embed Large' }, + { value: 'nomic-embed-text', label: 'Nomic Embed Text' }, + ], + }; + case 'ibm': + return { + language: [ + { value: 'meta-llama/llama-3-1-70b-instruct', label: 'Llama 3.1 70B Instruct' }, + { value: 'ibm/granite-13b-chat-v2', label: 'Granite 13B Chat v2' }, + ], + embedding: [ + { value: 'ibm/slate-125m-english-rtrvr', label: 'Slate 125M English Retriever' }, + ], + }; + default: + return { + language: [ + { value: 'gpt-4', label: 'GPT-4' }, + { value: 'gpt-4-turbo', label: 'GPT-4 Turbo' }, + { value: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo' }, + ], + embedding: [ + { value: 'text-embedding-ada-002', label: 'text-embedding-ada-002' }, + { value: 'text-embedding-3-small', label: 'text-embedding-3-small' }, + { value: 'text-embedding-3-large', label: 'text-embedding-3-large' }, + ], + }; + } +} \ No newline at end of file diff --git a/frontend/src/app/settings/helpers/model-select-item.tsx b/frontend/src/app/settings/helpers/model-select-item.tsx new file mode 100644 index 00000000..18240edb --- /dev/null +++ b/frontend/src/app/settings/helpers/model-select-item.tsx @@ -0,0 +1,36 @@ +import { SelectItem } from "@/components/ui/select"; +import { getModelLogo, type ModelProvider, type ModelOption } from "./model-helpers"; + +interface ModelSelectItemProps { + model: ModelOption; + provider?: ModelProvider; +} + +export function ModelSelectItem({ model, provider }: ModelSelectItemProps) { + return ( + +
+ {getModelLogo(model.value, provider)} + {model.label} +
+
+ ); +} + +interface ModelSelectItemsProps { + models?: ModelOption[]; + fallbackModels: ModelOption[]; + provider: ModelProvider; +} + +export function ModelSelectItems({ models, fallbackModels, provider }: ModelSelectItemsProps) { + const modelsToRender = models || fallbackModels; + + return ( + <> + {modelsToRender.map((model) => ( + + ))} + + ); +} \ No newline at end of file diff --git a/frontend/src/app/settings/page.tsx b/frontend/src/app/settings/page.tsx index 4a0f139c..0a858f6e 100644 --- a/frontend/src/app/settings/page.tsx +++ b/frontend/src/app/settings/page.tsx @@ -5,7 +5,10 @@ import { useSearchParams } from "next/navigation"; import { Suspense, useCallback, useEffect, useState } from "react"; import { useUpdateFlowSettingMutation } from "@/app/api/mutations/useUpdateFlowSettingMutation"; import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery"; +import { useGetOpenAIModelsQuery, useGetOllamaModelsQuery, useGetIBMModelsQuery } from "@/app/api/queries/useGetModelsQuery"; import { ConfirmationDialog } from "@/components/confirmation-dialog"; +import { ModelSelectItems } from "./helpers/model-select-item"; +import { getFallbackModels, type ModelProvider } from "./helpers/model-helpers"; import { ProtectedRoute } from "@/components/protected-route"; import { Badge } from "@/components/ui/badge"; import { Button } from "@/components/ui/button"; @@ -22,7 +25,6 @@ import { Label } from "@/components/ui/label"; import { Select, SelectContent, - SelectItem, SelectTrigger, SelectValue, } from "@/components/ui/select"; @@ -104,6 +106,37 @@ function KnowledgeSourcesPage() { enabled: isAuthenticated, }); + // Get the current provider from settings + const currentProvider = (settings.provider?.model_provider || 'openai') as ModelProvider; + + // Fetch available models based on provider + const { data: openaiModelsData } = useGetOpenAIModelsQuery({ + enabled: isAuthenticated && currentProvider === 'openai', + }); + + const { data: ollamaModelsData } = useGetOllamaModelsQuery( + undefined, // No params for now, could be extended later + { + enabled: isAuthenticated && currentProvider === 'ollama', + } + ); + + const { data: ibmModelsData } = useGetIBMModelsQuery( + undefined, // No params for now, could be extended later + { + enabled: isAuthenticated && currentProvider === 'ibm', + } + ); + + // Select the appropriate models data based on provider + const modelsData = currentProvider === 'openai' + ? openaiModelsData + : currentProvider === 'ollama' + ? ollamaModelsData + : currentProvider === 'ibm' + ? ibmModelsData + : openaiModelsData; // fallback to openai + // Mutations const updateFlowSettingMutation = useUpdateFlowSettingMutation({ onSuccess: () => { @@ -171,6 +204,7 @@ function KnowledgeSourcesPage() { debouncedUpdate({ chunk_overlap: numValue }); }; + // Helper function to get connector icon const getConnectorIcon = useCallback((iconName: string) => { const iconMap: { [key: string]: React.ReactElement } = { @@ -559,21 +593,18 @@ function KnowledgeSourcesPage() { Language Model @@ -685,7 +716,7 @@ function KnowledgeSourcesPage() {