make endpoint be changed in models service and in onboarding backend instead of onboarding screen

This commit is contained in:
Lucas Oliveira 2025-09-30 15:50:47 -03:00
parent 622eb422b2
commit d6b100459f
3 changed files with 140 additions and 138 deletions

View file

@ -1,5 +1,4 @@
import { useEffect, useState } from "react"; import { useEffect, useState } from "react";
import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery";
import { LabelInput } from "@/components/label-input"; import { LabelInput } from "@/components/label-input";
import { LabelWrapper } from "@/components/label-wrapper"; import { LabelWrapper } from "@/components/label-wrapper";
import OllamaLogo from "@/components/logo/ollama-logo"; import OllamaLogo from "@/components/logo/ollama-logo";
@ -20,8 +19,7 @@ export function OllamaOnboarding({
sampleDataset: boolean; sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void; setSampleDataset: (dataset: boolean) => void;
}) { }) {
const { data: settings } = useGetSettingsQuery(); const [endpoint, setEndpoint] = useState(`http://{localhost}:11434`);
const [endpoint, setEndpoint] = useState(`http://${settings?.localhost_url ?? "localhost"}:11434`);
const [showConnecting, setShowConnecting] = useState(false); const [showConnecting, setShowConnecting] = useState(false);
const debouncedEndpoint = useDebouncedValue(endpoint, 500); const debouncedEndpoint = useDebouncedValue(endpoint, 500);

View file

@ -1,6 +1,7 @@
import json import json
import platform import platform
from starlette.responses import JSONResponse from starlette.responses import JSONResponse
from utils.container_utils import transform_localhost_url
from utils.logging_config import get_logger from utils.logging_config import get_logger
from config.settings import ( from config.settings import (
LANGFLOW_URL, LANGFLOW_URL,
@ -441,6 +442,8 @@ async def onboarding(request, flows_service):
{"error": "endpoint must be a non-empty string"}, status_code=400 {"error": "endpoint must be a non-empty string"}, status_code=400
) )
current_config.provider.endpoint = body["endpoint"].strip() current_config.provider.endpoint = body["endpoint"].strip()
if "model_provider" in body and body["model_provider"].strip() == "ollama":
current_config.provider.endpoint = transform_localhost_url(body["endpoint"].strip())
config_updated = True config_updated = True
if "project_id" in body: if "project_id" in body:

View file

@ -1,5 +1,6 @@
import httpx import httpx
from typing import Dict, List from typing import Dict, List
from utils.container_utils import transform_localhost_url
from utils.logging_config import get_logger from utils.logging_config import get_logger
logger = get_logger(__name__) logger = get_logger(__name__)
@ -95,7 +96,7 @@ class ModelsService:
"""Fetch available models from Ollama API with tool calling capabilities for language models""" """Fetch available models from Ollama API with tool calling capabilities for language models"""
try: try:
# Use provided endpoint or default # Use provided endpoint or default
ollama_url = endpoint ollama_url = transform_localhost_url(endpoint)
# API endpoints # API endpoints
tags_url = f"{ollama_url}/api/tags" tags_url = f"{ollama_url}/api/tags"