diff --git a/frontend/src/app/settings/page.tsx b/frontend/src/app/settings/page.tsx index 7f1ca858..91575423 100644 --- a/frontend/src/app/settings/page.tsx +++ b/frontend/src/app/settings/page.tsx @@ -35,10 +35,11 @@ import { Textarea } from "@/components/ui/textarea"; import { useAuth } from "@/contexts/auth-context"; import { useTask } from "@/contexts/task-context"; import { useDebounce } from "@/lib/debounce"; +import { DEFAULT_AGENT_SETTINGS, DEFAULT_KNOWLEDGE_SETTINGS, UI_CONSTANTS } from "@/lib/constants"; import { getFallbackModels, type ModelProvider } from "./helpers/model-helpers"; import { ModelSelectItems } from "./helpers/model-select-item"; -const MAX_SYSTEM_PROMPT_CHARS = 2000; +const { MAX_SYSTEM_PROMPT_CHARS } = UI_CONSTANTS; interface GoogleDriveFile { id: string; @@ -529,8 +530,17 @@ function KnowledgeSourcesPage() { fetch(`/api/reset-flow/retrieval`, { method: "POST", }) - .then((response) => response.json()) + .then((response) => { + if (response.ok) { + return response.json(); + } + throw new Error(`HTTP ${response.status}: ${response.statusText}`); + }) .then(() => { + // Only reset form values if the API call was successful + setSystemPrompt(DEFAULT_AGENT_SETTINGS.system_prompt); + // Trigger model update to default model + handleModelChange(DEFAULT_AGENT_SETTINGS.llm_model); closeDialog(); // Close after successful completion }) .catch((error) => { @@ -543,8 +553,17 @@ function KnowledgeSourcesPage() { fetch(`/api/reset-flow/ingest`, { method: "POST", }) - .then((response) => response.json()) + .then((response) => { + if (response.ok) { + return response.json(); + } + throw new Error(`HTTP ${response.status}: ${response.statusText}`); + }) .then(() => { + // Only reset form values if the API call was successful + setChunkSize(DEFAULT_KNOWLEDGE_SETTINGS.chunk_size); + setChunkOverlap(DEFAULT_KNOWLEDGE_SETTINGS.chunk_overlap); + setProcessingMode(DEFAULT_KNOWLEDGE_SETTINGS.processing_mode); closeDialog(); // Close after successful completion }) .catch((error) => { diff --git a/frontend/src/lib/constants.ts b/frontend/src/lib/constants.ts new file mode 100644 index 00000000..9c6ea7b0 --- /dev/null +++ b/frontend/src/lib/constants.ts @@ -0,0 +1,23 @@ +/** + * Default agent settings + */ +export const DEFAULT_AGENT_SETTINGS = { + llm_model: "gpt-4o-mini", + system_prompt: "You are a helpful assistant that can use tools to answer questions and perform tasks." +} as const; + +/** + * Default knowledge/ingest settings + */ +export const DEFAULT_KNOWLEDGE_SETTINGS = { + chunk_size: 1000, + chunk_overlap: 200, + processing_mode: "standard" +} as const; + +/** + * UI Constants + */ +export const UI_CONSTANTS = { + MAX_SYSTEM_PROMPT_CHARS: 2000, +} as const; \ No newline at end of file diff --git a/src/services/flows_service.py b/src/services/flows_service.py index 8993025a..0d7a7bc8 100644 --- a/src/services/flows_service.py +++ b/src/services/flows_service.py @@ -478,16 +478,14 @@ class FlowsService: if not LANGFLOW_CHAT_FLOW_ID: raise ValueError("LANGFLOW_CHAT_FLOW_ID is not configured") await self._update_flow_field(LANGFLOW_CHAT_FLOW_ID, "model_name", model_name, - node_display_name="Language Model", - node_id="LanguageModelComponent-0YME7") + node_display_name="Language Model") async def update_chat_flow_system_prompt(self, system_prompt: str): """Helper function to update the system prompt in the chat flow""" if not LANGFLOW_CHAT_FLOW_ID: raise ValueError("LANGFLOW_CHAT_FLOW_ID is not configured") - await self._update_flow_field(LANGFLOW_CHAT_FLOW_ID, "system_message", system_prompt, - node_display_name="Language Model", - node_id="LanguageModelComponent-0YME7") + await self._update_flow_field(LANGFLOW_CHAT_FLOW_ID, "system_prompt", system_prompt, + node_display_name="Agent") async def update_flow_docling_preset(self, preset: str, preset_config: dict): """Helper function to update docling preset in the ingest flow""" @@ -503,24 +501,21 @@ class FlowsService: if not LANGFLOW_INGEST_FLOW_ID: raise ValueError("LANGFLOW_INGEST_FLOW_ID is not configured") await self._update_flow_field(LANGFLOW_INGEST_FLOW_ID, "chunk_size", chunk_size, - node_display_name="Split Text", - node_id="SplitText-3ZI5B") + node_display_name="Split Text") async def update_ingest_flow_chunk_overlap(self, chunk_overlap: int): """Helper function to update chunk overlap in the ingest flow""" if not LANGFLOW_INGEST_FLOW_ID: raise ValueError("LANGFLOW_INGEST_FLOW_ID is not configured") await self._update_flow_field(LANGFLOW_INGEST_FLOW_ID, "chunk_overlap", chunk_overlap, - node_display_name="Split Text", - node_id="SplitText-3ZI5B") + node_display_name="Split Text") async def update_ingest_flow_embedding_model(self, embedding_model: str): """Helper function to update embedding model in the ingest flow""" if not LANGFLOW_INGEST_FLOW_ID: raise ValueError("LANGFLOW_INGEST_FLOW_ID is not configured") await self._update_flow_field(LANGFLOW_INGEST_FLOW_ID, "model", embedding_model, - node_display_name="Embedding Model", - node_id="EmbeddingModel-eZ6bT") + node_display_name="Embedding Model") def _replace_node_in_flow(self, flow_data, old_id, new_node): """Replace a node in the flow data"""