From 141a7da3390d1667f007e5f65bbd0a2fedb4fced Mon Sep 17 00:00:00 2001 From: Lucas Oliveira Date: Wed, 19 Nov 2025 16:18:26 -0300 Subject: [PATCH 1/4] update template when provider updates --- src/services/flows_service.py | 59 +++++++++++++++++++++++++++++------ 1 file changed, 50 insertions(+), 9 deletions(-) diff --git a/src/services/flows_service.py b/src/services/flows_service.py index d527baa0..16f3f5e2 100644 --- a/src/services/flows_service.py +++ b/src/services/flows_service.py @@ -816,7 +816,7 @@ class FlowsService: if not DISABLE_INGEST_WITH_LANGFLOW and embedding_model: embedding_node, _ = self._find_node_in_flow(flow_data, display_name=OPENAI_EMBEDDING_COMPONENT_DISPLAY_NAME) if embedding_node: - if self._update_component_fields( + if await self._update_component_fields( embedding_node, provider, embedding_model, endpoint ): updates_made.append(f"embedding model: {embedding_model}") @@ -825,14 +825,14 @@ class FlowsService: if llm_model: llm_node, _ = self._find_node_in_flow(flow_data, display_name=OPENAI_LLM_COMPONENT_DISPLAY_NAME) if llm_node: - if self._update_component_fields( + if await self._update_component_fields( llm_node, provider, llm_model, endpoint ): updates_made.append(f"llm model: {llm_model}") # Update LLM component (if exists in this flow) agent_node, _ = self._find_node_in_flow(flow_data, display_name=AGENT_COMPONENT_DISPLAY_NAME) if agent_node: - if self._update_component_fields( + if await self._update_component_fields( agent_node, provider, llm_model, endpoint ): updates_made.append(f"agent model: {llm_model}") @@ -865,7 +865,7 @@ class FlowsService: "flow_id": flow_id, } - def _update_component_fields( + async def _update_component_fields( self, component_node, provider: str, @@ -881,13 +881,54 @@ class FlowsService: updated = False provider_name = "IBM watsonx.ai" if provider == "watsonx" else "Ollama" if provider == "ollama" else "Anthropic" if provider == "anthropic" else "OpenAI" - if "agent_llm" in template: - template["agent_llm"]["value"] = provider_name - updated = True - if "provider" in template: - template["provider"]["value"] = provider_name + field_name = "provider" if "provider" in template else "agent_llm" + + # Update provider field and call custom_component/update endpoint + if field_name in template: + # First, update the provider value + template[field_name]["value"] = provider_name + + # Call custom_component/update endpoint to get updated template + # Only call if code field exists (custom components should have code) + if "code" in template and "value" in template["code"]: + code_value = template["code"]["value"] + field_value = provider_name + + try: + update_payload = { + "code": code_value, + "template": template, + "field": field_name, + "field_value": field_value, + "tool_mode": False, + } + + response = await clients.langflow_request( + "POST", "/api/v1/custom_component/update", json=update_payload + ) + + if response.status_code == 200: + response_data = response.json() + # Update template with the new template from response.data + if "template" in response_data: + # Update the template in component_node + component_node["data"]["node"]["template"] = response_data["template"] + # Update local template reference + template = response_data["template"] + logger.info(f"Successfully updated template via custom_component/update for provider: {provider_name}") + else: + logger.warning("Response from custom_component/update missing 'data' field") + else: + logger.warning( + f"Failed to call custom_component/update: HTTP {response.status_code} - {response.text}" + ) + except Exception as e: + logger.error(f"Error calling custom_component/update: {str(e)}") + # Continue with manual updates even if API call fails + updated = True + # Update model_name field (common to all providers) if "model" in template: From e9d2e053193e58816d426427d8e9a463d98cd627 Mon Sep 17 00:00:00 2001 From: Lucas Oliveira Date: Wed, 19 Nov 2025 16:18:49 -0300 Subject: [PATCH 2/4] updated flows removing ollama base url global variable --- flows/components/ollama_embedding.json | 4 ++-- flows/components/ollama_llm.json | 4 ++-- flows/components/ollama_llm_text.json | 4 ++-- flows/openrag_nudges.json | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/flows/components/ollama_embedding.json b/flows/components/ollama_embedding.json index e8f4789c..2cc1a57f 100644 --- a/flows/components/ollama_embedding.json +++ b/flows/components/ollama_embedding.json @@ -7,14 +7,14 @@ "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, - "load_from_db": true, + "load_from_db": false, "list": false, "list_add_label": "Add More", "required": true, "placeholder": "", "show": true, "name": "base_url", - "value": "OLLAMA_BASE_URL", + "value": "", "display_name": "Ollama Base URL", "advanced": false, "input_types": ["Message"], diff --git a/flows/components/ollama_llm.json b/flows/components/ollama_llm.json index 9dc83439..7505957e 100644 --- a/flows/components/ollama_llm.json +++ b/flows/components/ollama_llm.json @@ -7,14 +7,14 @@ "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, - "load_from_db": true, + "load_from_db": false, "list": false, "list_add_label": "Add More", "required": false, "placeholder": "", "show": true, "name": "base_url", - "value": "OLLAMA_BASE_URL", + "value": "", "display_name": "Base URL", "advanced": false, "input_types": ["Message"], diff --git a/flows/components/ollama_llm_text.json b/flows/components/ollama_llm_text.json index eaf40f2a..8edd9f28 100644 --- a/flows/components/ollama_llm_text.json +++ b/flows/components/ollama_llm_text.json @@ -7,14 +7,14 @@ "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, - "load_from_db": true, + "load_from_db": false, "list": false, "list_add_label": "Add More", "required": false, "placeholder": "", "show": true, "name": "base_url", - "value": "OLLAMA_BASE_URL", + "value": "", "display_name": "Base URL", "advanced": false, "input_types": ["Message"], diff --git a/flows/openrag_nudges.json b/flows/openrag_nudges.json index a1fa9bee..5bbfef8d 100644 --- a/flows/openrag_nudges.json +++ b/flows/openrag_nudges.json @@ -2424,7 +2424,7 @@ ], "list": false, "list_add_label": "Add More", - "load_from_db": true, + "load_from_db": false, "name": "ollama_base_url", "placeholder": "", "real_time_refresh": true, @@ -2435,7 +2435,7 @@ "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "OLLAMA_BASE_URL" + "value": "" }, "project_id": { "_input_type": "StrInput", From b3b891a40f86b6571f1da9bb0ead10e069113e6d Mon Sep 17 00:00:00 2001 From: Edwin Jose Date: Wed, 19 Nov 2025 14:38:13 -0500 Subject: [PATCH 3/4] Add support for Anthropic, WatsonX, and Ollama APIs Introduced new environment variables for Anthropic, WatsonX, and Ollama API integration in both backend and Langflow services within docker-compose files. Also enabled backend service to build from local Dockerfile instead of using only the image. --- docker-compose-cpu.yml | 10 ++++++++++ docker-compose.yml | 18 ++++++++++++++---- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/docker-compose-cpu.yml b/docker-compose-cpu.yml index 2b72db64..cfe082f1 100644 --- a/docker-compose-cpu.yml +++ b/docker-compose-cpu.yml @@ -63,6 +63,11 @@ services: - OPENSEARCH_USERNAME=admin - OPENSEARCH_PASSWORD=${OPENSEARCH_PASSWORD} - OPENAI_API_KEY=${OPENAI_API_KEY} + - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + - WATSONX_API_KEY=${WATSONX_API_KEY} + - WATSONX_ENDPOINT=${WATSONX_ENDPOINT} + - WATSONX_PROJECT_ID=${WATSONX_PROJECT_ID} + - OLLAMA_ENDPOINT=${OLLAMA_ENDPOINT} - GOOGLE_OAUTH_CLIENT_ID=${GOOGLE_OAUTH_CLIENT_ID} - GOOGLE_OAUTH_CLIENT_SECRET=${GOOGLE_OAUTH_CLIENT_SECRET} - MICROSOFT_GRAPH_OAUTH_CLIENT_ID=${MICROSOFT_GRAPH_OAUTH_CLIENT_ID} @@ -101,6 +106,11 @@ services: environment: - LANGFLOW_DEACTIVATE_TRACING=true - OPENAI_API_KEY=${OPENAI_API_KEY} + - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + - WATSONX_API_KEY=${WATSONX_API_KEY} + - WATSONX_ENDPOINT=${WATSONX_ENDPOINT} + - WATSONX_PROJECT_ID=${WATSONX_PROJECT_ID} + - OLLAMA_BASE_URL=${OLLAMA_ENDPOINT} - LANGFLOW_LOAD_FLOWS_PATH=/app/flows - LANGFLOW_SECRET_KEY=${LANGFLOW_SECRET_KEY} - JWT=None diff --git a/docker-compose.yml b/docker-compose.yml index f976c261..15e4f2c3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -40,9 +40,9 @@ services: openrag-backend: image: phact/openrag-backend:${OPENRAG_VERSION:-latest} - # build: - # context: . - # dockerfile: Dockerfile.backend + build: + context: . + dockerfile: Dockerfile.backend container_name: openrag-backend depends_on: - langflow @@ -62,6 +62,11 @@ services: - OPENSEARCH_USERNAME=admin - OPENSEARCH_PASSWORD=${OPENSEARCH_PASSWORD} - OPENAI_API_KEY=${OPENAI_API_KEY} + - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + - WATSONX_API_KEY=${WATSONX_API_KEY} + - WATSONX_ENDPOINT=${WATSONX_ENDPOINT} + - WATSONX_PROJECT_ID=${WATSONX_PROJECT_ID} + - OLLAMA_ENDPOINT=${OLLAMA_ENDPOINT} - NVIDIA_DRIVER_CAPABILITIES=compute,utility - NVIDIA_VISIBLE_DEVICES=all - GOOGLE_OAUTH_CLIENT_ID=${GOOGLE_OAUTH_CLIENT_ID} @@ -97,12 +102,17 @@ services: # build: # context: . # dockerfile: Dockerfile.langflow - container_name: langflow + # container_name: langflow ports: - "7860:7860" environment: - LANGFLOW_DEACTIVATE_TRACING=true - OPENAI_API_KEY=${OPENAI_API_KEY} + - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + - WATSONX_API_KEY=${WATSONX_API_KEY} + - WATSONX_ENDPOINT=${WATSONX_ENDPOINT} + - WATSONX_PROJECT_ID=${WATSONX_PROJECT_ID} + - OLLAMA_BASE_URL=${OLLAMA_ENDPOINT} - LANGFLOW_LOAD_FLOWS_PATH=/app/flows - LANGFLOW_SECRET_KEY=${LANGFLOW_SECRET_KEY} - JWT=None From f7d4cd1d9ee6d78fdfd5eaaa916b82e87fc0e51e Mon Sep 17 00:00:00 2001 From: Edwin Jose Date: Wed, 19 Nov 2025 14:42:52 -0500 Subject: [PATCH 4/4] Update docker-compose.yml --- docker-compose.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 15e4f2c3..ab23c050 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -40,9 +40,9 @@ services: openrag-backend: image: phact/openrag-backend:${OPENRAG_VERSION:-latest} - build: - context: . - dockerfile: Dockerfile.backend + # build: + # context: . + # dockerfile: Dockerfile.backend container_name: openrag-backend depends_on: - langflow @@ -102,7 +102,7 @@ services: # build: # context: . # dockerfile: Dockerfile.langflow - # container_name: langflow + container_name: langflow ports: - "7860:7860" environment: