From 02ebe8270d0d2f6bd5318349555b4d5593e6f687 Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Tue, 23 Sep 2025 14:13:02 -0400
Subject: [PATCH 01/13] initial-content
---
README.md | 2 +-
docs/docs/reference/troubleshooting.mdx | 24 ------
docs/docs/support/troubleshoot.mdx | 107 ++++++++++++++++++++++++
docs/sidebars.js | 6 +-
4 files changed, 111 insertions(+), 28 deletions(-)
delete mode 100644 docs/docs/reference/troubleshooting.mdx
create mode 100644 docs/docs/support/troubleshoot.mdx
diff --git a/README.md b/README.md
index d79011a0..df1d6451 100644
--- a/README.md
+++ b/README.md
@@ -138,7 +138,7 @@ podman machine start
### Common Issues
-See common issues and fixes: [docs/reference/troubleshooting.mdx](docs/docs/reference/troubleshooting.mdx)
+See common issues and fixes: [docs/support/troubleshoot.mdx](docs/docs/reference/troubleshoot.mdx)
diff --git a/docs/docs/reference/troubleshooting.mdx b/docs/docs/reference/troubleshooting.mdx
deleted file mode 100644
index c1893ef5..00000000
--- a/docs/docs/reference/troubleshooting.mdx
+++ /dev/null
@@ -1,24 +0,0 @@
----
-title: Troubleshooting
-slug: /reference/troubleshooting
----
-
-# Troubleshooting
-
-## Podman on macOS
-
-If using Podman on macOS, you may need to increase VM memory:
-
-```bash
-podman machine stop
-podman machine rm
-podman machine init --memory 8192 # 8 GB example
-podman machine start
-```
-
-## Common Issues
-
-1. OpenSearch fails to start: Check that `OPENSEARCH_PASSWORD` is set and meets requirements
-2. Langflow connection issues: Verify `LANGFLOW_SUPERUSER` credentials are correct
-3. Out of memory errors: Increase Docker memory allocation or use CPU-only mode
-4. Port conflicts: Ensure ports 3000, 7860, 8000, 9200, 5601 are available
diff --git a/docs/docs/support/troubleshoot.mdx b/docs/docs/support/troubleshoot.mdx
new file mode 100644
index 00000000..bf18e2f7
--- /dev/null
+++ b/docs/docs/support/troubleshoot.mdx
@@ -0,0 +1,107 @@
+---
+title: Troubleshoot
+slug: /reference/troubleshoot
+---
+
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
+This page provides troubleshooting advice for issues you might encounter when using OpenRAG or contributing to OpenRAG.
+
+## OpenSearch fails to start
+
+Check that `OPENSEARCH_PASSWORD` is set and meets requirements.
+The password must contain at least 8 characters, and must contain at least one uppercase letter, one lowercase letter, one digit, and one special character that is strong.
+
+## Langflow connection issues
+
+Verify the `LANGFLOW_SUPERUSER` credentials are correct.
+
+## Memory errors
+
+### Container out of memory errors
+
+Increase Docker memory allocation or use [docker-compose-cpu.yml](https://github.com/langflow-ai/openrag/blob/main/docker-compose-cpu.yml) to deploy OpenRAG.
+
+### Podman on macOS memory issues
+
+If you're using Podman on macOS, you may need to increase VM memory on your Podman machine.
+This example increases the machine size to 8 GB of RAM, which should be sufficient to run OpenRAG.
+ ```bash
+ podman machine stop
+ podman machine rm
+ podman machine init --memory 8192 # 8 GB example
+ podman machine start
+ ```
+
+## Port conflicts
+
+Ensure ports 3000, 7860, 8000, 9200, 5601 are available.
+
+## Langflow container already exists
+
+If you are running other versions of Langflow containers on your machine, you may encounter an issue where Docker or Podman thinks Langflow is already up.
+
+Remove just the problem container, or clean up all containers and start fresh.
+
+To reset your local containers and pull new images, do the following:
+
+1. Stop your containers and completely remove them.
+
+
+
+
+ ```bash
+ # Stop all running containers
+ docker stop $(docker ps -q)
+
+ # Remove all containers (including stopped ones)
+ docker rm --force $(docker ps -aq)
+
+ # Remove all images
+ docker rmi --force $(docker images -q)
+
+ # Remove all volumes
+ docker volume prune --force
+
+ # Remove all networks (except default)
+ docker network prune --force
+
+ # Clean up any leftover data
+ docker system prune --all --force --volumes
+ ```
+
+
+
+
+ ```bash
+ # Stop all running containers
+ podman stop --all
+
+ # Remove all containers (including stopped ones)
+ podman rm --all --force
+
+ # Remove all images
+ podman rmi --all --force
+
+ # Remove all volumes
+ podman volume prune --force
+
+ # Remove all networks (except default)
+ podman network prune --force
+
+ # Clean up any leftover data
+ podman system prune --all --force --volumes
+ ```
+
+
+
+
+2. Restart OpenRAG and upgrade to get the latest images for your containers.
+ ```bash
+ uv run openrag
+ ```
+
+3. In the OpenRAG TUI, click **Status**, and then click **Upgrade**.
+When the **Close** button is active, the upgrade is complete.
+Close the window and open the OpenRAG appplication.
diff --git a/docs/sidebars.js b/docs/sidebars.js
index 51a4ddc3..02d14334 100644
--- a/docs/sidebars.js
+++ b/docs/sidebars.js
@@ -50,12 +50,12 @@ const sidebars = {
},
{
type: "category",
- label: "Reference",
+ label: "Support",
items: [
{
type: "doc",
- id: "reference/troubleshooting",
- label: "Troubleshooting"
+ id: "support/troubleshoot",
+ label: "Troubleshoot"
},
],
},
From 69a85d3e4815b9b19538ab79a5205f04eb255c48 Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Mon, 29 Sep 2025 14:36:01 -0400
Subject: [PATCH 02/13] move-docker-to-its-own-page
---
docs/docs/get-started/docker.mdx | 98 ++++++++++++++++++++++++--------
1 file changed, 73 insertions(+), 25 deletions(-)
diff --git a/docs/docs/get-started/docker.mdx b/docs/docs/get-started/docker.mdx
index a394bc69..84f0fca6 100644
--- a/docs/docs/get-started/docker.mdx
+++ b/docs/docs/get-started/docker.mdx
@@ -1,40 +1,88 @@
---
-title: Docker Deployment
+title: Docker deployment
slug: /get-started/docker
---
-# Docker Deployment
+There are two different Docker Compose files.
+They deploy the same applications and containers, but to different environments.
-## Standard Deployment
+- [`docker-compose.yml`](https://github.com/langflow-ai/openrag/blob/main/docker-compose.yml) is an OpenRAG deployment with GPU support for accelerated AI processing.
-```bash
-# Build and start all services
-docker compose build
-docker compose up -d
-```
+- [`docker-compose-cpu.yml`](https://github.com/langflow-ai/openrag/blob/main/docker-compose-cpu.yml) is a CPU-only version of OpenRAG for systems without GPU support. Use this Docker compose file for environments where GPU drivers aren't available.
-## CPU-Only Deployment
+To install OpenRAG with Docker Compose:
-For environments without GPU support:
+1. Clone the OpenRAG repository.
+ ```bash
+ git clone https://github.com/langflow-ai/openrag.git
+ cd openrag
+ ```
-```bash
-docker compose -f docker-compose-cpu.yml up -d
-```
+2. Copy the example `.env` file that is included in the repository root.
+ The example file includes all environment variables with comments to guide you in finding and setting their values.
+ ```bash
+ cp .env.example .env
+ ```
-## Force Rebuild
+ Alternatively, create a new `.env` file in the repository root.
+ ```
+ touch .env
+ ```
-If you need to reset state or rebuild everything:
+3. Set environment variables. The Docker Compose files are populated with values from your `.env`, so the following values are **required** to be set:
+
+ ```bash
+ OPENSEARCH_PASSWORD=your_secure_password
+ OPENAI_API_KEY=your_openai_api_key
+
+ LANGFLOW_SUPERUSER=admin
+ LANGFLOW_SUPERUSER_PASSWORD=your_langflow_password
+ LANGFLOW_SECRET_KEY=your_secret_key
+ ```
+ For more information on configuring OpenRAG with environment variables, see [Environment variables](/configure/configuration).
+ For additional configuration values, including `config.yaml`, see [Configuration](/configure/configuration).
+
+4. Deploy OpenRAG with Docker Compose based on your deployment type.
+
+ For GPU-enabled systems, run the following command:
+ ```bash
+ docker compose up -d
+ ```
+
+ For CPU-only systems, run the following command:
+ ```bash
+ docker compose -f docker-compose-cpu.yml up -d
+ ```
+
+ The OpenRAG Docker Compose file starts five containers:
+ | Container Name | Default Address | Purpose |
+ |---|---|---|
+ | OpenRAG Backend | http://localhost:8000 | FastAPI server and core functionality. |
+ | OpenRAG Frontend | http://localhost:3000 | React web interface for users. |
+ | Langflow | http://localhost:7860 | AI workflow engine and flow management. |
+ | OpenSearch | http://localhost:9200 | Vector database for document storage. |
+ | OpenSearch Dashboards | http://localhost:5601 | Database administration interface. |
+
+5. Verify installation by confirming all services are running.
+
+ ```bash
+ docker compose ps
+ ```
+
+ You can now access the application at:
+
+ - **Frontend**: http://localhost:3000
+ - **Backend API**: http://localhost:8000
+ - **Langflow**: http://localhost:7860
+
+Continue with the [Quickstart](/quickstart).
+
+## Rebuild all Docker containers
+
+If you need to reset state and rebuild all of your containers, run the following command.
+Your OpenSearch and Langflow databases will be lost.
+Documents stored in the `./documents` directory will persist, since the directory is mounted as a volume in the OpenRAG backend container.
```bash
docker compose up --build --force-recreate --remove-orphans
```
-
-## Service URLs
-
-After deployment, services are available at:
-
-- Frontend: http://localhost:3000
-- Backend API: http://localhost:8000
-- Langflow: http://localhost:7860
-- OpenSearch: http://localhost:9200
-- OpenSearch Dashboards: http://localhost:5601
From 030b73c6abc5b0a0ace65367ce5db001e7c53b6b Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Mon, 29 Sep 2025 14:37:01 -0400
Subject: [PATCH 03/13] links
---
docs/docs/get-started/install.mdx | 122 +-----------------------------
1 file changed, 4 insertions(+), 118 deletions(-)
diff --git a/docs/docs/get-started/install.mdx b/docs/docs/get-started/install.mdx
index dcb5c5f1..a9192cf4 100644
--- a/docs/docs/get-started/install.mdx
+++ b/docs/docs/get-started/install.mdx
@@ -10,7 +10,7 @@ OpenRAG can be installed in multiple ways:
* [**Python wheel**](#install-python-wheel): Install the OpenRAG Python wheel and use the [OpenRAG Terminal User Interface (TUI)](/get-started/tui) to install, run, and configure your OpenRAG deployment without running Docker commands.
-* [**Docker Compose**](#install-and-run-docker): Clone the OpenRAG repository and deploy OpenRAG with Docker Compose, including all services and dependencies.
+* [**Docker Compose**](/docker): Clone the OpenRAG repository and deploy OpenRAG with Docker Compose, including all services and dependencies.
## Prerequisites
@@ -79,46 +79,8 @@ For more information on virtual environments, see [uv](https://docs.astral.sh/uv
Command completed successfully
```
-7. To open the OpenRAG application, click **Open App**, press 6, or navigate to `http://localhost:3000`.
- The application opens.
-8. Select your language model and embedding model provider, and complete the required fields.
- **Your provider can only be selected once, and you must use the same provider for your language model and embedding model.**
- The language model can be changed, but the embeddings model cannot be changed.
- To change your provider selection, you must restart OpenRAG and delete the `config.yml` file.
-
-
-
- 9. If you already entered a value for `OPENAI_API_KEY` in the TUI in Step 5, enable **Get API key from environment variable**.
- 10. Under **Advanced settings**, select your **Embedding Model** and **Language Model**.
- 11. To load 2 sample PDFs, enable **Sample dataset**.
- This is recommended, but not required.
- 12. Click **Complete**.
-
-
-
- 9. Complete the fields for **watsonx.ai API Endpoint**, **IBM API key**, and **IBM Project ID**.
- These values are found in your IBM watsonx deployment.
- 10. Under **Advanced settings**, select your **Embedding Model** and **Language Model**.
- 11. To load 2 sample PDFs, enable **Sample dataset**.
- This is recommended, but not required.
- 12. Click **Complete**.
-
-
-
- 9. Enter your Ollama server's base URL address.
- The default Ollama server address is `http://localhost:11434`.
- Since OpenRAG is running in a container, you may need to change `localhost` to access services outside of the container. For example, change `http://localhost:11434` to `http://host.docker.internal:11434` to connect to Ollama.
- OpenRAG automatically sends a test connection to your Ollama server to confirm connectivity.
- 10. Select the **Embedding Model** and **Language Model** your Ollama server is running.
- OpenRAG automatically lists the available models from your Ollama server.
- 11. To load 2 sample PDFs, enable **Sample dataset**.
- This is recommended, but not required.
- 12. Click **Complete**.
-
-
-
-
-13. Continue with the [Quickstart](/quickstart).
+7. To open the OpenRAG application, click **Open App** or press 6.
+8. Continue with the [Quickstart](/quickstart).
### Advanced Setup {#advanced-setup}
@@ -138,80 +100,4 @@ The `LANGFLOW_PUBLIC_URL` controls where the Langflow web interface can be acces
The `WEBHOOK_BASE_URL` controls where the endpoint for `/connectors/CONNECTOR_TYPE/webhook` will be available.
This connection enables real-time document synchronization with external services.
-For example, for Google Drive file synchronization the webhook URL is `/connectors/google_drive/webhook`.
-
-## Docker {#install-and-run-docker}
-
-There are two different Docker Compose files.
-They deploy the same applications and containers, but to different environments.
-
-- [`docker-compose.yml`](https://github.com/langflow-ai/openrag/blob/main/docker-compose.yml) is an OpenRAG deployment with GPU support for accelerated AI processing.
-
-- [`docker-compose-cpu.yml`](https://github.com/langflow-ai/openrag/blob/main/docker-compose-cpu.yml) is a CPU-only version of OpenRAG for systems without GPU support. Use this Docker compose file for environments where GPU drivers aren't available.
-
-To install OpenRAG with Docker Compose:
-
-1. Clone the OpenRAG repository.
- ```bash
- git clone https://github.com/langflow-ai/openrag.git
- cd openrag
- ```
-
-2. Copy the example `.env` file that is included in the repository root.
- The example file includes all environment variables with comments to guide you in finding and setting their values.
- ```bash
- cp .env.example .env
- ```
-
- Alternatively, create a new `.env` file in the repository root.
- ```
- touch .env
- ```
-
-3. Set environment variables. The Docker Compose files are populated with values from your `.env`, so the following values are **required** to be set:
-
- ```bash
- OPENSEARCH_PASSWORD=your_secure_password
- OPENAI_API_KEY=your_openai_api_key
-
- LANGFLOW_SUPERUSER=admin
- LANGFLOW_SUPERUSER_PASSWORD=your_langflow_password
- LANGFLOW_SECRET_KEY=your_secret_key
- ```
- For more information on configuring OpenRAG with environment variables, see [Environment variables](/configure/configuration).
- For additional configuration values, including `config.yaml`, see [Configuration](/configure/configuration).
-
-4. Deploy OpenRAG with Docker Compose based on your deployment type.
-
- For GPU-enabled systems, run the following command:
- ```bash
- docker compose up -d
- ```
-
- For CPU-only systems, run the following command:
- ```bash
- docker compose -f docker-compose-cpu.yml up -d
- ```
-
- The OpenRAG Docker Compose file starts five containers:
- | Container Name | Default Address | Purpose |
- |---|---|---|
- | OpenRAG Backend | http://localhost:8000 | FastAPI server and core functionality. |
- | OpenRAG Frontend | http://localhost:3000 | React web interface for users. |
- | Langflow | http://localhost:7860 | AI workflow engine and flow management. |
- | OpenSearch | http://localhost:9200 | Vector database for document storage. |
- | OpenSearch Dashboards | http://localhost:5601 | Database administration interface. |
-
-5. Verify installation by confirming all services are running.
-
- ```bash
- docker compose ps
- ```
-
- You can now access the application at:
-
- - **Frontend**: http://localhost:3000
- - **Backend API**: http://localhost:8000
- - **Langflow**: http://localhost:7860
-
-Continue with the Quickstart.
\ No newline at end of file
+For example, for Google Drive file synchronization the webhook URL is `/connectors/google_drive/webhook`.
\ No newline at end of file
From d88730acb3234ad7d2fe3ca9846c00c93871fc36 Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Mon, 29 Sep 2025 14:37:47 -0400
Subject: [PATCH 04/13] link
---
docs/docs/get-started/install.mdx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/docs/get-started/install.mdx b/docs/docs/get-started/install.mdx
index a9192cf4..e78f4df5 100644
--- a/docs/docs/get-started/install.mdx
+++ b/docs/docs/get-started/install.mdx
@@ -10,7 +10,7 @@ OpenRAG can be installed in multiple ways:
* [**Python wheel**](#install-python-wheel): Install the OpenRAG Python wheel and use the [OpenRAG Terminal User Interface (TUI)](/get-started/tui) to install, run, and configure your OpenRAG deployment without running Docker commands.
-* [**Docker Compose**](/docker): Clone the OpenRAG repository and deploy OpenRAG with Docker Compose, including all services and dependencies.
+* [**Docker Compose**](get-started/docker): Clone the OpenRAG repository and deploy OpenRAG with Docker Compose, including all services and dependencies.
## Prerequisites
From a88c6a9ed5dfce9468db21d1a42545d143fbd25c Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Mon, 29 Sep 2025 14:44:34 -0400
Subject: [PATCH 05/13] revert-onboarding
---
docs/docs/get-started/install.mdx | 42 +++++++++++++++++++++++++++++--
1 file changed, 40 insertions(+), 2 deletions(-)
diff --git a/docs/docs/get-started/install.mdx b/docs/docs/get-started/install.mdx
index e78f4df5..27cafb44 100644
--- a/docs/docs/get-started/install.mdx
+++ b/docs/docs/get-started/install.mdx
@@ -79,8 +79,46 @@ For more information on virtual environments, see [uv](https://docs.astral.sh/uv
Command completed successfully
```
-7. To open the OpenRAG application, click **Open App** or press 6.
-8. Continue with the [Quickstart](/quickstart).
+7. To open the OpenRAG application, click **Open App**, press 6, or navigate to `http://localhost:3000`.
+ The application opens.
+8. Select your language model and embedding model provider, and complete the required fields.
+ **Your provider can only be selected once, and you must use the same provider for your language model and embedding model.**
+ The language model can be changed, but the embeddings model cannot be changed.
+ To change your provider selection, you must restart OpenRAG and delete the `config.yml` file.
+
+
+
+ 9. If you already entered a value for `OPENAI_API_KEY` in the TUI in Step 5, enable **Get API key from environment variable**.
+ 10. Under **Advanced settings**, select your **Embedding Model** and **Language Model**.
+ 11. To load 2 sample PDFs, enable **Sample dataset**.
+ This is recommended, but not required.
+ 12. Click **Complete**.
+
+
+
+ 9. Complete the fields for **watsonx.ai API Endpoint**, **IBM API key**, and **IBM Project ID**.
+ These values are found in your IBM watsonx deployment.
+ 10. Under **Advanced settings**, select your **Embedding Model** and **Language Model**.
+ 11. To load 2 sample PDFs, enable **Sample dataset**.
+ This is recommended, but not required.
+ 12. Click **Complete**.
+
+
+
+ 9. Enter your Ollama server's base URL address.
+ The default Ollama server address is `http://localhost:11434`.
+ Since OpenRAG is running in a container, you may need to change `localhost` to access services outside of the container. For example, change `http://localhost:11434` to `http://host.docker.internal:11434` to connect to Ollama.
+ OpenRAG automatically sends a test connection to your Ollama server to confirm connectivity.
+ 10. Select the **Embedding Model** and **Language Model** your Ollama server is running.
+ OpenRAG automatically lists the available models from your Ollama server.
+ 11. To load 2 sample PDFs, enable **Sample dataset**.
+ This is recommended, but not required.
+ 12. Click **Complete**.
+
+
+
+
+13. Continue with the [Quickstart](/quickstart).
### Advanced Setup {#advanced-setup}
From 2341bf4700b3779bc5402577ebd39192d47e9cd3 Mon Sep 17 00:00:00 2001
From: Mike Fortman
Date: Mon, 29 Sep 2025 16:40:29 -0500
Subject: [PATCH 06/13] Update docling preset options
---
config.yaml | 4 +-
.../mutations/useUpdateFlowSettingMutation.ts | 4 +-
.../app/api/queries/useGetSettingsQuery.ts | 4 +-
frontend/src/app/settings/page.tsx | 165 ++++++++--------
frontend/src/lib/constants.ts | 4 +-
src/api/settings.py | 179 +++++++++++-------
src/config/config_manager.py | 4 +-
7 files changed, 211 insertions(+), 153 deletions(-)
diff --git a/config.yaml b/config.yaml
index 3bafb8bd..cd2e929b 100644
--- a/config.yaml
+++ b/config.yaml
@@ -21,7 +21,9 @@ knowledge:
# Overlap between chunks
chunk_overlap: 200
# Docling preset setting
- doclingPresets: standard
+ ocr: false
+ picture_descriptions: false
+ table_structure: false
# AI agent configuration
agent:
diff --git a/frontend/src/app/api/mutations/useUpdateFlowSettingMutation.ts b/frontend/src/app/api/mutations/useUpdateFlowSettingMutation.ts
index 5f196ebd..e789af48 100644
--- a/frontend/src/app/api/mutations/useUpdateFlowSettingMutation.ts
+++ b/frontend/src/app/api/mutations/useUpdateFlowSettingMutation.ts
@@ -8,7 +8,9 @@ interface UpdateFlowSettingVariables {
llm_model?: string;
system_prompt?: string;
embedding_model?: string;
- doclingPresets?: string;
+ table_structure?: boolean;
+ ocr?: boolean;
+ picture_descriptions?: boolean;
chunk_size?: number;
chunk_overlap?: number;
}
diff --git a/frontend/src/app/api/queries/useGetSettingsQuery.ts b/frontend/src/app/api/queries/useGetSettingsQuery.ts
index cf1b4ec2..d2d5a15d 100644
--- a/frontend/src/app/api/queries/useGetSettingsQuery.ts
+++ b/frontend/src/app/api/queries/useGetSettingsQuery.ts
@@ -13,7 +13,9 @@ export interface KnowledgeSettings {
embedding_model?: string;
chunk_size?: number;
chunk_overlap?: number;
- doclingPresets?: string;
+ table_structure?: boolean;
+ ocr?: boolean;
+ picture_descriptions?: boolean;
}
export interface Settings {
diff --git a/frontend/src/app/settings/page.tsx b/frontend/src/app/settings/page.tsx
index a63d91d3..3e6ff092 100644
--- a/frontend/src/app/settings/page.tsx
+++ b/frontend/src/app/settings/page.tsx
@@ -22,9 +22,9 @@ import {
CardTitle,
} from "@/components/ui/card";
import { Checkbox } from "@/components/ui/checkbox";
+import { Switch } from "@/components/ui/switch";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
-import { RadioGroup, RadioGroupItem } from "@/components/ui/radio-group";
import {
Select,
SelectContent,
@@ -39,11 +39,6 @@ import { DEFAULT_AGENT_SETTINGS, DEFAULT_KNOWLEDGE_SETTINGS, UI_CONSTANTS } from
import { getFallbackModels, type ModelProvider } from "./helpers/model-helpers";
import { ModelSelectItems } from "./helpers/model-select-item";
import { LabelWrapper } from "@/components/label-wrapper";
-import {
- Tooltip,
- TooltipContent,
- TooltipTrigger,
-} from "@radix-ui/react-tooltip";
const { MAX_SYSTEM_PROMPT_CHARS } = UI_CONSTANTS;
@@ -112,7 +107,9 @@ function KnowledgeSourcesPage() {
const [systemPrompt, setSystemPrompt] = useState("");
const [chunkSize, setChunkSize] = useState(1024);
const [chunkOverlap, setChunkOverlap] = useState(50);
- const [processingMode, setProcessingMode] = useState("standard");
+ const [tableStructure, setTableStructure] = useState(false);
+ const [ocr, setOcr] = useState(false);
+ const [pictureDescriptions, setPictureDescriptions] = useState(false);
// Fetch settings using React Query
const { data: settings = {} } = useGetSettingsQuery({
@@ -195,12 +192,24 @@ function KnowledgeSourcesPage() {
}
}, [settings.knowledge?.chunk_overlap]);
- // Sync processing mode with settings data
+ // Sync docling settings with settings data
useEffect(() => {
- if (settings.knowledge?.doclingPresets) {
- setProcessingMode(settings.knowledge.doclingPresets);
+ if (settings.knowledge?.table_structure !== undefined) {
+ setTableStructure(settings.knowledge.table_structure);
}
- }, [settings.knowledge?.doclingPresets]);
+ }, [settings.knowledge?.table_structure]);
+
+ useEffect(() => {
+ if (settings.knowledge?.ocr !== undefined) {
+ setOcr(settings.knowledge.ocr);
+ }
+ }, [settings.knowledge?.ocr]);
+
+ useEffect(() => {
+ if (settings.knowledge?.picture_descriptions !== undefined) {
+ setPictureDescriptions(settings.knowledge.picture_descriptions);
+ }
+ }, [settings.knowledge?.picture_descriptions]);
// Update model selection immediately
const handleModelChange = (newModel: string) => {
@@ -231,11 +240,20 @@ function KnowledgeSourcesPage() {
debouncedUpdate({ chunk_overlap: numValue });
};
- // Update processing mode
- const handleProcessingModeChange = (mode: string) => {
- setProcessingMode(mode);
- // Update the configuration setting (backend will also update the flow automatically)
- debouncedUpdate({ doclingPresets: mode });
+ // Update docling settings
+ const handleTableStructureChange = (checked: boolean) => {
+ setTableStructure(checked);
+ updateFlowSettingMutation.mutate({ table_structure: checked });
+ };
+
+ const handleOcrChange = (checked: boolean) => {
+ setOcr(checked);
+ updateFlowSettingMutation.mutate({ ocr: checked });
+ };
+
+ const handlePictureDescriptionsChange = (checked: boolean) => {
+ setPictureDescriptions(checked);
+ updateFlowSettingMutation.mutate({ picture_descriptions: checked });
};
// Helper function to get connector icon
@@ -569,7 +587,9 @@ function KnowledgeSourcesPage() {
// Only reset form values if the API call was successful
setChunkSize(DEFAULT_KNOWLEDGE_SETTINGS.chunk_size);
setChunkOverlap(DEFAULT_KNOWLEDGE_SETTINGS.chunk_overlap);
- setProcessingMode(DEFAULT_KNOWLEDGE_SETTINGS.processing_mode);
+ setTableStructure(false);
+ setOcr(false);
+ setPictureDescriptions(false);
closeDialog(); // Close after successful completion
})
.catch((error) => {
@@ -1064,75 +1084,60 @@ function KnowledgeSourcesPage() {
-
-
-
-
-
-
-
- Fast ingest for documents with selectable text. Images are
- ignored.
-
+
+
+
+
+ Capture table structure during ingest.
-
-
-
-
-
- Extracts text from images and scanned pages.
-
+
+
+
+
+
+
+ Extracts text from images/PDFs. Ingest is slower when enabled.
-
-
-
-
-
- Extracts text from images and scanned pages. Generates
- short image captions.
-
+
+
+
+
+
+
+ Adds captions for images. Ingest is slower when enabled.
-
-
-
-
-
- Extracts text from layout-aware parsing of text, tables,
- and sections.
-
-
-
-
+
+
diff --git a/frontend/src/lib/constants.ts b/frontend/src/lib/constants.ts
index 9c6ea7b0..8e7770fb 100644
--- a/frontend/src/lib/constants.ts
+++ b/frontend/src/lib/constants.ts
@@ -12,7 +12,9 @@ export const DEFAULT_AGENT_SETTINGS = {
export const DEFAULT_KNOWLEDGE_SETTINGS = {
chunk_size: 1000,
chunk_overlap: 200,
- processing_mode: "standard"
+ table_structure: false,
+ ocr: false,
+ picture_descriptions: false
} as const;
/**
diff --git a/src/api/settings.py b/src/api/settings.py
index c2c7cbd0..a99cce61 100644
--- a/src/api/settings.py
+++ b/src/api/settings.py
@@ -17,35 +17,30 @@ logger = get_logger(__name__)
# Docling preset configurations
-def get_docling_preset_configs():
- """Get docling preset configurations with platform-specific settings"""
+def get_docling_preset_configs(table_structure=False, ocr=False, picture_descriptions=False):
+ """Get docling preset configurations based on toggle settings
+
+ Args:
+ table_structure: Enable table structure parsing (default: False)
+ ocr: Enable OCR for text extraction from images (default: False)
+ picture_descriptions: Enable picture descriptions/captions (default: False)
+ """
is_macos = platform.system() == "Darwin"
- return {
- "standard": {"do_ocr": False},
- "ocr": {"do_ocr": True, "ocr_engine": "ocrmac" if is_macos else "easyocr"},
- "picture_description": {
- "do_ocr": True,
- "ocr_engine": "ocrmac" if is_macos else "easyocr",
- "do_picture_classification": True,
- "do_picture_description": True,
- "picture_description_local": {
- "repo_id": "HuggingFaceTB/SmolVLM-256M-Instruct",
- "prompt": "Describe this image in a few sentences.",
- },
- },
- "VLM": {
- "pipeline": "vlm",
- "vlm_pipeline_model_local": {
- "repo_id": "ds4sd/SmolDocling-256M-preview-mlx-bf16"
- if is_macos
- else "ds4sd/SmolDocling-256M-preview",
- "response_format": "doctags",
- "inference_framework": "mlx",
- },
- },
+ config = {
+ "do_ocr": ocr,
+ "ocr_engine": "ocrmac" if is_macos else "easyocr",
+ "do_table_structure": table_structure,
+ "do_picture_classification": picture_descriptions,
+ "do_picture_description": picture_descriptions,
+ "picture_description_local": {
+ "repo_id": "HuggingFaceTB/SmolVLM-256M-Instruct",
+ "prompt": "Describe this image in a few sentences.",
+ }
}
+ return config
+
async def get_settings(request, session_manager):
"""Get application settings"""
@@ -71,7 +66,9 @@ async def get_settings(request, session_manager):
"embedding_model": knowledge_config.embedding_model,
"chunk_size": knowledge_config.chunk_size,
"chunk_overlap": knowledge_config.chunk_overlap,
- "doclingPresets": knowledge_config.doclingPresets,
+ "table_structure": knowledge_config.table_structure,
+ "ocr": knowledge_config.ocr,
+ "picture_descriptions": knowledge_config.picture_descriptions,
},
"agent": {
"llm_model": agent_config.llm_model,
@@ -178,7 +175,9 @@ async def update_settings(request, session_manager):
"system_prompt",
"chunk_size",
"chunk_overlap",
- "doclingPresets",
+ "table_structure",
+ "ocr",
+ "picture_descriptions",
"embedding_model",
}
@@ -255,32 +254,68 @@ async def update_settings(request, session_manager):
# Don't fail the entire settings update if flow update fails
# The config will still be saved
- if "doclingPresets" in body:
- preset_configs = get_docling_preset_configs()
- valid_presets = list(preset_configs.keys())
- if body["doclingPresets"] not in valid_presets:
+ if "table_structure" in body:
+ if not isinstance(body["table_structure"], bool):
return JSONResponse(
- {
- "error": f"doclingPresets must be one of: {', '.join(valid_presets)}"
- },
- status_code=400,
+ {"error": "table_structure must be a boolean"}, status_code=400
)
- current_config.knowledge.doclingPresets = body["doclingPresets"]
+ current_config.knowledge.table_structure = body["table_structure"]
config_updated = True
- # Also update the flow with the new docling preset
+ # Also update the flow with the new docling settings
try:
flows_service = _get_flows_service()
- await flows_service.update_flow_docling_preset(
- body["doclingPresets"], preset_configs[body["doclingPresets"]]
- )
- logger.info(
- f"Successfully updated docling preset in flow to '{body['doclingPresets']}'"
+ preset_config = get_docling_preset_configs(
+ table_structure=body["table_structure"],
+ ocr=current_config.knowledge.ocr,
+ picture_descriptions=current_config.knowledge.picture_descriptions
)
+ await flows_service.update_flow_docling_preset("custom", preset_config)
+ logger.info(f"Successfully updated table_structure setting in flow")
except Exception as e:
- logger.error(f"Failed to update docling preset in flow: {str(e)}")
- # Don't fail the entire settings update if flow update fails
- # The config will still be saved
+ logger.error(f"Failed to update docling settings in flow: {str(e)}")
+
+ if "ocr" in body:
+ if not isinstance(body["ocr"], bool):
+ return JSONResponse(
+ {"error": "ocr must be a boolean"}, status_code=400
+ )
+ current_config.knowledge.ocr = body["ocr"]
+ config_updated = True
+
+ # Also update the flow with the new docling settings
+ try:
+ flows_service = _get_flows_service()
+ preset_config = get_docling_preset_configs(
+ table_structure=current_config.knowledge.table_structure,
+ ocr=body["ocr"],
+ picture_descriptions=current_config.knowledge.picture_descriptions
+ )
+ await flows_service.update_flow_docling_preset("custom", preset_config)
+ logger.info(f"Successfully updated ocr setting in flow")
+ except Exception as e:
+ logger.error(f"Failed to update docling settings in flow: {str(e)}")
+
+ if "picture_descriptions" in body:
+ if not isinstance(body["picture_descriptions"], bool):
+ return JSONResponse(
+ {"error": "picture_descriptions must be a boolean"}, status_code=400
+ )
+ current_config.knowledge.picture_descriptions = body["picture_descriptions"]
+ config_updated = True
+
+ # Also update the flow with the new docling settings
+ try:
+ flows_service = _get_flows_service()
+ preset_config = get_docling_preset_configs(
+ table_structure=current_config.knowledge.table_structure,
+ ocr=current_config.knowledge.ocr,
+ picture_descriptions=body["picture_descriptions"]
+ )
+ await flows_service.update_flow_docling_preset("custom", preset_config)
+ logger.info(f"Successfully updated picture_descriptions setting in flow")
+ except Exception as e:
+ logger.error(f"Failed to update docling settings in flow: {str(e)}")
if "chunk_size" in body:
if not isinstance(body["chunk_size"], int) or body["chunk_size"] <= 0:
@@ -624,48 +659,56 @@ def _get_flows_service():
async def update_docling_preset(request, session_manager):
- """Update docling preset in the ingest flow"""
+ """Update docling settings in the ingest flow - deprecated endpoint, use /settings instead"""
try:
# Parse request body
body = await request.json()
- # Validate preset parameter
- if "preset" not in body:
- return JSONResponse(
- {"error": "preset parameter is required"}, status_code=400
- )
+ # Support old preset-based API for backwards compatibility
+ if "preset" in body:
+ # Map old presets to new toggle settings
+ preset_map = {
+ "standard": {"table_structure": False, "ocr": False, "picture_descriptions": False},
+ "ocr": {"table_structure": False, "ocr": True, "picture_descriptions": False},
+ "picture_description": {"table_structure": False, "ocr": True, "picture_descriptions": True},
+ "VLM": {"table_structure": False, "ocr": False, "picture_descriptions": False},
+ }
- preset = body["preset"]
- preset_configs = get_docling_preset_configs()
+ preset = body["preset"]
+ if preset not in preset_map:
+ return JSONResponse(
+ {"error": f"Invalid preset '{preset}'. Valid presets: {', '.join(preset_map.keys())}"},
+ status_code=400,
+ )
- if preset not in preset_configs:
- valid_presets = list(preset_configs.keys())
- return JSONResponse(
- {
- "error": f"Invalid preset '{preset}'. Valid presets: {', '.join(valid_presets)}"
- },
- status_code=400,
- )
+ settings = preset_map[preset]
+ else:
+ # Support new toggle-based API
+ settings = {
+ "table_structure": body.get("table_structure", False),
+ "ocr": body.get("ocr", False),
+ "picture_descriptions": body.get("picture_descriptions", False),
+ }
# Get the preset configuration
- preset_config = preset_configs[preset]
+ preset_config = get_docling_preset_configs(**settings)
# Use the helper function to update the flow
flows_service = _get_flows_service()
- await flows_service.update_flow_docling_preset(preset, preset_config)
+ await flows_service.update_flow_docling_preset("custom", preset_config)
- logger.info(f"Successfully updated docling preset to '{preset}' in ingest flow")
+ logger.info(f"Successfully updated docling settings in ingest flow")
return JSONResponse(
{
- "message": f"Successfully updated docling preset to '{preset}'",
- "preset": preset,
+ "message": f"Successfully updated docling settings",
+ "settings": settings,
"preset_config": preset_config,
}
)
except Exception as e:
- logger.error("Failed to update docling preset", error=str(e))
+ logger.error("Failed to update docling settings", error=str(e))
return JSONResponse(
- {"error": f"Failed to update docling preset: {str(e)}"}, status_code=500
+ {"error": f"Failed to update docling settings: {str(e)}"}, status_code=500
)
diff --git a/src/config/config_manager.py b/src/config/config_manager.py
index 0b814470..6e891c5c 100644
--- a/src/config/config_manager.py
+++ b/src/config/config_manager.py
@@ -27,7 +27,9 @@ class KnowledgeConfig:
embedding_model: str = "text-embedding-3-small"
chunk_size: int = 1000
chunk_overlap: int = 200
- doclingPresets: str = "standard"
+ table_structure: bool = False
+ ocr: bool = False
+ picture_descriptions: bool = False
@dataclass
From 70a1d0b19e3b96268d20db64ff5239680c45c40d Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Mon, 29 Sep 2025 20:48:14 -0400
Subject: [PATCH 07/13] init-content
---
docs/docs/get-started/tui.mdx | 52 ++++++++++++++++++++++++++++-------
1 file changed, 42 insertions(+), 10 deletions(-)
diff --git a/docs/docs/get-started/tui.mdx b/docs/docs/get-started/tui.mdx
index 2f0a048d..44fe8a66 100644
--- a/docs/docs/get-started/tui.mdx
+++ b/docs/docs/get-started/tui.mdx
@@ -1,22 +1,60 @@
---
-title: Terminal Interface (TUI)
+title: Terminal User Interface (TUI) commands
slug: /get-started/tui
---
# OpenRAG TUI Guide
-The OpenRAG Terminal User Interface (TUI) provides a streamlined way to set up, configure, and monitor your OpenRAG deployment directly from the terminal.
+The OpenRAG Terminal User Interface (TUI) provides a streamlined way to set up, configure, and monitor your OpenRAG deployment directly from the terminal, on any operating system.

-## Launch
+The TUI offers an easier way to use OpenRAG without sacrificing control.
+Instead of starting OpenRAG using Docker commands and manually editing values in the `.env` file, the TUI walks you through the setup. It prompts for variables where required, creates a `.env` file for you, and then starts OpenRAG.
+
+Once OpenRAG is running, use the TUI to monitor your application, control your containers, and retrieve logs.
+
+## Start the TUI
+
+To start the TUI, run the following commands from the directory where you installed OpenRAG.
+For more information, see [Install OpenRAG](/install).
```bash
uv sync
uv run openrag
```
-## Features
+The TUI Welcome Screen offers basic and advanced setup options.
+For more information on setup values during installation, see [Install OpenRAG](/install).
+
+## Container management
+
+The TUI can deploy, manage, and upgrade your OpenRAG containers.
+
+### Start container services
+
+Click **Start Container Services** to start the OpenRAG containers.
+The TUI automatically detects your container runtime, and then checks if your machine has compatible GPU support by checking for `CUDA`, `NVIDIA_SMI`, and Docker/Podman runtime support. This check determines which Docker Compose file OpenRAG uses.
+The TUI then pulls the images and deploys the containers with the following command.
+```bash
+docker compose up -d
+```
+If images are missing, the TUI runs `docker compose pull`, then runs `docker compose up -d`.
+
+### Start native services
+
+A "native" service in OpenRAG refers to a service run natively on your machine, and not within a container.
+The `docling-serve` process is a native service in OpenRAG, because it's a document processing service that is run on your local machine, and controlled separately from the containers.
+
+To start or stop `docling-serve` or any other native services, in the TUI main menu, click **Start Native Services** or **Stop Native Services**.
+
+To view the status, port, or PID of a native service, in the TUI main menu, click [Status](#status).
+
+### Status
+
+The **Status** screen
+
+### Diagnostics
### Welcome Screen
- Quick setup options: basic (no auth) or advanced (OAuth)
@@ -56,11 +94,5 @@ uv run openrag
- Q: quit
- Number keys (1-4): quick access to main screens
-## Benefits
-1. Simplified setup without manual file edits
-2. Clear visual feedback and error messages
-3. Integrated monitoring and control
-4. Cross-platform: Linux, macOS, Windows
-5. Fully terminal-based; no browser required
From 4de9a1fe93af55a20b0f9a57eb81fe5160b71c26 Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Mon, 29 Sep 2025 21:24:18 -0400
Subject: [PATCH 08/13] tui-more-information
---
docs/docs/get-started/tui.mdx | 70 +++++++++++++++++------------------
1 file changed, 33 insertions(+), 37 deletions(-)
diff --git a/docs/docs/get-started/tui.mdx b/docs/docs/get-started/tui.mdx
index 44fe8a66..5ca4e934 100644
--- a/docs/docs/get-started/tui.mdx
+++ b/docs/docs/get-started/tui.mdx
@@ -27,6 +27,17 @@ uv run openrag
The TUI Welcome Screen offers basic and advanced setup options.
For more information on setup values during installation, see [Install OpenRAG](/install).
+## Navigation
+
+The TUI accepts mouse input or keyboard commands.
+
+- Arrow keys: move between options
+- Tab/Shift+Tab: switch fields and buttons
+- Enter: select/confirm
+- Escape: back
+- Q: quit
+- Number keys (1-4): quick access to main screens
+
## Container management
The TUI can deploy, manage, and upgrade your OpenRAG containers.
@@ -52,47 +63,32 @@ To view the status, port, or PID of a native service, in the TUI main menu, clic
### Status
-The **Status** screen
+The **Status** menu displays information on your container deployment.
+Here you can check container health, find your service ports, view logs, and upgrade your containers.
-### Diagnostics
+To view streaming logs, select the container you want to view, and press l.
+To copy your logs, click **Copy to Clipboard**.
-### Welcome Screen
-- Quick setup options: basic (no auth) or advanced (OAuth)
-- Service monitoring: container status at a glance
-- Quick actions: diagnostics, logs, configuration
+To **upgrade** your containers, click **Upgrade**.
+**Upgrade** runs `docker compose pull` and then `docker compose up -d --force-recreate`.
+The first command pulls the latest images of OpenRAG.
+The second command recreates the containers with your data persisted.
-### Configuration Screen
-- Environment variables: guided forms for required settings
-- API keys: secure input with validation
-- OAuth setup: Google and Microsoft
-- Document paths: configure ingestion directories
-- Auto-save: generates and updates `.env`
+To **reset** your containers, click **Reset**.
+Reset gives you a completely fresh start.
+Reset deletes all of your data, including OpenSearch data, uploaded documents, and authentication.
+**Reset** runs two commands.
+It first stops and removes all containers, volumes, and local images.
+```
+docker compose down --volumes --remove-orphans --rmi local
+```
-### Service Monitor
-- Container status: real-time state of services
-- Resource usage: CPU, memory, network
-- Service control: start/stop/restart
-- Health checks: health indicators for all components
-
-### Log Viewer
-- Live logs: stream logs across services
-- Filtering: by service (backend, frontend, Langflow, OpenSearch)
-- Levels: DEBUG/INFO/WARNING/ERROR
-- Export: save logs for later analysis
-
-### Diagnostics
-- System checks: Docker/Podman availability and configuration
-- Environment validation: verify required variables
-- Network tests: connectivity between services
-- Performance metrics: system capacity and recommendations
-
-## Navigation
-- Arrow keys: move between options
-- Tab/Shift+Tab: switch fields and buttons
-- Enter: select/confirm
-- Escape: back
-- Q: quit
-- Number keys (1-4): quick access to main screens
+When the first command is complete, OpenRAG removes any additional Docker objects with `prune`.
+```
+docker system prune -f
+```
+## Diagnostics
+The **Diagnostics** menu provides health monitoring for your container runtimes and monitoring of your OpenSearch security.
\ No newline at end of file
From 7d8bc4be211ed708226521c3da72a1c000cebb3a Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Tue, 30 Sep 2025 10:57:05 -0400
Subject: [PATCH 09/13] slug
---
docs/docs/support/troubleshoot.mdx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/docs/support/troubleshoot.mdx b/docs/docs/support/troubleshoot.mdx
index bf18e2f7..57dcb4d3 100644
--- a/docs/docs/support/troubleshoot.mdx
+++ b/docs/docs/support/troubleshoot.mdx
@@ -1,6 +1,6 @@
---
title: Troubleshoot
-slug: /reference/troubleshoot
+slug: /support/troubleshoot
---
import Tabs from '@theme/Tabs';
From 236628dbfd55b01aa213bc85e8b050f8c61600af Mon Sep 17 00:00:00 2001
From: phact
Date: Tue, 30 Sep 2025 11:34:40 -0400
Subject: [PATCH 10/13] langflow responses workflow
---
.../workflows/build-langflow-responses.yml | 61 +++++++++++++++++++
1 file changed, 61 insertions(+)
create mode 100644 .github/workflows/build-langflow-responses.yml
diff --git a/.github/workflows/build-langflow-responses.yml b/.github/workflows/build-langflow-responses.yml
new file mode 100644
index 00000000..46daca9b
--- /dev/null
+++ b/.github/workflows/build-langflow-responses.yml
@@ -0,0 +1,61 @@
+name: Build Langflow Responses Multi-Arch
+
+on:
+ workflow_dispatch:
+
+jobs:
+ build:
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - platform: linux/amd64
+ arch: amd64
+ runs-on: ubuntu-latest
+ - platform: linux/arm64
+ arch: arm64
+ runs-on: [self-hosted, linux, ARM64, langflow-ai-arm64-2]
+
+ runs-on: ${{ matrix.runs-on }}
+
+ steps:
+ - name: Checkout langflow responses branch
+ uses: actions/checkout@v4
+ with:
+ repository: langflow-ai/langflow
+ ref: responses
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKER_USERNAME }}
+ password: ${{ secrets.DOCKER_PASSWORD }}
+
+ - name: Build and push langflow (${{ matrix.arch }})
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ platforms: ${{ matrix.platform }}
+ push: true
+ tags: phact/langflow:responses-${{ matrix.arch }}
+ cache-from: type=gha,scope=langflow-responses-${{ matrix.arch }}
+ cache-to: type=gha,mode=max,scope=langflow-responses-${{ matrix.arch }}
+
+ manifest:
+ needs: build
+ runs-on: ubuntu-latest
+ steps:
+ - name: Login to Docker Hub
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKER_USERNAME }}
+ password: ${{ secrets.DOCKER_PASSWORD }}
+
+ - name: Create and push multi-arch manifest
+ run: |
+ docker buildx imagetools create -t phact/langflow:responses \
+ phact/langflow:responses-amd64 \
+ phact/langflow:responses-arm64
\ No newline at end of file
From 6c52462b2fcd7aef3b691ecbe403161f96ffc892 Mon Sep 17 00:00:00 2001
From: phact
Date: Tue, 30 Sep 2025 11:40:48 -0400
Subject: [PATCH 11/13] get the right branch
---
.github/workflows/build-langflow-responses.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/build-langflow-responses.yml b/.github/workflows/build-langflow-responses.yml
index 46daca9b..8d9264e2 100644
--- a/.github/workflows/build-langflow-responses.yml
+++ b/.github/workflows/build-langflow-responses.yml
@@ -19,11 +19,11 @@ jobs:
runs-on: ${{ matrix.runs-on }}
steps:
- - name: Checkout langflow responses branch
+ - name: Checkout langflow load_flows_autologin_false branch
uses: actions/checkout@v4
with:
repository: langflow-ai/langflow
- ref: responses
+ ref: load_flows_autologin_false
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
From bf871b9cd728518aaac51c77aa511de698cde890 Mon Sep 17 00:00:00 2001
From: phact
Date: Tue, 30 Sep 2025 11:46:17 -0400
Subject: [PATCH 12/13] use Dockerfile.langflow
---
.github/workflows/build-langflow-responses.yml | 6 ++----
1 file changed, 2 insertions(+), 4 deletions(-)
diff --git a/.github/workflows/build-langflow-responses.yml b/.github/workflows/build-langflow-responses.yml
index 8d9264e2..0f9d3d08 100644
--- a/.github/workflows/build-langflow-responses.yml
+++ b/.github/workflows/build-langflow-responses.yml
@@ -19,11 +19,8 @@ jobs:
runs-on: ${{ matrix.runs-on }}
steps:
- - name: Checkout langflow load_flows_autologin_false branch
+ - name: Checkout
uses: actions/checkout@v4
- with:
- repository: langflow-ai/langflow
- ref: load_flows_autologin_false
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
@@ -38,6 +35,7 @@ jobs:
uses: docker/build-push-action@v5
with:
context: .
+ file: ./Dockerfile.langflow
platforms: ${{ matrix.platform }}
push: true
tags: phact/langflow:responses-${{ matrix.arch }}
From a3fe879eba59b1ce65f2ab65893a263cd200549a Mon Sep 17 00:00:00 2001
From: Mike Fortman
Date: Tue, 30 Sep 2025 12:20:09 -0500
Subject: [PATCH 13/13] design feedback
---
frontend/components/ui/switch.tsx | 4 ++--
frontend/src/app/settings/page.tsx | 8 ++++----
frontend/src/components/ui/switch.tsx | 29 ---------------------------
3 files changed, 6 insertions(+), 35 deletions(-)
delete mode 100644 frontend/src/components/ui/switch.tsx
diff --git a/frontend/components/ui/switch.tsx b/frontend/components/ui/switch.tsx
index 12187e8f..9d78fcfd 100644
--- a/frontend/components/ui/switch.tsx
+++ b/frontend/components/ui/switch.tsx
@@ -11,7 +11,7 @@ const Switch = React.forwardRef<
>(({ className, ...props }, ref) => (
diff --git a/frontend/src/app/settings/page.tsx b/frontend/src/app/settings/page.tsx
index 3e6ff092..cc439f53 100644
--- a/frontend/src/app/settings/page.tsx
+++ b/frontend/src/app/settings/page.tsx
@@ -1083,8 +1083,8 @@ function KnowledgeSourcesPage() {