diff --git a/Dockerfile.backend b/Dockerfile.backend
index 5d9d84f4..3c94a279 100644
--- a/Dockerfile.backend
+++ b/Dockerfile.backend
@@ -21,7 +21,7 @@ COPY pyproject.toml uv.lock ./
RUN uv sync
# Copy sample document and warmup script for docling
-COPY documents/warmup_ocr.pdf ./
+COPY openrag-documents/warmup_ocr.pdf ./
COPY warm_up_docling.py ./
RUN uv run docling-tools models download
RUN uv run python - <<'PY'
diff --git a/docker-compose-cpu.yml b/docker-compose-cpu.yml
index 61b9a2f6..58e75268 100644
--- a/docker-compose-cpu.yml
+++ b/docker-compose-cpu.yml
@@ -81,9 +81,10 @@ services:
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
volumes:
- - ./openrag-documents:/app/documents:Z
+ - ./openrag-documents:/app/openrag-documents:Z
- ./keys:/app/keys:Z
- ./flows:/app/flows:U,z
+ - ./config:/app/config:Z
openrag-frontend:
image: langflowai/openrag-frontend:${OPENRAG_VERSION:-latest}
diff --git a/docker-compose.yml b/docker-compose.yml
index 2ed199a5..0a284871 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -80,9 +80,10 @@ services:
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
volumes:
- - ./openrag-documents:/app/documents:Z
+ - ./openrag-documents:/app/openrag-documents:Z
- ./keys:/app/keys:Z
- ./flows:/app/flows:U,z
+ - ./config:/app/config:Z
openrag-frontend:
image: langflowai/openrag-frontend:${OPENRAG_VERSION:-latest}
diff --git a/docs/docs/core-components/knowledge.mdx b/docs/docs/core-components/knowledge.mdx
index 63edbaa3..4c7e7acc 100644
--- a/docs/docs/core-components/knowledge.mdx
+++ b/docs/docs/core-components/knowledge.mdx
@@ -29,7 +29,7 @@ To configure the knowledge ingestion pipeline parameters, see [Docling Ingestion
The **Knowledge Ingest** flow uses Langflow's [**File** component](https://docs.langflow.org/components-data#file) to split and embed files loaded from your local machine into the OpenSearch database.
-The default path to your local folder is mounted from the `./openrag-documents` folder in your OpenRAG project directory to the `/app/documents/` directory inside the Docker container. Files added to the host or the container will be visible in both locations. To configure this location, modify the **Documents Paths** variable in either the TUI's [Advanced Setup](/install#setup) menu or in the `.env` used by Docker Compose.
+The default path to your local folder is mounted from the `./openrag-documents` folder in your OpenRAG project directory to the `/app/openrag-documents/` directory inside the Docker container. Files added to the host or the container will be visible in both locations. To configure this location, modify the **Documents Paths** variable in either the TUI's [Advanced Setup](/install#setup) menu or in the `.env` used by Docker Compose.
To load and process a single file from the mapped location, click **Add Knowledge**, and then click **File**.
The file is loaded into your OpenSearch database, and appears in the Knowledge page.
diff --git a/flows/ingestion_flow.json b/flows/ingestion_flow.json
index 66da194f..63c0a116 100644
--- a/flows/ingestion_flow.json
+++ b/flows/ingestion_flow.json
@@ -5712,7 +5712,7 @@
"endpoint_name": null,
"id": "5488df7c-b93f-4f87-a446-b67028bc0813",
"is_component": false,
- "last_tested_version": "1.7.0.dev21",
+ "last_tested_version": "1.7.0.dev19",
"name": "OpenSearch Ingestion Flow",
"tags": [
"openai",
diff --git a/flows/openrag_agent.json b/flows/openrag_agent.json
index c97f0240..d9ad0b82 100644
--- a/flows/openrag_agent.json
+++ b/flows/openrag_agent.json
@@ -4507,6 +4507,7 @@
"endpoint_name": null,
"id": "1098eea1-6649-4e1d-aed1-b77249fb8dd0",
"is_component": false,
+ "locked": true,
"last_tested_version": "1.7.0.dev21",
"name": "OpenRAG OpenSearch Agent",
"tags": [
diff --git a/flows/openrag_nudges.json b/flows/openrag_nudges.json
index adebbf9e..5fe01022 100644
--- a/flows/openrag_nudges.json
+++ b/flows/openrag_nudges.json
@@ -4088,6 +4088,7 @@
"endpoint_name": null,
"id": "ebc01d31-1976-46ce-a385-b0240327226c",
"is_component": false,
+ "locked": true,
"last_tested_version": "1.7.0.dev21",
"name": "OpenRAG OpenSearch Nudges",
"tags": [
diff --git a/flows/openrag_url_mcp.json b/flows/openrag_url_mcp.json
index 7ed64cab..8bb2e2bb 100644
--- a/flows/openrag_url_mcp.json
+++ b/flows/openrag_url_mcp.json
@@ -6052,6 +6052,7 @@
"endpoint_name": null,
"id": "72c3d17c-2dac-4a73-b48a-6518473d7830",
"is_component": false,
+ "locked": true,
"mcp_enabled": true,
"last_tested_version": "1.7.0.dev21",
"name": "OpenSearch URL Ingestion Flow",
diff --git a/frontend/app/admin/page.tsx b/frontend/app/admin/page.tsx
index 85fcf345..77a7407e 100644
--- a/frontend/app/admin/page.tsx
+++ b/frontend/app/admin/page.tsx
@@ -1,364 +1,364 @@
"use client";
-import { useState, useEffect } from "react";
+import { Cloud, FolderOpen, Loader2, Upload } from "lucide-react";
+import { useEffect, useState } from "react";
+import { ProtectedRoute } from "@/components/protected-route";
import { Button } from "@/components/ui/button";
import {
- Card,
- CardContent,
- CardDescription,
- CardHeader,
- CardTitle,
+ Card,
+ CardContent,
+ CardDescription,
+ CardHeader,
+ CardTitle,
} from "@/components/ui/card";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
-import { Upload, FolderOpen, Loader2, Cloud } from "lucide-react";
-import { ProtectedRoute } from "@/components/protected-route";
import { useTask } from "@/contexts/task-context";
function AdminPage() {
- console.log("AdminPage component rendered!");
- const [fileUploadLoading, setFileUploadLoading] = useState(false);
- const [pathUploadLoading, setPathUploadLoading] = useState(false);
- const [selectedFile, setSelectedFile] = useState(null);
- const [folderPath, setFolderPath] = useState("/app/documents/");
- const [bucketUploadLoading, setBucketUploadLoading] = useState(false);
- const [bucketUrl, setBucketUrl] = useState("s3://");
- const [uploadStatus, setUploadStatus] = useState("");
- const [awsEnabled, setAwsEnabled] = useState(false);
- const { addTask } = useTask();
+ console.log("AdminPage component rendered!");
+ const [fileUploadLoading, setFileUploadLoading] = useState(false);
+ const [pathUploadLoading, setPathUploadLoading] = useState(false);
+ const [selectedFile, setSelectedFile] = useState(null);
+ const [folderPath, setFolderPath] = useState("/app/openrag-documents/");
+ const [bucketUploadLoading, setBucketUploadLoading] = useState(false);
+ const [bucketUrl, setBucketUrl] = useState("s3://");
+ const [uploadStatus, setUploadStatus] = useState("");
+ const [awsEnabled, setAwsEnabled] = useState(false);
+ const { addTask } = useTask();
- useEffect(() => {
- console.log("AdminPage useEffect running - checking AWS availability");
- const checkAws = async () => {
- try {
- console.log("Making request to /api/upload_options");
- const res = await fetch("/api/upload_options");
- console.log("Response status:", res.status, "OK:", res.ok);
- if (res.ok) {
- const data = await res.json();
- console.log("Response data:", data);
- setAwsEnabled(Boolean(data.aws));
- }
- } catch (err) {
- console.error("Failed to check AWS availability", err);
- }
- };
- checkAws();
- }, []);
+ useEffect(() => {
+ console.log("AdminPage useEffect running - checking AWS availability");
+ const checkAws = async () => {
+ try {
+ console.log("Making request to /api/upload_options");
+ const res = await fetch("/api/upload_options");
+ console.log("Response status:", res.status, "OK:", res.ok);
+ if (res.ok) {
+ const data = await res.json();
+ console.log("Response data:", data);
+ setAwsEnabled(Boolean(data.aws));
+ }
+ } catch (err) {
+ console.error("Failed to check AWS availability", err);
+ }
+ };
+ checkAws();
+ }, []);
- const handleFileUpload = async (e: React.FormEvent) => {
- e.preventDefault();
- if (!selectedFile) return;
+ const handleFileUpload = async (e: React.FormEvent) => {
+ e.preventDefault();
+ if (!selectedFile) return;
- setFileUploadLoading(true);
- setUploadStatus("");
+ setFileUploadLoading(true);
+ setUploadStatus("");
- try {
- const formData = new FormData();
- formData.append("file", selectedFile);
+ try {
+ const formData = new FormData();
+ formData.append("file", selectedFile);
- const response = await fetch("/api/router/upload_ingest", {
- method: "POST",
- body: formData,
- });
+ const response = await fetch("/api/router/upload_ingest", {
+ method: "POST",
+ body: formData,
+ });
- const result = await response.json();
+ const result = await response.json();
- if (response.ok) {
- setUploadStatus(`File uploaded successfully! ID: ${result.id}`);
- setSelectedFile(null);
- // Reset the file input
- const fileInput = document.getElementById(
- "file-input",
- ) as HTMLInputElement;
- if (fileInput) fileInput.value = "";
- } else {
- setUploadStatus(`Error: ${result.error || "Upload failed"}`);
- }
- } catch (error) {
- setUploadStatus(
- `Error: ${error instanceof Error ? error.message : "Upload failed"}`,
- );
- } finally {
- setFileUploadLoading(false);
- }
- };
+ if (response.ok) {
+ setUploadStatus(`File uploaded successfully! ID: ${result.id}`);
+ setSelectedFile(null);
+ // Reset the file input
+ const fileInput = document.getElementById(
+ "file-input",
+ ) as HTMLInputElement;
+ if (fileInput) fileInput.value = "";
+ } else {
+ setUploadStatus(`Error: ${result.error || "Upload failed"}`);
+ }
+ } catch (error) {
+ setUploadStatus(
+ `Error: ${error instanceof Error ? error.message : "Upload failed"}`,
+ );
+ } finally {
+ setFileUploadLoading(false);
+ }
+ };
- const handleBucketUpload = async (e: React.FormEvent) => {
- e.preventDefault();
- if (!bucketUrl.trim()) return;
+ const handleBucketUpload = async (e: React.FormEvent) => {
+ e.preventDefault();
+ if (!bucketUrl.trim()) return;
- setBucketUploadLoading(true);
- setUploadStatus("");
+ setBucketUploadLoading(true);
+ setUploadStatus("");
- try {
- const response = await fetch("/api/upload_bucket", {
- method: "POST",
- headers: {
- "Content-Type": "application/json",
- },
- body: JSON.stringify({ s3_url: bucketUrl }),
- });
+ try {
+ const response = await fetch("/api/upload_bucket", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify({ s3_url: bucketUrl }),
+ });
- const result = await response.json();
+ const result = await response.json();
- if (response.status === 201) {
- const taskId = result.task_id || result.id;
- const totalFiles = result.total_files || 0;
+ if (response.status === 201) {
+ const taskId = result.task_id || result.id;
+ const totalFiles = result.total_files || 0;
- if (!taskId) {
- throw new Error("No task ID received from server");
- }
+ if (!taskId) {
+ throw new Error("No task ID received from server");
+ }
- addTask(taskId);
- setUploadStatus(
- `π Processing started for ${totalFiles} files. Check the task notification panel for real-time progress. (Task ID: ${taskId})`,
- );
- setBucketUrl("");
- } else {
- setUploadStatus(`Error: ${result.error || "Bucket processing failed"}`);
- }
- } catch (error) {
- setUploadStatus(
- `Error: ${error instanceof Error ? error.message : "Bucket processing failed"}`,
- );
- } finally {
- setBucketUploadLoading(false);
- }
- };
+ addTask(taskId);
+ setUploadStatus(
+ `π Processing started for ${totalFiles} files. Check the task notification panel for real-time progress. (Task ID: ${taskId})`,
+ );
+ setBucketUrl("");
+ } else {
+ setUploadStatus(`Error: ${result.error || "Bucket processing failed"}`);
+ }
+ } catch (error) {
+ setUploadStatus(
+ `Error: ${error instanceof Error ? error.message : "Bucket processing failed"}`,
+ );
+ } finally {
+ setBucketUploadLoading(false);
+ }
+ };
- const handlePathUpload = async (e: React.FormEvent) => {
- e.preventDefault();
- if (!folderPath.trim()) return;
+ const handlePathUpload = async (e: React.FormEvent) => {
+ e.preventDefault();
+ if (!folderPath.trim()) return;
- setPathUploadLoading(true);
- setUploadStatus("");
+ setPathUploadLoading(true);
+ setUploadStatus("");
- try {
- const response = await fetch("/api/upload_path", {
- method: "POST",
- headers: {
- "Content-Type": "application/json",
- },
- body: JSON.stringify({ path: folderPath }),
- });
+ try {
+ const response = await fetch("/api/upload_path", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify({ path: folderPath }),
+ });
- const result = await response.json();
+ const result = await response.json();
- if (response.status === 201) {
- // New flow: Got task ID, use centralized tracking
- const taskId = result.task_id || result.id;
- const totalFiles = result.total_files || 0;
+ if (response.status === 201) {
+ // New flow: Got task ID, use centralized tracking
+ const taskId = result.task_id || result.id;
+ const totalFiles = result.total_files || 0;
- if (!taskId) {
- throw new Error("No task ID received from server");
- }
+ if (!taskId) {
+ throw new Error("No task ID received from server");
+ }
- // Add task to centralized tracking
- addTask(taskId);
+ // Add task to centralized tracking
+ addTask(taskId);
- setUploadStatus(
- `π Processing started for ${totalFiles} files. Check the task notification panel for real-time progress. (Task ID: ${taskId})`,
- );
- setFolderPath("");
- setPathUploadLoading(false);
- } else if (response.ok) {
- // Original flow: Direct response with results
- const successful =
- result.results?.filter(
- (r: { status: string }) => r.status === "indexed",
- ).length || 0;
- const total = result.results?.length || 0;
- setUploadStatus(
- `Path processed successfully! ${successful}/${total} files indexed.`,
- );
- setFolderPath("");
- setPathUploadLoading(false);
- } else {
- setUploadStatus(`Error: ${result.error || "Path upload failed"}`);
- setPathUploadLoading(false);
- }
- } catch (error) {
- setUploadStatus(
- `Error: ${error instanceof Error ? error.message : "Path upload failed"}`,
- );
- setPathUploadLoading(false);
- }
- };
+ setUploadStatus(
+ `π Processing started for ${totalFiles} files. Check the task notification panel for real-time progress. (Task ID: ${taskId})`,
+ );
+ setFolderPath("");
+ setPathUploadLoading(false);
+ } else if (response.ok) {
+ // Original flow: Direct response with results
+ const successful =
+ result.results?.filter(
+ (r: { status: string }) => r.status === "indexed",
+ ).length || 0;
+ const total = result.results?.length || 0;
+ setUploadStatus(
+ `Path processed successfully! ${successful}/${total} files indexed.`,
+ );
+ setFolderPath("");
+ setPathUploadLoading(false);
+ } else {
+ setUploadStatus(`Error: ${result.error || "Path upload failed"}`);
+ setPathUploadLoading(false);
+ }
+ } catch (error) {
+ setUploadStatus(
+ `Error: ${error instanceof Error ? error.message : "Path upload failed"}`,
+ );
+ setPathUploadLoading(false);
+ }
+ };
- // Remove the old pollPathTaskStatus function since we're using centralized system
+ // Remove the old pollPathTaskStatus function since we're using centralized system
- return (
-
-
-
Ingest
-
- Upload and manage documents in your database
-
-
+ return (
+
+
+
Ingest
+
+ Upload and manage documents in your database
+
+
- {uploadStatus && (
-
-
-
- {uploadStatus}
-
-
-
- )}
+ {uploadStatus && (
+
+
+
+ {uploadStatus}
+
+
+
+ )}
-
-
-
-
-
- Upload File
-
-
- Upload a single document to be indexed and searchable
-
-
-
-
-
-
+
+
+
+
+
+ Upload File
+
+
+ Upload a single document to be indexed and searchable
+
+
+
+
+
+
-
-
-
-
- Upload Folder
-
-
- Process all documents in a folder path on the server
-
-
-
-
-
-
- {awsEnabled && (
-
-
-
-
- Process Bucket
-
-
- Process all documents from an S3 bucket. AWS credentials must be
- set as environment variables.
-
-
-
-
-
-
- )}
-
-
- );
+
+
+
+
+ Upload Folder
+
+
+ Process all documents in a folder path on the server
+
+
+
+
+
+
+ {awsEnabled && (
+
+
+
+
+ Process Bucket
+
+
+ Process all documents from an S3 bucket. AWS credentials must be
+ set as environment variables.
+
+
+
+
+
+
+ )}
+
- See our{" "}
-
- Cloud Connectors installation guide
- {" "}
- for more detail.
-
-
- )}
-
-
- );
- })}
-
-
-
- {/* Model Providers Section */}
-
-
-
- Model Providers
-
-
-
-
-
- {/* Agent Behavior Section */}
-
-
-
- Agent
-
-
- Restore flow
-
- }
- title="Restore default Agent flow"
- description="This restores defaults and discards all custom settings and overrides. This canβt be undone."
- confirmText="Restore"
- variant="destructive"
- onConfirm={handleRestoreRetrievalFlow}
- />
-
-
- Edit in Langflow
-
- }
- title="Edit Agent flow in Langflow"
- description={
- <>
-
- You're entering Langflow. You can edit the{" "}
- Agent flow and other underlying flows. Manual
- changes to components, wiring, or I/O can break this
- experience.
-
-
- Restore flow
-
- }
- title="Restore default Ingest flow"
- description="This restores defaults and discards all custom settings and overrides. This can't be undone."
- confirmText="Restore"
- variant="destructive"
- onConfirm={handleRestoreIngestFlow}
- />
-
-
- Edit in Langflow
-
- }
- title="Edit Ingest flow in Langflow"
- description={
- <>
-
- You're entering Langflow. You can edit the{" "}
- Ingest flow and other underlying flows. Manual
- changes to components, wiring, or I/O can break this
- experience.
-
+ See our{" "}
+
+ Cloud Connectors installation guide
+ {" "}
+ for more detail.
+
+
+ )}
+
+
+ );
+ })}
+
+
+
+ {/* Model Providers Section */}
+
+
+
+ Model Providers
+
+
+
+
+
+ {/* Agent Behavior Section */}
+
+
+
+ Agent
+
+
+ Restore flow
+
+ }
+ title="Restore default Agent flow"
+ description="This restores defaults and discards all custom settings and overrides. This canβt be undone."
+ confirmText="Restore"
+ variant="destructive"
+ onConfirm={handleRestoreRetrievalFlow}
+ />
+
+
+ Edit in Langflow
+
+ }
+ title="Edit Agent flow in Langflow"
+ description={
+ <>
+
+ You're entering Langflow. You can edit the{" "}
+ Agent flow and other underlying flows. Manual
+ changes to components, wiring, or I/O can break this
+ experience.
+
+
+ To enable editing, you need to unlock the flow by clicking
+ on its name and disabling the Lock flow option.
+
+
+ Restore flow
+
+ }
+ title="Restore default Ingest flow"
+ description="This restores defaults and discards all custom settings and overrides. This can't be undone."
+ confirmText="Restore"
+ variant="destructive"
+ onConfirm={handleRestoreIngestFlow}
+ />
+
+
+ Edit in Langflow
+
+ }
+ title="Edit Ingest flow in Langflow"
+ description={
+ <>
+
+ You're entering Langflow. You can edit the{" "}
+ Ingest flow and other underlying flows. Manual
+ changes to components, wiring, or I/O can break this
+ experience.
+
+
+ To enable editing, you need to unlock the flow by clicking
+ on its name and disabling the Lock flow option.
+