From f3be92726bd144bf2e477fe30526e2a50456fcf4 Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Mon, 20 Oct 2025 09:41:05 -0400
Subject: [PATCH 1/9] link-out-to-oauth-docs
---
docs/docs/get-started/install.mdx | 14 +++++++++-----
1 file changed, 9 insertions(+), 5 deletions(-)
diff --git a/docs/docs/get-started/install.mdx b/docs/docs/get-started/install.mdx
index ff9872aa..585bef91 100644
--- a/docs/docs/get-started/install.mdx
+++ b/docs/docs/get-started/install.mdx
@@ -93,8 +93,9 @@ For OAuth setup, use **Advanced Setup**.
1. To install OpenRAG with **Advanced Setup**, click **Advanced Setup** or press 2.
2. Click **Generate Passwords** to generate passwords for OpenSearch and Langflow.
3. Paste your OpenAI API key in the OpenAI API key field.
- 4. Add your client and secret values for Google, Azure, or AWS OAuth.
- These values can be found in your OAuth provider.
+ 4. Add your client and secret values for Google or Microsoft OAuth.
+ These values can be found with your OAuth provider.
+ For more information, see the [Google OAuth client](https://developers.google.com/identity/protocols/oauth2) or [Microsoft Graph OAuth client](https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth) documentation.
5. The OpenRAG TUI presents redirect URIs for your OAuth app.
These are the URLs your OAuth provider will redirect back to after user sign-in.
Register these redirect values with your OAuth provider as they are presented in the TUI.
@@ -107,8 +108,8 @@ For OAuth setup, use **Advanced Setup**.
Command completed successfully
```
8. To open the OpenRAG application, click **Open App**, press 6, or navigate to `http://localhost:3000`.
- You will be presented with your provider's OAuth sign-in screen, and be redirected to the redirect URI after sign-in.
- Continue with Application Onboarding.
+ You are presented with your provider's OAuth sign-in screen.
+ After sign-in, you are redirected to the redirect URI.
Two additional variables are available for Advanced Setup:
@@ -116,7 +117,10 @@ For OAuth setup, use **Advanced Setup**.
The `WEBHOOK_BASE_URL` controls where the endpoint for `/connectors/CONNECTOR_TYPE/webhook` will be available.
This connection enables real-time document synchronization with external services.
- For example, for Google Drive file synchronization the webhook URL is `/connectors/google_drive/webhook`.
+ Supported webhook endpoints:
+ - Google Drive: `/connectors/google_drive/webhook`
+ - OneDrive: `/connectors/onedrive/webhook`
+ - SharePoint: `/connectors/sharepoint/webhook`
9. Continue with [Application Onboarding](#application-onboarding).
From 4fac72c98b75de27f4b88485ee30cb75be575f1f Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Mon, 20 Oct 2025 09:41:15 -0400
Subject: [PATCH 2/9] remove-model-restrictions
---
docs/docs/_partial-onboarding.mdx | 8 ++------
1 file changed, 2 insertions(+), 6 deletions(-)
diff --git a/docs/docs/_partial-onboarding.mdx b/docs/docs/_partial-onboarding.mdx
index 44222371..3c5c031d 100644
--- a/docs/docs/_partial-onboarding.mdx
+++ b/docs/docs/_partial-onboarding.mdx
@@ -5,16 +5,12 @@ import TabItem from '@theme/TabItem';
The first time you start OpenRAG, whether using the TUI or a `.env` file, you must complete application onboarding.
-Most values from onboarding can be changed later in the OpenRAG **Settings** page, but there are important restrictions.
-
-The **language model provider** and **embeddings model provider** can only be selected at onboarding, and you must use the same provider for your language model and embedding model.
-To change your provider selection later, you must completely reinstall OpenRAG.
-
-The **language model** can be changed later in **Settings**, but the **embeddings model** cannot be changed later.
+Values from onboarding can be changed later in the OpenRAG **Settings** page.
1. Enable **Get API key from environment variable** to automatically enter your key from the TUI-generated `.env` file.
+ Alternatively, paste an OpenAI API key into the field.
2. Under **Advanced settings**, select your **Embedding Model** and **Language Model**.
3. To load 2 sample PDFs, enable **Sample dataset**.
This is recommended, but not required.
From 419543f378afd9a810e6d2440210732d5fcc6fae Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Mon, 20 Oct 2025 09:59:29 -0400
Subject: [PATCH 3/9] docs-docker-install-instead-of-deploy
---
docs/docs/get-started/docker.mdx | 4 ++--
docs/docs/get-started/quickstart.mdx | 2 --
docs/sidebars.js | 2 +-
3 files changed, 3 insertions(+), 5 deletions(-)
diff --git a/docs/docs/get-started/docker.mdx b/docs/docs/get-started/docker.mdx
index 366d71de..ebf86554 100644
--- a/docs/docs/get-started/docker.mdx
+++ b/docs/docs/get-started/docker.mdx
@@ -1,12 +1,12 @@
---
-title: Deploy with Docker
+title: Install with Docker
slug: /get-started/docker
---
import PartialOnboarding from '@site/docs/_partial-onboarding.mdx';
There are two different Docker Compose files.
-They deploy the same applications and containers, but to different environments.
+They deploy the same applications and containers locally, but to different environments.
- [`docker-compose.yml`](https://github.com/langflow-ai/openrag/blob/main/docker-compose.yml) is an OpenRAG deployment with GPU support for accelerated AI processing.
diff --git a/docs/docs/get-started/quickstart.mdx b/docs/docs/get-started/quickstart.mdx
index c2f4b3a5..942433b5 100644
--- a/docs/docs/get-started/quickstart.mdx
+++ b/docs/docs/get-started/quickstart.mdx
@@ -44,8 +44,6 @@ If you aren't getting the results you need, you can further tune the knowledge i
To modify the knowledge ingestion or Agent behavior, click **Settings**.
In this example, you'll try a different LLM to demonstrate how the Agent's response changes.
-You can only change the **Language model**, and not the **Model provider** that you started with in OpenRAG.
-If you're using Ollama, you can use any installed model.
1. To edit the Agent's behavior, click **Edit in Langflow**.
You can more quickly access the **Language Model** and **Agent Instructions** fields in this page, but for illustration purposes, navigate to the Langflow visual builder.
diff --git a/docs/sidebars.js b/docs/sidebars.js
index 9d0c49c8..dd561b33 100644
--- a/docs/sidebars.js
+++ b/docs/sidebars.js
@@ -33,7 +33,7 @@ const sidebars = {
{
type: "doc",
id: "get-started/docker",
- label: "Deploy with Docker"
+ label: "Install with Docker"
},
{
type: "doc",
From 675c334a6aa22d8a00ef27feefcb26ec30ee8a48 Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Mon, 20 Oct 2025 10:05:09 -0400
Subject: [PATCH 4/9] emphasize-local-for-docker-deploy
---
README.md | 8 ++++----
docs/docs/get-started/docker.mdx | 4 ++--
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/README.md b/README.md
index 38beff99..4cc99a71 100644
--- a/README.md
+++ b/README.md
@@ -49,10 +49,10 @@ To launch OpenRAG with the TUI, do the following:
For the full TUI guide, see [TUI](https://docs.openr.ag/get-started/tui).
-## Docker Deployment
+## Docker installation
If you prefer to use Docker to run OpenRAG, the repository includes two Docker Compose `.yml` files.
-They deploy the same applications and containers, but to different environments.
+They deploy the same applications and containers locally, but to different environments.
- [`docker-compose.yml`](https://github.com/langflow-ai/openrag/blob/main/docker-compose.yml) is an OpenRAG deployment for environments with GPU support. GPU support requires an NVIDIA GPU with CUDA support and compatible NVIDIA drivers installed on the OpenRAG host machine.
@@ -60,7 +60,7 @@ They deploy the same applications and containers, but to different environments.
Both Docker deployments depend on `docling serve` to be running on port `5001` on the host machine. This enables [Mac MLX](https://opensource.apple.com/projects/mlx/) support for document processing. Installing OpenRAG with the TUI starts `docling serve` automatically, but for a Docker deployment you must manually start the `docling serve` process.
-To deploy OpenRAG with Docker:
+To install OpenRAG with Docker:
1. Clone the OpenRAG repository.
```bash
@@ -121,7 +121,7 @@ To deploy OpenRAG with Docker:
uv run python scripts/docling_ctl.py stop
```
-For more information, see [Deploy with Docker](https://docs.openr.ag/get-started/docker).
+For more information, see [Install with Docker](https://docs.openr.ag/get-started/docker).
## Troubleshooting
diff --git a/docs/docs/get-started/docker.mdx b/docs/docs/get-started/docker.mdx
index ebf86554..fd098a80 100644
--- a/docs/docs/get-started/docker.mdx
+++ b/docs/docs/get-started/docker.mdx
@@ -23,7 +23,7 @@ Both Docker deployments depend on `docling serve` to be running on port `5001` o
- Create an [OpenAI API key](https://platform.openai.com/api-keys). This key is **required** to start OpenRAG, but you can choose a different model provider during [Application Onboarding](#application-onboarding).
- Optional: GPU support requires an NVIDIA GPU with CUDA support and compatible NVIDIA drivers installed on the OpenRAG host machine. If you don't have GPU capabilities, OpenRAG provides an alternate CPU-only deployment.
-## Deploy OpenRAG with Docker Compose
+## Install OpenRAG with Docker Compose
To install OpenRAG with Docker Compose, do the following:
@@ -82,7 +82,7 @@ The following values are **required** to be set:
PID: 27746
```
-7. Deploy OpenRAG with Docker Compose based on your deployment type.
+7. Deploy OpenRAG locally with Docker Compose based on your deployment type.
For GPU-enabled systems, run the following commands:
```bash
From d3b383d69ca4f427f7a99cf046ad09ad9c25a708 Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Mon, 20 Oct 2025 10:06:44 -0400
Subject: [PATCH 5/9] sharepoint-not-s3
---
docs/docs/core-components/knowledge.mdx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/docs/core-components/knowledge.mdx b/docs/docs/core-components/knowledge.mdx
index 9b699a4c..c0a3a5ff 100644
--- a/docs/docs/core-components/knowledge.mdx
+++ b/docs/docs/core-components/knowledge.mdx
@@ -39,7 +39,7 @@ The files are loaded into your OpenSearch database, and appear in the Knowledge
### Ingest files through OAuth connectors {#oauth-ingestion}
-OpenRAG supports Google Drive, OneDrive, and AWS S3 as OAuth connectors for seamless document synchronization.
+OpenRAG supports Google Drive, OneDrive, and Sharepoint as OAuth connectors for seamless document synchronization.
OAuth integration allows individual users to connect their personal cloud storage accounts to OpenRAG. Each user must separately authorize OpenRAG to access their own cloud storage files. When a user connects a cloud service, they are redirected to authenticate with that service provider and grant OpenRAG permission to sync documents from their personal cloud storage.
From 1fef4ee1dfd3e4f8907438a73baa1d84290d1dd5 Mon Sep 17 00:00:00 2001
From: Cole Goldsmith
Date: Wed, 22 Oct 2025 15:26:12 -0500
Subject: [PATCH 6/9] Refactor knowledge menu icons and use shadcn dropdown
---
frontend/components/knowledge-dropdown.tsx | 158 ++++++++-----------
frontend/src/app/settings/icons/aws-icon.tsx | 31 ++++
2 files changed, 93 insertions(+), 96 deletions(-)
create mode 100644 frontend/src/app/settings/icons/aws-icon.tsx
diff --git a/frontend/components/knowledge-dropdown.tsx b/frontend/components/knowledge-dropdown.tsx
index 19ddc387..56aa7def 100644
--- a/frontend/components/knowledge-dropdown.tsx
+++ b/frontend/components/knowledge-dropdown.tsx
@@ -4,10 +4,11 @@ import { useQueryClient } from "@tanstack/react-query";
import {
ChevronDown,
Cloud,
+ File,
+ Folder,
FolderOpen,
Loader2,
PlugZap,
- Upload,
} from "lucide-react";
import { useRouter } from "next/navigation";
import { useEffect, useRef, useState } from "react";
@@ -22,18 +23,26 @@ import {
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
+import {
+ DropdownMenu,
+ DropdownMenuContent,
+ DropdownMenuItem,
+ DropdownMenuTrigger,
+} from "@/components/ui/dropdown-menu";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import { useTask } from "@/contexts/task-context";
-import { cn } from "@/lib/utils";
import type { File as SearchFile } from "@/src/app/api/queries/useGetSearchQuery";
+import GoogleDriveIcon from "@/app/settings/icons/google-drive-icon";
+import OneDriveIcon from "@/app/settings/icons/one-drive-icon";
+import SharePointIcon from "@/app/settings/icons/share-point-icon";
+import AwsIcon from "@/app/settings/icons/aws-icon";
export function KnowledgeDropdown() {
const { addTask } = useTask();
const { refetch: refetchTasks } = useGetTasksQuery();
const queryClient = useQueryClient();
const router = useRouter();
- const [isOpen, setIsOpen] = useState(false);
const [showFolderDialog, setShowFolderDialog] = useState(false);
const [showS3Dialog, setShowS3Dialog] = useState(false);
const [showDuplicateDialog, setShowDuplicateDialog] = useState(false);
@@ -55,7 +64,6 @@ export function KnowledgeDropdown() {
};
}>({});
const fileInputRef = useRef(null);
- const dropdownRef = useRef(null);
// Check AWS availability and cloud connectors on mount
useEffect(() => {
@@ -141,24 +149,6 @@ export function KnowledgeDropdown() {
checkAvailability();
}, []);
- // Handle click outside to close dropdown
- useEffect(() => {
- const handleClickOutside = (event: MouseEvent) => {
- if (
- dropdownRef.current &&
- !dropdownRef.current.contains(event.target as Node)
- ) {
- setIsOpen(false);
- }
- };
-
- if (isOpen) {
- document.addEventListener("mousedown", handleClickOutside);
- return () =>
- document.removeEventListener("mousedown", handleClickOutside);
- }
- }, [isOpen]);
-
const handleFileUpload = () => {
fileInputRef.current?.click();
};
@@ -168,8 +158,7 @@ export function KnowledgeDropdown() {
if (files && files.length > 0) {
const file = files[0];
- // Close dropdown immediately after file selection
- setIsOpen(false);
+ // File selection will close dropdown automatically
try {
// Check if filename already exists (using ORIGINAL filename)
@@ -427,13 +416,19 @@ export function KnowledgeDropdown() {
}
};
+ // Icon mapping for cloud connectors
+ const connectorIconMap = {
+ google_drive: GoogleDriveIcon,
+ onedrive: OneDriveIcon,
+ sharepoint: SharePointIcon,
+ };
+
const cloudConnectorItems = Object.entries(cloudConnectors)
.filter(([, info]) => info.available)
.map(([type, info]) => ({
label: info.name,
- icon: PlugZap,
+ icon: connectorIconMap[type as keyof typeof connectorIconMap] || PlugZap,
onClick: async () => {
- setIsOpen(false);
if (info.connected && info.hasToken) {
setIsNavigatingToCloud(true);
try {
@@ -448,36 +443,30 @@ export function KnowledgeDropdown() {
}
},
disabled: !info.connected || !info.hasToken,
- tooltip: !info.connected
- ? `Connect ${info.name} in Settings first`
- : !info.hasToken
- ? `Reconnect ${info.name} - access token required`
- : undefined,
}));
const menuItems = [
{
- label: "Add File",
- icon: Upload,
+ label: "File",
+ icon: File,
onClick: handleFileUpload,
},
{
- label: "Process Folder",
- icon: FolderOpen,
- onClick: () => {
- setIsOpen(false);
- setShowFolderDialog(true);
- },
+ label: "Folder",
+ icon: Folder,
+ onClick: () => setShowFolderDialog(true),
+ },
+ {
+ label: "Amazon S3",
+ icon: AwsIcon,
+ onClick: () => setShowS3Dialog(true),
},
...(awsEnabled
? [
{
- label: "Process S3 Bucket",
- icon: Cloud,
- onClick: () => {
- setIsOpen(false);
- setShowS3Dialog(true);
- },
+ label: "Amazon S3",
+ icon: AwsIcon,
+ onClick: () => setShowS3Dialog(true),
},
]
: []),
@@ -490,13 +479,9 @@ export function KnowledgeDropdown() {
return (
<>
-