Merge branch 'main' into feat/knowledge-page-sweep
This commit is contained in:
commit
01dcdef2fb
32 changed files with 1602 additions and 810 deletions
|
|
@ -33,8 +33,8 @@ RUN uv sync --frozen --no-install-project --no-editable --extra postgresql
|
|||
|
||||
# Build frontend
|
||||
WORKDIR /app/src/frontend
|
||||
RUN npm ci && \
|
||||
npm run build && \
|
||||
RUN NODE_OPTIONS=--max_old_space_size=4096 npm ci && \
|
||||
NODE_OPTIONS=--max_old_space_size=4096 npm run build && \
|
||||
mkdir -p /app/src/backend/base/langflow/frontend && \
|
||||
cp -r build/* /app/src/backend/base/langflow/frontend/
|
||||
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ LANGFLOW_CHAT_FLOW_ID=your_chat_flow_id
|
|||
LANGFLOW_INGEST_FLOW_ID=your_ingest_flow_id
|
||||
NUDGES_FLOW_ID=your_nudges_flow_id
|
||||
```
|
||||
See extended configuration, including ingestion and optional variables: [docs/configure/configuration.md](docs/docs/configure/configuration.md)
|
||||
See extended configuration, including ingestion and optional variables: [docs/reference/configuration.md](docs/docs/reference/configuration.md)
|
||||
### 3. Start OpenRAG
|
||||
|
||||
```bash
|
||||
|
|
|
|||
|
|
@ -1,15 +0,0 @@
|
|||
# OpenRAG Configuration File
|
||||
provider:
|
||||
model_provider: "openai" # openai, anthropic, azure, etc.
|
||||
api_key: "your-api-key" # or use OPENAI_API_KEY env var
|
||||
|
||||
knowledge:
|
||||
embedding_model: "text-embedding-3-small"
|
||||
chunk_size: 1000
|
||||
chunk_overlap: 200
|
||||
ocr: true
|
||||
picture_descriptions: false
|
||||
|
||||
agent:
|
||||
llm_model: "gpt-4o-mini"
|
||||
system_prompt: "You are a helpful AI assistant..."
|
||||
|
|
@ -74,7 +74,6 @@ services:
|
|||
- ./documents:/app/documents:Z
|
||||
- ./keys:/app/keys:Z
|
||||
- ./flows:/app/flows:Z
|
||||
- ./config:/app/config:z
|
||||
|
||||
openrag-frontend:
|
||||
image: phact/openrag-frontend:${OPENRAG_VERSION:-latest}
|
||||
|
|
|
|||
|
|
@ -73,7 +73,6 @@ services:
|
|||
- ./documents:/app/documents:Z
|
||||
- ./keys:/app/keys:Z
|
||||
- ./flows:/app/flows:z
|
||||
- ./config:/app/config:z
|
||||
gpus: all
|
||||
|
||||
openrag-frontend:
|
||||
|
|
|
|||
|
|
@ -2,4 +2,4 @@ import Icon from "@site/src/components/icon/icon";
|
|||
|
||||
All flows included with OpenRAG are designed to be modular, performant, and provider-agnostic.
|
||||
To modify a flow, click <Icon name="Settings2" aria-hidden="true"/> **Settings**, and click **Edit in Langflow**.
|
||||
Flows are edited in the same way as in the [Langflow visual editor](https://docs.langflow.org/concepts-overview).
|
||||
OpenRAG's visual editor is based on the [Langflow visual editor](https://docs.langflow.org/concepts-overview), so you can edit your flows to match your specific use case.
|
||||
49
docs/docs/_partial-onboarding.mdx
Normal file
49
docs/docs/_partial-onboarding.mdx
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
## Application onboarding
|
||||
|
||||
The first time you start OpenRAG, whether using the TUI or a `.env` file, you must complete application onboarding.
|
||||
|
||||
Most values from onboarding can be changed later in the OpenRAG **Settings** page, but there are important restrictions.
|
||||
|
||||
The **language model provider** and **embeddings model provider** can only be selected at onboarding, and you must use the same provider for your language model and embedding model.
|
||||
To change your provider selection later, you must completely reinstall OpenRAG.
|
||||
|
||||
The **language model** can be changed later in **Settings**, but the **embeddings model** cannot be changed later.
|
||||
|
||||
<Tabs groupId="Provider">
|
||||
<TabItem value="OpenAI" label="OpenAI" default>
|
||||
1. Enable **Get API key from environment variable** to automatically enter your key from the TUI-generated `.env` file.
|
||||
2. Under **Advanced settings**, select your **Embedding Model** and **Language Model**.
|
||||
3. To load 2 sample PDFs, enable **Sample dataset**.
|
||||
This is recommended, but not required.
|
||||
4. Click **Complete**.
|
||||
5. Continue with the [Quickstart](/quickstart).
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="IBM watsonx.ai" label="IBM watsonx.ai">
|
||||
1. Complete the fields for **watsonx.ai API Endpoint**, **IBM API key**, and **IBM Project ID**.
|
||||
These values are found in your IBM watsonx deployment.
|
||||
2. Under **Advanced settings**, select your **Embedding Model** and **Language Model**.
|
||||
3. To load 2 sample PDFs, enable **Sample dataset**.
|
||||
This is recommended, but not required.
|
||||
4. Click **Complete**.
|
||||
5. Continue with the [Quickstart](/quickstart).
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Ollama" label="Ollama">
|
||||
:::tip
|
||||
Ollama is not included with OpenRAG. To install Ollama, see the [Ollama documentation](https://docs.ollama.com/).
|
||||
:::
|
||||
1. Enter your Ollama server's base URL address.
|
||||
The default Ollama server address is `http://localhost:11434`.
|
||||
OpenRAG automatically transforms `localhost` to access services outside of the container, and sends a test connection to your Ollama server to confirm connectivity.
|
||||
2. Select the **Embedding Model** and **Language Model** your Ollama server is running.
|
||||
OpenRAG retrieves the available models from your Ollama server.
|
||||
3. To load 2 sample PDFs, enable **Sample dataset**.
|
||||
This is recommended, but not required.
|
||||
4. Click **Complete**.
|
||||
5. Continue with the [Quickstart](/quickstart).
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
|
@ -1,110 +0,0 @@
|
|||
---
|
||||
title: Configuration
|
||||
slug: /configure/configuration
|
||||
---
|
||||
|
||||
import PartialExternalPreview from '@site/docs/_partial-external-preview.mdx';
|
||||
|
||||
<PartialExternalPreview />
|
||||
|
||||
OpenRAG supports multiple configuration methods with the following priority:
|
||||
|
||||
1. **Environment Variables** (highest priority)
|
||||
2. **Configuration File** (`config.yaml`)
|
||||
3. **Langflow Flow Settings** (runtime override)
|
||||
4. **Default Values** (fallback)
|
||||
|
||||
## Configuration File
|
||||
|
||||
Create a `config.yaml` file in the project root to configure OpenRAG:
|
||||
|
||||
```yaml
|
||||
# OpenRAG Configuration File
|
||||
provider:
|
||||
model_provider: "openai" # openai, anthropic, azure, etc.
|
||||
api_key: "your-api-key" # or use OPENAI_API_KEY env var
|
||||
|
||||
knowledge:
|
||||
embedding_model: "text-embedding-3-small"
|
||||
chunk_size: 1000
|
||||
chunk_overlap: 200
|
||||
ocr: true
|
||||
picture_descriptions: false
|
||||
|
||||
agent:
|
||||
llm_model: "gpt-4o-mini"
|
||||
system_prompt: "You are a helpful AI assistant..."
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
Environment variables will override configuration file settings. You can still use `.env` files:
|
||||
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
## Required Variables
|
||||
|
||||
| Variable | Description |
|
||||
| ----------------------------- | ------------------------------------------- |
|
||||
| `OPENAI_API_KEY` | Your OpenAI API key |
|
||||
| `OPENSEARCH_PASSWORD` | Password for OpenSearch admin user |
|
||||
| `LANGFLOW_SUPERUSER` | Langflow admin username |
|
||||
| `LANGFLOW_SUPERUSER_PASSWORD` | Langflow admin password |
|
||||
| `LANGFLOW_CHAT_FLOW_ID` | ID of your Langflow chat flow |
|
||||
| `LANGFLOW_INGEST_FLOW_ID` | ID of your Langflow ingestion flow |
|
||||
| `NUDGES_FLOW_ID` | ID of your Langflow nudges/suggestions flow |
|
||||
|
||||
## Ingestion Configuration
|
||||
|
||||
| Variable | Description |
|
||||
| ------------------------------ | ------------------------------------------------------ |
|
||||
| `DISABLE_INGEST_WITH_LANGFLOW` | Disable Langflow ingestion pipeline (default: `false`) |
|
||||
|
||||
- `false` or unset: Uses Langflow pipeline (upload → ingest → delete)
|
||||
- `true`: Uses traditional OpenRAG processor for document ingestion
|
||||
|
||||
## Optional Variables
|
||||
|
||||
| Variable | Description |
|
||||
| ------------------------------------------------------------------------- | ------------------------------------------------------------------ |
|
||||
| `LANGFLOW_PUBLIC_URL` | Public URL for Langflow (default: `http://localhost:7860`) |
|
||||
| `GOOGLE_OAUTH_CLIENT_ID` / `GOOGLE_OAUTH_CLIENT_SECRET` | Google OAuth authentication |
|
||||
| `MICROSOFT_GRAPH_OAUTH_CLIENT_ID` / `MICROSOFT_GRAPH_OAUTH_CLIENT_SECRET` | Microsoft OAuth |
|
||||
| `WEBHOOK_BASE_URL` | Base URL for webhook endpoints |
|
||||
| `AWS_ACCESS_KEY_ID` / `AWS_SECRET_ACCESS_KEY` | AWS integrations |
|
||||
| `SESSION_SECRET` | Session management (default: auto-generated, change in production) |
|
||||
| `LANGFLOW_KEY` | Explicit Langflow API key (auto-generated if not provided) |
|
||||
| `LANGFLOW_SECRET_KEY` | Secret key for Langflow internal operations |
|
||||
|
||||
## OpenRAG Configuration Variables
|
||||
|
||||
These environment variables override settings in `config.yaml`:
|
||||
|
||||
### Provider Settings
|
||||
|
||||
| Variable | Description | Default |
|
||||
| ------------------ | ---------------------------------------- | -------- |
|
||||
| `MODEL_PROVIDER` | Model provider (openai, anthropic, etc.) | `openai` |
|
||||
| `PROVIDER_API_KEY` | API key for the model provider | |
|
||||
| `OPENAI_API_KEY` | OpenAI API key (backward compatibility) | |
|
||||
|
||||
### Knowledge Settings
|
||||
|
||||
| Variable | Description | Default |
|
||||
| ------------------------------ | --------------------------------------- | ------------------------ |
|
||||
| `EMBEDDING_MODEL` | Embedding model for vector search | `text-embedding-3-small` |
|
||||
| `CHUNK_SIZE` | Text chunk size for document processing | `1000` |
|
||||
| `CHUNK_OVERLAP` | Overlap between chunks | `200` |
|
||||
| `OCR_ENABLED` | Enable OCR for image processing | `true` |
|
||||
| `PICTURE_DESCRIPTIONS_ENABLED` | Enable picture descriptions | `false` |
|
||||
|
||||
### Agent Settings
|
||||
|
||||
| Variable | Description | Default |
|
||||
| --------------- | --------------------------------- | ------------------------ |
|
||||
| `LLM_MODEL` | Language model for the chat agent | `gpt-4o-mini` |
|
||||
| `SYSTEM_PROMPT` | System prompt for the agent | Default assistant prompt |
|
||||
|
||||
See `.env.example` for a complete list with descriptions, and `docker-compose*.yml` for runtime usage.
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
title: Agents powered by Langflow
|
||||
title: Langflow Agents
|
||||
slug: /agents
|
||||
---
|
||||
|
||||
|
|
@ -13,9 +13,13 @@ import PartialExternalPreview from '@site/docs/_partial-external-preview.mdx';
|
|||
|
||||
OpenRAG leverages Langflow's Agent component to power the OpenRAG OpenSearch Agent flow.
|
||||
|
||||
This flow intelligently chats with your knowledge by embedding your query, comparing it the vector database embeddings, and generating a response with the LLM.
|
||||
[Flows](https://docs.langflow.org/concepts-overview) in Langflow are functional representations of application workflows, with multiple [component](https://docs.langflow.org/concepts-components) nodes connected as single steps in a workflow.
|
||||
|
||||
The Agent component shines here in its ability to make decisions on not only what query should be sent, but when a query is necessary to solve the problem at hand.
|
||||
In the OpenRAG OpenSearch Agent flow, components like the Langflow [**Agent** component](https://docs.langflow.org/agents) and [**OpenSearch** component](https://docs.langflow.org/bundles-elastic#opensearch) are connected to intelligently chat with your knowledge by embedding your query, comparing it the vector database embeddings, and generating a response with the LLM.
|
||||
|
||||

|
||||
|
||||
The Agent component shines here in its ability to make decisions on not only what query should be sent, but when a query is necessary to solve the problem at hand.
|
||||
|
||||
<details closed>
|
||||
<summary>How do agents work?</summary>
|
||||
|
|
@ -33,22 +37,32 @@ In an agentic context, tools are functions that the agent can run to perform tas
|
|||
## Use the OpenRAG OpenSearch Agent flow
|
||||
|
||||
If you've chatted with your knowledge in OpenRAG, you've already experienced the OpenRAG OpenSearch Agent chat flow.
|
||||
To view the flow, click <Icon name="Settings2" aria-hidden="true"/> **Settings**, and then click **Edit in Langflow**.
|
||||
This flow contains seven components:
|
||||
To switch OpenRAG over to the [Langflow visual editor](https://docs.langflow.org/concepts-overview) and view the OpenRAG OpenSearch Agentflow, click <Icon name="Settings2" aria-hidden="true"/> **Settings**, and then click **Edit in Langflow**.
|
||||
This flow contains seven components connected together to chat with your data:
|
||||
|
||||
* The Agent component orchestrates the entire flow by deciding when to search the knowledge base, how to formulate search queries, and how to combine retrieved information with the user's question to generate a comprehensive response.
|
||||
The Agent behaves according to the prompt in the **Agent Instructions** field.
|
||||
* The Chat Input component is connected to the Agent component's Input port. This allows to flow to be triggered by an incoming prompt from a user or application.
|
||||
* The OpenSearch component is connected to the Agent component's Tools port. The agent may not use this database for every request; the agent only uses this connection if it decides the knowledge can help respond to the prompt.
|
||||
* The Language Model component is connected to the Agent component's Language Model port. The agent uses the connected LLM to reason through the request sent through Chat Input.
|
||||
* The Embedding Model component is connected to the OpenSearch component's Embedding port. This component converts text queries into vector representations that are compared with document embeddings stored in OpenSearch for semantic similarity matching. This gives your Agent's queries context.
|
||||
* The Text Input component is populated with the global variable `OPENRAG-QUERY-FILTER`.
|
||||
This filter is the Knowledge filter, and filters which knowledge sources to search through.
|
||||
* The Agent component's Output port is connected to the Chat Output component, which returns the final response to the user or application.
|
||||
* The [**Agent** component](https://docs.langflow.org/agents) orchestrates the entire flow by deciding when to search the knowledge base, how to formulate search queries, and how to combine retrieved information with the user's question to generate a comprehensive response.
|
||||
The **Agent** behaves according to the prompt in the **Agent Instructions** field.
|
||||
* The [**Chat Input** component](https://docs.langflow.org/components-io) is connected to the Agent component's Input port. This allows to flow to be triggered by an incoming prompt from a user or application.
|
||||
* The [**OpenSearch** component](https://docs.langflow.org/bundles-elastic#opensearch) is connected to the Agent component's Tools port. The agent may not use this database for every request; the agent only uses this connection if it decides the knowledge can help respond to the prompt.
|
||||
* The [**Language Model** component](https://docs.langflow.org/components-models) is connected to the Agent component's Language Model port. The agent uses the connected LLM to reason through the request sent through Chat Input.
|
||||
* The [**Embedding Model** component](https://docs.langflow.org/components-embedding-models) is connected to the OpenSearch component's Embedding port. This component converts text queries into vector representations that are compared with document embeddings stored in OpenSearch for semantic similarity matching. This gives your Agent's queries context.
|
||||
* The [**Text Input** component](https://docs.langflow.org/components-io) is populated with the global variable `OPENRAG-QUERY-FILTER`.
|
||||
This filter is the [Knowledge filter](/knowledge#create-knowledge-filters), and filters which knowledge sources to search through.
|
||||
* The **Agent** component's Output port is connected to the [**Chat Output** component](https://docs.langflow.org/components-io), which returns the final response to the user or application.
|
||||
|
||||
<PartialModifyFlows />
|
||||
|
||||
For an example of changing out the agent's LLM in OpenRAG, see the [Quickstart](/quickstart#change-components).
|
||||
For an example of changing out the agent's language model in OpenRAG, see the [Quickstart](/quickstart#change-components).
|
||||
|
||||
To restore the flow to its initial state, in OpenRAG, click <Icon name="Settings" aria-hidden="true"/> **Settings**, and then click **Restore Flow**.
|
||||
OpenRAG warns you that this discards all custom settings. Click **Restore** to restore the flow.
|
||||
OpenRAG warns you that this discards all custom settings. Click **Restore** to restore the flow.
|
||||
|
||||
## Additional Langflow functionality
|
||||
|
||||
Langflow includes features beyond Agents to help you integrate OpenRAG into your application, and all Langflow features are included in OpenRAG.
|
||||
|
||||
* Langflow can serve your flows as an [MCP server](https://docs.langflow.org/mcp-server), or consume other MCP servers as an [MCP client](https://docs.langflow.org/mcp-client). Get started with the [MCP tutorial](https://docs.langflow.org/mcp-tutorial).
|
||||
|
||||
* If you don't see the component you need, extend Langflow's functionality by creating [custom Python components](https://docs.langflow.org/components-custom-components).
|
||||
|
||||
* Langflow offers component [bundles](https://docs.langflow.org/components-bundle-components) to integrate with many popular vector stores, AI/ML providers, and search APIs.
|
||||
|
|
@ -46,7 +46,7 @@ If OpenRAG detects that the local machine is running on macOS, OpenRAG uses the
|
|||
|
||||
## Use OpenRAG default ingestion instead of Docling serve
|
||||
|
||||
If you want to use OpenRAG's built-in pipeline instead of Docling serve, set `DISABLE_INGEST_WITH_LANGFLOW=true` in [Environment variables](/configure/configuration#ingestion-configuration).
|
||||
If you want to use OpenRAG's built-in pipeline instead of Docling serve, set `DISABLE_INGEST_WITH_LANGFLOW=true` in [Environment variables](/reference/configuration#document-processing).
|
||||
|
||||
The built-in pipeline still uses the Docling processor, but uses it directly without the Docling Serve API.
|
||||
|
||||
|
|
|
|||
|
|
@ -12,19 +12,9 @@ import PartialExternalPreview from '@site/docs/_partial-external-preview.mdx';
|
|||
<PartialExternalPreview />
|
||||
|
||||
OpenRAG uses [OpenSearch](https://docs.opensearch.org/latest/) for its vector-backed knowledge store.
|
||||
This is a specialized database for storing and retrieving embeddings, which helps your Agent efficiently find relevant information.
|
||||
OpenSearch provides powerful hybrid search capabilities with enterprise-grade security and multi-tenancy support.
|
||||
|
||||
## Explore knowledge
|
||||
|
||||
The Knowledge page lists the documents OpenRAG has ingested into the OpenSearch vector database's `documents` index.
|
||||
|
||||
To explore your current knowledge, click <Icon name="Library" aria-hidden="true"/> **Knowledge**.
|
||||
Click on a document to display the chunks derived from splitting the default documents into the vector database.
|
||||
|
||||
Documents are processed with the default **Knowledge Ingest** flow, so if you want to split your documents differently, edit the **Knowledge Ingest** flow.
|
||||
|
||||
<PartialModifyFlows />
|
||||
|
||||
## Ingest knowledge
|
||||
|
||||
OpenRAG supports knowledge ingestion through direct file uploads and OAuth connectors.
|
||||
|
|
@ -33,7 +23,7 @@ OpenRAG supports knowledge ingestion through direct file uploads and OAuth conne
|
|||
|
||||
The **Knowledge Ingest** flow uses Langflow's [**File** component](https://docs.langflow.org/components-data#file) to split and embed files loaded from your local machine into the OpenSearch database.
|
||||
|
||||
The default path to your local folder is mounted from the `./documents` folder in your OpenRAG project directory to the `/app/documents/` directory inside the Docker container. Files added to the host or the container will be visible in both locations. To configure this location, modify the **Documents Paths** variable in either the TUI's [Advanced Setup](/install#advanced-setup) or in the `.env` used by Docker Compose.
|
||||
The default path to your local folder is mounted from the `./documents` folder in your OpenRAG project directory to the `/app/documents/` directory inside the Docker container. Files added to the host or the container will be visible in both locations. To configure this location, modify the **Documents Paths** variable in either the TUI's [Advanced Setup](/install#setup) menu or in the `.env` used by Docker Compose.
|
||||
|
||||
To load and process a single file from the mapped location, click <Icon name="Plus" aria-hidden="true"/> **Add Knowledge**, and then click **Add File**.
|
||||
The file is loaded into your OpenSearch database, and appears in the Knowledge page.
|
||||
|
|
@ -57,7 +47,7 @@ If you wish to use another provider, add the secrets to another provider.
|
|||
<TabItem value="TUI" label="TUI" default>
|
||||
1. If OpenRAG is running, stop it with **Status** > **Stop Services**.
|
||||
2. Click **Advanced Setup**.
|
||||
3. Add the OAuth provider's client and secret key in the [Advanced Setup](/install#advanced-setup) menu.
|
||||
3. Add the OAuth provider's client and secret key in the [Advanced Setup](/install#setup) menu.
|
||||
4. Click **Save Configuration**.
|
||||
The TUI generates a new `.env` file with your OAuth values.
|
||||
5. Click **Start Container Services**.
|
||||
|
|
@ -100,6 +90,17 @@ You can monitor the sync progress in the <Icon name="Bell" aria-hidden="true"/>
|
|||
|
||||
Once processing is complete, the synced documents become available in your knowledge base and can be searched through the chat interface or Knowledge page.
|
||||
|
||||
## Explore knowledge
|
||||
|
||||
The **Knowledge** page lists the documents OpenRAG has ingested into the OpenSearch vector database's `documents` index.
|
||||
|
||||
To explore your current knowledge, click <Icon name="Library" aria-hidden="true"/> **Knowledge**.
|
||||
Click on a document to display the chunks derived from splitting the default documents into the vector database.
|
||||
|
||||
Documents are processed with the default **Knowledge Ingest** flow, so if you want to split your documents differently, edit the **Knowledge Ingest** flow.
|
||||
|
||||
<PartialModifyFlows />
|
||||
|
||||
### Knowledge ingestion settings
|
||||
|
||||
To configure the knowledge ingestion pipeline parameters, see [Docling Ingestion](/ingestion).
|
||||
|
|
@ -139,7 +140,7 @@ A new filter is created with default settings that match everything.
|
|||
|
||||
OpenRAG automatically detects and configures the correct vector dimensions for embedding models, ensuring optimal search performance and compatibility.
|
||||
|
||||
The complete list of supported models is available at [/src/services/models_service.py](https://github.com/langflow-ai/openrag/blob/main/src/services/models_service.py).
|
||||
The complete list of supported models is available at [`models_service.py` in the OpenRAG repository](https://github.com/langflow-ai/openrag/blob/main/src/services/models_service.py).
|
||||
|
||||
You can use custom embedding models by specifying them in your configuration.
|
||||
|
||||
|
|
@ -147,4 +148,4 @@ If you use an unknown embedding model, OpenRAG will automatically fall back to `
|
|||
|
||||
The default embedding dimension is `1536` and the default model is `text-embedding-3-small`.
|
||||
|
||||
For models with known vector dimensions, see [/src/config/settings.py](https://github.com/langflow-ai/openrag/blob/main/src/config/settings.py).
|
||||
For models with known vector dimensions, see [`settings.py` in the OpenRAG repository](https://github.com/langflow-ai/openrag/blob/main/src/config/settings.py).
|
||||
|
|
@ -1,8 +1,9 @@
|
|||
---
|
||||
title: Docker deployment
|
||||
title: Deploy with Docker
|
||||
slug: /get-started/docker
|
||||
---
|
||||
|
||||
import PartialOnboarding from '@site/docs/_partial-onboarding.mdx';
|
||||
import PartialExternalPreview from '@site/docs/_partial-external-preview.mdx';
|
||||
|
||||
<PartialExternalPreview />
|
||||
|
|
@ -14,7 +15,18 @@ They deploy the same applications and containers, but to different environments.
|
|||
|
||||
- [`docker-compose-cpu.yml`](https://github.com/langflow-ai/openrag/blob/main/docker-compose-cpu.yml) is a CPU-only version of OpenRAG for systems without GPU support. Use this Docker compose file for environments where GPU drivers aren't available.
|
||||
|
||||
To install OpenRAG with Docker Compose:
|
||||
## Prerequisites
|
||||
|
||||
- [Python Version 3.10 to 3.13](https://www.python.org/downloads/release/python-3100/)
|
||||
- [uv](https://docs.astral.sh/uv/getting-started/installation/)
|
||||
- [Podman](https://podman.io/docs/installation) (recommended) or [Docker](https://docs.docker.com/get-docker/) installed
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/) installed. If you're using Podman, use [podman-compose](https://docs.podman.io/en/latest/markdown/podman-compose.1.html) or alias Docker compose commands to Podman commands.
|
||||
- Create an [OpenAI API key](https://platform.openai.com/api-keys). This key is **required** to start OpenRAG, but you can choose a different model provider during [Application Onboarding](#application-onboarding).
|
||||
- Optional: GPU support requires an NVIDIA GPU with CUDA support and compatible NVIDIA drivers installed on the OpenRAG host machine. If you don't have GPU capabilities, OpenRAG provides an alternate CPU-only deployment.
|
||||
|
||||
## Deploy OpenRAG with Docker Compose
|
||||
|
||||
To install OpenRAG with Docker Compose, do the following:
|
||||
|
||||
1. Clone the OpenRAG repository.
|
||||
```bash
|
||||
|
|
@ -22,7 +34,7 @@ To install OpenRAG with Docker Compose:
|
|||
cd openrag
|
||||
```
|
||||
|
||||
2. Copy the example `.env` file that is included in the repository root.
|
||||
2. Copy the example `.env` file included in the repository root.
|
||||
The example file includes all environment variables with comments to guide you in finding and setting their values.
|
||||
```bash
|
||||
cp .env.example .env
|
||||
|
|
@ -33,18 +45,18 @@ To install OpenRAG with Docker Compose:
|
|||
touch .env
|
||||
```
|
||||
|
||||
3. Set environment variables. The Docker Compose files are populated with values from your `.env`, so the following values are **required** to be set:
|
||||
3. Set environment variables. The Docker Compose files will be populated with values from your `.env`.
|
||||
The following values are **required** to be set:
|
||||
|
||||
```bash
|
||||
OPENSEARCH_PASSWORD=your_secure_password
|
||||
OPENAI_API_KEY=your_openai_api_key
|
||||
|
||||
LANGFLOW_SUPERUSER=admin
|
||||
LANGFLOW_SUPERUSER_PASSWORD=your_langflow_password
|
||||
LANGFLOW_SECRET_KEY=your_secret_key
|
||||
```
|
||||
For more information on configuring OpenRAG with environment variables, see [Environment variables](/configure/configuration).
|
||||
For additional configuration values, including `config.yaml`, see [Configuration](/configure/configuration).
|
||||
|
||||
For more information on configuring OpenRAG with environment variables, see [Environment variables](/reference/configuration).
|
||||
|
||||
4. Deploy OpenRAG with Docker Compose based on your deployment type.
|
||||
|
||||
|
|
@ -79,14 +91,39 @@ To install OpenRAG with Docker Compose:
|
|||
- **Backend API**: http://localhost:8000
|
||||
- **Langflow**: http://localhost:7860
|
||||
|
||||
Continue with the [Quickstart](/quickstart).
|
||||
6. Continue with [Application Onboarding](#application-onboarding).
|
||||
|
||||
## Rebuild all Docker containers
|
||||
<PartialOnboarding />
|
||||
|
||||
If you need to reset state and rebuild all of your containers, run the following command.
|
||||
## Container management commands
|
||||
|
||||
Manage your OpenRAG containers with the following commands.
|
||||
These commands are also available in the TUI's [Status menu](/get-started/tui#status).
|
||||
|
||||
### Upgrade containers
|
||||
|
||||
Upgrade your containers to the latest version while preserving your data.
|
||||
|
||||
```bash
|
||||
docker compose pull
|
||||
docker compose up -d --force-recreate
|
||||
```
|
||||
|
||||
### Rebuild containers (destructive)
|
||||
|
||||
Reset state by rebuilding all of your containers.
|
||||
Your OpenSearch and Langflow databases will be lost.
|
||||
Documents stored in the `./documents` directory will persist, since the directory is mounted as a volume in the OpenRAG backend container.
|
||||
|
||||
```bash
|
||||
docker compose up --build --force-recreate --remove-orphans
|
||||
```
|
||||
|
||||
### Remove all containers and data (destructive)
|
||||
|
||||
Completely remove your OpenRAG installation and delete all data.
|
||||
This deletes all of your data, including OpenSearch data, uploaded documents, and authentication.
|
||||
```bash
|
||||
docker compose down --volumes --remove-orphans --rmi local
|
||||
docker system prune -f
|
||||
```
|
||||
|
|
@ -5,41 +5,51 @@ slug: /install
|
|||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
import PartialOnboarding from '@site/docs/_partial-onboarding.mdx';
|
||||
import PartialExternalPreview from '@site/docs/_partial-external-preview.mdx';
|
||||
|
||||
<PartialExternalPreview />
|
||||
|
||||
OpenRAG can be installed in multiple ways:
|
||||
[Install the OpenRAG Python wheel](#install-python-wheel), and then run the [OpenRAG Terminal User Interface(TUI)](#setup) to start your OpenRAG deployment with a guided setup process.
|
||||
|
||||
* [**Python wheel**](#install-python-wheel): Install the OpenRAG Python wheel and use the [OpenRAG Terminal User Interface (TUI)](/get-started/tui) to install, run, and configure your OpenRAG deployment without running Docker commands.
|
||||
|
||||
* [**Docker Compose**](get-started/docker): Clone the OpenRAG repository and deploy OpenRAG with Docker Compose, including all services and dependencies.
|
||||
If you prefer running Docker commands and manually editing `.env` files, see [Deploy with Docker](/get-started/docker).
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [Python Version 3.10 to 3.13](https://www.python.org/downloads/release/python-3100/)
|
||||
- [uv](https://docs.astral.sh/uv/getting-started/installation/)
|
||||
- [Docker](https://docs.docker.com/get-docker/) or [Podman](https://podman.io/docs/installation) installed
|
||||
- [Podman](https://podman.io/docs/installation) (recommended) or [Docker](https://docs.docker.com/get-docker/) installed
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/) installed. If using Podman, use [podman-compose](https://docs.podman.io/en/latest/markdown/podman-compose.1.html) or alias Docker compose commands to Podman commands.
|
||||
- For GPU support: (TBD)
|
||||
- Create an [OpenAI API key](https://platform.openai.com/api-keys). This key is **required** to start OpenRAG, but you can choose a different model provider during [Application Onboarding](#application-onboarding).
|
||||
- Optional: GPU support requires an NVIDIA GPU with [CUDA](https://docs.nvidia.com/cuda/) support and compatible NVIDIA drivers installed on the OpenRAG host machine. If you don't have GPU capabilities, OpenRAG provides an alternate CPU-only deployment.
|
||||
|
||||
## Python wheel {#install-python-wheel}
|
||||
## Install the OpenRAG Python wheel {#install-python-wheel}
|
||||
|
||||
The Python wheel is currently available internally, but will be available on PyPI at launch.
|
||||
The wheel installs the OpenRAG wheel, which includes the TUI for installing, running, and managing OpenRAG.
|
||||
For more information on virtual environments, see [uv](https://docs.astral.sh/uv/pip/environments).
|
||||
:::important
|
||||
The `.whl` file is currently available as an internal download during public preview, and will be published to PyPI in a future release.
|
||||
:::
|
||||
|
||||
1. Create a new project with a virtual environment using [uv](https://docs.astral.sh/uv/pip/environments).
|
||||
The OpenRAG wheel installs the Terminal User Interface (TUI) for configuring and running OpenRAG.
|
||||
|
||||
1. Create a new project with a virtual environment using `uv init`.
|
||||
|
||||
```bash
|
||||
uv init YOUR_PROJECT_NAME
|
||||
cd YOUR_PROJECT_NAME
|
||||
```
|
||||
2. Add the OpenRAG wheel to your project and install it in the virtual environment.
|
||||
Replace `PATH/TO/` and `VERSION` with your OpenRAG wheel location and version.
|
||||
|
||||
The `(venv)` prompt doesn't change, but `uv` commands will automatically use the project's virtual environment.
|
||||
For more information on virtual environments, see the [uv documentation](https://docs.astral.sh/uv/pip/environments).
|
||||
|
||||
2. Add the local OpenRAG wheel to your project's virtual environment.
|
||||
|
||||
```bash
|
||||
uv add PATH/TO/openrag-VERSION-py3-none-any.whl
|
||||
```
|
||||
Replace `PATH/TO/` and `VERSION` with the path and version of your downloaded OpenRAG `.whl` file.
|
||||
|
||||
For example, if your `.whl` file is in the `~/Downloads` directory, the command is `uv add ~/Downloads/openrag-0.1.8-py3-none-any.whl`.
|
||||
|
||||
3. Ensure all dependencies are installed and updated in your virtual environment.
|
||||
```bash
|
||||
uv sync
|
||||
|
|
@ -50,95 +60,66 @@ For more information on virtual environments, see [uv](https://docs.astral.sh/uv
|
|||
uv run openrag
|
||||
```
|
||||
|
||||
The OpenRAG TUI opens.
|
||||
5. Continue with [Setup OpenRAG with the TUI](#setup).
|
||||
|
||||
5. To install OpenRAG with Basic Setup, click **Basic Setup** or press <kbd>1</kbd>. Basic Setup does not set up OAuth connections for ingestion from Google Drive, OneDrive, or AWS. For OAuth setup, see [Advanced Setup](#advanced-setup).
|
||||
The TUI prompts you for the required startup values.
|
||||
Click **Generate Passwords** to autocomplete fields that contain **Auto-generated Secure Password**, or bring your own passwords.
|
||||
<details closed>
|
||||
<summary>Where do I find the required startup values?</summary>
|
||||
|
||||
| Variable | Where to Find | Description |
|
||||
|----------|---------------|-------------|
|
||||
| `OPENSEARCH_PASSWORD` | Auto-generated secure password | The password for OpenSearch database access. Must be at least 8 characters and must contain at least one uppercase letter, one lowercase letter, one digit, and one special character. |
|
||||
| `OPENAI_API_KEY` | [OpenAI Platform](https://platform.openai.com/api-keys) | API key from your OpenAI account. |
|
||||
| `LANGFLOW_SUPERUSER` | User generated | Username for Langflow admin access. For more, see [Langflow docs](https://docs.langflow.org/api-keys-and-authentication#langflow-superuser). |
|
||||
| `LANGFLOW_SUPERUSER_PASSWORD` | Auto-generated secure password | Password for Langflow admin access. For more, see the [Langflow docs](https://docs.langflow.org/api-keys-and-authentication#langflow-superuser). |
|
||||
| `LANGFLOW_SECRET_KEY` | Auto-generated secure key | Secret key for Langflow security. For more, see the [Langflow docs](https://docs.langflow.org/api-keys-and-authentication#langflow-secret-key). |
|
||||
| `LANGFLOW_AUTO_LOGIN` | Auto-generated or manual | Auto-login configuration. For more, see the [Langflow docs](https://docs.langflow.org/api-keys-and-authentication#langflow-auto-login). |
|
||||
| `LANGFLOW_NEW_USER_IS_ACTIVE` | Langflow | New user activation setting. For more, see the [Langflow docs](https://docs.langflow.org/api-keys-and-authentication#langflow-new-user-is-active). |
|
||||
| `LANGFLOW_ENABLE_SUPERUSER_CLI` | Langflow server | Superuser CLI access setting. For more, see the [Langflow docs](https://docs.langflow.org/api-keys-and-authentication#langflow-enable-superuser-cli). |
|
||||
| `DOCUMENTS_PATH` | Set your local path | Path to your document storage directory. |
|
||||
|
||||
</details>
|
||||
|
||||
To complete credentials, click **Save Configuration**.
|
||||
## Set up OpenRAG with the TUI {#setup}
|
||||
|
||||
6. To start OpenRAG with your credentials, click **Start Container Services**.
|
||||
Startup pulls container images and starts them, so it can take some time.
|
||||
The operation has completed when the **Close** button is available and the terminal displays:
|
||||
```bash
|
||||
Services started successfully
|
||||
Command completed successfully
|
||||
```
|
||||
The TUI creates a `.env` file in your OpenRAG directory root and starts OpenRAG.
|
||||
|
||||
7. To open the OpenRAG application, click **Open App**, press <kbd>6</kbd>, or navigate to `http://localhost:3000`.
|
||||
The application opens.
|
||||
8. Select your language model and embedding model provider, and complete the required fields.
|
||||
**Your provider can only be selected once, and you must use the same provider for your language model and embedding model.**
|
||||
The language model can be changed, but the embeddings model cannot be changed.
|
||||
To change your provider selection, you must restart OpenRAG and delete the `config.yml` file.
|
||||
**Basic Setup** generates all of the required values except the OpenAI API key.
|
||||
**Basic Setup** does not set up OAuth connections for ingestion from Google Drive, OneDrive, or AWS.
|
||||
For OAuth setup, use **Advanced Setup**.
|
||||
|
||||
<Tabs groupId="Embedding provider">
|
||||
<TabItem value="OpenAI" label="OpenAI" default>
|
||||
9. If you already entered a value for `OPENAI_API_KEY` in the TUI in Step 5, enable **Get API key from environment variable**.
|
||||
10. Under **Advanced settings**, select your **Embedding Model** and **Language Model**.
|
||||
11. To load 2 sample PDFs, enable **Sample dataset**.
|
||||
This is recommended, but not required.
|
||||
12. Click **Complete**.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="IBM watsonx.ai" label="IBM watsonx.ai">
|
||||
9. Complete the fields for **watsonx.ai API Endpoint**, **IBM API key**, and **IBM Project ID**.
|
||||
These values are found in your IBM watsonx deployment.
|
||||
10. Under **Advanced settings**, select your **Embedding Model** and **Language Model**.
|
||||
11. To load 2 sample PDFs, enable **Sample dataset**.
|
||||
This is recommended, but not required.
|
||||
12. Click **Complete**.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Ollama" label="Ollama">
|
||||
9. Enter your Ollama server's base URL address.
|
||||
The default Ollama server address is `http://localhost:11434`.
|
||||
Since OpenRAG is running in a container, you may need to change `localhost` to access services outside of the container. For example, change `http://localhost:11434` to `http://host.docker.internal:11434` to connect to Ollama.
|
||||
OpenRAG automatically sends a test connection to your Ollama server to confirm connectivity.
|
||||
10. Select the **Embedding Model** and **Language Model** your Ollama server is running.
|
||||
OpenRAG automatically lists the available models from your Ollama server.
|
||||
11. To load 2 sample PDFs, enable **Sample dataset**.
|
||||
This is recommended, but not required.
|
||||
12. Click **Complete**.
|
||||
If the TUI detects OAuth credentials, it enforces the **Advanced Setup** path.
|
||||
If the TUI detects a `.env` file in the OpenRAG root directory, it will source any variables from the `.env` file.
|
||||
<Tabs groupId="Setup method">
|
||||
<TabItem value="Basic setup" label="Basic setup" default>
|
||||
|
||||
1. To install OpenRAG with **Basic Setup**, click **Basic Setup** or press <kbd>1</kbd>.
|
||||
2. Click **Generate Passwords** to generate passwords for OpenSearch and Langflow.
|
||||
3. Paste your OpenAI API key in the OpenAI API key field.
|
||||
4. Click **Save Configuration**.
|
||||
5. To start OpenRAG, click **Start Container Services**.
|
||||
Startup pulls container images and runs them, so it can take some time.
|
||||
When startup is complete, the TUI displays the following:
|
||||
```bash
|
||||
Services started successfully
|
||||
Command completed successfully
|
||||
```
|
||||
6. To open the OpenRAG application, click **Open App**.
|
||||
7. Continue with [Application Onboarding](#application-onboarding).
|
||||
</TabItem>
|
||||
<TabItem value="Advanced setup" label="Advanced setup">
|
||||
1. To install OpenRAG with **Advanced Setup**, click **Advanced Setup** or press <kbd>2</kbd>.
|
||||
2. Click **Generate Passwords** to generate passwords for OpenSearch and Langflow.
|
||||
3. Paste your OpenAI API key in the OpenAI API key field.
|
||||
4. Add your client and secret values for Google, Azure, or AWS OAuth.
|
||||
These values can be found in your OAuth provider.
|
||||
5. The OpenRAG TUI presents redirect URIs for your OAuth app.
|
||||
These are the URLs your OAuth provider will redirect back to after user sign-in.
|
||||
Register these redirect values with your OAuth provider as they are presented in the TUI.
|
||||
6. Click **Save Configuration**.
|
||||
7. To start OpenRAG, click **Start Container Services**.
|
||||
Startup pulls container images and runs them, so it can take some time.
|
||||
When startup is complete, the TUI displays the following:
|
||||
```bash
|
||||
Services started successfully
|
||||
Command completed successfully
|
||||
```
|
||||
8. To open the OpenRAG application, click **Open App**, press <kbd>6</kbd>, or navigate to `http://localhost:3000`.
|
||||
You will be presented with your provider's OAuth sign-in screen, and be redirected to the redirect URI after sign-in.
|
||||
Continue with Application Onboarding.
|
||||
|
||||
Two additional variables are available for Advanced Setup:
|
||||
|
||||
The `LANGFLOW_PUBLIC_URL` controls where the Langflow web interface can be accessed. This is where users interact with their flows in a browser.
|
||||
|
||||
The `WEBHOOK_BASE_URL` controls where the endpoint for `/connectors/CONNECTOR_TYPE/webhook` will be available.
|
||||
This connection enables real-time document synchronization with external services.
|
||||
For example, for Google Drive file synchronization the webhook URL is `/connectors/google_drive/webhook`.
|
||||
|
||||
</TabItem>
|
||||
9. Continue with [Application Onboarding](#application-onboarding).
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
13. Continue with the [Quickstart](/quickstart).
|
||||
|
||||
### Advanced Setup {#advanced-setup}
|
||||
|
||||
**Advanced Setup** includes the required values from **Basic Setup**, with additional settings for OAuth credentials.
|
||||
If the OpenRAG TUI detects OAuth credentials, it enforces the Advanced Setup path.
|
||||
1. Add your client and secret values for Google, Azure, or AWS OAuth.
|
||||
These values can be found in your OAuth provider.
|
||||
2. The OpenRAG TUI presents redirect URIs for your OAuth app.
|
||||
These are the URLs your OAuth provider will redirect back to after user sign-in.
|
||||
Register these redirect values with your OAuth provider as they are presented in the TUI.
|
||||
3. To open the OpenRAG application, click **Open App** or press <kbd>6</kbd>.
|
||||
You will be presented with your provider's OAuth sign-in screen, and be redirected to the redirect URI after sign-in.
|
||||
|
||||
Two additional variables are available for Advanced Setup:
|
||||
|
||||
The `LANGFLOW_PUBLIC_URL` controls where the Langflow web interface can be accessed. This is where users interact with their flows in a browser.
|
||||
|
||||
The `WEBHOOK_BASE_URL` controls where the endpoint for `/connectors/CONNECTOR_TYPE/webhook` will be available.
|
||||
This connection enables real-time document synchronization with external services.
|
||||
For example, for Google Drive file synchronization the webhook URL is `/connectors/google_drive/webhook`.
|
||||
<PartialOnboarding />
|
||||
|
|
@ -15,40 +15,6 @@ Get started with OpenRAG by loading your knowledge, swapping out your language m
|
|||
## Prerequisites
|
||||
|
||||
- [Install and start OpenRAG](/install)
|
||||
- Create a [Langflow API key](https://docs.langflow.org/api-keys-and-authentication)
|
||||
<details>
|
||||
<summary>Create a Langflow API key</summary>
|
||||
|
||||
A Langflow API key is a user-specific token you can use with Langflow.
|
||||
It is **only** used for sending requests to the Langflow server.
|
||||
It does **not** access to OpenRAG.
|
||||
|
||||
To create a Langflow API key, do the following:
|
||||
|
||||
1. In Langflow, click your user icon, and then select **Settings**.
|
||||
2. Click **Langflow API Keys**, and then click <Icon name="Plus" aria-hidden="true"/> **Add New**.
|
||||
3. Name your key, and then click **Create API Key**.
|
||||
4. Copy the API key and store it securely.
|
||||
5. To use your Langflow API key in a request, set a `LANGFLOW_API_KEY` environment variable in your terminal, and then include an `x-api-key` header or query parameter with your request.
|
||||
For example:
|
||||
|
||||
```bash
|
||||
# Set variable
|
||||
export LANGFLOW_API_KEY="sk..."
|
||||
|
||||
# Send request
|
||||
curl --request POST \
|
||||
--url "http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID" \
|
||||
--header "Content-Type: application/json" \
|
||||
--header "x-api-key: $LANGFLOW_API_KEY" \
|
||||
--data '{
|
||||
"output_type": "chat",
|
||||
"input_type": "chat",
|
||||
"input_value": "Hello"
|
||||
}'
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
## Find your way around
|
||||
|
||||
|
|
@ -99,12 +65,44 @@ You can more quickly access the **Language Model** and **Agent Instructions** fi
|
|||
## Integrate OpenRAG into your application
|
||||
|
||||
To integrate OpenRAG into your application, use the [Langflow API](https://docs.langflow.org/api-reference-api-examples).
|
||||
Make requests with Python, TypeScript, or any HTTP client to run one of OpenRAG's default flows and get a response, and then modify the flow further to improve results.
|
||||
Make requests with Python, TypeScript, or any HTTP client to run one of OpenRAG's default flows and get a response, and then modify the flow further to improve results. Langflow provides code snippets to help you get started.
|
||||
|
||||
Langflow provides code snippets to help you get started with the Langflow API.
|
||||
|
||||
1. To navigate to the OpenRAG OpenSearch Agent flow, click <Icon name="Settings2" aria-hidden="true"/> **Settings**, and then click **Edit in Langflow** in the OpenRAG OpenSearch Agent flow.
|
||||
2. Click **Share**, and then click **API access**.
|
||||
1. Create a [Langflow API key](https://docs.langflow.org/api-keys-and-authentication).
|
||||
<details>
|
||||
<summary>Create a Langflow API key</summary>
|
||||
|
||||
A Langflow API key is a user-specific token you can use with Langflow.
|
||||
It is **only** used for sending requests to the Langflow server.
|
||||
It does **not** access to OpenRAG.
|
||||
|
||||
To create a Langflow API key, do the following:
|
||||
|
||||
1. In Langflow, click your user icon, and then select **Settings**.
|
||||
2. Click **Langflow API Keys**, and then click <Icon name="Plus" aria-hidden="true"/> **Add New**.
|
||||
3. Name your key, and then click **Create API Key**.
|
||||
4. Copy the API key and store it securely.
|
||||
5. To use your Langflow API key in a request, set a `LANGFLOW_API_KEY` environment variable in your terminal, and then include an `x-api-key` header or query parameter with your request.
|
||||
For example:
|
||||
|
||||
```bash
|
||||
# Set variable
|
||||
export LANGFLOW_API_KEY="sk..."
|
||||
|
||||
# Send request
|
||||
curl --request POST \
|
||||
--url "http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID" \
|
||||
--header "Content-Type: application/json" \
|
||||
--header "x-api-key: $LANGFLOW_API_KEY" \
|
||||
--data '{
|
||||
"output_type": "chat",
|
||||
"input_type": "chat",
|
||||
"input_value": "Hello"
|
||||
}'
|
||||
```
|
||||
|
||||
</details>
|
||||
2. To navigate to the OpenRAG OpenSearch Agent flow, click <Icon name="Settings2" aria-hidden="true"/> **Settings**, and then click **Edit in Langflow** in the OpenRAG OpenSearch Agent flow.
|
||||
3. Click **Share**, and then click **API access**.
|
||||
|
||||
The default code in the API access pane constructs a request with the Langflow server `url`, `headers`, and a `payload` of request data. The code snippets automatically include the `LANGFLOW_SERVER_ADDRESS` and `FLOW_ID` values for the flow. Replace these values if you're using the code for a different server or flow. The default Langflow server address is http://localhost:7860.
|
||||
|
||||
|
|
@ -189,7 +187,7 @@ Langflow provides code snippets to help you get started with the Langflow API.
|
|||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
3. Copy the snippet, paste it in a script file, and then run the script to send the request. If you are using the curl snippet, you can run the command directly in your terminal.
|
||||
4. Copy the snippet, paste it in a script file, and then run the script to send the request. If you are using the curl snippet, you can run the command directly in your terminal.
|
||||
|
||||
If the request is successful, the response includes many details about the flow run, including the session ID, inputs, outputs, components, durations, and more.
|
||||
The following is an example of a response from running the **Simple Agent** template flow:
|
||||
|
|
|
|||
|
|
@ -7,11 +7,10 @@ import PartialExternalPreview from '@site/docs/_partial-external-preview.mdx';
|
|||
|
||||
<PartialExternalPreview />
|
||||
|
||||
The OpenRAG Terminal User Interface (TUI) provides a streamlined way to set up, configure, and monitor your OpenRAG deployment directly from the terminal, on any operating system.
|
||||
The OpenRAG Terminal User Interface (TUI) allows you to set up, configure, and monitor your OpenRAG deployment directly from the terminal, on any operating system.
|
||||
|
||||

|
||||
|
||||
The TUI offers an easier way to use OpenRAG without sacrificing control.
|
||||
Instead of starting OpenRAG using Docker commands and manually editing values in the `.env` file, the TUI walks you through the setup. It prompts for variables where required, creates a `.env` file for you, and then starts OpenRAG.
|
||||
|
||||
Once OpenRAG is running, use the TUI to monitor your application, control your containers, and retrieve logs.
|
||||
|
|
@ -19,7 +18,6 @@ Once OpenRAG is running, use the TUI to monitor your application, control your c
|
|||
## Start the TUI
|
||||
|
||||
To start the TUI, run the following commands from the directory where you installed OpenRAG.
|
||||
For more information, see [Install OpenRAG](/install).
|
||||
|
||||
```bash
|
||||
uv sync
|
||||
|
|
|
|||
|
|
@ -18,6 +18,8 @@ OpenRAG connects and amplifies three popular, proven open-source projects into o
|
|||
|
||||
* [Docling](https://docling-project.github.io/docling/) - Docling simplifies document processing, parsing diverse formats — including advanced PDF understanding — and providing seamless integrations with the gen AI ecosystem.
|
||||
|
||||
OpenRAG builds on Langflow's familiar interface while adding OpenSearch for vector storage and Docling for simplified document parsing, with opinionated flows that serve as ready-to-use recipes for ingestion, retrieval, and generation from popular sources like OneDrive, Google Drive, and AWS. And don't fear: every part of the stack is swappable. Write your own custom components in Python, try different language models, and customize your flows to build an agentic RAG system that solves problems.
|
||||
OpenRAG builds on Langflow's familiar interface while adding OpenSearch for vector storage and Docling for simplified document parsing, with opinionated flows that serve as ready-to-use recipes for ingestion, retrieval, and generation from popular sources like OneDrive, Google Drive, and AWS.
|
||||
|
||||
Ready to get started? Install OpenRAG and then run the Quickstart to create a powerful RAG pipeline.
|
||||
What's more, every part of the stack is swappable. Write your own custom components in Python, try different language models, and customize your flows to build an agentic RAG system.
|
||||
|
||||
Ready to get started? [Install OpenRAG](/install) and then run the [Quickstart](/quickstart) to create a powerful RAG pipeline.
|
||||
162
docs/docs/reference/configuration.mdx
Normal file
162
docs/docs/reference/configuration.mdx
Normal file
|
|
@ -0,0 +1,162 @@
|
|||
---
|
||||
title: Environment variables
|
||||
slug: /reference/configuration
|
||||
---
|
||||
|
||||
import Icon from "@site/src/components/icon/icon";
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
OpenRAG recognizes [supported environment variables](#supported-environment-variables) from the following sources:
|
||||
|
||||
* [Environment variables](#supported-environment-variables) - Values set in the `.env` file.
|
||||
* [Langflow runtime overrides](#langflow-runtime-overrides) - Langflow components may tweak environment variables at runtime.
|
||||
* [Default or fallback values](#default-values-and-fallbacks) - These values are default or fallback values if OpenRAG doesn't find a value.
|
||||
|
||||
## Configure environment variables
|
||||
|
||||
Environment variables are set in a `.env` file in the root of your OpenRAG project directory.
|
||||
|
||||
For an example `.env` file, see [`.env.example` in the OpenRAG repository](https://github.com/langflow-ai/openrag/blob/main/.env.example).
|
||||
|
||||
The Docker Compose files are populated with values from your `.env`, so you don't need to edit the Docker Compose files manually.
|
||||
|
||||
Environment variables always take precedence over other variables.
|
||||
|
||||
### Set environment variables
|
||||
|
||||
To set environment variables, do the following.
|
||||
|
||||
1. Stop OpenRAG.
|
||||
2. Set the values in the `.env` file:
|
||||
```bash
|
||||
LOG_LEVEL=DEBUG
|
||||
LOG_FORMAT=json
|
||||
SERVICE_NAME=openrag-dev
|
||||
```
|
||||
3. Start OpenRAG.
|
||||
|
||||
Updating provider API keys or provider endpoints in the `.env` file will not take effect after [Application onboarding](/install#application-onboarding). To change these values, you must:
|
||||
|
||||
1. Stop OpenRAG.
|
||||
2. Remove the containers:
|
||||
```
|
||||
docker-compose down
|
||||
```
|
||||
3. Update the values in your `.env` file.
|
||||
4. Start OpenRAG containers.
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
5. Complete [Application onboarding](/install#application-onboarding) again.
|
||||
|
||||
## Supported environment variables
|
||||
|
||||
All OpenRAG configuration can be controlled through environment variables.
|
||||
|
||||
### AI provider settings
|
||||
|
||||
Configure which AI models and providers OpenRAG uses for language processing and embeddings.
|
||||
For more information, see [Application onboarding](/install#application-onboarding).
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `EMBEDDING_MODEL` | `text-embedding-3-small` | Embedding model for vector search. |
|
||||
| `LLM_MODEL` | `gpt-4o-mini` | Language model for the chat agent. |
|
||||
| `MODEL_PROVIDER` | `openai` | Model provider, such as OpenAI or IBM watsonx.ai. |
|
||||
| `OPENAI_API_KEY` | - | Your OpenAI API key. Required. |
|
||||
| `PROVIDER_API_KEY` | - | API key for the model provider. |
|
||||
| `PROVIDER_ENDPOINT` | - | Custom provider endpoint. Only used for IBM or Ollama providers. |
|
||||
| `PROVIDER_PROJECT_ID` | - | Project ID for providers. Only required for the IBM watsonx.ai provider. |
|
||||
|
||||
### Document processing
|
||||
|
||||
Control how OpenRAG processes and ingests documents into your knowledge base.
|
||||
For more information, see [Ingestion](/ingestion).
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `CHUNK_OVERLAP` | `200` | Overlap between chunks. |
|
||||
| `CHUNK_SIZE` | `1000` | Text chunk size for document processing. |
|
||||
| `DISABLE_INGEST_WITH_LANGFLOW` | `false` | Disable Langflow ingestion pipeline. |
|
||||
| `DOCLING_OCR_ENGINE` | - | OCR engine for document processing. |
|
||||
| `OCR_ENABLED` | `false` | Enable OCR for image processing. |
|
||||
| `OPENRAG_DOCUMENTS_PATHS` | `./documents` | Document paths for ingestion. |
|
||||
| `PICTURE_DESCRIPTIONS_ENABLED` | `false` | Enable picture descriptions. |
|
||||
|
||||
### Langflow settings
|
||||
|
||||
Configure Langflow authentication.
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `LANGFLOW_AUTO_LOGIN` | `False` | Enable auto-login for Langflow. |
|
||||
| `LANGFLOW_CHAT_FLOW_ID` | pre-filled | This value is pre-filled. The default value is found in [.env.example](https://github.com/langflow-ai/openrag/blob/main/.env.example). |
|
||||
| `LANGFLOW_ENABLE_SUPERUSER_CLI` | `False` | Enable superuser CLI. |
|
||||
| `LANGFLOW_INGEST_FLOW_ID` | pre-filled | This value is pre-filled. The default value is found in [.env.example](https://github.com/langflow-ai/openrag/blob/main/.env.example). |
|
||||
| `LANGFLOW_KEY` | auto-generated | Explicit Langflow API key. |
|
||||
| `LANGFLOW_NEW_USER_IS_ACTIVE` | `False` | New users are active by default. |
|
||||
| `LANGFLOW_PUBLIC_URL` | `http://localhost:7860` | Public URL for Langflow. |
|
||||
| `LANGFLOW_SECRET_KEY` | - | Secret key for Langflow internal operations. |
|
||||
| `LANGFLOW_SUPERUSER` | - | Langflow admin username. Required. |
|
||||
| `LANGFLOW_SUPERUSER_PASSWORD` | - | Langflow admin password. Required. |
|
||||
| `LANGFLOW_URL` | `http://localhost:7860` | Langflow URL. |
|
||||
| `NUDGES_FLOW_ID` | pre-filled | This value is pre-filled. The default value is found in [.env.example](https://github.com/langflow-ai/openrag/blob/main/.env.example). |
|
||||
| `SYSTEM_PROMPT` | "You are a helpful AI assistant with access to a knowledge base. Answer questions based on the provided context." | System prompt for the Langflow agent. |
|
||||
|
||||
### OAuth provider settings
|
||||
|
||||
Configure OAuth providers and external service integrations.
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `AWS_ACCESS_KEY_ID` / `AWS_SECRET_ACCESS_KEY` | - | AWS integrations. |
|
||||
| `GOOGLE_OAUTH_CLIENT_ID` / `GOOGLE_OAUTH_CLIENT_SECRET` | - | Google OAuth authentication. |
|
||||
| `MICROSOFT_GRAPH_OAUTH_CLIENT_ID` / `MICROSOFT_GRAPH_OAUTH_CLIENT_SECRET` | - | Microsoft OAuth. |
|
||||
| `WEBHOOK_BASE_URL` | - | Base URL for webhook endpoints. |
|
||||
|
||||
### OpenSearch settings
|
||||
|
||||
Configure OpenSearch database authentication.
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `OPENSEARCH_HOST` | `localhost` | OpenSearch host. |
|
||||
| `OPENSEARCH_PASSWORD` | - | Password for OpenSearch admin user. Required. |
|
||||
| `OPENSEARCH_PORT` | `9200` | OpenSearch port. |
|
||||
| `OPENSEARCH_USERNAME` | `admin` | OpenSearch username. |
|
||||
|
||||
### System settings
|
||||
|
||||
Configure general system components, session management, and logging.
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `LANGFLOW_KEY_RETRIES` | `15` | Number of retries for Langflow key generation. |
|
||||
| `LANGFLOW_KEY_RETRY_DELAY` | `2.0` | Delay between retries in seconds. |
|
||||
| `LOG_FORMAT` | - | Log format (set to "json" for JSON output). |
|
||||
| `LOG_LEVEL` | `INFO` | Logging level (DEBUG, INFO, WARNING, ERROR). |
|
||||
| `MAX_WORKERS` | - | Maximum number of workers for document processing. |
|
||||
| `SERVICE_NAME` | `openrag` | Service name for logging. |
|
||||
| `SESSION_SECRET` | auto-generated | Session management. |
|
||||
|
||||
## Langflow runtime overrides
|
||||
|
||||
Langflow runtime overrides allow you to modify component settings at runtime without changing the base configuration.
|
||||
|
||||
Runtime overrides are implemented through **tweaks** - parameter modifications that are passed to specific Langflow components during flow execution.
|
||||
|
||||
For more information on tweaks, see [Input schema (tweaks)](https://docs.langflow.org/concepts-publish#input-schema).
|
||||
|
||||
## Default values and fallbacks
|
||||
|
||||
When no environment variables or configuration file values are provided, OpenRAG uses default values.
|
||||
These values can be found in the code base at the following locations.
|
||||
|
||||
### OpenRAG configuration defaults
|
||||
|
||||
These values are defined in [`config_manager.py` in the OpenRAG repository](https://github.com/langflow-ai/openrag/blob/main/src/config/config_manager.py).
|
||||
|
||||
### System configuration defaults
|
||||
|
||||
These fallback values are defined in [`settings.py` in the OpenRAG repository](https://github.com/langflow-ai/openrag/blob/main/src/config/settings.py).
|
||||
|
|
@ -13,12 +13,12 @@ This page provides troubleshooting advice for issues you might encounter when us
|
|||
|
||||
## OpenSearch fails to start
|
||||
|
||||
Check that `OPENSEARCH_PASSWORD` is set and meets requirements.
|
||||
Check that `OPENSEARCH_PASSWORD` set in [Environment variables](/reference/configuration) meets requirements.
|
||||
The password must contain at least 8 characters, and must contain at least one uppercase letter, one lowercase letter, one digit, and one special character that is strong.
|
||||
|
||||
## Langflow connection issues
|
||||
|
||||
Verify the `LANGFLOW_SUPERUSER` credentials are correct.
|
||||
Verify the `LANGFLOW_SUPERUSER` credentials set in [Environment variables](/reference/configuration) are correct.
|
||||
|
||||
## Memory errors
|
||||
|
||||
|
|
@ -51,60 +51,61 @@ To reset your local containers and pull new images, do the following:
|
|||
|
||||
1. Stop your containers and completely remove them.
|
||||
|
||||
<Tabs groupId="Container software">
|
||||
<TabItem value="Docker" label="Docker" default>
|
||||
|
||||
```bash
|
||||
# Stop all running containers
|
||||
docker stop $(docker ps -q)
|
||||
<Tabs groupId="Container software">
|
||||
<TabItem value="Podman" label="Podman">
|
||||
|
||||
# Remove all containers (including stopped ones)
|
||||
docker rm --force $(docker ps -aq)
|
||||
```bash
|
||||
# Stop all running containers
|
||||
podman stop --all
|
||||
|
||||
# Remove all containers (including stopped ones)
|
||||
podman rm --all --force
|
||||
|
||||
# Remove all images
|
||||
podman rmi --all --force
|
||||
|
||||
# Remove all volumes
|
||||
podman volume prune --force
|
||||
|
||||
# Remove all networks (except default)
|
||||
podman network prune --force
|
||||
|
||||
# Clean up any leftover data
|
||||
podman system prune --all --force --volumes
|
||||
```
|
||||
|
||||
# Remove all images
|
||||
docker rmi --force $(docker images -q)
|
||||
</TabItem>
|
||||
<TabItem value="Docker" label="Docker" default>
|
||||
|
||||
# Remove all volumes
|
||||
docker volume prune --force
|
||||
```bash
|
||||
# Stop all running containers
|
||||
docker stop $(docker ps -q)
|
||||
|
||||
# Remove all containers (including stopped ones)
|
||||
docker rm --force $(docker ps -aq)
|
||||
|
||||
# Remove all images
|
||||
docker rmi --force $(docker images -q)
|
||||
|
||||
# Remove all volumes
|
||||
docker volume prune --force
|
||||
|
||||
# Remove all networks (except default)
|
||||
docker network prune --force
|
||||
|
||||
# Clean up any leftover data
|
||||
docker system prune --all --force --volumes
|
||||
```
|
||||
|
||||
# Remove all networks (except default)
|
||||
docker network prune --force
|
||||
|
||||
# Clean up any leftover data
|
||||
docker system prune --all --force --volumes
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Podman" label="Podman">
|
||||
|
||||
```bash
|
||||
# Stop all running containers
|
||||
podman stop --all
|
||||
|
||||
# Remove all containers (including stopped ones)
|
||||
podman rm --all --force
|
||||
|
||||
# Remove all images
|
||||
podman rmi --all --force
|
||||
|
||||
# Remove all volumes
|
||||
podman volume prune --force
|
||||
|
||||
# Remove all networks (except default)
|
||||
podman network prune --force
|
||||
|
||||
# Clean up any leftover data
|
||||
podman system prune --all --force --volumes
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
2. Restart OpenRAG and upgrade to get the latest images for your containers.
|
||||
```bash
|
||||
uv sync
|
||||
uv run openrag
|
||||
```
|
||||
|
||||
3. In the OpenRAG TUI, click **Status**, and then click **Upgrade**.
|
||||
When the **Close** button is active, the upgrade is complete.
|
||||
Close the window and open the OpenRAG appplication.
|
||||
Close the window and open the OpenRAG appplication.
|
||||
|
|
@ -28,22 +28,22 @@ const sidebars = {
|
|||
{
|
||||
type: "doc",
|
||||
id: "get-started/install",
|
||||
label: "Installation"
|
||||
label: "Install OpenRAG"
|
||||
},
|
||||
{
|
||||
type: "doc",
|
||||
id: "get-started/docker",
|
||||
label: "Deploy with Docker"
|
||||
},
|
||||
{
|
||||
type: "doc",
|
||||
id: "get-started/quickstart",
|
||||
label: "Quickstart"
|
||||
},
|
||||
{
|
||||
type: "doc",
|
||||
id: "get-started/docker",
|
||||
label: "Docker Deployment"
|
||||
},
|
||||
{
|
||||
type: "doc",
|
||||
id: "get-started/tui",
|
||||
label: "Terminal Interface (TUI)"
|
||||
label: "Terminal User Interface (TUI)"
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
@ -70,12 +70,12 @@ const sidebars = {
|
|||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "Configuration",
|
||||
label: "Reference",
|
||||
items: [
|
||||
{
|
||||
type: "doc",
|
||||
id: "configure/configuration",
|
||||
label: "Environment Variables"
|
||||
id: "reference/configuration",
|
||||
label: "Environment variables"
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
@ -93,4 +93,4 @@ const sidebars = {
|
|||
],
|
||||
};
|
||||
|
||||
export default sidebars;
|
||||
export default sidebars;
|
||||
|
|
@ -29,6 +29,34 @@
|
|||
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);
|
||||
}
|
||||
|
||||
/* Tabs Styling */
|
||||
.tabs-container {
|
||||
border: 1px solid var(--ifm-color-emphasis-300);
|
||||
border-radius: var(--ifm-global-radius);
|
||||
padding: 1rem;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.tabs {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.tabs__item {
|
||||
border: none;
|
||||
border-bottom: 1px solid var(--ifm-color-emphasis-200);
|
||||
margin-right: 0rem;
|
||||
padding-bottom: 0.5rem;
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
.tabs__item:hover {
|
||||
background-color: var(--ifm-hover-overlay);
|
||||
}
|
||||
|
||||
.tabs__item--active {
|
||||
border-bottom-color: var(--ifm-tabs-color-active);
|
||||
}
|
||||
|
||||
/* GitHub Icon Button */
|
||||
.header-github-link:hover {
|
||||
opacity: 0.6;
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ const buttonVariants = cva(
|
|||
destructive:
|
||||
"bg-destructive text-destructive-foreground hover:bg-destructive/90",
|
||||
outline:
|
||||
"border border-input hover:bg-muted hover:text-accent-foreground disabled:bg-muted disabled:!border-none",
|
||||
"border border-border hover:bg-muted hover:text-accent-foreground disabled:bg-muted disabled:!border-none",
|
||||
primary:
|
||||
"border bg-background text-secondary-foreground hover:bg-muted hover:shadow-sm",
|
||||
warning: "bg-warning text-warning-foreground hover:bg-warning/90",
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ const Card = React.forwardRef<
|
|||
ref={ref}
|
||||
className={cn(
|
||||
"rounded-xl border border-border bg-card text-card-foreground shadow-sm",
|
||||
className,
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
|
|
@ -33,8 +33,8 @@ const CardTitle = React.forwardRef<
|
|||
<h3
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"text-base font-semibold leading-tight tracking-tight",
|
||||
className,
|
||||
"text-base font-semibold leading-tight tracking-tight text-[14px]",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -1,31 +1,33 @@
|
|||
"use client"
|
||||
"use client";
|
||||
|
||||
import * as React from "react"
|
||||
import * as PopoverPrimitive from "@radix-ui/react-popover"
|
||||
import * as PopoverPrimitive from "@radix-ui/react-popover";
|
||||
import * as React from "react";
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
const Popover = PopoverPrimitive.Root
|
||||
const Popover = PopoverPrimitive.Root;
|
||||
|
||||
const PopoverTrigger = PopoverPrimitive.Trigger
|
||||
const PopoverTrigger = PopoverPrimitive.Trigger;
|
||||
|
||||
const PopoverAnchor = PopoverPrimitive.Anchor;
|
||||
|
||||
const PopoverContent = React.forwardRef<
|
||||
React.ElementRef<typeof PopoverPrimitive.Content>,
|
||||
React.ComponentPropsWithoutRef<typeof PopoverPrimitive.Content>
|
||||
React.ElementRef<typeof PopoverPrimitive.Content>,
|
||||
React.ComponentPropsWithoutRef<typeof PopoverPrimitive.Content>
|
||||
>(({ className, align = "center", sideOffset = 4, ...props }, ref) => (
|
||||
<PopoverPrimitive.Portal>
|
||||
<PopoverPrimitive.Content
|
||||
ref={ref}
|
||||
align={align}
|
||||
sideOffset={sideOffset}
|
||||
className={cn(
|
||||
"z-50 w-72 rounded-md border bg-popover p-4 text-popover-foreground shadow-md outline-none data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
</PopoverPrimitive.Portal>
|
||||
))
|
||||
PopoverContent.displayName = PopoverPrimitive.Content.displayName
|
||||
<PopoverPrimitive.Portal>
|
||||
<PopoverPrimitive.Content
|
||||
ref={ref}
|
||||
align={align}
|
||||
sideOffset={sideOffset}
|
||||
className={cn(
|
||||
"z-50 w-72 rounded-md border bg-popover p-4 text-popover-foreground shadow-md outline-none data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
</PopoverPrimitive.Portal>
|
||||
));
|
||||
PopoverContent.displayName = PopoverPrimitive.Content.displayName;
|
||||
|
||||
export { Popover, PopoverTrigger, PopoverContent }
|
||||
export { Popover, PopoverTrigger, PopoverAnchor, PopoverContent };
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ const SelectTrigger = React.forwardRef<
|
|||
<SelectPrimitive.Trigger
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"flex h-10 w-full items-center justify-between rounded-md border border-input px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:cursor-not-allowed disabled:bg-muted [&>span]:line-clamp-1",
|
||||
"flex h-10 w-full items-center justify-between rounded-md border border-input px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:cursor-not-allowed disabled:bg-muted [&>span]:line-clamp-1 disabled:border-none",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
|
|
@ -34,7 +34,7 @@ const SelectTrigger = React.forwardRef<
|
|||
{children}
|
||||
<SelectPrimitive.Icon asChild>
|
||||
{props.disabled ? (
|
||||
<LockIcon className="text-base h-5 w-5 opacity-50" />
|
||||
<LockIcon className="text-base h-4 w-4 opacity-50" />
|
||||
) : (
|
||||
<ChevronsUpDown className="text-base h-5 w-5" />
|
||||
)}
|
||||
|
|
|
|||
63
frontend/package-lock.json
generated
63
frontend/package-lock.json
generated
|
|
@ -44,6 +44,7 @@
|
|||
"react-icons": "^5.5.0",
|
||||
"react-markdown": "^10.1.0",
|
||||
"react-syntax-highlighter": "^15.6.1",
|
||||
"react-textarea-autosize": "^8.5.9",
|
||||
"rehype-mathjax": "^7.1.0",
|
||||
"rehype-raw": "^7.0.0",
|
||||
"remark-gfm": "^4.0.1",
|
||||
|
|
@ -8473,6 +8474,23 @@
|
|||
"react": ">= 0.14.0"
|
||||
}
|
||||
},
|
||||
"node_modules/react-textarea-autosize": {
|
||||
"version": "8.5.9",
|
||||
"resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.5.9.tgz",
|
||||
"integrity": "sha512-U1DGlIQN5AwgjTyOEnI1oCcMuEr1pv1qOtklB2l4nyMGbHzWrI0eFsYK0zos2YWqAolJyG0IWJaqWmWj5ETh0A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.20.13",
|
||||
"use-composed-ref": "^1.3.0",
|
||||
"use-latest": "^1.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/read-cache": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz",
|
||||
|
|
@ -10126,6 +10144,51 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"node_modules/use-composed-ref": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/use-composed-ref/-/use-composed-ref-1.4.0.tgz",
|
||||
"integrity": "sha512-djviaxuOOh7wkj0paeO1Q/4wMZ8Zrnag5H6yBvzN7AKKe8beOaED9SF5/ByLqsku8NP4zQqsvM2u3ew/tJK8/w==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/use-isomorphic-layout-effect": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.2.1.tgz",
|
||||
"integrity": "sha512-tpZZ+EX0gaghDAiFR37hj5MgY6ZN55kLiPkJsKxBMZ6GZdOSPJXiOzPM984oPYZ5AnehYx5WQp1+ME8I/P/pRA==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/use-latest": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/use-latest/-/use-latest-1.3.0.tgz",
|
||||
"integrity": "sha512-mhg3xdm9NaM8q+gLT8KryJPnRFOz1/5XPBhmDEVZK1webPzDjrPk7f/mbpeLqTgB9msytYWANxgALOCJKnLvcQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"use-isomorphic-layout-effect": "^1.1.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/use-sidecar": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.3.tgz",
|
||||
|
|
|
|||
|
|
@ -45,6 +45,7 @@
|
|||
"react-icons": "^5.5.0",
|
||||
"react-markdown": "^10.1.0",
|
||||
"react-syntax-highlighter": "^15.6.1",
|
||||
"react-textarea-autosize": "^8.5.9",
|
||||
"rehype-mathjax": "^7.1.0",
|
||||
"rehype-raw": "^7.0.0",
|
||||
"remark-gfm": "^4.0.1",
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
36
frontend/src/app/settings/icons/google-drive-icon.tsx
Normal file
36
frontend/src/app/settings/icons/google-drive-icon.tsx
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
const GoogleDriveIcon = () => (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="18"
|
||||
height="16"
|
||||
viewBox="0 0 18 16"
|
||||
fill="none"
|
||||
>
|
||||
<path
|
||||
d="M2.03338 13.2368L2.75732 14.4872C2.90774 14.7504 3.12398 14.9573 3.37783 15.1077L5.9633 10.6325H0.792358C0.792358 10.9239 0.867572 11.2154 1.018 11.4786L2.03338 13.2368Z"
|
||||
fill="#0066DA"
|
||||
/>
|
||||
<path
|
||||
d="M9.00005 5.36753L6.41458 0.892312C6.16073 1.04274 5.94449 1.24958 5.79407 1.51283L1.018 9.78633C0.870339 10.0439 0.792555 10.3356 0.792358 10.6325H5.9633L9.00005 5.36753Z"
|
||||
fill="#00AC47"
|
||||
/>
|
||||
<path
|
||||
d="M14.6223 15.1077C14.8761 14.9573 15.0924 14.7504 15.2428 14.4872L15.5436 13.9701L16.9821 11.4786C17.1325 11.2154 17.2077 10.9239 17.2077 10.6325H12.0364L13.1368 12.7949L14.6223 15.1077Z"
|
||||
fill="#EA4335"
|
||||
/>
|
||||
<path
|
||||
d="M9.00005 5.36753L11.5855 0.892313C11.3317 0.741885 11.0402 0.666672 10.7394 0.666672H7.26074C6.95988 0.666672 6.66843 0.751287 6.41458 0.892312L9.00005 5.36753Z"
|
||||
fill="#00832D"
|
||||
/>
|
||||
<path
|
||||
d="M12.0368 10.6325H5.9633L3.37783 15.1077C3.63167 15.2581 3.92313 15.3333 4.22398 15.3333H13.7761C14.077 15.3333 14.3684 15.2487 14.6223 15.1077L12.0368 10.6325Z"
|
||||
fill="#2684FC"
|
||||
/>
|
||||
<path
|
||||
d="M14.5941 5.64958L12.206 1.51283C12.0556 1.24958 11.8394 1.04274 11.5855 0.892313L9.00005 5.36753L12.0368 10.6325L17.1983 10.6325C17.1983 10.341 17.1231 10.0496 16.9727 9.78633L14.5941 5.64958Z"
|
||||
fill="#FFBA00"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
|
||||
export default GoogleDriveIcon;
|
||||
164
frontend/src/app/settings/icons/one-drive-icon.tsx
Normal file
164
frontend/src/app/settings/icons/one-drive-icon.tsx
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
const OneDriveIcon = () => (
|
||||
<svg
|
||||
width="17"
|
||||
height="12"
|
||||
viewBox="0 0 17 12"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<g clip-path="url(#clip0_3016_367)">
|
||||
<path
|
||||
d="M5.2316 2.32803C2.88332 2.3281 1.128 4.25034 0.99585 6.39175C1.07765 6.85315 1.34653 7.7643 1.76759 7.71751C2.29391 7.65902 3.61947 7.71751 4.75008 5.67068C5.57599 4.17546 7.27498 2.328 5.2316 2.32803Z"
|
||||
fill="url(#paint0_radial_3016_367)"
|
||||
/>
|
||||
<path
|
||||
d="M4.68864 3.12741C3.89927 4.37718 2.83674 6.16798 2.47813 6.7315C2.05185 7.40136 0.922937 7.11678 1.01646 6.15663C1.00724 6.23457 1.00016 6.31315 0.995274 6.39226C0.840839 8.89029 2.82143 10.9648 5.28604 10.9648C8.00238 10.9648 14.4806 7.58038 13.825 4.18931C13.134 2.19599 11.1918 0.766266 8.99072 0.766266C6.78965 0.766266 5.37899 2.03436 4.68864 3.12741Z"
|
||||
fill="url(#paint1_radial_3016_367)"
|
||||
/>
|
||||
<path
|
||||
d="M4.68864 3.12741C3.89927 4.37718 2.83674 6.16798 2.47813 6.7315C2.05185 7.40136 0.922937 7.11678 1.01646 6.15663C1.00724 6.23457 1.00016 6.31315 0.995274 6.39226C0.840839 8.89029 2.82143 10.9648 5.28604 10.9648C8.00238 10.9648 14.4806 7.58038 13.825 4.18931C13.134 2.19599 11.1918 0.766266 8.99072 0.766266C6.78965 0.766266 5.37899 2.03436 4.68864 3.12741Z"
|
||||
fill="url(#paint2_radial_3016_367)"
|
||||
fill-opacity="0.4"
|
||||
/>
|
||||
<path
|
||||
d="M4.68864 3.12741C3.89927 4.37718 2.83674 6.16798 2.47813 6.7315C2.05185 7.40136 0.922937 7.11678 1.01646 6.15663C1.00724 6.23457 1.00016 6.31315 0.995274 6.39226C0.840839 8.89029 2.82143 10.9648 5.28604 10.9648C8.00238 10.9648 14.4806 7.58038 13.825 4.18931C13.134 2.19599 11.1918 0.766266 8.99072 0.766266C6.78965 0.766266 5.37899 2.03436 4.68864 3.12741Z"
|
||||
fill="url(#paint3_radial_3016_367)"
|
||||
/>
|
||||
<path
|
||||
d="M4.68864 3.12741C3.89927 4.37718 2.83674 6.16798 2.47813 6.7315C2.05185 7.40136 0.922937 7.11678 1.01646 6.15663C1.00724 6.23457 1.00016 6.31315 0.995274 6.39226C0.840839 8.89029 2.82143 10.9648 5.28604 10.9648C8.00238 10.9648 14.4806 7.58038 13.825 4.18931C13.134 2.19599 11.1918 0.766266 8.99072 0.766266C6.78965 0.766266 5.37899 2.03436 4.68864 3.12741Z"
|
||||
fill="url(#paint4_radial_3016_367)"
|
||||
fill-opacity="0.6"
|
||||
/>
|
||||
<path
|
||||
d="M4.68864 3.12741C3.89927 4.37718 2.83674 6.16798 2.47813 6.7315C2.05185 7.40136 0.922937 7.11678 1.01646 6.15663C1.00724 6.23457 1.00016 6.31315 0.995274 6.39226C0.840839 8.89029 2.82143 10.9648 5.28604 10.9648C8.00238 10.9648 14.4806 7.58038 13.825 4.18931C13.134 2.19599 11.1918 0.766266 8.99072 0.766266C6.78965 0.766266 5.37899 2.03436 4.68864 3.12741Z"
|
||||
fill="url(#paint5_radial_3016_367)"
|
||||
fill-opacity="0.9"
|
||||
/>
|
||||
<path
|
||||
d="M5.24634 10.9659C5.24634 10.9659 11.7322 10.9786 12.8323 10.9786C14.8288 10.9786 16.3467 9.34866 16.3468 7.44669C16.3468 5.54468 14.7983 3.92459 12.8323 3.92459C10.8663 3.92459 9.73412 5.39542 8.88374 7.00089C7.8873 8.88221 6.61615 10.9433 5.24634 10.9659Z"
|
||||
fill="url(#paint6_linear_3016_367)"
|
||||
/>
|
||||
<path
|
||||
d="M5.24634 10.9659C5.24634 10.9659 11.7322 10.9786 12.8323 10.9786C14.8288 10.9786 16.3467 9.34866 16.3468 7.44669C16.3468 5.54468 14.7983 3.92459 12.8323 3.92459C10.8663 3.92459 9.73412 5.39542 8.88374 7.00089C7.8873 8.88221 6.61615 10.9433 5.24634 10.9659Z"
|
||||
fill="url(#paint7_radial_3016_367)"
|
||||
fill-opacity="0.4"
|
||||
/>
|
||||
<path
|
||||
d="M5.24634 10.9659C5.24634 10.9659 11.7322 10.9786 12.8323 10.9786C14.8288 10.9786 16.3467 9.34866 16.3468 7.44669C16.3468 5.54468 14.7983 3.92459 12.8323 3.92459C10.8663 3.92459 9.73412 5.39542 8.88374 7.00089C7.8873 8.88221 6.61615 10.9433 5.24634 10.9659Z"
|
||||
fill="url(#paint8_radial_3016_367)"
|
||||
fill-opacity="0.9"
|
||||
/>
|
||||
</g>
|
||||
<defs>
|
||||
<radialGradient
|
||||
id="paint0_radial_3016_367"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(1.28709 2.88928) rotate(50.1526) scale(4.84121 8.03004)"
|
||||
>
|
||||
<stop stop-color="#4894FE" />
|
||||
<stop offset="0.695072" stop-color="#0934B3" />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id="paint1_radial_3016_367"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(14.2836 -2.68456) rotate(130.923) scale(20.8177 15.4261)"
|
||||
>
|
||||
<stop offset="0.165327" stop-color="#23C0FE" />
|
||||
<stop offset="0.534" stop-color="#1C91FF" />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id="paint2_radial_3016_367"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(4.42852 3.16495) rotate(-139.986) scale(4.23243 9.68892)"
|
||||
>
|
||||
<stop stop-color="white" />
|
||||
<stop offset="0.660528" stop-color="#ADC0FF" stop-opacity="0" />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id="paint3_radial_3016_367"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(9.03076 8.16737) rotate(-139.764) scale(4.77056 7.24512)"
|
||||
>
|
||||
<stop stop-color="#033ACC" />
|
||||
<stop offset="1" stop-color="#368EFF" stop-opacity="0" />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id="paint4_radial_3016_367"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(4.14837 0.44361) rotate(66.5713) scale(10.4677 11.3005)"
|
||||
>
|
||||
<stop offset="0.592618" stop-color="#3464E3" stop-opacity="0" />
|
||||
<stop offset="1" stop-color="#033ACC" />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id="paint5_radial_3016_367"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(14.1157 -1.59739) rotate(135) scale(15.3977 24.123)"
|
||||
>
|
||||
<stop stop-color="#4BFDE8" />
|
||||
<stop offset="0.543937" stop-color="#4BFDE8" stop-opacity="0" />
|
||||
</radialGradient>
|
||||
<linearGradient
|
||||
id="paint6_linear_3016_367"
|
||||
x1="10.8"
|
||||
y1="10.9715"
|
||||
x2="10.8"
|
||||
y2="4.00825"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop stop-color="#0086FF" />
|
||||
<stop offset="0.49" stop-color="#00BBFF" />
|
||||
</linearGradient>
|
||||
<radialGradient
|
||||
id="paint7_radial_3016_367"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(7.16132 4.75417) rotate(21.6324) scale(6.97728 13.2126)"
|
||||
>
|
||||
<stop stop-color="white" />
|
||||
<stop offset="0.785262" stop-color="white" stop-opacity="0" />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id="paint8_radial_3016_367"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(16.1298 3.37785) rotate(139.243) scale(9.56565 9.59808)"
|
||||
>
|
||||
<stop stop-color="#4BFDE8" />
|
||||
<stop offset="0.584724" stop-color="#4BFDE8" stop-opacity="0" />
|
||||
</radialGradient>
|
||||
<clipPath id="clip0_3016_367">
|
||||
<rect
|
||||
width="15.6444"
|
||||
height="10.6667"
|
||||
fill="white"
|
||||
transform="translate(0.844482 0.666672)"
|
||||
/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
);
|
||||
|
||||
export default OneDriveIcon;
|
||||
211
frontend/src/app/settings/icons/share-point-icon.tsx
Normal file
211
frontend/src/app/settings/icons/share-point-icon.tsx
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
const SharePointIcon = () => (
|
||||
<svg
|
||||
width="15"
|
||||
height="16"
|
||||
viewBox="0 0 15 16"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<g clip-path="url(#clip0_3016_409)">
|
||||
<path
|
||||
d="M6.1335 9.6C8.78446 9.6 10.9335 7.45096 10.9335 4.8C10.9335 2.14903 8.78446 0 6.1335 0C3.48254 0 1.3335 2.14903 1.3335 4.8C1.3335 7.45096 3.48254 9.6 6.1335 9.6Z"
|
||||
fill="url(#paint0_linear_3016_409)"
|
||||
/>
|
||||
<path
|
||||
d="M6.1335 9.6C8.78446 9.6 10.9335 7.45096 10.9335 4.8C10.9335 2.14903 8.78446 0 6.1335 0C3.48254 0 1.3335 2.14903 1.3335 4.8C1.3335 7.45096 3.48254 9.6 6.1335 9.6Z"
|
||||
fill="url(#paint1_radial_3016_409)"
|
||||
fill-opacity="0.2"
|
||||
/>
|
||||
<path
|
||||
d="M6.1335 9.6C8.78446 9.6 10.9335 7.45096 10.9335 4.8C10.9335 2.14903 8.78446 0 6.1335 0C3.48254 0 1.3335 2.14903 1.3335 4.8C1.3335 7.45096 3.48254 9.6 6.1335 9.6Z"
|
||||
fill="url(#paint2_radial_3016_409)"
|
||||
fill-opacity="0.31"
|
||||
/>
|
||||
<path
|
||||
d="M6.1335 9.6C8.78446 9.6 10.9335 7.45096 10.9335 4.8C10.9335 2.14903 8.78446 0 6.1335 0C3.48254 0 1.3335 2.14903 1.3335 4.8C1.3335 7.45096 3.48254 9.6 6.1335 9.6Z"
|
||||
fill="url(#paint3_radial_3016_409)"
|
||||
fill-opacity="0.7"
|
||||
/>
|
||||
<path
|
||||
d="M10.5117 12.8C12.7209 12.8 14.5117 11.0091 14.5117 8.8C14.5117 6.59088 12.7209 4.8 10.5117 4.8C8.3026 4.8 6.51172 6.59088 6.51172 8.8C6.51172 11.0091 8.3026 12.8 10.5117 12.8Z"
|
||||
fill="url(#paint4_linear_3016_409)"
|
||||
/>
|
||||
<path
|
||||
d="M10.5117 12.8C12.7209 12.8 14.5117 11.0091 14.5117 8.8C14.5117 6.59088 12.7209 4.8 10.5117 4.8C8.3026 4.8 6.51172 6.59088 6.51172 8.8C6.51172 11.0091 8.3026 12.8 10.5117 12.8Z"
|
||||
fill="url(#paint5_radial_3016_409)"
|
||||
fill-opacity="0.5"
|
||||
/>
|
||||
<path
|
||||
d="M10.5117 12.8C12.7209 12.8 14.5117 11.0091 14.5117 8.8C14.5117 6.59088 12.7209 4.8 10.5117 4.8C8.3026 4.8 6.51172 6.59088 6.51172 8.8C6.51172 11.0091 8.3026 12.8 10.5117 12.8Z"
|
||||
fill="url(#paint6_radial_3016_409)"
|
||||
fill-opacity="0.7"
|
||||
/>
|
||||
<path
|
||||
d="M6.7335 16C8.61126 16 10.1335 14.4778 10.1335 12.6C10.1335 10.7222 8.61126 9.2 6.7335 9.2C4.85574 9.2 3.3335 10.7222 3.3335 12.6C3.3335 14.4778 4.85574 16 6.7335 16Z"
|
||||
fill="url(#paint7_linear_3016_409)"
|
||||
/>
|
||||
<path
|
||||
d="M6.7335 16C8.61126 16 10.1335 14.4778 10.1335 12.6C10.1335 10.7222 8.61126 9.2 6.7335 9.2C4.85574 9.2 3.3335 10.7222 3.3335 12.6C3.3335 14.4778 4.85574 16 6.7335 16Z"
|
||||
fill="url(#paint8_linear_3016_409)"
|
||||
fill-opacity="0.32"
|
||||
/>
|
||||
<path
|
||||
d="M5.23354 7.60001H1.43354C0.715575 7.60001 0.133545 8.18204 0.133545 8.90001V12.7C0.133545 13.418 0.715575 14 1.43354 14H5.23354C5.95151 14 6.53354 13.418 6.53354 12.7V8.90001C6.53354 8.18204 5.95151 7.60001 5.23354 7.60001Z"
|
||||
fill="url(#paint9_radial_3016_409)"
|
||||
/>
|
||||
<path
|
||||
d="M5.23354 7.60001H1.43354C0.715575 7.60001 0.133545 8.18204 0.133545 8.90001V12.7C0.133545 13.418 0.715575 14 1.43354 14H5.23354C5.95151 14 6.53354 13.418 6.53354 12.7V8.90001C6.53354 8.18204 5.95151 7.60001 5.23354 7.60001Z"
|
||||
fill="url(#paint10_radial_3016_409)"
|
||||
fill-opacity="0.6"
|
||||
/>
|
||||
<path
|
||||
d="M1.95581 11.8734L2.64917 11.523C2.72733 11.676 2.82929 11.7887 2.95505 11.8611C3.08249 11.9335 3.22185 11.9697 3.37309 11.9697C3.54133 11.9697 3.66965 11.9368 3.75801 11.871C3.84641 11.8036 3.89057 11.7024 3.89057 11.5675C3.89057 11.4622 3.84809 11.3733 3.76313 11.301C3.67817 11.2269 3.52777 11.171 3.31193 11.1332C2.90069 11.0608 2.60157 10.9341 2.41465 10.7531C2.22941 10.5722 2.13679 10.3468 2.13679 10.077C2.13679 9.74136 2.25915 9.4732 2.50387 9.27248C2.74857 9.0718 3.07145 8.97144 3.47253 8.97144C3.74273 8.97144 3.98065 9.02492 4.18629 9.13184C4.39189 9.23876 4.55505 9.39176 4.67569 9.59084L3.99765 9.92892C3.92285 9.81704 3.84213 9.73644 3.75549 9.68708C3.66881 9.63608 3.56005 9.61056 3.42917 9.61056C3.27285 9.61056 3.15389 9.64348 3.07233 9.70928C2.99245 9.77508 2.95249 9.86064 2.95249 9.96592C2.95249 10.0564 2.99073 10.1362 3.06721 10.2053C3.14537 10.2727 3.30173 10.3278 3.53625 10.3706C3.93053 10.443 4.22449 10.5746 4.41825 10.7654C4.61369 10.9546 4.71137 11.194 4.71137 11.4836C4.71137 11.8356 4.59497 12.1145 4.36217 12.3201C4.12933 12.5258 3.79713 12.6286 3.36545 12.6286C3.05277 12.6286 2.77065 12.5628 2.51916 12.4312C2.26935 12.2979 2.08157 12.112 1.95581 11.8734Z"
|
||||
fill="white"
|
||||
/>
|
||||
<path
|
||||
d="M1.95483 11.9088L2.64867 11.5466C2.72687 11.7047 2.82891 11.8212 2.95475 11.896C3.08227 11.9709 3.22171 12.0083 3.37307 12.0083C3.54143 12.0083 3.66983 11.9743 3.75823 11.9062C3.84667 11.8365 3.89087 11.732 3.89087 11.5925C3.89087 11.4837 3.84835 11.3918 3.76335 11.317C3.67831 11.2405 3.52783 11.1827 3.31187 11.1436C2.90031 11.0688 2.60103 10.9378 2.41397 10.7508C2.22862 10.5637 2.13594 10.3307 2.13594 10.0518C2.13594 9.70491 2.25838 9.42775 2.50325 9.22027C2.74815 9.01279 3.07123 8.90907 3.47255 8.90907C3.74295 8.90907 3.98099 8.96435 4.18679 9.07487C4.39255 9.18539 4.55579 9.34355 4.67651 9.54931L3.99803 9.89879C3.92319 9.78315 3.84243 9.69983 3.75571 9.64879C3.66895 9.59607 3.56015 9.56971 3.42919 9.56971C3.27275 9.56971 3.15371 9.60375 3.07207 9.67175C2.99215 9.73979 2.95219 9.82819 2.95219 9.93703C2.95219 10.0306 2.99047 10.113 3.06699 10.1845C3.14519 10.2542 3.30167 10.3112 3.53631 10.3554C3.93083 10.4302 4.22503 10.5662 4.41891 10.7635C4.61447 10.959 4.71223 11.2065 4.71223 11.5058C4.71223 11.8697 4.59575 12.1579 4.36279 12.3705C4.12979 12.583 3.79735 12.6893 3.36543 12.6893C3.05251 12.6893 2.77023 12.6213 2.51856 12.4853C2.26858 12.3475 2.08067 12.1554 1.95483 11.9088Z"
|
||||
fill="white"
|
||||
/>
|
||||
</g>
|
||||
<defs>
|
||||
<linearGradient
|
||||
id="paint0_linear_3016_409"
|
||||
x1="2.5335"
|
||||
y1="1.2"
|
||||
x2="8.9335"
|
||||
y2="9.6"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop stop-color="#00E3DF" />
|
||||
<stop offset="0.410156" stop-color="#0097A8" />
|
||||
<stop offset="1" stop-color="#007791" />
|
||||
</linearGradient>
|
||||
<radialGradient
|
||||
id="paint1_radial_3016_409"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(7.60222 10.9279) rotate(-112.448) scale(7.37044 13.2516)"
|
||||
>
|
||||
<stop offset="0.28573" stop-color="#003B5D" />
|
||||
<stop offset="0.612265" stop-color="#004A6C" stop-opacity="0.688298" />
|
||||
<stop offset="0.968041" stop-color="#006F94" stop-opacity="0" />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id="paint2_radial_3016_409"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(7.77166 8.81012) rotate(-112.063) scale(6.22076 11.1709)"
|
||||
>
|
||||
<stop offset="0.259744" stop-color="#002A42" />
|
||||
<stop offset="0.612265" stop-color="#004261" stop-opacity="0.688298" />
|
||||
<stop offset="0.968041" stop-color="#006F94" stop-opacity="0" />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id="paint3_radial_3016_409"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(8.87294 0.508276) rotate(124.447) scale(5.20428)"
|
||||
>
|
||||
<stop stop-color="#78EDFF" />
|
||||
<stop offset="1" stop-color="#2CCFCA" stop-opacity="0" />
|
||||
</radialGradient>
|
||||
<linearGradient
|
||||
id="paint4_linear_3016_409"
|
||||
x1="7.51172"
|
||||
y1="5.8"
|
||||
x2="12.845"
|
||||
y2="12.8"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop stop-color="#00E3DF" />
|
||||
<stop offset="0.476427" stop-color="#00A2B8" />
|
||||
<stop offset="0.945063" stop-color="#00637C" />
|
||||
</linearGradient>
|
||||
<radialGradient
|
||||
id="paint5_radial_3016_409"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(8.22004 12.1333) rotate(-70.8012) scale(4.94148 8.90348)"
|
||||
>
|
||||
<stop stop-color="#003B5D" />
|
||||
<stop offset="0.492035" stop-color="#004C6C" stop-opacity="0.72" />
|
||||
<stop offset="0.968041" stop-color="#007A86" stop-opacity="0" />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id="paint6_radial_3016_409"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(12.7946 5.22356) rotate(124.447) scale(4.33692)"
|
||||
>
|
||||
<stop stop-color="#78EDFF" />
|
||||
<stop offset="1" stop-color="#2CCFCA" stop-opacity="0" />
|
||||
</radialGradient>
|
||||
<linearGradient
|
||||
id="paint7_linear_3016_409"
|
||||
x1="4.3535"
|
||||
y1="9.54"
|
||||
x2="7.7535"
|
||||
y2="16.34"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0.0534989" stop-color="#75FFF6" />
|
||||
<stop offset="0.51144" stop-color="#00C7D1" />
|
||||
<stop offset="0.96002" stop-color="#0096AD" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="paint8_linear_3016_409"
|
||||
x1="10.1179"
|
||||
y1="16.0003"
|
||||
x2="8.30102"
|
||||
y2="13.4503"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0.259744" stop-color="#0E5A5D" />
|
||||
<stop offset="0.535716" stop-color="#126C6B" stop-opacity="0.688298" />
|
||||
<stop offset="0.968041" stop-color="#1C948A" stop-opacity="0" />
|
||||
</linearGradient>
|
||||
<radialGradient
|
||||
id="paint9_radial_3016_409"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(0.133545 7.60001) rotate(45) scale(9.05096)"
|
||||
>
|
||||
<stop offset="0.0625" stop-color="#00B6BD" />
|
||||
<stop offset="0.890131" stop-color="#00495C" />
|
||||
</radialGradient>
|
||||
<radialGradient
|
||||
id="paint10_radial_3016_409"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="translate(3.33354 11.44) rotate(90) scale(4.48 5.1)"
|
||||
>
|
||||
<stop offset="0.566964" stop-color="#1E8581" stop-opacity="0" />
|
||||
<stop offset="0.973806" stop-color="#1ECBE6" />
|
||||
</radialGradient>
|
||||
<clipPath id="clip0_3016_409">
|
||||
<rect
|
||||
width="14.4"
|
||||
height="16"
|
||||
fill="white"
|
||||
transform="translate(0.133545)"
|
||||
/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
);
|
||||
|
||||
export default SharePointIcon;
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
"use client";
|
||||
|
||||
import { ArrowUpRight, Loader2, PlugZap, Plus, RefreshCw } from "lucide-react";
|
||||
import { ArrowUpRight, Loader2, Plus, Minus } from "lucide-react";
|
||||
import { useRouter, useSearchParams } from "next/navigation";
|
||||
import Link from "next/link";
|
||||
import { Suspense, useCallback, useEffect, useState } from "react";
|
||||
import { useUpdateFlowSettingMutation } from "@/app/api/mutations/useUpdateFlowSettingMutation";
|
||||
import {
|
||||
|
|
@ -12,7 +13,6 @@ import {
|
|||
import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery";
|
||||
import { ConfirmationDialog } from "@/components/confirmation-dialog";
|
||||
import { ProtectedRoute } from "@/components/protected-route";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Card,
|
||||
|
|
@ -21,7 +21,6 @@ import {
|
|||
CardHeader,
|
||||
CardTitle,
|
||||
} from "@/components/ui/card";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
|
|
@ -44,6 +43,10 @@ import { getFallbackModels, type ModelProvider } from "./helpers/model-helpers";
|
|||
import { ModelSelectItems } from "./helpers/model-select-item";
|
||||
import { LabelWrapper } from "@/components/label-wrapper";
|
||||
|
||||
import GoogleDriveIcon from "./icons/google-drive-icon";
|
||||
import OneDriveIcon from "./icons/one-drive-icon";
|
||||
import SharePointIcon from "./icons/share-point-icon";
|
||||
|
||||
const { MAX_SYSTEM_PROMPT_CHARS } = UI_CONSTANTS;
|
||||
|
||||
interface GoogleDriveFile {
|
||||
|
|
@ -92,6 +95,33 @@ interface Connection {
|
|||
last_sync?: string;
|
||||
}
|
||||
|
||||
const DEFAULT_CONNECTORS: Connector[] = [
|
||||
{
|
||||
id: "google_drive",
|
||||
name: "Google Drive",
|
||||
description: "Google Drive is not configured.",
|
||||
icon: <GoogleDriveIcon />,
|
||||
status: "not_connected",
|
||||
type: "google_drive",
|
||||
},
|
||||
{
|
||||
id: "one_drive",
|
||||
name: "OneDrive",
|
||||
description: "OneDrive is not configured.",
|
||||
icon: <OneDriveIcon />,
|
||||
status: "not_connected",
|
||||
type: "one_drive",
|
||||
},
|
||||
{
|
||||
id: "amazon_s3",
|
||||
name: "SharePoint",
|
||||
description: "SharePoint is not configured.",
|
||||
icon: <SharePointIcon />,
|
||||
status: "not_connected",
|
||||
type: "sharepoint",
|
||||
},
|
||||
];
|
||||
|
||||
function KnowledgeSourcesPage() {
|
||||
const { isAuthenticated, isNoAuthMode } = useAuth();
|
||||
const { addTask, tasks } = useTask();
|
||||
|
|
@ -262,22 +292,20 @@ function KnowledgeSourcesPage() {
|
|||
updateFlowSettingMutation.mutate({ picture_descriptions: checked });
|
||||
};
|
||||
|
||||
console.log({ connectors });
|
||||
|
||||
// Helper function to get connector icon
|
||||
const getConnectorIcon = useCallback((iconName: string) => {
|
||||
const iconMap: { [key: string]: React.ReactElement } = {
|
||||
"google-drive": (
|
||||
<div className="w-8 h-8 bg-blue-600 rounded flex items-center justify-center text-white font-bold leading-none shrink-0">
|
||||
G
|
||||
</div>
|
||||
),
|
||||
"google-drive": <GoogleDriveIcon />,
|
||||
sharepoint: (
|
||||
<div className="w-8 h-8 bg-blue-700 rounded flex items-center justify-center text-white font-bold leading-none shrink-0">
|
||||
SP
|
||||
</div>
|
||||
),
|
||||
onedrive: (
|
||||
<div className="w-8 h-8 bg-blue-400 rounded flex items-center justify-center text-white font-bold leading-none shrink-0">
|
||||
OD
|
||||
<div className="w-8 h-8 bg-white border border-gray-300 rounded flex items-center justify-center">
|
||||
<OneDriveIcon />
|
||||
</div>
|
||||
),
|
||||
};
|
||||
|
|
@ -313,7 +341,7 @@ function KnowledgeSourcesPage() {
|
|||
status: "not_connected" as const,
|
||||
type: type,
|
||||
}));
|
||||
|
||||
console.log({ initialConnectors });
|
||||
setConnectors(initialConnectors);
|
||||
|
||||
// Check status for each connector type
|
||||
|
|
@ -454,34 +482,13 @@ function KnowledgeSourcesPage() {
|
|||
const getStatusBadge = (status: Connector["status"]) => {
|
||||
switch (status) {
|
||||
case "connected":
|
||||
return (
|
||||
<Badge
|
||||
variant="default"
|
||||
className="bg-green-500/20 text-green-400 border-green-500/30"
|
||||
>
|
||||
Connected
|
||||
</Badge>
|
||||
);
|
||||
return <div className="h-2 w-2 bg-green-500 rounded-full" />;
|
||||
case "connecting":
|
||||
return (
|
||||
<Badge
|
||||
variant="secondary"
|
||||
className="bg-yellow-500/20 text-yellow-400 border-yellow-500/30"
|
||||
>
|
||||
Connecting...
|
||||
</Badge>
|
||||
);
|
||||
return <div className="h-2 w-2 bg-yellow-500 rounded-full" />;
|
||||
case "error":
|
||||
return <Badge variant="destructive">Error</Badge>;
|
||||
return <div className="h-2 w-2 bg-red-500 rounded-full" />;
|
||||
default:
|
||||
return (
|
||||
<Badge
|
||||
variant="outline"
|
||||
className="bg-muted/20 text-muted-foreground border-muted whitespace-nowrap"
|
||||
>
|
||||
Not Connected
|
||||
</Badge>
|
||||
);
|
||||
return <div className="h-2 w-2 bg-muted rounded-full" />;
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -614,7 +621,7 @@ function KnowledgeSourcesPage() {
|
|||
{/* Connectors Section */}
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h2 className="text-2xl font-semibold tracking-tight mb-2">
|
||||
<h2 className="text-lg font-semibold tracking-tight mb-2">
|
||||
Cloud Connectors
|
||||
</h2>
|
||||
</div>
|
||||
|
|
@ -701,73 +708,85 @@ function KnowledgeSourcesPage() {
|
|||
|
||||
{/* Connectors Grid */}
|
||||
<div className="grid gap-6 md:grid-cols-2 lg:grid-cols-3">
|
||||
{connectors.map((connector) => (
|
||||
<Card key={connector.id} className="relative flex flex-col">
|
||||
<CardHeader>
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-3">
|
||||
{connector.icon}
|
||||
<div>
|
||||
<CardTitle className="text-lg">
|
||||
{DEFAULT_CONNECTORS.map((connector) => {
|
||||
const actualConnector = connectors.find(
|
||||
(c) => c.id === connector.id
|
||||
);
|
||||
return (
|
||||
<Card key={connector.id} className="relative flex flex-col">
|
||||
<CardHeader>
|
||||
<div className="flex flex-col items-start justify-between">
|
||||
<div className="flex flex-col gap-3">
|
||||
<div className="mb-1">
|
||||
<div
|
||||
className={`w-8 h-8 ${
|
||||
actualConnector ? "bg-white" : "bg-muted grayscale"
|
||||
} rounded flex items-center justify-center`}
|
||||
>
|
||||
{connector.icon}
|
||||
</div>
|
||||
</div>
|
||||
<CardTitle className="flex flex-row items-center gap-2">
|
||||
{connector.name}
|
||||
{actualConnector &&
|
||||
getStatusBadge(actualConnector.status)}
|
||||
</CardTitle>
|
||||
<CardDescription className="text-sm">
|
||||
{connector.description}
|
||||
<CardDescription className="text-[13px]">
|
||||
{actualConnector?.description
|
||||
? `${actualConnector.name} is configured.`
|
||||
: connector.description}
|
||||
</CardDescription>
|
||||
</div>
|
||||
</div>
|
||||
{getStatusBadge(connector.status)}
|
||||
</div>
|
||||
</CardHeader>
|
||||
<CardContent className="flex-1 flex flex-col justify-end space-y-4">
|
||||
{connector.status === "connected" ? (
|
||||
<div className="space-y-3">
|
||||
<Button
|
||||
onClick={() => navigateToKnowledgePage(connector)}
|
||||
disabled={isSyncing === connector.id}
|
||||
className="w-full"
|
||||
variant="outline"
|
||||
>
|
||||
<Plus className="h-4 w-4" />
|
||||
Add Knowledge
|
||||
</Button>
|
||||
</CardHeader>
|
||||
<CardContent className="flex-1 flex flex-col justify-end space-y-4">
|
||||
{actualConnector?.status === "connected" ? (
|
||||
<div className="space-y-3">
|
||||
<Button
|
||||
onClick={() => navigateToKnowledgePage(connector)}
|
||||
disabled={isSyncing === connector.id}
|
||||
className="w-full cursor-pointer"
|
||||
size="sm"
|
||||
>
|
||||
<Plus className="h-4 w-4" />
|
||||
Add Knowledge
|
||||
</Button>
|
||||
|
||||
{syncResults[connector.id] && (
|
||||
<div className="text-xs text-muted-foreground bg-muted/50 p-2 rounded">
|
||||
<div>
|
||||
Processed: {syncResults[connector.id]?.processed || 0}
|
||||
{syncResults[connector.id] && (
|
||||
<div className="text-xs text-muted-foreground bg-muted/50 p-2 rounded">
|
||||
<div>
|
||||
Processed:{" "}
|
||||
{syncResults[connector.id]?.processed || 0}
|
||||
</div>
|
||||
<div>
|
||||
Added: {syncResults[connector.id]?.added || 0}
|
||||
</div>
|
||||
{syncResults[connector.id]?.errors && (
|
||||
<div>
|
||||
Errors: {syncResults[connector.id]?.errors}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
Added: {syncResults[connector.id]?.added || 0}
|
||||
</div>
|
||||
{syncResults[connector.id]?.errors && (
|
||||
<div>Errors: {syncResults[connector.id]?.errors}</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<Button
|
||||
onClick={() => handleConnect(connector)}
|
||||
disabled={isConnecting === connector.id}
|
||||
className="w-full"
|
||||
>
|
||||
{isConnecting === connector.id ? (
|
||||
<>
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
Connecting...
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<PlugZap className="mr-2 h-4 w-4" />
|
||||
Connect
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
))}
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<div className="text-[13px] text-muted-foreground">
|
||||
<p>
|
||||
See our{" "}
|
||||
<Link
|
||||
className="text-accent-pink-foreground"
|
||||
href="https://github.com/langflow-ai/openrag/pull/96/files#diff-06889aa94ccf8dac64e70c8cc30a2ceed32cc3c0c2c14a6ff0336fe882a9c2ccR41"
|
||||
>
|
||||
Cloud Connectors installation guide
|
||||
</Link>{" "}
|
||||
for more detail.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
{/* Agent Behavior Section */}
|
||||
|
|
@ -877,32 +896,31 @@ function KnowledgeSourcesPage() {
|
|||
</LabelWrapper>
|
||||
</div>
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="system-prompt" className="text-base font-medium">
|
||||
Agent Instructions
|
||||
</Label>
|
||||
<Textarea
|
||||
id="system-prompt"
|
||||
placeholder="Enter your agent instructions here..."
|
||||
value={systemPrompt}
|
||||
onChange={(e) => setSystemPrompt(e.target.value)}
|
||||
rows={6}
|
||||
className={`resize-none ${
|
||||
systemPrompt.length > MAX_SYSTEM_PROMPT_CHARS
|
||||
? "border-red-500 focus:border-red-500"
|
||||
: ""
|
||||
}`}
|
||||
/>
|
||||
<div className="flex justify-start">
|
||||
<span
|
||||
className={`text-xs ${
|
||||
<LabelWrapper label="Agent Instructions" id="system-prompt">
|
||||
<Textarea
|
||||
id="system-prompt"
|
||||
placeholder="Enter your agent instructions here..."
|
||||
value={systemPrompt}
|
||||
onChange={(e) => setSystemPrompt(e.target.value)}
|
||||
rows={6}
|
||||
className={`resize-none ${
|
||||
systemPrompt.length > MAX_SYSTEM_PROMPT_CHARS
|
||||
? "text-red-500"
|
||||
: "text-muted-foreground"
|
||||
? "border-red-500 focus:border-red-500"
|
||||
: ""
|
||||
}`}
|
||||
>
|
||||
{systemPrompt.length}/{MAX_SYSTEM_PROMPT_CHARS} characters
|
||||
</span>
|
||||
</div>
|
||||
/>
|
||||
<div className="flex justify-start">
|
||||
<span
|
||||
className={`text-xs ${
|
||||
systemPrompt.length > MAX_SYSTEM_PROMPT_CHARS
|
||||
? "text-red-500"
|
||||
: "text-muted-foreground"
|
||||
}`}
|
||||
>
|
||||
{systemPrompt.length}/{MAX_SYSTEM_PROMPT_CHARS} characters
|
||||
</span>
|
||||
</div>
|
||||
</LabelWrapper>
|
||||
</div>
|
||||
<div className="flex justify-end pt-2">
|
||||
<Button
|
||||
|
|
@ -934,7 +952,9 @@ function KnowledgeSourcesPage() {
|
|||
<CardHeader>
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<CardTitle className="text-lg mb-4">Knowledge</CardTitle>
|
||||
<CardTitle className="text-lg mb-4">
|
||||
Knowledge ingestion and retrieval
|
||||
</CardTitle>
|
||||
<CardDescription>
|
||||
Quick knowledge settings. Edit in Langflow for full control.
|
||||
</CardDescription>
|
||||
|
|
@ -1045,47 +1065,94 @@ function KnowledgeSourcesPage() {
|
|||
</div>
|
||||
<div className="grid grid-cols-2 gap-4">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="chunk-size" className="text-base font-medium">
|
||||
Chunk size
|
||||
</Label>
|
||||
<div className="relative">
|
||||
<Input
|
||||
id="chunk-size"
|
||||
type="number"
|
||||
min="1"
|
||||
value={chunkSize}
|
||||
onChange={(e) => handleChunkSizeChange(e.target.value)}
|
||||
className="w-full pr-20"
|
||||
/>
|
||||
<div className="absolute inset-y-0 right-0 flex items-center pr-8 pointer-events-none">
|
||||
<span className="text-sm text-muted-foreground">
|
||||
characters
|
||||
</span>
|
||||
<LabelWrapper id="chunk-size" label="Chunk size">
|
||||
<div className="relative">
|
||||
<Input
|
||||
id="chunk-size"
|
||||
type="number"
|
||||
min="1"
|
||||
value={chunkSize}
|
||||
onChange={(e) => handleChunkSizeChange(e.target.value)}
|
||||
className="w-full pr-20 [appearance:textfield] [&::-webkit-outer-spin-button]:appearance-none [&::-webkit-inner-spin-button]:appearance-none"
|
||||
/>
|
||||
<div className="absolute inset-y-0 right-0 flex items-center">
|
||||
<span className="text-sm text-placeholder-foreground mr-4 pointer-events-none">
|
||||
characters
|
||||
</span>
|
||||
<div className="flex flex-col">
|
||||
<Button
|
||||
aria-label="Increase value"
|
||||
className="h-5 rounded-l-none rounded-br-none border-input border-b-[0.5px] focus-visible:relative"
|
||||
variant="outline"
|
||||
size="iconSm"
|
||||
onClick={() =>
|
||||
handleChunkSizeChange((chunkSize + 1).toString())
|
||||
}
|
||||
>
|
||||
<Plus className="text-muted-foreground" size={8} />
|
||||
</Button>
|
||||
<Button
|
||||
aria-label="Decrease value"
|
||||
className="h-5 rounded-l-none rounded-tr-none border-input border-t-[0.5px] focus-visible:relative"
|
||||
variant="outline"
|
||||
size="iconSm"
|
||||
onClick={() =>
|
||||
handleChunkSizeChange((chunkSize - 1).toString())
|
||||
}
|
||||
>
|
||||
<Minus className="text-muted-foreground" size={8} />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</LabelWrapper>
|
||||
</div>
|
||||
<div className="space-y-2">
|
||||
<Label
|
||||
htmlFor="chunk-overlap"
|
||||
className="text-base font-medium"
|
||||
>
|
||||
Chunk overlap
|
||||
</Label>
|
||||
<div className="relative">
|
||||
<Input
|
||||
id="chunk-overlap"
|
||||
type="number"
|
||||
min="0"
|
||||
value={chunkOverlap}
|
||||
onChange={(e) => handleChunkOverlapChange(e.target.value)}
|
||||
className="w-full pr-20"
|
||||
/>
|
||||
<div className="absolute inset-y-0 right-0 flex items-center pr-8 pointer-events-none">
|
||||
<span className="text-sm text-muted-foreground">
|
||||
characters
|
||||
</span>
|
||||
<LabelWrapper id="chunk-overlap" label="Chunk overlap">
|
||||
<div className="relative">
|
||||
<Input
|
||||
id="chunk-overlap"
|
||||
type="number"
|
||||
min="0"
|
||||
value={chunkOverlap}
|
||||
onChange={(e) => handleChunkOverlapChange(e.target.value)}
|
||||
className="w-full pr-20 [appearance:textfield] [&::-webkit-outer-spin-button]:appearance-none [&::-webkit-inner-spin-button]:appearance-none"
|
||||
/>
|
||||
<div className="absolute inset-y-0 right-0 flex items-center">
|
||||
<span className="text-sm text-placeholder-foreground mr-4 pointer-events-none">
|
||||
characters
|
||||
</span>
|
||||
<div className="flex flex-col">
|
||||
<Button
|
||||
aria-label="Increase value"
|
||||
className="h-5 rounded-l-none rounded-br-none border-input border-b-[0.5px] focus-visible:relative"
|
||||
variant="outline"
|
||||
size="iconSm"
|
||||
onClick={() =>
|
||||
handleChunkOverlapChange(
|
||||
(chunkOverlap + 1).toString()
|
||||
)
|
||||
}
|
||||
>
|
||||
<Plus className="text-muted-foreground" size={8} />
|
||||
</Button>
|
||||
<Button
|
||||
aria-label="Decrease value"
|
||||
className="h-5 rounded-l-none rounded-tr-none border-input border-t-[0.5px] focus-visible:relative"
|
||||
variant="outline"
|
||||
size="iconSm"
|
||||
onClick={() =>
|
||||
handleChunkOverlapChange(
|
||||
(chunkOverlap - 1).toString()
|
||||
)
|
||||
}
|
||||
>
|
||||
<Minus className="text-muted-foreground" size={8} />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</LabelWrapper>
|
||||
</div>
|
||||
</div>
|
||||
<div className="">
|
||||
|
|
|
|||
|
|
@ -53,6 +53,10 @@ export function LayoutWrapper({ children }: { children: React.ReactNode }) {
|
|||
const authPaths = ["/login", "/auth/callback", "/onboarding"];
|
||||
const isAuthPage = authPaths.includes(pathname);
|
||||
|
||||
// List of paths with smaller max-width
|
||||
const smallWidthPaths = ["/settings", "/settings/connector/new"];
|
||||
const isSmallWidthPath = smallWidthPaths.includes(pathname);
|
||||
|
||||
// Calculate active tasks for the bell icon
|
||||
const activeTasks = tasks.filter(
|
||||
(task) =>
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue