Merge branch 'main' into feat-add-run-query-opensearch
This commit is contained in:
commit
75f01539e0
68 changed files with 4017 additions and 1989 deletions
19
.env.example
19
.env.example
|
|
@ -40,13 +40,28 @@ GOOGLE_OAUTH_CLIENT_SECRET=
|
|||
MICROSOFT_GRAPH_OAUTH_CLIENT_ID=
|
||||
MICROSOFT_GRAPH_OAUTH_CLIENT_SECRET=
|
||||
|
||||
# AWS Access Key ID and Secret Access Key with access to your S3 instance
|
||||
AWS_ACCESS_KEY_ID=
|
||||
AWS_SECRET_ACCESS_KEY=
|
||||
|
||||
# OPTIONAL: dns routable from google (etc.) to handle continous ingest (something like ngrok works). This enables continous ingestion
|
||||
WEBHOOK_BASE_URL=
|
||||
|
||||
# Model Provider API Keys
|
||||
OPENAI_API_KEY=
|
||||
ANTHROPIC_API_KEY=
|
||||
OLLAMA_ENDPOINT=
|
||||
WATSONX_API_KEY=
|
||||
WATSONX_ENDPOINT=
|
||||
WATSONX_PROJECT_ID=
|
||||
|
||||
AWS_ACCESS_KEY_ID=
|
||||
AWS_SECRET_ACCESS_KEY=
|
||||
# LLM Provider configuration. Providers can be "anthropic", "watsonx", "ibm" or "ollama".
|
||||
LLM_PROVIDER=
|
||||
LLM_MODEL=
|
||||
|
||||
# Embedding provider configuration. Providers can be "watsonx", "ibm" or "ollama".
|
||||
EMBEDDING_PROVIDER=
|
||||
EMBEDDING_MODEL=
|
||||
|
||||
# OPTIONAL url for openrag link to langflow in the UI
|
||||
LANGFLOW_PUBLIC_URL=
|
||||
|
|
|
|||
35
.github/workflows/build-multiarch.yml
vendored
35
.github/workflows/build-multiarch.yml
vendored
|
|
@ -14,6 +14,7 @@ jobs:
|
|||
outputs:
|
||||
skip_release: ${{ steps.version.outputs.skip_release }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
docker_version: ${{ steps.version.outputs.docker_version }}
|
||||
is_prerelease: ${{ steps.version.outputs.is_prerelease }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
|
@ -26,6 +27,12 @@ jobs:
|
|||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Version: $VERSION"
|
||||
|
||||
# Normalize version per PEP 440 for Docker tags
|
||||
# e.g., "0.1.53-rc2" -> "0.1.53rc2" to match Python's importlib.metadata
|
||||
DOCKER_VERSION=$(echo "$VERSION" | sed -E 's/-?(rc|alpha|beta|dev|post)/\1/g')
|
||||
echo "docker_version=$DOCKER_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Docker Version: $DOCKER_VERSION"
|
||||
|
||||
# Check if tag already exists
|
||||
if git rev-parse "v$VERSION" >/dev/null 2>&1; then
|
||||
echo "Tag v$VERSION already exists, skipping release"
|
||||
|
|
@ -117,13 +124,6 @@ jobs:
|
|||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Extract version from pyproject.toml
|
||||
id: version
|
||||
run: |
|
||||
VERSION=$(grep '^version = ' pyproject.toml | cut -d '"' -f 2)
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Version: $VERSION"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
|
|
@ -141,7 +141,7 @@ jobs:
|
|||
file: ${{ matrix.file }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ matrix.tag }}:${{ steps.version.outputs.version }}-${{ matrix.arch }}
|
||||
tags: ${{ matrix.tag }}:${{ needs.check-version.outputs.docker_version }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.image }}-${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.image }}-${{ matrix.arch }}
|
||||
|
||||
|
|
@ -153,12 +153,6 @@ jobs:
|
|||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Extract version from pyproject.toml
|
||||
id: version
|
||||
run: |
|
||||
VERSION=$(grep '^version = ' pyproject.toml | cut -d '"' -f 2)
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
|
|
@ -167,7 +161,7 @@ jobs:
|
|||
|
||||
- name: Create and push multi-arch manifests
|
||||
run: |
|
||||
VERSION=${{ steps.version.outputs.version }}
|
||||
VERSION=${{ needs.check-version.outputs.docker_version }}
|
||||
|
||||
# Create versioned tags
|
||||
docker buildx imagetools create -t langflowai/openrag-backend:$VERSION \
|
||||
|
|
@ -224,13 +218,6 @@ jobs:
|
|||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
|
||||
- name: Extract version from pyproject.toml
|
||||
id: version
|
||||
run: |
|
||||
VERSION=$(grep '^version = ' pyproject.toml | cut -d '"' -f 2)
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Version: $VERSION"
|
||||
|
||||
- name: Build wheel and source distribution
|
||||
run: |
|
||||
uv build
|
||||
|
|
@ -253,8 +240,8 @@ jobs:
|
|||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
tag_name: v${{ steps.version.outputs.version }}
|
||||
name: Release ${{ steps.version.outputs.version }}
|
||||
tag_name: v${{ needs.check-version.outputs.version }}
|
||||
name: Release ${{ needs.check-version.outputs.version }}
|
||||
draft: false
|
||||
prerelease: ${{ needs.check-version.outputs.is_prerelease }}
|
||||
generate_release_notes: true
|
||||
|
|
|
|||
6
.github/workflows/test-integration.yml
vendored
6
.github/workflows/test-integration.yml
vendored
|
|
@ -38,8 +38,12 @@ jobs:
|
|||
docker builder prune -af || true
|
||||
docker-compose -f docker-compose.yml down -v --remove-orphans || true
|
||||
|
||||
- name: Cleanup OpenSearch data (root-owned files)
|
||||
run: |
|
||||
docker run --rm -v $(pwd):/work alpine rm -rf /work/opensearch-data || true
|
||||
|
||||
- run: df -h
|
||||
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
|
|
|
|||
|
|
@ -80,10 +80,11 @@ services:
|
|||
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}
|
||||
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
|
||||
volumes:
|
||||
- ./openrag-documents:/app/openrag-documents:Z
|
||||
- ./keys:/app/keys:Z
|
||||
- ./flows:/app/flows:U,z
|
||||
- ./config:/app/config:Z
|
||||
- ${OPENRAG_DOCUMENTS_PATH:-./openrag-documents}:/app/openrag-documents:Z
|
||||
- ${OPENRAG_KEYS_PATH:-./keys}:/app/keys:Z
|
||||
- ${OPENRAG_FLOWS_PATH:-./flows}:/app/flows:U,z
|
||||
- ${OPENRAG_CONFIG_PATH:-./config}:/app/config:Z
|
||||
- ${OPENRAG_DATA_PATH:-./data}:/app/data:Z
|
||||
|
||||
openrag-frontend:
|
||||
image: langflowai/openrag-frontend:${OPENRAG_VERSION:-latest}
|
||||
|
|
@ -100,7 +101,7 @@ services:
|
|||
|
||||
langflow:
|
||||
volumes:
|
||||
- ./flows:/app/flows:U,z
|
||||
- ${OPENRAG_FLOWS_PATH:-./flows}:/app/flows:U,z
|
||||
image: langflowai/openrag-langflow:${OPENRAG_VERSION:-latest}
|
||||
build:
|
||||
context: .
|
||||
|
|
|
|||
9
docs/docs/_partial-docker-compose-down-and-prune.mdx
Normal file
9
docs/docs/_partial-docker-compose-down-and-prune.mdx
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
```bash title="Docker"
|
||||
docker compose down --volumes --remove-orphans --rmi local
|
||||
docker system prune -f
|
||||
```
|
||||
|
||||
```bash title="Podman"
|
||||
podman compose down --volumes --remove-orphans --rmi local
|
||||
podman system prune -f
|
||||
```
|
||||
7
docs/docs/_partial-docker-compose-up.mdx
Normal file
7
docs/docs/_partial-docker-compose-up.mdx
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
```bash title="Docker"
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
```bash title="Podman"
|
||||
podman compose up -d
|
||||
```
|
||||
49
docs/docs/_partial-docker-remove-and-cleanup-steps.mdx
Normal file
49
docs/docs/_partial-docker-remove-and-cleanup-steps.mdx
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
2. Remove all containers, including stopped containers:
|
||||
|
||||
```bash title="Docker"
|
||||
docker rm --force $(docker ps -aq)
|
||||
```
|
||||
|
||||
```bash title="Podman"
|
||||
podman rm --all --force
|
||||
```
|
||||
|
||||
3. Remove all images:
|
||||
|
||||
```bash title="Docker"
|
||||
docker rmi --force $(docker images -q)
|
||||
```
|
||||
|
||||
```bash title="Podman"
|
||||
podman rmi --all --force
|
||||
```
|
||||
|
||||
4. Remove all volumes:
|
||||
|
||||
```bash title="Docker"
|
||||
docker volume prune --force
|
||||
```
|
||||
|
||||
```bash title="Podman"
|
||||
podman volume prune --force
|
||||
```
|
||||
|
||||
5. Remove all networks except the default network:
|
||||
|
||||
```bash title="Docker"
|
||||
docker network prune --force
|
||||
```
|
||||
|
||||
```bash title="Podman"
|
||||
podman network prune --force
|
||||
```
|
||||
|
||||
6. Clean up any leftover data:
|
||||
|
||||
```bash title="Docker"
|
||||
docker system prune --all --force --volumes
|
||||
```
|
||||
|
||||
```bash title="Podman"
|
||||
podman system prune --all --force --volumes
|
||||
```
|
||||
7
docs/docs/_partial-docker-stop-all.mdx
Normal file
7
docs/docs/_partial-docker-stop-all.mdx
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
```bash title="Docker"
|
||||
docker stop $(docker ps -q)
|
||||
```
|
||||
|
||||
```bash title="Podman"
|
||||
podman stop --all
|
||||
```
|
||||
25
docs/docs/_partial-factory-reset-warning.mdx
Normal file
25
docs/docs/_partial-factory-reset-warning.mdx
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
:::warning
|
||||
This is a destructive action that does the following:
|
||||
|
||||
* Destroys all OpenRAG containers, volumes, and local images with `docker compose down --volumes --remove-orphans --rmi local`.
|
||||
* Prunes any additional container objects with `docker system prune -f`.
|
||||
* Deletes the contents of OpenRAG's `config` and `./opensearch-data` directories.
|
||||
* Deletes the `conversations.json` file.
|
||||
|
||||
<p/>Destroyed containers and deleted data are lost and cannot be recovered after running this operation.
|
||||
|
||||
This operation _doesn't_ remove the `.env` file or the contents of the `./openrag-documents` directory.
|
||||
:::
|
||||
|
||||
<!--
|
||||
Author's note: Don't remove the <p/> tag!
|
||||
For some reason, this specific usage consistently enforces the line break and indentation whether the partial is nested or not.
|
||||
|
||||
Without the <p/> tag, when this partial is used inside a list (ex. nested under a step in a numbered list), there is no implicit line break after the last bullet.
|
||||
When this partial is used outside of a list (as a top-level paragraph), there is an implicit line break after the last bullet.
|
||||
|
||||
Neither <br/> nor wrapping the entire line in <p> </p> worked consistently for both use cases.
|
||||
Either the line break was missing or the indentation was incorrect.
|
||||
|
||||
This behavior was observed in Docusaurus 3.9.2 on 05 Dec 2025. In a future release, if this is not longer an issue, you can remove the tag and this note. :)
|
||||
-->
|
||||
5
docs/docs/_partial-install-next-steps.mdx
Normal file
5
docs/docs/_partial-install-next-steps.mdx
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
## Next steps
|
||||
|
||||
* Try some of OpenRAG's core features in the [quickstart](/quickstart#chat-with-documents).
|
||||
* Learn how to [manage OpenRAG services](/manage-services).
|
||||
* [Upload documents](/ingestion), and then use the [**Chat**](/chat) to explore your data.
|
||||
|
|
@ -4,8 +4,8 @@ import TabItem from '@theme/TabItem';
|
|||
|
||||
1. Open the **OpenRAG OpenSearch Agent** flow in the Langflow visual editor: From the **Chat** window, click <Icon name="Settings2" aria-hidden="true"/> **Settings**, click **Edit in Langflow**, and then click **Proceed**.
|
||||
|
||||
2. Create a [Langflow API key](https://docs.langflow.org/api-keys-and-authentication), which is a user-specific token required to send requests to the Langflow server.
|
||||
This key doesn't grant access to OpenRAG.
|
||||
2. Optional: If you don't want to use the Langflow API key that is generated automatically when you install OpenRAG, you can create a [Langflow API key](https://docs.langflow.org/api-keys-and-authentication).
|
||||
This key doesn't grant access to OpenRAG; it is only for authenticating with the Langflow API.
|
||||
|
||||
1. In the Langflow visual editor, click your user icon in the header, and then select **Settings**.
|
||||
2. Click **Langflow API Keys**, and then click <Icon name="Plus" aria-hidden="true"/> **Add New**.
|
||||
|
|
|
|||
|
|
@ -1,24 +0,0 @@
|
|||
import Icon from "@site/src/components/icon/icon";
|
||||
|
||||
Using Ollama for your OpenRAG language model provider offers greater flexibility and configuration, but can also be overwhelming to start.
|
||||
These recommendations are a reasonable starting point for users with at least one GPU and experience running LLMs locally.
|
||||
|
||||
For best performance, OpenRAG recommends OpenAI's `gpt-oss:20b` language model. However, this model uses 16GB of RAM, so consider using Ollama Cloud or running Ollama on a remote machine.
|
||||
|
||||
For generating embeddings, OpenRAG recommends the [`nomic-embed-text`](https://ollama.com/library/nomic-embed-text) embedding model, which provides high-quality embeddings optimized for retrieval tasks.
|
||||
|
||||
To run models in [**Ollama Cloud**](https://docs.ollama.com/cloud), follow these steps:
|
||||
|
||||
1. Sign in to Ollama Cloud.
|
||||
In a terminal, enter `ollama signin` to connect your local environment with Ollama Cloud.
|
||||
2. To run the model, in Ollama, select the `gpt-oss:20b-cloud` model, or run `ollama run gpt-oss:20b-cloud` in a terminal.
|
||||
Ollama Cloud models are run at the same URL as your local Ollama server at `http://localhost:11434`, and automatically offloaded to Ollama's cloud service.
|
||||
3. Connect OpenRAG to the same local Ollama server as you would for local models in onboarding, using the default address of `http://localhost:11434`.
|
||||
4. In the **Language model** field, select the `gpt-oss:20b-cloud` model.
|
||||
<br></br>
|
||||
To run models on a **remote Ollama server**, follow these steps:
|
||||
|
||||
1. Ensure your remote Ollama server is accessible from your OpenRAG instance.
|
||||
2. In the **Ollama Base URL** field, enter your remote Ollama server's base URL, such as `http://your-remote-server:11434`.
|
||||
OpenRAG connects to the remote Ollama server and populates the lists with the server's available models.
|
||||
3. Select your **Embedding model** and **Language model** from the available options.
|
||||
|
|
@ -1,79 +1,134 @@
|
|||
import Icon from "@site/src/components/icon/icon";
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
import PartialOllama from '@site/docs/_partial-ollama.mdx';
|
||||
|
||||
## Application onboarding
|
||||
## Complete the application onboarding process {#application-onboarding}
|
||||
|
||||
The first time you start OpenRAG, regardless of how you installed it, you must complete application onboarding.
|
||||
The first time you start the OpenRAG application, you must complete the application onboarding process to select language and embedding models that are essential for OpenRAG features like the [**Chat**](/chat).
|
||||
|
||||
Some of these variables, such as the embedding models, can be changed seamlessly after onboarding.
|
||||
Others are immutable and require you to destroy and recreate the OpenRAG containers.
|
||||
For more information, see [Environment variables](/reference/configuration).
|
||||
For more information, see the [OpenRAG environment variables reference](/reference/configuration).
|
||||
|
||||
You can use different providers for your language model and embedding model, such as Anthropic for the language model and OpenAI for the embeddings model.
|
||||
You can use different providers for your language model and embedding model, such as Anthropic for the language model and OpenAI for the embedding model.
|
||||
Additionally, you can set multiple embedding models.
|
||||
|
||||
You only need to complete onboarding for your preferred providers.
|
||||
|
||||
<Tabs groupId="Provider">
|
||||
<TabItem value="Anthropic" label="Anthropic" default>
|
||||
<Tabs groupId="Provider">
|
||||
<TabItem value="Anthropic" label="Anthropic" default>
|
||||
|
||||
:::info
|
||||
Anthropic doesn't provide embedding models. If you select Anthropic for your language model, you must select a different provider for embeddings.
|
||||
:::
|
||||
:::info
|
||||
Anthropic doesn't provide embedding models. If you select Anthropic for your language model, you must select a different provider for the embedding model.
|
||||
:::
|
||||
|
||||
1. Enable **Use environment Anthropic API key** to automatically use your key from the `.env` file.
|
||||
Alternatively, paste an Anthropic API key into the field.
|
||||
2. Under **Advanced settings**, select your **Language Model**.
|
||||
3. Click **Complete**.
|
||||
4. In the second onboarding panel, select a provider for embeddings and select your **Embedding Model**.
|
||||
5. To complete the onboarding tasks, click **What is OpenRAG**, and then click **Add a Document**.
|
||||
Alternatively, click <Icon name="ArrowRight" aria-hidden="true"/> **Skip overview**.
|
||||
6. Continue with the [Quickstart](/quickstart).
|
||||
1. Enter your Anthropic API key, or enable **Get API key from environment variable** to pull the key from your [OpenRAG `.env` file](/reference/configuration).
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="OpenAI" label="OpenAI">
|
||||
If you set `ANTHROPIC_API_KEY` in your OpenRAG `.env` file, this value can be populated automatically.
|
||||
|
||||
1. Enable **Get API key from environment variable** to automatically enter your key from the TUI-generated `.env` file.
|
||||
Alternatively, paste an OpenAI API key into the field.
|
||||
2. Under **Advanced settings**, select your **Language Model**.
|
||||
3. Click **Complete**.
|
||||
4. In the second onboarding panel, select a provider for embeddings and select your **Embedding Model**.
|
||||
5. To complete the onboarding tasks, click **What is OpenRAG**, and then click **Add a Document**.
|
||||
Alternatively, click <Icon name="ArrowRight" aria-hidden="true"/> **Skip overview**.
|
||||
6. Continue with the [Quickstart](/quickstart).
|
||||
2. Under **Advanced settings**, select the language model that you want to use.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="IBM watsonx.ai" label="IBM watsonx.ai">
|
||||
3. Click **Complete**.
|
||||
|
||||
1. Complete the fields for **watsonx.ai API Endpoint**, **IBM Project ID**, and **IBM API key**.
|
||||
These values are found in your IBM watsonx deployment.
|
||||
2. Under **Advanced settings**, select your **Language Model**.
|
||||
3. Click **Complete**.
|
||||
4. In the second onboarding panel, select a provider for embeddings and select your **Embedding Model**.
|
||||
5. To complete the onboarding tasks, click **What is OpenRAG**, and then click **Add a Document**.
|
||||
Alternatively, click <Icon name="ArrowRight" aria-hidden="true"/> **Skip overview**.
|
||||
6. Continue with the [Quickstart](/quickstart).
|
||||
4. Select a provider for embeddings, provide the required information, and then select the embedding model you want to use.
|
||||
For information about another provider's credentials and settings, see the instructions for that provider.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Ollama" label="Ollama">
|
||||
5. Click **Complete**.
|
||||
|
||||
:::info
|
||||
Ollama isn't installed with OpenRAG. To install Ollama, see the [Ollama documentation](https://docs.ollama.com/).
|
||||
:::
|
||||
After you configure the embedding model, OpenRAG uses your credentials and models to ingest some [initial documents](/knowledge#default-documents). This tests the connection, and it allows you to ask OpenRAG about itself in the [**Chat**](/chat).
|
||||
If there is a problem with the model configuration, an error occurs and you are redirected back to the application onboarding screen.
|
||||
Verify that the credential is valid and has access to the selected model, and then click **Complete** to retry ingestion.
|
||||
|
||||
1. To connect to an Ollama server running on your local machine, enter your Ollama server's base URL address.
|
||||
The default Ollama server address is `http://localhost:11434`.
|
||||
OpenRAG connects to the Ollama server and populates the model lists with the server's available models.
|
||||
2. Select the **Embedding Model** and **Language Model** your Ollama server is running.
|
||||
<details closed>
|
||||
<summary>Ollama model selection and external server configuration</summary>
|
||||
<PartialOllama />
|
||||
</details>
|
||||
3. Click **Complete**.
|
||||
4. To complete the onboarding tasks, click **What is OpenRAG**, and then click **Add a Document**.
|
||||
5. Continue with the [Quickstart](/quickstart).
|
||||
6. Continue through the overview slides for a brief introduction to OpenRAG, or click <Icon name="ArrowRight" aria-hidden="true"/> **Skip overview**.
|
||||
The overview demonstrates some basic functionality that is covered in the [quickstart](/quickstart#chat-with-documents) and in other parts of the OpenRAG documentation.
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
</TabItem>
|
||||
<TabItem value="IBM watsonx.ai" label="IBM watsonx.ai">
|
||||
|
||||
1. Use the values from your IBM watsonx deployment for the **watsonx.ai API Endpoint**, **IBM Project ID**, and **IBM API key** fields.
|
||||
|
||||
If you set `WATSONX_API_KEY`, `WATSONX_API_URL`, or `WATSONX_PROJECT_ID` in your [OpenRAG `.env` file](/reference/configuration), these values can be populated automatically.
|
||||
|
||||
2. Under **Advanced settings**, select the language model that you want to use.
|
||||
|
||||
3. Click **Complete**.
|
||||
|
||||
4. Select a provider for embeddings, provide the required information, and then select the embedding model you want to use.
|
||||
For information about another provider's credentials and settings, see the instructions for that provider.
|
||||
|
||||
5. Click **Complete**.
|
||||
|
||||
After you configure the embedding model, OpenRAG uses your credentials and models to ingest some [initial documents](/knowledge#default-documents). This tests the connection, and it allows you to ask OpenRAG about itself in the [**Chat**](/chat).
|
||||
If there is a problem with the model configuration, an error occurs and you are redirected back to the application onboarding screen.
|
||||
Verify that the credentials are valid and have access to the selected model, and then click **Complete** to retry ingestion.
|
||||
|
||||
6. Continue through the overview slides for a brief introduction to OpenRAG, or click <Icon name="ArrowRight" aria-hidden="true"/> **Skip overview**.
|
||||
The overview demonstrates some basic functionality that is covered in the [quickstart](/quickstart#chat-with-documents) and in other parts of the OpenRAG documentation.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Ollama" label="Ollama">
|
||||
|
||||
:::info
|
||||
Ollama isn't installed with OpenRAG. You must install it separately if you want to use Ollama as a model provider.
|
||||
:::
|
||||
|
||||
Using Ollama as your language and embedding model provider offers greater flexibility and configuration options for hosting models, but it can be advanced for new users.
|
||||
The recommendations given here are a reasonable starting point for users with at least one GPU and experience running LLMs locally.
|
||||
|
||||
The OpenRAG team recommends the OpenAI `gpt-oss:20b` lanuage model and the [`nomic-embed-text`](https://ollama.com/library/nomic-embed-text) embedding model.
|
||||
However, `gpt-oss:20b` uses 16GB of RAM, so consider using Ollama Cloud or running Ollama on a remote machine.
|
||||
|
||||
1. [Install Ollama locally or on a remote server](https://docs.ollama.com/), or [run models in Ollama Cloud](https://docs.ollama.com/cloud).
|
||||
|
||||
If you are running a remote server, it must be accessible from your OpenRAG deployment.
|
||||
|
||||
2. In OpenRAG onboarding, connect to your Ollama server:
|
||||
|
||||
* **Local Ollama server**: Enter your Ollama server's base URL and port. The default Ollama server address is `http://localhost:11434`.
|
||||
* **Ollama Cloud**: Because Ollama Cloud models run at the same address as a local Ollama server and automatically offload to Ollama's cloud service, you can use the same base URL and port as you would for a local Ollama server. The default address is `http://localhost:11434`.
|
||||
* **Remote server**: Enter your remote Ollama server's base URL and port, such as `http://your-remote-server:11434`.
|
||||
|
||||
If the connection succeeds, OpenRAG populates the model lists with the server's available models.
|
||||
|
||||
3. Select the model that your Ollama server is running.
|
||||
|
||||
Language model and embedding model selections are independent.
|
||||
You can use the same or different servers for each model.
|
||||
|
||||
To use different providers for each model, you must configure both providers, and select the relevant model for each provider.
|
||||
|
||||
4. Click **Complete**.
|
||||
|
||||
After you configure the embedding model, OpenRAG uses the address and models to ingest some [initial documents](/knowledge#default-documents). This tests the connection, and it allows you to ask OpenRAG about itself in the [**Chat**](/chat).
|
||||
If there is a problem with the model configuration, an error occurs and you are redirected back to the application onboarding screen.
|
||||
Verify that the server address is valid, and that the selected model is running on the server.
|
||||
Then, click **Complete** to retry ingestion.
|
||||
|
||||
5. Continue through the overview slides for a brief introduction to OpenRAG, or click <Icon name="ArrowRight" aria-hidden="true"/> **Skip overview**.
|
||||
The overview demonstrates some basic functionality that is covered in the [quickstart](/quickstart#chat-with-documents) and in other parts of the OpenRAG documentation.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="OpenAI" label="OpenAI (default)">
|
||||
|
||||
1. Enter your OpenAI API key, or enable **Get API key from environment variable** to pull the key from your [OpenRAG `.env` file](/reference/configuration).
|
||||
|
||||
If you set `OPENAI_API_KEY` in your OpenRAG `.env` file, this value can be populated automatically.
|
||||
|
||||
2. Under **Advanced settings**, select the language model that you want to use.
|
||||
|
||||
3. Click **Complete**.
|
||||
|
||||
4. Select a provider for embeddings, provide the required information, and then select the embedding model you want to use.
|
||||
For information about another provider's credentials and settings, see the instructions for that provider.
|
||||
|
||||
5. Click **Complete**.
|
||||
|
||||
After you configure the embedding model, OpenRAG uses your credentials and models to ingest some [initial documents](/knowledge#default-documents). This tests the connection, and it allows you to ask OpenRAG about itself in the [**Chat**](/chat).
|
||||
If there is a problem with the model configuration, an error occurs and you are redirected back to the application onboarding screen.
|
||||
Verify that the credential is valid and has access to the selected model, and then click **Complete** to retry ingestion.
|
||||
|
||||
6. Continue through the overview slides for a brief introduction to OpenRAG, or click <Icon name="ArrowRight" aria-hidden="true"/> **Skip overview**.
|
||||
The overview demonstrates some basic functionality that is covered in the [quickstart](/quickstart#chat-with-documents) and in other parts of the OpenRAG documentation.
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
11
docs/docs/_partial-opensearch-auth-mode.mdx
Normal file
11
docs/docs/_partial-opensearch-auth-mode.mdx
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
* **No-auth mode**: If you select **Basic Setup** in the [TUI](/tui), or your [OpenRAG `.env` file](/reference/configuration) doesn't include OAuth credentials, then the OpenRAG OpenSearch instance runs in no-auth mode.
|
||||
|
||||
This mode uses one anonymous JWT token for OpenSearch authentication.
|
||||
There is no differentiation between users; all users that access your OpenRAG instance can access all documents uploaded to your knowledge base.
|
||||
|
||||
* **OAuth mode**: If you select **Advanced Setup** in the [TUI](/tui), or your [OpenRAG `.env` file](/reference/configuration) includes OAuth credentials, then the OpenRAG OpenSearch instance runs in OAuth mode.
|
||||
|
||||
This mode uses a unique JWT token for each OpenRAG user, and each document is tagged with user ownership.
|
||||
Documents are filtered by user owner; users see only the documents that they uploaded or have access to through their cloud storage accounts.
|
||||
|
||||
To enable OAuth mode after initial setup, see [Ingest files with OAuth connectors](/ingestion#oauth-ingestion).
|
||||
12
docs/docs/_partial-prereq-common.mdx
Normal file
12
docs/docs/_partial-prereq-common.mdx
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
* Gather the credentials and connection details for your preferred model providers.
|
||||
You must have access to at least one language model and one embedding model.
|
||||
If a provider offers both types, you can use the same provider for both models.
|
||||
If a provider offers only one type, you must select two providers.
|
||||
|
||||
* **OpenAI**: Create an [OpenAI API key](https://platform.openai.com/api-keys).
|
||||
* **Anthropic**: Create an [Anthropic API key](https://www.anthropic.com/docs/api/reference).
|
||||
Anthropic provides language models only; you must select an additional provider for embeddings.
|
||||
* **IBM watsonx.ai**: Get your watsonx.ai API endpoint, IBM project ID, and IBM API key from your watsonx deployment.
|
||||
* **Ollama**: Deploy an [Ollama instance and models](https://docs.ollama.com/) locally, in the cloud, or on a remote server, and then get your Ollama server's base URL and the names of the models that you want to use.
|
||||
|
||||
* Optional: Install GPU support with an NVIDIA GPU, [CUDA](https://docs.nvidia.com/cuda/) support, and compatible NVIDIA drivers on the OpenRAG host machine. If you don't have GPU capabilities, OpenRAG provides an alternate CPU-only deployment.
|
||||
6
docs/docs/_partial-prereq-no-script.mdx
Normal file
6
docs/docs/_partial-prereq-no-script.mdx
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
* Install [uv](https://docs.astral.sh/uv/getting-started/installation/).
|
||||
|
||||
* Install [Podman](https://podman.io/docs/installation) (recommended) or [Docker](https://docs.docker.com/get-docker/).
|
||||
|
||||
* Install [`podman-compose`](https://docs.podman.io/en/latest/markdown/podman-compose.1.html) or [Docker Compose](https://docs.docker.com/compose/install/).
|
||||
To use Docker Compose with Podman, you must alias Docker Compose commands to Podman commands.
|
||||
1
docs/docs/_partial-prereq-python.mdx
Normal file
1
docs/docs/_partial-prereq-python.mdx
Normal file
|
|
@ -0,0 +1 @@
|
|||
* Install [Python](https://www.python.org/downloads/release/python-3100/) version 3.13 or later.
|
||||
2
docs/docs/_partial-prereq-windows.mdx
Normal file
2
docs/docs/_partial-prereq-windows.mdx
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
* For Microsoft Windows, you must use the Windows Subsystem for Linux (WSL).
|
||||
See [Install OpenRAG on Windows](/install-windows) before proceeding.
|
||||
135
docs/docs/_partial-setup.mdx
Normal file
135
docs/docs/_partial-setup.mdx
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
import PartialOpenSearchAuthMode from '@site/docs/_partial-opensearch-auth-mode.mdx';
|
||||
|
||||
You can use either **Basic Setup** or **Advanced Setup** to configure OpenRAG.
|
||||
This choice determines how OpenRAG authenticates with your deployment's [OpenSearch instance](/knowledge), and it controls user access to documents stored in your OpenSearch knowledge base:
|
||||
|
||||
<PartialOpenSearchAuthMode />
|
||||
|
||||
:::info
|
||||
You must use **Advanced Setup** if you want to [use OAuth connectors to upload documents from cloud storage](/ingestion#oauth-ingestion).
|
||||
:::
|
||||
|
||||
If OpenRAG detects OAuth credentials during setup, it recommends **Advanced Setup** in the TUI.
|
||||
|
||||
<Tabs groupId="Setup method">
|
||||
<TabItem value="Basic setup" label="Basic setup" default>
|
||||
|
||||
1. In the TUI, click **Basic Setup** or press <kbd>1</kbd>.
|
||||
|
||||
2. Enter administrator passwords for the OpenRAG OpenSearch and Langflow services, or click **Generate Passwords** to generate passwords automatically.
|
||||
|
||||
The OpenSearch password is required.
|
||||
|
||||
The Langflow password is recommended but optional.
|
||||
If the Langflow password is empty, the Langflow server starts without authentication enabled. For more information, see [Langflow settings](/reference/configuration#langflow-settings).
|
||||
|
||||
3. Optional: Enter your OpenAI API key, or leave this field empty to provide model provider credentials during the application onboarding process.
|
||||
|
||||
There is no material difference between providing the key now or during the [application onboarding process](#application-onboarding).
|
||||
If you provide a key now, it can be populated automatically during the application onboarding process if you select the OpenAI model provider, and then enable **Get API key from environment variable**.
|
||||
|
||||
OpenRAG's core functionality requires access to language and embedding models.
|
||||
By default, OpenRAG uses OpenAI models.
|
||||
If you aren't sure which models or providers to use, you must provide an OpenAI API key to use OpenRAG's default model configuration.
|
||||
If you want to use a different model provider, you can leave this field empty.
|
||||
|
||||
4. Click **Save Configuration**.
|
||||
|
||||
Your passwords and API key, if provided, are stored in the [OpenRAG `.env` file](/reference/configuration) in your OpenRAG installation directory.
|
||||
If you modified any credentials that were pulled from an existing `.env` file, those values are updated in the `.env` file.
|
||||
|
||||
5. Click **Start All Services** to start the OpenRAG services that run in containers.
|
||||
|
||||
This process can take some time while OpenRAG pulls and runs the container images.
|
||||
If all services start successfully, the TUI prints a confirmation message:
|
||||
|
||||
```text
|
||||
Services started successfully
|
||||
Command completed successfully
|
||||
```
|
||||
|
||||
6. Under [**Native Services**](/manage-services), click **Start** to start the Docling service.
|
||||
|
||||
7. Launch the OpenRAG application:
|
||||
|
||||
* From the TUI main menu, click **Open App**.
|
||||
* In your browser, navigate to `localhost:3000`.
|
||||
|
||||
8. Continue with the [application onboarding process](#application-onboarding).
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Advanced setup" label="Advanced setup">
|
||||
|
||||
1. In the TUI, click **Advanced Setup** or press <kbd>2</kbd>.
|
||||
|
||||
2. Enter administrator passwords for the OpenRAG OpenSearch and Langflow services, or click **Generate Passwords** to generate passwords automatically.
|
||||
|
||||
The OpenSearch password is required.
|
||||
|
||||
The Langflow password is recommended but optional.
|
||||
If the Langflow password is empty, the Langflow server starts without authentication enabled. For more information, see [Langflow settings](/reference/configuration#langflow-settings).
|
||||
|
||||
3. Optional: Enter your OpenAI API key, or leave this field empty to provide model provider credentials during the application onboarding process.
|
||||
|
||||
There is no material difference between providing the key now or during the [application onboarding process](#application-onboarding).
|
||||
If you provide a key now, it can be populated automatically during the application onboarding process if you select the OpenAI model provider, and then enable **Get API key from environment variable**.
|
||||
|
||||
OpenRAG's core functionality requires access to language and embedding models.
|
||||
By default, OpenRAG uses OpenAI models.
|
||||
If you aren't sure which models or providers to use, you must provide an OpenAI API key to use OpenRAG's default model configuration.
|
||||
If you want to use a different model provider, you can leave this field empty.
|
||||
|
||||
4. To upload documents from external storage, such as Google Drive, add the required OAuth credentials for the connectors that you want to use. These settings can be populated automatically if OpenRAG detects these credentials in an [OpenRAG `.env` file](/reference/configuration) in the OpenRAG installation directory.
|
||||
|
||||
* **Amazon**: Provide your AWS Access Key ID and AWS Secret Access Key with access to your S3 instance. For more information, see the AWS documentation on [Configuring access to AWS applications](https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html).
|
||||
* **Google**: Provide your Google OAuth Client ID and Google OAuth Client Secret. You can generate these in the [Google Cloud Console](https://console.cloud.google.com/apis/credentials). For more information, see the [Google OAuth client documentation](https://developers.google.com/identity/protocols/oauth2).
|
||||
* **Microsoft**: For the Microsoft OAuth Client ID and Microsoft OAuth Client Secret, provide [Azure application registration credentials for SharePoint and OneDrive](https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/app-registration?view=odsp-graph-online). For more information, see the [Microsoft Graph OAuth client documentation](https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth).
|
||||
|
||||
You can [manage OAuth credentials](/ingestion#oauth-ingestion) later, but it is recommended to configure them during initial set up.
|
||||
|
||||
5. The OpenRAG TUI presents redirect URIs for your OAuth app.
|
||||
These are the URLs your OAuth provider will redirect back to after user sign-in.
|
||||
Register these redirect values with your OAuth provider as they are presented in the TUI.
|
||||
|
||||
6. Click **Save Configuration**.
|
||||
|
||||
Your passwords, API key, and OAuth credentials, if provided, are stored in the [OpenRAG `.env` file](/reference/configuration) in your OpenRAG installation directory.
|
||||
If you modified any credentials that were pulled from an existing `.env` file, those values are updated in the `.env` file.
|
||||
|
||||
7. Click **Start All Services** to start the OpenRAG services that run in containers.
|
||||
|
||||
This process can take some time while OpenRAG pulls and runs the container images.
|
||||
If all services start successfully, the TUI prints a confirmation message:
|
||||
|
||||
```text
|
||||
Services started successfully
|
||||
Command completed successfully
|
||||
```
|
||||
|
||||
8. Under [**Native Services**](/manage-services), click **Start** to start the Docling service.
|
||||
|
||||
9. Launch the OpenRAG application:
|
||||
|
||||
* From the TUI main menu, click **Open App**.
|
||||
* In your browser, navigate to `localhost:3000`.
|
||||
|
||||
10. If you enabled OAuth connectors, you must sign in to your OAuth provider before being redirected to your OpenRAG instance.
|
||||
|
||||
11. If required, you can edit the following additional environment variables.
|
||||
Only change these variables if your OpenRAG deployment has a non-default network configuration, such as a reverse proxy or custom domain.
|
||||
|
||||
* `LANGFLOW_PUBLIC_URL`: Sets the base address to access the Langflow web interface. This is where users interact with flows in a browser.
|
||||
* `WEBHOOK_BASE_URL`: Sets the base address for the following OpenRAG OAuth connector endpoints:
|
||||
* Amazon S3: Not applicable.
|
||||
* Google Drive: `WEBHOOK_BASE_URL/connectors/google_drive/webhook`
|
||||
* OneDrive: `WEBHOOK_BASE_URL/connectors/onedrive/webhook`
|
||||
* SharePoint: `WEBHOOK_BASE_URL/connectors/sharepoint/webhook`
|
||||
|
||||
12. Continue with the [application onboarding process](#application-onboarding).
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
<!-- Author's note: Under Advanced Setup, the bullets for `LANGFLOW_PUBLIC_URL` and `WEBHOOK_BASE_URL` must be indented extra spaces in order to render correctly. Not sure why, but please don't outdent them unless you can enforce the rendering. -->
|
||||
|
|
@ -4,8 +4,6 @@ slug: /agents
|
|||
---
|
||||
|
||||
import Icon from "@site/src/components/icon/icon";
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
OpenRAG includes a built-in [Langflow](https://docs.langflow.org/) instance for creating and managing functional application workflows called _flows_.
|
||||
In a flow, the individual workflow steps are represented by [_components_](https://docs.langflow.org/concepts-components) that are connected together to form a complete process.
|
||||
|
|
@ -34,7 +32,7 @@ For example, to view and edit the built-in **Chat** flow (the **OpenRAG OpenSear
|
|||
|
||||
If prompted to acknowledge that you are entering Langflow, click **Proceed**.
|
||||
|
||||
If Langflow requests login information, enter the `LANGFLOW_SUPERUSER` and `LANGFLOW_SUPERUSER_PASSWORD` from the `.env` file in your OpenRAG installation directory.
|
||||
If Langflow requests login information, enter the `LANGFLOW_SUPERUSER` and `LANGFLOW_SUPERUSER_PASSWORD` from your [OpenRAG `.env` file](/reference/configuration) in your OpenRAG installation directory.
|
||||
|
||||

|
||||
|
||||
|
|
@ -65,7 +63,7 @@ Explore the [Langflow documentation](https://docs.langflow.org/) to learn more a
|
|||
|
||||
By default, OpenRAG is pinned to the latest Langflow Docker image for stability.
|
||||
|
||||
If necessary, you can set a specific Langflow version with the [`LANGFLOW_VERSION`](/reference/configuration). However, there are risks to changing this setting:
|
||||
If necessary, you can set a specific Langflow version with the `LANGFLOW_VERSION` [environment variable](/reference/configuration). However, there are risks to changing this setting:
|
||||
|
||||
* The [Langflow documentation](https://docs.langflow.org/) describes the functionality present in the latest release of the Langflow OSS Python package. If your `LANGFLOW_VERSION` is different, the Langflow documentation might not align with the features and default settings in your OpenRAG installation.
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,8 @@ import Tabs from '@theme/Tabs';
|
|||
import TabItem from '@theme/TabItem';
|
||||
import PartialTempKnowledge from '@site/docs/_partial-temp-knowledge.mdx';
|
||||
import PartialIngestionFlow from '@site/docs/_partial-ingestion-flow.mdx';
|
||||
import PartialDockerComposeUp from '@site/docs/_partial-docker-compose-up.mdx';
|
||||
import PartialDockerStopAll from '@site/docs/_partial-docker-stop-all.mdx';
|
||||
|
||||
Upload documents to your [OpenRAG OpenSearch instance](/knowledge) to populate your knowledge base with unique content, such as your own company documents, research papers, or websites.
|
||||
Documents are processed through OpenRAG's knowledge ingestion flows with Docling.
|
||||
|
|
@ -62,56 +64,41 @@ Before users can connect their own cloud storage accounts, you must configure th
|
|||
To enable multiple connectors, you must register an app and generate credentials for each provider.
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="TUI" label="TUI Advanced Setup" default>
|
||||
|
||||
If you use the TUI to manage your OpenRAG containers, provide OAuth credentials in the **Advanced Setup**.
|
||||
<TabItem value="TUI" label="TUI-managed services" default>
|
||||
|
||||
If you use the [Terminal User Interface (TUI)](/tui) to manage your OpenRAG services, enter OAuth credentials in the **Advanced Setup** menu.
|
||||
You can do this during [installation](/install#setup), or you can add the credentials afterwards:
|
||||
|
||||
1. If OpenRAG is running, stop it: Go to [**Status**](/install#tui-container-management), and then click **Stop Services**.
|
||||
1. If OpenRAG is running, open the TUI's **Status** menu (<kbd>3</kbd>), and then click **Stop Services**.
|
||||
|
||||
2. Click **Advanced Setup**, and then add the OAuth credentials for the cloud storage providers that you want to use:
|
||||
2. Open the **Advanced Setup** menu (<kbd>2</kbd>), and then add the OAuth credentials for the cloud storage providers that you want to use:
|
||||
|
||||
* **Amazon**: Provide your AWS Access Key ID and AWS Secret Access Key with access to your S3 instance. For more information, see the AWS documentation on [Configuring access to AWS applications](https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html).
|
||||
* **Google**: Provide your Google OAuth Client ID and Google OAuth Client Secret. You can generate these in the [Google Cloud Console](https://console.cloud.google.com/apis/credentials). For more information, see the [Google OAuth client documentation](https://developers.google.com/identity/protocols/oauth2).
|
||||
* **Microsoft**: For the Microsoft OAuth Client ID and Microsoft OAuth Client Secret, provide [Azure application registration credentials for SharePoint and OneDrive](https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/app-registration?view=odsp-graph-online). For more information, see the [Microsoft Graph OAuth client documentation](https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth).
|
||||
|
||||
3. The OpenRAG TUI presents redirect URIs for your OAuth app that you must register with your OAuth provider.
|
||||
3. The TUI presents redirect URIs for your OAuth app that you must register with your OAuth provider.
|
||||
These are the URLs your OAuth provider will redirect back to after users authenticate and grant access to their cloud storage.
|
||||
|
||||
4. Click **Save Configuration**.
|
||||
4. Click **Save Configuration** to add the OAuth credentials to your [OpenRAG `.env` file](/reference/configuration).
|
||||
|
||||
OpenRAG regenerates the [`.env`](/reference/configuration) file with the given credentials.
|
||||
5. Click **Start All Services** to restart the OpenRAG containers with OAuth enabled.
|
||||
|
||||
5. Click **Start Container Services**.
|
||||
6. Launch the OpenRAG app.
|
||||
You should be prompted to sign in to your OAuth provider before being redirected to your OpenRAG instance.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="env" label="Docker Compose .env file">
|
||||
<TabItem value="env" label="Self-managed services">
|
||||
|
||||
If you [install OpenRAG with self-managed containers](/docker), set OAuth credentials in the `.env` file for Docker Compose.
|
||||
If you [installed OpenRAG with self-managed services](/docker), set OAuth credentials in your [OpenRAG `.env` file](/reference/configuration).
|
||||
|
||||
You can do this during [initial set up](/docker#install-openrag-with-docker-compose), or you can add the credentials afterwards:
|
||||
You can do this during [initial set up](/docker#setup), or you can add the credentials afterwards:
|
||||
|
||||
1. Stop your OpenRAG deployment.
|
||||
1. Stop all OpenRAG containers:
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="podman" label="Podman">
|
||||
<PartialDockerStopAll />
|
||||
|
||||
```bash
|
||||
podman stop --all
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="docker" label="Docker">
|
||||
|
||||
```bash
|
||||
docker stop $(docker ps -q)
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
2. Edit the `.env` file for Docker Compose to add the OAuth credentials for the cloud storage providers that you want to use:
|
||||
2. Edit your OpenRAG `.env` file to add the OAuth credentials for the cloud storage providers that you want to use:
|
||||
|
||||
* **Amazon**: Provide your AWS Access Key ID and AWS Secret Access Key with access to your S3 instance. For more information, see the AWS documentation on [Configuring access to AWS applications](https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html).
|
||||
|
||||
|
|
@ -136,24 +123,9 @@ You can do this during [initial set up](/docker#install-openrag-with-docker-comp
|
|||
|
||||
3. Save the `.env` file.
|
||||
|
||||
4. Restart your OpenRAG deployment:
|
||||
4. Restart your OpenRAG containers:
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="podman" label="Podman">
|
||||
|
||||
```bash
|
||||
podman-compose up -d
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="docker" label="Docker">
|
||||
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
<PartialDockerComposeUp />
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
|
@ -238,6 +210,7 @@ All errors were file-specific, and they didn't stop the pipeline.
|
|||
|
||||
* Machine: Apple M4 Pro
|
||||
* Podman VM:
|
||||
|
||||
* Name: podman-machine-default
|
||||
* Type: applehv
|
||||
* vCPUs: 7
|
||||
|
|
|
|||
|
|
@ -4,8 +4,6 @@ slug: /knowledge-filters
|
|||
---
|
||||
|
||||
import Icon from "@site/src/components/icon/icon";
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
OpenRAG's knowledge filters help you organize and manage your [knowledge base](/knowledge) by creating pre-defined views of your documents.
|
||||
|
||||
|
|
|
|||
|
|
@ -4,13 +4,12 @@ slug: /knowledge
|
|||
---
|
||||
|
||||
import Icon from "@site/src/components/icon/icon";
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
import PartialOpenSearchAuthMode from '@site/docs/_partial-opensearch-auth-mode.mdx';
|
||||
|
||||
OpenRAG includes a built-in [OpenSearch](https://docs.opensearch.org/latest/) instance that serves as the underlying datastore for your _knowledge_ (documents).
|
||||
This specialized database is used to store and retrieve your documents and the associated vector data (embeddings).
|
||||
|
||||
The documents in your OpenSearch knowledge base provide specialized context in addition to the general knowledge available to the language model that you select when you [install OpenRAG](/install) or [edit a flow](/agents).
|
||||
The documents in your OpenSearch knowledge base provide specialized context in addition to the general knowledge available to the language model that you select when you [install OpenRAG](/install-options) or [edit a flow](/agents).
|
||||
|
||||
You can [upload documents](/ingestion) from a variety of sources to populate your knowledge base with unique content, such as your own company documents, research papers, or websites.
|
||||
Documents are processed through OpenRAG's knowledge ingestion flows with Docling.
|
||||
|
|
@ -26,26 +25,22 @@ The **Knowledge** page lists the documents OpenRAG has ingested into your OpenSe
|
|||
To explore the raw contents of your knowledge base, click <Icon name="Library" aria-hidden="true"/> **Knowledge** to get a list of all ingested documents.
|
||||
Click a document to view the chunks produced from splitting the document during ingestion.
|
||||
|
||||
By default, OpenRAG includes some initial documents about OpenRAG. You can use these documents to ask OpenRAG about itself, and to test the [**Chat**](/chat) feature before uploading your own documents.
|
||||
If you [delete these documents](#delete-knowledge), you won't be able to ask OpenRAG about itself and it's own functionality.
|
||||
### Default documents {#default-documents}
|
||||
|
||||
By default, OpenRAG includes some initial documents about OpenRAG.
|
||||
These documents are ingested automatically during the [application onboarding process](/install#application-onboarding).
|
||||
|
||||
You can use these documents to ask OpenRAG about itself, and to test the [**Chat**](/chat) feature before uploading your own documents.
|
||||
|
||||
If you [delete](#delete-knowledge) these documents, you won't be able to ask OpenRAG about itself and it's own functionality.
|
||||
It is recommended that you keep these documents, and use [filters](/knowledge-filters) to separate them from your other knowledge.
|
||||
|
||||
## OpenSearch authentication and document access {#auth}
|
||||
|
||||
When you [install OpenRAG](/install), you can choose between two setup modes: **Basic Setup** and **Advanced Setup**.
|
||||
The mode you choose determines how OpenRAG authenticates with OpenSearch and controls access to documents:
|
||||
When you [install OpenRAG](/install-options), you provide the initial configuration values for your OpenRAG services, including authentication credentials for OpenSearch and OAuth connectors.
|
||||
This configuration determines how OpenRAG authenticates with your deployment's OpenSearch instance, and it controls user access to documents in your knowledge base:
|
||||
|
||||
* **Basic Setup (no-auth mode)**: If you choose **Basic Setup**, then OpenRAG is installed in no-auth mode.
|
||||
This mode uses one, anonymous JWT token for OpenSearch authentication.
|
||||
There is no differentiation between users.
|
||||
All users that access your OpenRAG instance can access all documents uploaded to your OpenSearch knowledge base.
|
||||
|
||||
* **Advanced Setup (OAuth mode)**: If you choose **Advanced Setup**, then OpenRAG is installed in OAuth mode.
|
||||
This mode uses a unique JWT token for each OpenRAG user, and each document is tagged with user ownership. Documents are filtered by user owner.
|
||||
This means users see only the documents that they uploaded or have access to.
|
||||
|
||||
You can enable OAuth mode after installation.
|
||||
For more information, see [Ingest files with OAuth connectors](/ingestion#oauth-ingestion).
|
||||
<PartialOpenSearchAuthMode />
|
||||
|
||||
## OpenSearch indexes
|
||||
|
||||
|
|
@ -75,18 +70,18 @@ If needed, you can use [filters](/knowledge-filters) to separate documents that
|
|||
|
||||
### Set the embedding model and dimensions {#set-the-embedding-model-and-dimensions}
|
||||
|
||||
When you [install OpenRAG](/install), you select at least one embedding model during [application onboarding](/install#application-onboarding).
|
||||
When you [install OpenRAG](/install-options), you select at least one embedding model during the [application onboarding process](/install#application-onboarding).
|
||||
OpenRAG automatically detects and configures the appropriate vector dimensions for your selected embedding model, ensuring optimal search performance and compatibility.
|
||||
|
||||
In the OpenRAG repository, you can find the complete list of supported models in [`models_service.py`](https://github.com/langflow-ai/openrag/blob/main/src/services/models_service.py) and the corresponding vector dimensions in [`settings.py`](https://github.com/langflow-ai/openrag/blob/main/src/config/settings.py).
|
||||
|
||||
During application onboarding, you can select from the supported models.
|
||||
During the application onboarding process, you can select from the supported models.
|
||||
The default embedding dimension is `1536`, and the default model is the OpenAI `text-embedding-3-small`.
|
||||
|
||||
If you want to use an unsupported model, you must manually set the model in your [OpenRAG configuration](/reference/configuration).
|
||||
If you want to use an unsupported model, you must manually set the model in your [OpenRAG `.env` file](/reference/configuration).
|
||||
If you use an unsupported embedding model that doesn't have defined dimensions in `settings.py`, then OpenRAG falls back to the default dimensions (1536) and logs a warning. OpenRAG's OpenSearch instance and flows continue to work, but [similarity search](https://www.ibm.com/think/topics/vector-search) quality can be affected if the actual model dimensions aren't 1536.
|
||||
|
||||
To change the embedding model after onboarding, it is recommended that you modify the embedding model setting in the OpenRAG **Settings** page or in your [OpenRAG configuration](/reference/configuration).
|
||||
To change the embedding model after onboarding, it is recommended that you modify the embedding model setting in the OpenRAG **Settings** page or in your [OpenRAG `.env` file](/reference/configuration).
|
||||
This will automatically update all relevant [OpenRAG flows](/agents) to use the new embedding model configuration.
|
||||
|
||||
### Set Docling parameters
|
||||
|
|
@ -97,35 +92,26 @@ When you [upload documents](/ingestion), Docling processes the files, splits the
|
|||
|
||||
You can use either Docling Serve or OpenRAG's built-in Docling ingestion pipeline to process documents.
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="serve" label="Docling Serve ingestion" default>
|
||||
|
||||
By default, OpenRAG uses [Docling Serve](https://github.com/docling-project/docling-serve).
|
||||
* **Docling Serve ingestion**: By default, OpenRAG uses [Docling Serve](https://github.com/docling-project/docling-serve).
|
||||
This means that OpenRAG starts a `docling serve` process on your local machine and runs Docling ingestion through an API service.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="docling" label="Built-in Docling ingestion">
|
||||
* **Built-in Docling ingestion**: If you want to use OpenRAG's built-in Docling ingestion pipeline instead of the separate Docling Serve service, set `DISABLE_INGEST_WITH_LANGFLOW=true` in your [OpenRAG environment variables](/reference/configuration#document-processing-settings).
|
||||
|
||||
If you want to use OpenRAG's built-in Docling ingestion pipeline instead of the separate Docling Serve service, set `DISABLE_INGEST_WITH_LANGFLOW=true` in your [OpenRAG environment variables](/reference/configuration#document-processing).
|
||||
The built-in pipeline uses the Docling processor directly instead of through the Docling Serve API.
|
||||
|
||||
The built-in pipeline uses the Docling processor directly instead of through the Docling Serve API.
|
||||
|
||||
For the underlying functionality, see [`processors.py`](https://github.com/langflow-ai/openrag/blob/main/src/models/processors.py#L58) in the OpenRAG repository.
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
For the underlying functionality, see [`processors.py`](https://github.com/langflow-ai/openrag/blob/main/src/models/processors.py#L58) in the OpenRAG repository.
|
||||
|
||||
To modify the Docling ingestion and embedding parameters, click <Icon name="Settings2" aria-hidden="true"/> **Settings** in the OpenRAG user interface.
|
||||
|
||||
:::tip
|
||||
OpenRAG warns you if `docling serve` isn't running.
|
||||
You can [start and stop OpenRAG services](/install#tui-container-management) from the TUI main menu with **Start Native Services** or **Stop Native Services**.
|
||||
For information about starting and stopping OpenRAG native services, like Docling, see [Manage OpenRAG services](/manage-services).
|
||||
:::
|
||||
|
||||
* **Embedding model**: Select the model to use to generate vector embeddings for your documents.
|
||||
|
||||
This is initially set during installation.
|
||||
The recommended way to change this setting is in the OpenRAG **Settings** or your [OpenRAG configuration](/reference/configuration).
|
||||
The recommended way to change this setting is in the OpenRAG **Settings** or your [OpenRAG `.env` file](/reference/configuration).
|
||||
This will automatically update all relevant [OpenRAG flows](/agents) to use the new embedding model configuration.
|
||||
|
||||
If you uploaded documents prior to changing the embedding model, you can [create filters](/knowledge-filters) to separate documents embedded with different models, or you can reupload all documents to regenerate embeddings with the new model.
|
||||
|
|
@ -153,7 +139,7 @@ The default value is 200 characters, which represents an overlap of 20 percent i
|
|||
|
||||
The default path for local uploads is the `./openrag-documents` subdirectory in your OpenRAG installation directory. This is mounted to the `/app/openrag-documents/` directory inside the OpenRAG container. Files added to the host or container directory are visible in both locations.
|
||||
|
||||
To change this location, modify the **Documents Paths** variable in either the [**Advanced Setup** menu](/install#setup) or in the `.env` used by Docker Compose.
|
||||
To change this location, modify the **Documents Paths** variable in either the [**Advanced Setup** menu](/install#setup) or in your [OpenRAG `.env` file](/reference/configuration).
|
||||
|
||||
## Delete knowledge {#delete-knowledge}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,118 +1,111 @@
|
|||
---
|
||||
title: Install OpenRAG containers
|
||||
title: Deploy OpenRAG with self-managed services
|
||||
slug: /docker
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
import PartialOnboarding from '@site/docs/_partial-onboarding.mdx';
|
||||
import PartialWsl from '@site/docs/_partial-wsl-install.mdx';
|
||||
import PartialPrereqCommon from '@site/docs/_partial-prereq-common.mdx';
|
||||
import PartialPrereqNoScript from '@site/docs/_partial-prereq-no-script.mdx';
|
||||
import PartialPrereqWindows from '@site/docs/_partial-prereq-windows.mdx';
|
||||
import PartialPrereqPython from '@site/docs/_partial-prereq-python.mdx';
|
||||
import PartialInstallNextSteps from '@site/docs/_partial-install-next-steps.mdx';
|
||||
|
||||
OpenRAG has two Docker Compose files. Both files deploy the same applications and containers locally, but they are for different environments:
|
||||
To manage your own OpenRAG services, deploy OpenRAG with Docker or Podman.
|
||||
|
||||
- [`docker-compose.yml`](https://github.com/langflow-ai/openrag/blob/main/docker-compose.yml) is an OpenRAG deployment with GPU support for accelerated AI processing. This Docker Compose file requires an NVIDIA GPU with [CUDA](https://docs.nvidia.com/cuda/) support.
|
||||
|
||||
- [`docker-compose-cpu.yml`](https://github.com/langflow-ai/openrag/blob/main/docker-compose-cpu.yml) is a CPU-only version of OpenRAG for systems without NVIDIA GPU support. Use this Docker Compose file for environments where GPU drivers aren't available.
|
||||
Use this installation method if you don't want to [use the Terminal User Interface (TUI)](/tui), or you need to run OpenRAG in an environment where using the TUI is unfeasible.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Install the following:
|
||||
<PartialPrereqWindows />
|
||||
|
||||
- [Python](https://www.python.org/downloads/release/python-3100/) version 3.13 or later.
|
||||
- [uv](https://docs.astral.sh/uv/getting-started/installation/).
|
||||
- [Podman](https://podman.io/docs/installation) (recommended) or [Docker](https://docs.docker.com/get-docker/).
|
||||
- [`podman-compose`](https://docs.podman.io/en/latest/markdown/podman-compose.1.html) or [Docker Compose](https://docs.docker.com/compose/install/). To use Docker Compose with Podman, you must alias Docker Compose commands to Podman commands.
|
||||
<PartialPrereqPython />
|
||||
|
||||
- Microsoft Windows only: To run OpenRAG on Windows, you must use the Windows Subsystem for Linux (WSL).
|
||||
<PartialPrereqNoScript />
|
||||
|
||||
<details>
|
||||
<summary>Install WSL for OpenRAG</summary>
|
||||
<PartialPrereqCommon />
|
||||
|
||||
<PartialWsl />
|
||||
## Prepare your deployment {#setup}
|
||||
|
||||
</details>
|
||||
1. Clone the OpenRAG repository:
|
||||
|
||||
- Prepare model providers and credentials.
|
||||
|
||||
During [application onboarding](#application-onboarding), you must select language model and embedding model providers.
|
||||
If your chosen provider offers both types, you can use the same provider for both selections.
|
||||
If your provider offers only one type, such as Anthropic, you must select two providers.
|
||||
|
||||
Gather the credentials and connection details for your chosen model providers before starting onboarding:
|
||||
|
||||
- OpenAI: Create an [OpenAI API key](https://platform.openai.com/api-keys).
|
||||
- Anthropic language models: Create an [Anthropic API key](https://www.anthropic.com/docs/api/reference).
|
||||
- IBM watsonx.ai: Get your watsonx.ai API endpoint, IBM project ID, and IBM API key from your watsonx deployment.
|
||||
- Ollama: Use the [Ollama documentation](https://docs.ollama.com/) to set up your Ollama instance locally, in the cloud, or on a remote server, and then get your Ollama server's base URL.
|
||||
|
||||
- Optional: Install GPU support with an NVIDIA GPU, [CUDA](https://docs.nvidia.com/cuda/) support, and compatible NVIDIA drivers on the OpenRAG host machine. This is required to use the GPU-accelerated Docker Compose file. If you choose not to use GPU support, you must use the CPU-only Docker Compose file instead.
|
||||
|
||||
## Install OpenRAG with Docker Compose
|
||||
|
||||
To install OpenRAG with Docker Compose, do the following:
|
||||
|
||||
1. Clone the OpenRAG repository.
|
||||
```bash
|
||||
git clone https://github.com/langflow-ai/openrag.git
|
||||
```
|
||||
|
||||
2. Change to the root of the cloned repository:
|
||||
|
||||
```bash
|
||||
cd openrag
|
||||
```
|
||||
|
||||
2. Install dependencies.
|
||||
3. Install dependencies:
|
||||
|
||||
```bash
|
||||
uv sync
|
||||
```
|
||||
|
||||
3. Copy the example `.env` file included in the repository root.
|
||||
The example file includes all environment variables with comments to guide you in finding and setting their values.
|
||||
4. Create a `.env` file at the root of the cloned repository.
|
||||
|
||||
You can create an empty file or copy the repository's [`.env.example`](https://github.com/langflow-ai/openrag/blob/main/.env.example) file.
|
||||
The example file contains some of the [OpenRAG environment variables](/reference/configuration) to get you started with configuring your deployment.
|
||||
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
Alternatively, create a new `.env` file in the repository root.
|
||||
```
|
||||
touch .env
|
||||
```
|
||||
5. Edit the `.env` file to configure your deployment using [OpenRAG environment variables](/reference/configuration).
|
||||
The OpenRAG Docker Compose files pull values from your `.env` file to configure the OpenRAG containers.
|
||||
The following variables are required or recommended:
|
||||
|
||||
4. The Docker Compose files are populated with the values from your `.env` file.
|
||||
The `OPENSEARCH_PASSWORD` value must be set.
|
||||
`OPENSEARCH_PASSWORD` can be automatically generated when using the TUI, but for a Docker Compose installation, you can set it manually instead. To generate an OpenSearch admin password, see the [OpenSearch documentation](https://docs.opensearch.org/latest/security/configuration/demo-configuration/#setting-up-a-custom-admin-password).
|
||||
* **`OPENSEARCH_PASSWORD` (Required)**: Sets the OpenSearch administrator password. It must adhere to the [OpenSearch password complexity requirements](https://docs.opensearch.org/latest/security/configuration/demo-configuration/#setting-up-a-custom-admin-password).
|
||||
|
||||
The following values are optional:
|
||||
* **`LANGFLOW_SUPERUSER`**: The username for the Langflow administrator user. If `LANGFLOW_SUPERUSER` isn't set, then the default value is `admin`.
|
||||
|
||||
```bash
|
||||
OPENAI_API_KEY=your_openai_api_key
|
||||
LANGFLOW_SECRET_KEY=your_secret_key
|
||||
```
|
||||
* **`LANGFLOW_SUPERUSER_PASSWORD` (Strongly recommended)**: Sets the Langflow administrator password, and determines the Langflow server's default authentication mode. If `LANGFLOW_SUPERUSER_PASSWORD` isn't set, then the Langflow server starts without authentication enabled. For more information, see [Langflow settings](/reference/configuration#langflow-settings).
|
||||
|
||||
`OPENAI_API_KEY` is optional. You can provide it during [application onboarding](#application-onboarding) or choose a different model provider. If you want to set it in your `.env` file, you can find your OpenAI API key in your [OpenAI account](https://platform.openai.com/api-keys).
|
||||
* **`LANGFLOW_SECRET_KEY` (Strongly recommended)**: A secret encryption key for internal Langflow operations. It is recommended to [generate your own Langflow secret key](https://docs.langflow.org/api-keys-and-authentication#langflow-secret-key). If `LANGFLOW_SECRET_KEY` isn't set, then Langflow generates a secret key automatically.
|
||||
|
||||
`LANGFLOW_SECRET_KEY` is optional. Langflow will auto-generate it if not set. For more information, see the [Langflow documentation](https://docs.langflow.org/api-keys-and-authentication#langflow-secret-key).
|
||||
* **Model provider credentials**: Provide credentials for your preferred model providers. If none of these are set in the `.env` file, you must configure at least one provider during the [application onboarding process](#application-onboarding).
|
||||
|
||||
The following Langflow configuration values are optional but important to consider:
|
||||
* `OPENAI_API_KEY`
|
||||
* `ANTHROPIC_API_KEY`
|
||||
* `OLLAMA_ENDPOINT`
|
||||
* `WATSONX_API_KEY`
|
||||
* `WATSONX_ENDPOINT`
|
||||
* `WATSONX_PROJECT_ID`
|
||||
|
||||
```bash
|
||||
LANGFLOW_SUPERUSER=admin
|
||||
LANGFLOW_SUPERUSER_PASSWORD=your_langflow_password
|
||||
```
|
||||
* **OAuth provider credentials**: To upload documents from external storage, such as Google Drive, set the required OAuth credentials for the connectors that you want to use. You can [manage OAuth credentials](/ingestion#oauth-ingestion) later, but it is recommended to configure them during initial set up so you don't have to rebuild the containers.
|
||||
|
||||
`LANGFLOW_SUPERUSER` defaults to `admin`. You can omit it or set it to a different username. `LANGFLOW_SUPERUSER_PASSWORD` is optional. If omitted, Langflow runs in [autologin mode](https://docs.langflow.org/api-keys-and-authentication#langflow-auto-login) with no password required. If set, Langflow requires password authentication.
|
||||
|
||||
For more information on configuring OpenRAG with environment variables, see [Environment variables](/reference/configuration).
|
||||
* **Amazon**: Provide your AWS Access Key ID and AWS Secret Access Key with access to your S3 instance. For more information, see the AWS documentation on [Configuring access to AWS applications](https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html).
|
||||
* **Google**: Provide your Google OAuth Client ID and Google OAuth Client Secret. You can generate these in the [Google Cloud Console](https://console.cloud.google.com/apis/credentials). For more information, see the [Google OAuth client documentation](https://developers.google.com/identity/protocols/oauth2).
|
||||
* **Microsoft**: For the Microsoft OAuth Client ID and Microsoft OAuth Client Secret, provide [Azure application registration credentials for SharePoint and OneDrive](https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/app-registration?view=odsp-graph-online). For more information, see the [Microsoft Graph OAuth client documentation](https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth).
|
||||
|
||||
For more information and variables, see [OpenRAG environment variables](/reference/configuration).
|
||||
|
||||
## Start services
|
||||
|
||||
1. Start `docling serve` on port 5001 on the host machine:
|
||||
|
||||
5. Start `docling serve` on the host machine.
|
||||
OpenRAG Docker installations require that `docling serve` is running on port 5001 on the host machine.
|
||||
This enables [Mac MLX](https://opensource.apple.com/projects/mlx/) support for document processing.
|
||||
|
||||
```bash
|
||||
uv run python scripts/docling_ctl.py start --port 5001
|
||||
```
|
||||
|
||||
6. Confirm `docling serve` is running.
|
||||
```
|
||||
|
||||
Docling cannot run inside a Docker container due to system-level dependencies, so you must manage it as a separate service on the host machine.
|
||||
For more information, see [Stop, start, and inspect native services](/manage-services#start-native-services).
|
||||
|
||||
This port is required to deploy OpenRAG successfully; don't use a different port.
|
||||
Additionally, this enables the [MLX framework](https://opensource.apple.com/projects/mlx/) for accelerated performance on Apple Silicon Mac machines.
|
||||
|
||||
2. Confirm `docling serve` is running.
|
||||
|
||||
```bash
|
||||
uv run python scripts/docling_ctl.py status
|
||||
```
|
||||
|
||||
Make sure the response shows that `docling serve` is running, for example:
|
||||
If `docling serve` is running, the output includes the status, address, and process ID (PID):
|
||||
|
||||
```bash
|
||||
Status: running
|
||||
Endpoint: http://127.0.0.1:5001
|
||||
|
|
@ -120,239 +113,55 @@ To install OpenRAG with Docker Compose, do the following:
|
|||
PID: 27746
|
||||
```
|
||||
|
||||
7. Deploy OpenRAG locally with Docker Compose based on your deployment type.
|
||||
3. Deploy the OpenRAG containers locally using the appropriate Docker Compose file for your environment.
|
||||
Both files deploy the same services.
|
||||
|
||||
<Tabs groupId="Compose file">
|
||||
<TabItem value="docker-compose.yml" label="docker-compose.yml" default>
|
||||
```bash
|
||||
docker compose build
|
||||
docker compose up -d
|
||||
```
|
||||
</TabItem>
|
||||
<TabItem value="docker-compose-cpu.yml" label="docker-compose-cpu.yml">
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose-cpu.yml up -d
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
* [`docker-compose.yml`](https://github.com/langflow-ai/openrag/blob/main/docker-compose.yml): If your host machine has an NVIDIA GPU with CUDA support and compatible NVIDIA drivers, you can use this file to deploy OpenRAG with accelerated processing.
|
||||
|
||||
The OpenRAG Docker Compose file starts five containers:
|
||||
| Container Name | Default Address | Purpose |
|
||||
|---|---|---|
|
||||
| OpenRAG Backend | http://localhost:8000 | FastAPI server and core functionality. |
|
||||
| OpenRAG Frontend | http://localhost:3000 | React web interface for users. |
|
||||
| Langflow | http://localhost:7860 | AI workflow engine and flow management. |
|
||||
| OpenSearch | http://localhost:9200 | Vector database for document storage. |
|
||||
| OpenSearch Dashboards | http://localhost:5601 | Database administration interface. |
|
||||
|
||||
8. Verify installation by confirming all services are running.
|
||||
|
||||
```bash
|
||||
docker compose ps
|
||||
```
|
||||
|
||||
You can now access OpenRAG at the following endpoints:
|
||||
|
||||
- **Frontend**: http://localhost:3000
|
||||
- **Backend API**: http://localhost:8000
|
||||
- **Langflow**: http://localhost:7860
|
||||
|
||||
9. Continue with [application onboarding](#application-onboarding).
|
||||
|
||||
To stop `docling serve` when you're done with your OpenRAG deployment, run:
|
||||
|
||||
```bash
|
||||
uv run python scripts/docling_ctl.py stop
|
||||
```
|
||||
|
||||
<PartialOnboarding />
|
||||
|
||||
## Container management commands
|
||||
|
||||
Manage your OpenRAG containers with the following commands.
|
||||
These commands are also available in the TUI's [Status menu](/install#status).
|
||||
|
||||
### Upgrade containers {#upgrade-containers}
|
||||
|
||||
Upgrade your containers to the latest version while preserving your data.
|
||||
|
||||
```bash
|
||||
docker compose pull
|
||||
docker compose up -d --force-recreate
|
||||
```
|
||||
|
||||
### Reset containers (destructive) {#reset-containers}
|
||||
|
||||
:::warning
|
||||
These are destructive operations that reset your OpenRAG deployment to an initial state.
|
||||
Be aware that data is lost and cannot be recovered after running these commands.
|
||||
:::
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="docker-compose" label="Docker Compose" default>
|
||||
|
||||
* Rebuild containers: This command destroys and recreates the containers. Data stored exclusively on the containers is lost, such as Langflow flows.
|
||||
The `.env` file, `config` directory, `./openrag-documents` directory, `./opensearch-data` directory, and the `conversations.json` file are preserved.
|
||||
|
||||
```bash
|
||||
docker compose up --build --force-recreate --remove-orphans
|
||||
```
|
||||
|
||||
* Destroy and recreate containers with the option for additional data removal: These commands destroy the containers, and then recreate them.
|
||||
This allows you to delete other OpenRAG data before recreating the containers.
|
||||
|
||||
1. Destroy the containers, volumes, and local images, and then remove (prune) any additional Docker objects:
|
||||
|
||||
```bash
|
||||
docker compose down --volumes --remove-orphans --rmi local
|
||||
docker system prune -f
|
||||
```
|
||||
|
||||
2. Optional: Remove data that wasn't deleted by the previous commands:
|
||||
|
||||
* OpenRAG's `.env` file
|
||||
* The contents of OpenRAG's `config` directory
|
||||
* The contents of the `./openrag-documents` directory
|
||||
* The contents of the `./opensearch-data` directory
|
||||
* The `conversations.json` file
|
||||
|
||||
3. Recreate the containers:
|
||||
|
||||
```bash
|
||||
```bash title="Docker"
|
||||
docker compose build
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Podman-compose" label="Podman Compose">
|
||||
|
||||
* Rebuild containers: This command destroys and recreates the containers. Data stored exclusively on the containers is lost, such as Langflow flows.
|
||||
The `.env` file, `config` directory, `./openrag-documents` directory, `./opensearch-data` directory, and the `conversations.json` file are preserved.
|
||||
|
||||
```bash
|
||||
podman-compose up --build --force-recreate --remove-orphans
|
||||
```
|
||||
|
||||
* Destroy and recreate containers with the option for additional data removal: These commands destroy the containers, and then recreate them.
|
||||
This allows you to delete other OpenRAG data before recreating the containers.
|
||||
|
||||
1. Destroy the containers, volumes, and local images, and then remove (prune) any additional Podman objects:
|
||||
|
||||
```bash
|
||||
podman-compose down --volumes --remove-orphans --rmi local
|
||||
podman system prune -f
|
||||
```bash title="Podman"
|
||||
podman compose build
|
||||
podman compose up -d
|
||||
```
|
||||
|
||||
2. Optional: Remove data that wasn't deleted by the previous commands:
|
||||
* [`docker-compose-cpu.yml`](https://github.com/langflow-ai/openrag/blob/main/docker-compose-cpu.yml): If your host machine doesn't have NVIDIA GPU support, use this file for a CPU-only OpenRAG deployment.
|
||||
|
||||
* OpenRAG's `.env` file
|
||||
* The contents of OpenRAG's `config` directory
|
||||
* The contents of the `./openrag-documents` directory
|
||||
* The contents of the `./opensearch-data` directory
|
||||
* The `conversations.json` file
|
||||
|
||||
3. Recreate the containers:
|
||||
|
||||
```bash
|
||||
podman-compose up -d
|
||||
```bash title="Docker"
|
||||
docker compose -f docker-compose-cpu.yml up -d
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="docker" label="Docker">
|
||||
```bash title="Podman"
|
||||
podman compose -f docker-compose-cpu.yml up -d
|
||||
```
|
||||
|
||||
1. Stop all running containers:
|
||||
4. Wait for the OpenRAG containers to start, and then confirm that all containers are running:
|
||||
|
||||
```bash
|
||||
docker stop $(docker ps -q)
|
||||
```bash title="Docker"
|
||||
docker compose ps
|
||||
```
|
||||
|
||||
2. Remove all containers, including stopped containers:
|
||||
|
||||
```bash
|
||||
docker rm --force $(docker ps -aq)
|
||||
```bash title="Podman"
|
||||
podman compose ps
|
||||
```
|
||||
|
||||
3. Remove all images:
|
||||
The OpenRAG Docker Compose files deploy the following containers:
|
||||
|
||||
```bash
|
||||
docker rmi --force $(docker images -q)
|
||||
```
|
||||
| Container Name | Default address | Purpose |
|
||||
|---|---|---|
|
||||
| OpenRAG Backend | http://localhost:8000 | FastAPI server and core functionality. |
|
||||
| OpenRAG Frontend | http://localhost:3000 | React web interface for user interaction. |
|
||||
| Langflow | http://localhost:7860 | [AI workflow engine](/agents). |
|
||||
| OpenSearch | http://localhost:9200 | Datastore for [knowledge](/knowledge). |
|
||||
| OpenSearch Dashboards | http://localhost:5601 | OpenSearch database administration interface. |
|
||||
|
||||
4. Remove all volumes:
|
||||
When the containers are running, you can access your OpenRAG services at their addresses.
|
||||
|
||||
```bash
|
||||
docker volume prune --force
|
||||
```
|
||||
5. Access the OpenRAG frontend at `http://localhost:3000`, and then continue with the [application onboarding process](#application-onboarding).
|
||||
|
||||
5. Remove all networks except the default network:
|
||||
<PartialOnboarding />
|
||||
|
||||
```bash
|
||||
docker network prune --force
|
||||
```
|
||||
|
||||
6. Clean up any leftover data:
|
||||
|
||||
```bash
|
||||
docker system prune --all --force --volumes
|
||||
```
|
||||
|
||||
7. Optional: Remove data that wasn't deleted by the previous commands:
|
||||
|
||||
* OpenRAG's `.env` file
|
||||
* The contents of OpenRAG's `config` directory
|
||||
* The contents of the `./openrag-documents` directory
|
||||
* The contents of the `./opensearch-data` directory
|
||||
* The `conversations.json` file
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="podman" label="Podman">
|
||||
|
||||
1. Stop all running containers:
|
||||
|
||||
```bash
|
||||
podman stop --all
|
||||
```
|
||||
|
||||
2. Remove all containers, including stopped containers:
|
||||
|
||||
```bash
|
||||
podman rm --all --force
|
||||
```
|
||||
|
||||
3. Remove all images:
|
||||
|
||||
```bash
|
||||
podman rmi --all --force
|
||||
```
|
||||
|
||||
4. Remove all volumes:
|
||||
|
||||
```bash
|
||||
podman volume prune --force
|
||||
```
|
||||
|
||||
5. Remove all networks except the default network:
|
||||
|
||||
```bash
|
||||
podman network prune --force
|
||||
```
|
||||
|
||||
6. Clean up any leftover data:
|
||||
|
||||
```bash
|
||||
podman system prune --all --force --volumes
|
||||
```
|
||||
|
||||
7. Optional: Remove data that wasn't deleted by the previous commands:
|
||||
|
||||
* OpenRAG's `.env` file
|
||||
* The contents of OpenRAG's `config` directory
|
||||
* The contents of the `./openrag-documents` directory
|
||||
* The contents of the `./opensearch-data` directory
|
||||
* The `conversations.json` file
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
After resetting your containers, you must repeat [application onboarding](#application-onboarding).
|
||||
<PartialInstallNextSteps />
|
||||
34
docs/docs/get-started/install-options.mdx
Normal file
34
docs/docs/get-started/install-options.mdx
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
---
|
||||
title: Select an installation method
|
||||
slug: /install-options
|
||||
---
|
||||
|
||||
The [OpenRAG architecture](/#openrag-architecture) is lightweight and container-based with a central OpenRAG backend that orchestrates the various services and external connectors.
|
||||
Depending on your use case, OpenRAG can assist with service management, or you can manage the services yourself.
|
||||
|
||||
Select the installation method that best fits your needs:
|
||||
|
||||
* **Use the [Terminal User Interface (TUI)](/tui) to manage services**: For guided configuration and simplified service management, install OpenRAG with TUI-managed services. Use one of the following options:
|
||||
|
||||
* [**Automatic installer script**](/install): Run one script to install the required dependencies and OpenRAG.
|
||||
* [**`uv`**](/install-uv): Install OpenRAG as a dependency of a new or existing Python project.
|
||||
* [**`uvx`**](/install-uvx): Install OpenRAG without creating a project or modifying your project's dependencies.
|
||||
|
||||
* [**Install OpenRAG on Microsoft Windows**](/install-windows): On Windows machines, you must install OpenRAG within the Windows Subsystem for Linux (WSL).
|
||||
|
||||
:::warning
|
||||
OpenRAG doesn't support nested virtualization; don't run OpenRAG on a WSL distribution that is inside a Windows VM.
|
||||
:::
|
||||
|
||||
* [**Manage your own services**](/docker): You can use Docker or Podman to deploy self-managed OpenRAG services.
|
||||
|
||||
The first time you start OpenRAG, you must complete the application onboarding process.
|
||||
This is required for all installation methods because it prepares the minimum required configuration for OpenRAG to run.
|
||||
For TUI-managed services, you must also complete initial setup before you start the OpenRAG services.
|
||||
For more information, see the instructions for your preferred installation method.
|
||||
|
||||
Your OpenRAG configuration is stored in a `.env` file in the OpenRAG installation directory.
|
||||
When using TUI-managed services, this file is created automatically, or you can provide a pre-populated `.env` file before starting the TUI.
|
||||
The TUI prompts you for the required values during setup and onboarding, and any values detected in a preexisting `.env` file are populated automatically.
|
||||
When using self-managed services, you must provide a pre-populated `.env` file, as you would for any Docker or Podman deployment.
|
||||
For more information, see the instructions for your preferred installation method and the [OpenRAG environment variables reference](/reference/configuration).
|
||||
123
docs/docs/get-started/install-uv.mdx
Normal file
123
docs/docs/get-started/install-uv.mdx
Normal file
|
|
@ -0,0 +1,123 @@
|
|||
---
|
||||
title: Install OpenRAG in a Python project with uv
|
||||
slug: /install-uv
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
import PartialOnboarding from '@site/docs/_partial-onboarding.mdx';
|
||||
import PartialSetup from '@site/docs/_partial-setup.mdx';
|
||||
import PartialPrereqCommon from '@site/docs/_partial-prereq-common.mdx';
|
||||
import PartialPrereqNoScript from '@site/docs/_partial-prereq-no-script.mdx';
|
||||
import PartialPrereqWindows from '@site/docs/_partial-prereq-windows.mdx';
|
||||
import PartialPrereqPython from '@site/docs/_partial-prereq-python.mdx';
|
||||
import PartialInstallNextSteps from '@site/docs/_partial-install-next-steps.mdx';
|
||||
import PartialOpenSearchAuthMode from '@site/docs/_partial-opensearch-auth-mode.mdx';
|
||||
|
||||
Use [`uv`](https://docs.astral.sh/uv/getting-started/installation/) to install OpenRAG as a managed or unmanaged dependency in a new or existing Python project.
|
||||
|
||||
When you install OpenRAG with `uv`, you will use the [Terminal User Interface (TUI)](/tui) to configure and manage your OpenRAG deployment.
|
||||
|
||||
For other installation methods, see [Select an installation method](/install-options).
|
||||
|
||||
## Prerequisites
|
||||
|
||||
<PartialPrereqWindows />
|
||||
|
||||
<PartialPrereqPython />
|
||||
|
||||
<PartialPrereqNoScript />
|
||||
|
||||
<PartialPrereqCommon />
|
||||
|
||||
## Install and start OpenRAG with uv
|
||||
|
||||
There are two ways to install OpenRAG with `uv`:
|
||||
|
||||
* [**`uv add`** (Recommended)](#uv-add): Install OpenRAG as a managed dependency in a new or existing `uv` Python project.
|
||||
This is recommended because it adds OpenRAG to your `pyproject.toml` and lockfile for better management of dependencies and the virtual environment.
|
||||
|
||||
* [**`uv pip install`**](#uv-pip-install): Use the [`uv pip` interface](https://docs.astral.sh/uv/pip/) to install OpenRAG into an existing Python project that uses `pip`, `pip-tools`, and `virtualenv` commands.
|
||||
|
||||
If you encounter errors during installation, see [Troubleshoot OpenRAG](/support/troubleshoot).
|
||||
|
||||
### Use uv add {#uv-add}
|
||||
|
||||
1. Create a new `uv`-managed Python project:
|
||||
|
||||
```bash
|
||||
uv init PROJECT_NAME
|
||||
```
|
||||
|
||||
2. Change into your new project directory:
|
||||
|
||||
```bash
|
||||
cd PROJECT_NAME
|
||||
```
|
||||
|
||||
Because `uv` manages the virtual environment for you, you won't see a `(venv)` prompt.
|
||||
`uv` commands automatically use the project's virtual environment.
|
||||
|
||||
3. Add OpenRAG to your project:
|
||||
|
||||
* Add the latest version:
|
||||
|
||||
```bash
|
||||
uv add openrag
|
||||
```
|
||||
|
||||
* Add a specific version:
|
||||
|
||||
```bash
|
||||
uv add openrag==0.1.30
|
||||
```
|
||||
|
||||
* Add a local wheel:
|
||||
|
||||
```bash
|
||||
uv add path/to/openrag-VERSION-py3-none-any.whl
|
||||
```
|
||||
|
||||
For more options, see [Managing dependencies with `uv`](https://docs.astral.sh/uv/concepts/projects/dependencies/).
|
||||
|
||||
4. Optional: If you want to use a pre-populated [OpenRAG `.env` file](/reference/configuration), copy it to this directory before starting OpenRAG.
|
||||
|
||||
5. Start the OpenRAG TUI:
|
||||
|
||||
```bash
|
||||
uv run openrag
|
||||
```
|
||||
|
||||
### Use uv pip install {#uv-pip-install}
|
||||
|
||||
1. Activate your virtual environment.
|
||||
|
||||
2. Install the OpenRAG Python package:
|
||||
|
||||
```bash
|
||||
uv pip install openrag
|
||||
```
|
||||
|
||||
3. Optional: If you want to use a pre-populated [OpenRAG `.env` file](/reference/configuration), copy it to this directory before starting OpenRAG.
|
||||
|
||||
4. Start the OpenRAG TUI:
|
||||
|
||||
```bash
|
||||
uv run openrag
|
||||
```
|
||||
|
||||
## Set up OpenRAG with the TUI {#setup}
|
||||
|
||||
When you install OpenRAG with `uv`, you manage the OpenRAG services with the TUI.
|
||||
The TUI guides you through the initial configuration process before you start the OpenRAG services.
|
||||
|
||||
Your configuration values are stored in an [OpenRAG `.env` file](/reference/configuration) that is created automatically in the Python project where you installed OpenRAG.
|
||||
If OpenRAG detects an existing `.env` file in this directory, then the TUI can populate those values automatically during setup and onboarding.
|
||||
|
||||
Container definitions are stored in the `docker-compose` files in the same directory as the OpenRAG `.env` file.
|
||||
|
||||
<PartialSetup />
|
||||
|
||||
<PartialOnboarding />
|
||||
|
||||
<PartialInstallNextSteps />
|
||||
82
docs/docs/get-started/install-uvx.mdx
Normal file
82
docs/docs/get-started/install-uvx.mdx
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
---
|
||||
title: Invoke OpenRAG with uvx
|
||||
slug: /install-uvx
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
import PartialOnboarding from '@site/docs/_partial-onboarding.mdx';
|
||||
import PartialSetup from '@site/docs/_partial-setup.mdx';
|
||||
import PartialPrereqCommon from '@site/docs/_partial-prereq-common.mdx';
|
||||
import PartialPrereqNoScript from '@site/docs/_partial-prereq-no-script.mdx';
|
||||
import PartialPrereqWindows from '@site/docs/_partial-prereq-windows.mdx';
|
||||
import PartialPrereqPython from '@site/docs/_partial-prereq-python.mdx';
|
||||
import PartialInstallNextSteps from '@site/docs/_partial-install-next-steps.mdx';
|
||||
import PartialOpenSearchAuthMode from '@site/docs/_partial-opensearch-auth-mode.mdx';
|
||||
|
||||
Use [`uvx`](https://docs.astral.sh/uv/guides/tools/#running-tools) to invoke OpenRAG outside of a Python project or without modifying your project's dependencies.
|
||||
|
||||
:::tip
|
||||
The [automatic installer script](/install) also uses `uvx` to install OpenRAG.
|
||||
:::
|
||||
|
||||
When you install OpenRAG with `uvx`, you will use the [Terminal User Interface (TUI)](/tui) to configure and manage your OpenRAG deployment.
|
||||
|
||||
This installation method is best for testing OpenRAG by running it outside of a Python project.
|
||||
For other installation methods, see [Select an installation method](/install-options).
|
||||
|
||||
## Prerequisites
|
||||
|
||||
<PartialPrereqWindows />
|
||||
|
||||
<PartialPrereqPython />
|
||||
|
||||
<PartialPrereqNoScript />
|
||||
|
||||
<PartialPrereqCommon />
|
||||
|
||||
## Install and run OpenRAG with uvx
|
||||
|
||||
1. Create a directory to store your OpenRAG configuration files and data, and then change to that directory:
|
||||
|
||||
```bash
|
||||
mkdir openrag-workspace
|
||||
cd openrag-workspace
|
||||
```
|
||||
|
||||
2. Optional: If you want to use a pre-populated [OpenRAG `.env` file](/reference/configuration), copy it to this directory before invoking OpenRAG.
|
||||
|
||||
3. Invoke OpenRAG:
|
||||
|
||||
```bash
|
||||
uvx openrag
|
||||
```
|
||||
|
||||
You can invoke a specific version using any of the [`uvx` version specifiers](https://docs.astral.sh/uv/guides/tools/#requesting-specific-versions), such as `--from`:
|
||||
|
||||
```bash
|
||||
uvx --from openrag==0.1.30 openrag
|
||||
```
|
||||
|
||||
Invoking OpenRAG with `uvx openrag` creates a cached, ephemeral environment for the TUI in your local `uv` cache.
|
||||
By invoking OpenRAG in a specific directory, your OpenRAG configuration files and data are stored separately from the `uv` cache.
|
||||
Clearing the `uv` cache doesn't remove your entire OpenRAG installation.
|
||||
After clearing the cache, you can re-invoke OpenRAG (`uvx openrag`) to restart the TUI with your preserved configuration and data.
|
||||
|
||||
If you encounter errors during installation, see [Troubleshoot OpenRAG](/support/troubleshoot).
|
||||
|
||||
## Set up OpenRAG with the TUI {#setup}
|
||||
|
||||
When you install OpenRAG with `uvx`, you manage the OpenRAG services with the TUI.
|
||||
The TUI guides you through the initial configuration process before you start the OpenRAG services.
|
||||
|
||||
Your configuration values are stored in an [OpenRAG `.env` file](/reference/configuration) that is created automatically in the OpenRAG installation directory, which is the directory where you invoked OpenRAG.
|
||||
If OpenRAG detects an existing `.env` file in this directory, then the TUI can populate those values automatically during setup and onboarding.
|
||||
|
||||
Container definitions are stored in the `docker-compose` files in the same directory as the OpenRAG `.env` file.
|
||||
|
||||
<PartialSetup />
|
||||
|
||||
<PartialOnboarding />
|
||||
|
||||
<PartialInstallNextSteps />
|
||||
|
|
@ -1,4 +1,21 @@
|
|||
1. [Install WSL](https://learn.microsoft.com/en-us/windows/wsl/install) with the Ubuntu distribution using WSL 2:
|
||||
---
|
||||
title: Install OpenRAG on Microsoft Windows
|
||||
slug: /install-windows
|
||||
---
|
||||
|
||||
If you're using Windows, you must install OpenRAG within the Windows Subsystem for Linux (WSL).
|
||||
|
||||
:::warning
|
||||
Nested virtualization isn't supported.
|
||||
|
||||
OpenRAG isn't compatible with nested virtualization, which can cause networking issues.
|
||||
Don't install OpenRAG on a WSL distribution that is installed inside a Windows VM.
|
||||
Instead, install OpenRAG on your base OS or a non-nested Linux VM.
|
||||
:::
|
||||
|
||||
## Install OpenRAG in the WSL
|
||||
|
||||
1. [Install WSL](https://learn.microsoft.com/en-us/windows/wsl/install) with an Ubuntu distribution using WSL 2:
|
||||
|
||||
```powershell
|
||||
wsl --install -d Ubuntu
|
||||
|
|
@ -8,18 +25,18 @@
|
|||
|
||||
For existing WSL installations, you can [change the distribution](https://learn.microsoft.com/en-us/windows/wsl/install#change-the-default-linux-distribution-installed) and [check the WSL version](https://learn.microsoft.com/en-us/windows/wsl/install#upgrade-version-from-wsl-1-to-wsl-2).
|
||||
|
||||
:::warning Known limitation
|
||||
OpenRAG isn't compatible with nested virtualization, which can cause networking issues.
|
||||
Don't install OpenRAG on a WSL distribution that is installed inside a Windows VM.
|
||||
Instead, install OpenRAG on your base OS or a non-nested Linux VM.
|
||||
:::
|
||||
|
||||
2. [Start your WSL Ubuntu distribution](https://learn.microsoft.com/en-us/windows/wsl/install#ways-to-run-multiple-linux-distributions-with-wsl) if it doesn't start automatically.
|
||||
|
||||
3. [Set up a username and password for your WSL distribution](https://learn.microsoft.com/en-us/windows/wsl/setup/environment#set-up-your-linux-username-and-password).
|
||||
|
||||
4. [Install Docker Desktop for Windows with WSL 2](https://learn.microsoft.com/en-us/windows/wsl/tutorials/wsl-containers). When you reach the Docker Desktop **WSL integration** settings, make sure your Ubuntu distribution is enabled, and then click **Apply & Restart** to enable Docker support in WSL.
|
||||
|
||||
The Docker Desktop WSL integration makes Docker available within your WSL distribution.
|
||||
You don't need to install Docker or Podman separately in your WSL distribution before you install OpenRAG.
|
||||
|
||||
5. Install and run OpenRAG from within your WSL Ubuntu distribution.
|
||||
<br/>
|
||||
You can install OpenRAG in your WSL distribution using any of the [OpenRAG installation methods](/install-options).
|
||||
|
||||
## Troubleshoot OpenRAG in WSL
|
||||
|
||||
If you encounter issues with port forwarding or the Windows Firewall, you might need to adjust the [Hyper-V firewall settings](https://learn.microsoft.com/en-us/windows/security/operating-system-security/network-security/windows-firewall/hyper-v-firewall) to allow communication between your WSL distribution and the Windows host. For more troubleshooting advice for networking issues, see [Troubleshooting WSL common issues](https://learn.microsoft.com/en-us/windows/wsl/troubleshooting#common-issues).
|
||||
|
|
@ -1,469 +1,81 @@
|
|||
---
|
||||
title: Install OpenRAG with TUI
|
||||
title: Install OpenRAG with the automatic installer script
|
||||
slug: /install
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
import PartialOnboarding from '@site/docs/_partial-onboarding.mdx';
|
||||
import PartialWsl from '@site/docs/_partial-wsl-install.mdx';
|
||||
import PartialSetup from '@site/docs/_partial-setup.mdx';
|
||||
import PartialPrereqCommon from '@site/docs/_partial-prereq-common.mdx';
|
||||
import PartialPrereqWindows from '@site/docs/_partial-prereq-windows.mdx';
|
||||
import PartialPrereqPython from '@site/docs/_partial-prereq-python.mdx';
|
||||
import PartialInstallNextSteps from '@site/docs/_partial-install-next-steps.mdx';
|
||||
import PartialOpenSearchAuthMode from '@site/docs/_partial-opensearch-auth-mode.mdx';
|
||||
|
||||
[Install OpenRAG](#install) and then run the [OpenRAG Terminal User Interface(TUI)](#setup) to start your OpenRAG deployment with a guided setup process.
|
||||
:::tip
|
||||
To quickly install and test OpenRAG's core features, try the [quickstart](/quickstart).
|
||||
:::
|
||||
|
||||
The OpenRAG Terminal User Interface (TUI) allows you to set up, configure, and monitor your OpenRAG deployment directly from the terminal.
|
||||
The installer script installs `uv`, Docker or Podman, Docker Compose, and OpenRAG.
|
||||
Then, it installs and runs OpenRAG with `uvx`.
|
||||
|
||||

|
||||
When you install OpenRAG with the installer script, you will use the [Terminal User Interface (TUI)](/tui) to configure and manage your OpenRAG deployment.
|
||||
|
||||
Instead of starting OpenRAG using Docker commands and manually editing values in the `.env` file, the TUI walks you through the setup. It prompts for variables where required, creates a `.env` file for you, and then starts OpenRAG.
|
||||
|
||||
Once OpenRAG is running, use the TUI to monitor your application, control your containers, and retrieve logs.
|
||||
|
||||
If you prefer running Podman or Docker containers and manually editing `.env` files, see [Install OpenRAG Containers](/docker).
|
||||
This installation method is best for testing OpenRAG by running it outside of a Python project.
|
||||
For other installation methods, see [Select an installation method](/install-options).
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- All OpenRAG installations require [Python](https://www.python.org/downloads/release/python-3100/) version 3.13 or later.
|
||||
<PartialPrereqWindows />
|
||||
|
||||
- If you aren't using the automatic installer script, install the following:
|
||||
<PartialPrereqPython />
|
||||
|
||||
- [uv](https://docs.astral.sh/uv/getting-started/installation/).
|
||||
- [Podman](https://podman.io/docs/installation) (recommended) or [Docker](https://docs.docker.com/get-docker/).
|
||||
- [`podman-compose`](https://docs.podman.io/en/latest/markdown/podman-compose.1.html) or [Docker Compose](https://docs.docker.com/compose/install/). To use Docker Compose with Podman, you must alias Docker Compose commands to Podman commands.
|
||||
<PartialPrereqCommon />
|
||||
|
||||
- Microsoft Windows only: To run OpenRAG on Windows, you must use the Windows Subsystem for Linux (WSL).
|
||||
## Run the installer script {#install}
|
||||
|
||||
<details>
|
||||
<summary>Install WSL for OpenRAG</summary>
|
||||
1. Create a directory to store your OpenRAG configuration files and data, and then change to that directory:
|
||||
|
||||
<PartialWsl />
|
||||
```bash
|
||||
mkdir openrag-workspace
|
||||
cd openrag-workspace
|
||||
```
|
||||
|
||||
</details>
|
||||
2. Get and run the installer script:
|
||||
|
||||
- Prepare model providers and credentials.
|
||||
```bash
|
||||
curl -fsSL https://docs.openr.ag/files/run_openrag_with_prereqs.sh | bash
|
||||
```
|
||||
|
||||
During [application onboarding](#application-onboarding), you must select language model and embedding model providers.
|
||||
If your chosen provider offers both types, you can use the same provider for both selections.
|
||||
If your provider offers only one type, such as Anthropic, you must select two providers.
|
||||
The installer script installs OpenRAG with [`uvx`](https://docs.astral.sh/uv/guides/tools/#running-tools) in the directory where you run the script.
|
||||
|
||||
Gather the credentials and connection details for your chosen model providers before starting onboarding:
|
||||
3. Wait while the installer script prepares your environment and installs OpenRAG.
|
||||
You might be prompted to install certain dependencies if they aren't already present in your environment.
|
||||
|
||||
- OpenAI: Create an [OpenAI API key](https://platform.openai.com/api-keys).
|
||||
- Anthropic language models: Create an [Anthropic API key](https://www.anthropic.com/docs/api/reference).
|
||||
- IBM watsonx.ai: Get your watsonx.ai API endpoint, IBM project ID, and IBM API key from your watsonx deployment.
|
||||
- Ollama: Use the [Ollama documentation](https://docs.ollama.com/) to set up your Ollama instance locally, in the cloud, or on a remote server, and then get your Ollama server's base URL.
|
||||
The entire process can take a few minutes.
|
||||
Once the environment is ready, the OpenRAG TUI starts.
|
||||
|
||||
- Optional: Install GPU support with an NVIDIA GPU, [CUDA](https://docs.nvidia.com/cuda/) support, and compatible NVIDIA drivers on the OpenRAG host machine. If you don't have GPU capabilities, OpenRAG provides an alternate CPU-only deployment.
|
||||

|
||||
|
||||
## Install OpenRAG {#install}
|
||||
|
||||
Choose an installation method based on your needs:
|
||||
|
||||
* For new users, the automatic installer script detects and installs prerequisites and then runs OpenRAG.
|
||||
* For a quick test, use `uvx` to run OpenRAG without creating a project or modifying files.
|
||||
* Use `uv add` to install OpenRAG as a managed dependency in a new or existing Python project.
|
||||
* Use `uv pip install` to install OpenRAG into an existing virtual environment.
|
||||
|
||||
<Tabs groupId="Installation method">
|
||||
<TabItem value="installer" label="Automatic installer" default>
|
||||
|
||||
The script detects and installs uv, Docker/Podman, and Docker Compose prerequisites, then runs OpenRAG with `uvx`.
|
||||
|
||||
1. Create a directory to store the OpenRAG configuration files:
|
||||
```bash
|
||||
mkdir openrag-workspace
|
||||
cd openrag-workspace
|
||||
```
|
||||
|
||||
2. Run the installer:
|
||||
```bash
|
||||
curl -fsSL https://docs.openr.ag/files/run_openrag_with_prereqs.sh | bash
|
||||
```
|
||||
|
||||
The TUI creates a `.env` file and docker-compose files in the current working directory.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="uvx" label="Quick test with uvx">
|
||||
|
||||
Use `uvx` to quickly run OpenRAG without creating a project or modifying any files.
|
||||
|
||||
1. Create a directory to store the OpenRAG configuration files:
|
||||
```bash
|
||||
mkdir openrag-workspace
|
||||
cd openrag-workspace
|
||||
```
|
||||
|
||||
2. Run OpenRAG:
|
||||
```bash
|
||||
uvx openrag
|
||||
```
|
||||
|
||||
To run a specific version:
|
||||
```bash
|
||||
uvx --from openrag==0.1.30 openrag
|
||||
```
|
||||
|
||||
The TUI creates a `.env` file and docker-compose files in the current working directory.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="uv-add" label="Python project with uv add">
|
||||
|
||||
Use `uv add` to install OpenRAG as a dependency in your Python project. This adds OpenRAG to your `pyproject.toml` and lockfile, making your installation reproducible and version-controlled.
|
||||
|
||||
1. Create a new project with a virtual environment:
|
||||
```bash
|
||||
uv init YOUR_PROJECT_NAME
|
||||
cd YOUR_PROJECT_NAME
|
||||
```
|
||||
|
||||
The `(venv)` prompt doesn't change, but `uv` commands will automatically use the project's virtual environment.
|
||||
|
||||
2. Add OpenRAG to your project:
|
||||
```bash
|
||||
uv add openrag
|
||||
```
|
||||
|
||||
To add a specific version:
|
||||
```bash
|
||||
uv add openrag==0.1.30
|
||||
```
|
||||
|
||||
3. Start the OpenRAG TUI:
|
||||
```bash
|
||||
uv run openrag
|
||||
```
|
||||
|
||||
<details closed>
|
||||
<summary>Install a local wheel</summary>
|
||||
|
||||
If you downloaded the OpenRAG wheel to your local machine, install it by specifying its path:
|
||||
|
||||
1. Add the wheel to your project:
|
||||
```bash
|
||||
uv add PATH/TO/openrag-VERSION-py3-none-any.whl
|
||||
```
|
||||
|
||||
Replace `PATH/TO/` and `VERSION` with the path and version of your downloaded OpenRAG `.whl` file.
|
||||
|
||||
2. Run OpenRAG:
|
||||
```bash
|
||||
uv run openrag
|
||||
```
|
||||
</details>
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="uv-pip" label="Existing virtual environment with uv pip install">
|
||||
|
||||
Use `uv pip install` to install OpenRAG into an existing virtual environment that isn't managed by `uv`.
|
||||
|
||||
:::tip
|
||||
For new projects, `uv add` is recommended as it manages dependencies in your project's lockfile.
|
||||
:::
|
||||
|
||||
1. Activate your virtual environment.
|
||||
|
||||
2. Install OpenRAG:
|
||||
```bash
|
||||
uv pip install openrag
|
||||
```
|
||||
|
||||
3. Run OpenRAG:
|
||||
```bash
|
||||
uv run openrag
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
Continue with [Set up OpenRAG with the TUI](#setup).
|
||||
Because the installer script uses `uvx`, it creates a cached, ephemeral environment in your local `uv` cache, and your OpenRAG configuration files and data are stored separately from the `uv` cache.
|
||||
Clearing the cache doesn't delete your entire OpenRAG installation, only the temporary TUI environment.
|
||||
After clearing the cache, run `uvx openrag` to [access the TUI](/tui) and continue with your preserved configuration and data.
|
||||
|
||||
If you encounter errors during installation, see [Troubleshoot OpenRAG](/support/troubleshoot).
|
||||
|
||||
## Set up OpenRAG with the TUI {#setup}
|
||||
|
||||
The OpenRAG setup process creates a `.env` file at the root of your OpenRAG directory, and then starts OpenRAG.
|
||||
If it detects a `.env` file in the OpenRAG root directory, it sources any variables from the `.env` file.
|
||||
When you install OpenRAG with the installer script, you manage the OpenRAG services with the TUI.
|
||||
The TUI guides you through the initial configuration process before you start the OpenRAG services.
|
||||
|
||||
The TUI offers two setup methods to populate the required values. **Basic Setup** can generate all minimum required values for OpenRAG. However, **Basic Setup** doesn't enable [OAuth connectors for cloud storage](/knowledge#auth). If you want to use OAuth connectors to upload documents from cloud storage, select **Advanced Setup**.
|
||||
If OpenRAG detects OAuth credentials, it recommends **Advanced Setup**.
|
||||
Your configuration values are stored in an [OpenRAG `.env` file](/reference/configuration) that is created automatically in the OpenRAG installation directory, which is the directory where you ran the installer script.
|
||||
If OpenRAG detects an existing `.env` file in this directory, then the TUI can populate those values automatically during setup and onboarding.
|
||||
|
||||
<Tabs groupId="Setup method">
|
||||
<TabItem value="Basic setup" label="Basic setup" default>
|
||||
Container definitions are stored in the `docker-compose` files in the same directory as the OpenRAG `.env` file.
|
||||
|
||||
1. To install OpenRAG with **Basic Setup**, click **Basic Setup** or press <kbd>1</kbd>.
|
||||
2. Click **Generate Passwords** to generate passwords for OpenSearch and Langflow.
|
||||
|
||||
The OpenSearch password is required. The Langflow admin password is optional.
|
||||
If no Langflow admin password is generated, Langflow runs in [autologin mode](https://docs.langflow.org/api-keys-and-authentication#langflow-auto-login) with no password required.
|
||||
|
||||
3. Optional: Paste your OpenAI API key in the OpenAI API key field. You can also provide this during onboarding or choose a different model provider.
|
||||
4. Click **Save Configuration**.
|
||||
Your passwords are saved in the `.env` file used to start OpenRAG.
|
||||
5. To start OpenRAG, click **Start All Services**.
|
||||
Startup pulls container images and runs them, so it can take some time.
|
||||
When startup is complete, the TUI displays the following:
|
||||
```bash
|
||||
Services started successfully
|
||||
Command completed successfully
|
||||
```
|
||||
6. To start the Docling service, under **Native Services**, click **Start**.
|
||||
7. To open the OpenRAG application, navigate to the TUI main menu, and then click **Open App**.
|
||||
Alternatively, in your browser, navigate to `localhost:3000`.
|
||||
8. Continue with [application onboarding](#application-onboarding).
|
||||
</TabItem>
|
||||
<TabItem value="Advanced setup" label="Advanced setup">
|
||||
|
||||
1. To install OpenRAG with **Advanced Setup**, click **Advanced Setup** or press <kbd>2</kbd>.
|
||||
2. Click **Generate Passwords** to generate passwords for OpenSearch and Langflow.
|
||||
|
||||
The OpenSearch password is required. The Langflow admin password is optional.
|
||||
If no Langflow admin password is generated, Langflow runs in [autologin mode](https://docs.langflow.org/api-keys-and-authentication#langflow-auto-login) with no password required.
|
||||
|
||||
3. Paste your OpenAI API key in the OpenAI API key field.
|
||||
4. If you want to upload documents from external storage, such as Google Drive, add the required OAuth credentials for the connectors that you want to use. These settings can be populated automatically if OpenRAG detects these credentials in a `.env` file in the OpenRAG installation directory.
|
||||
|
||||
* **Amazon**: Provide your AWS Access Key ID and AWS Secret Access Key with access to your S3 instance. For more information, see the AWS documentation on [Configuring access to AWS applications](https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html).
|
||||
* **Google**: Provide your Google OAuth Client ID and Google OAuth Client Secret. You can generate these in the [Google Cloud Console](https://console.cloud.google.com/apis/credentials). For more information, see the [Google OAuth client documentation](https://developers.google.com/identity/protocols/oauth2).
|
||||
* **Microsoft**: For the Microsoft OAuth Client ID and Microsoft OAuth Client Secret, provide [Azure application registration credentials for SharePoint and OneDrive](https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/app-registration?view=odsp-graph-online). For more information, see the [Microsoft Graph OAuth client documentation](https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth).
|
||||
|
||||
You can [manage OAuth credentials](/ingestion#oauth-ingestion) later, but it is recommended to configure them during initial set up.
|
||||
|
||||
5. The OpenRAG TUI presents redirect URIs for your OAuth app.
|
||||
These are the URLs your OAuth provider will redirect back to after user sign-in.
|
||||
Register these redirect values with your OAuth provider as they are presented in the TUI.
|
||||
6. Click **Save Configuration**.
|
||||
7. To start OpenRAG, click **Start All Services**.
|
||||
Startup pulls container images and runs them, so it can take some time.
|
||||
When startup is complete, the TUI displays the following:
|
||||
```bash
|
||||
Services started successfully
|
||||
Command completed successfully
|
||||
```
|
||||
8. To start the Docling service, under **Native Services**, click **Start**.
|
||||
9. To open the OpenRAG application, navigate to the TUI main menu, and then click **Open App**.
|
||||
Alternatively, in your browser, navigate to `localhost:3000`.
|
||||
|
||||
10. If you enabled OAuth connectors, you must sign in to your OAuth provider before being redirected to your OpenRAG instance.
|
||||
|
||||
11. Two additional variables are available for **Advanced Setup** at this point.
|
||||
Only change these variables if you have a non-default network configuration for your deployment, such as using a reverse proxy or custom domain.
|
||||
|
||||
* `LANGFLOW_PUBLIC_URL`: Sets the base address to access the Langflow web interface. This is where users interact with flows in a browser.
|
||||
|
||||
* `WEBHOOK_BASE_URL`: Sets the base address of the OpenRAG OAuth connector endpoint.
|
||||
Supported webhook endpoints:
|
||||
|
||||
- Amazon S3: Not applicable.
|
||||
- Google Drive: `/connectors/google_drive/webhook`
|
||||
- OneDrive: `/connectors/onedrive/webhook`
|
||||
- SharePoint: `/connectors/sharepoint/webhook`
|
||||
|
||||
12. Continue with [application onboarding](#application-onboarding).
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
<PartialSetup />
|
||||
|
||||
<PartialOnboarding />
|
||||
|
||||
## Exit the OpenRAG TUI
|
||||
|
||||
To exit the OpenRAG TUI, navigate to the main menu, and then press <kbd>q</kbd>.
|
||||
The OpenRAG containers continue to run until they are stopped.
|
||||
For more information, see [Manage OpenRAG containers with the TUI ](#tui-container-management).
|
||||
|
||||
To relaunch the TUI, run `uv run openrag`.
|
||||
If you installed OpenRAG with `uvx`, run `uvx openrag`.
|
||||
|
||||
## Manage OpenRAG containers with the TUI {#tui-container-management}
|
||||
|
||||
After installation, the TUI can deploy, manage, and upgrade your OpenRAG containers.
|
||||
|
||||
### Diagnostics
|
||||
|
||||
The **Diagnostics** menu provides health monitoring for your container runtimes and monitoring of your OpenSearch security.
|
||||
|
||||
### Status {#status}
|
||||
|
||||
The **Status** menu displays information on your container deployment.
|
||||
Here you can check container health, find your service ports, view logs, and upgrade your containers.
|
||||
|
||||
* **Logs**: To view streaming logs, select the container you want to view, and press <kbd>l</kbd>.
|
||||
To copy the logs, click **Copy to Clipboard**.
|
||||
|
||||
* **Upgrade**: Check for updates. For more information, see [upgrade OpenRAG](#upgrade).
|
||||
|
||||
* **Factory Reset**: This is a destructive action that [resets your containers](#reset-containers).
|
||||
|
||||
* **Native services**: [View and manage OpenRAG services](#start-all-services) that run directly on your local machine instead of a container.
|
||||
|
||||
### Reset containers {#reset-containers}
|
||||
|
||||
Reset your OpenRAG deployment by recreating the containers and removing some related data.
|
||||
|
||||
:::warning
|
||||
This is a destructive action that destroys the following:
|
||||
|
||||
* All OpenRAG containers, volumes, and local images
|
||||
* Any additional Docker objects
|
||||
* The contents of OpenRAG's `config` and `./opensearch-data` directories
|
||||
* The `conversations.json` file
|
||||
|
||||
This operation _doesn't_ remove the `.env` file or the contents of the `./openrag-documents` directory.
|
||||
:::
|
||||
|
||||
1. To destroy and recreate your OpenRAG containers, go to the TUI [**Status** menu](#status), and then click **Factory Reset**.
|
||||
|
||||
This function runs the following commands _and_ deletes the contents of OpenRAG's `config` and `./opensearch-data` directories.
|
||||
|
||||
```bash
|
||||
docker compose down --volumes --remove-orphans --rmi local
|
||||
docker system prune -f
|
||||
```
|
||||
|
||||
2. If you reset your containers as part of reinstalling OpenRAG, continue the [reinstallation process](#reinstall) after resetting the containers.
|
||||
Otherwise, in the TUI **Setup** menu, repeat the [setup process](#setup) to start the services and launch the OpenRAG app. Your OpenRAG passwords, OAuth credentials (if previously set), and onboarding configuration are restored from the `.env` file.
|
||||
|
||||
### Start all services {#start-all-services}
|
||||
|
||||
Through the TUI, you can view and manage OpenRAG services that run in containers and directly on your local machine.
|
||||
|
||||
#### Start containers
|
||||
|
||||
On the TUI main page or the **Setup** menu, click **Start All Services** to start the OpenRAG containers and launch OpenRAG itself.
|
||||
|
||||
When you start all services, the following processes happen:
|
||||
|
||||
1. OpenRAG automatically detects your container runtime, and then checks if your machine has compatible GPU support by checking for `CUDA`, `NVIDIA_SMI`, and Docker/Podman runtime support. This check determines which Docker Compose file OpenRAG uses.
|
||||
|
||||
2. OpenRAG pulls the OpenRAG container images with `docker compose pull` if any images are missing.
|
||||
|
||||
3. OpenRAG deploys the containers with `docker compose up -d`.
|
||||
|
||||
#### Start native services (Docling)
|
||||
|
||||
A _native service_ in OpenRAG is a service that runs locally on your machine, not within a container. For example, the `docling serve` process is an OpenRAG native service because this document processing service runs on your local machine, separate from the OpenRAG containers.
|
||||
|
||||
From the **Status** menu, you can view the status, port, and process ID (PID) of the OpenRAG native services.
|
||||
You can also click **Stop** or **Restart** to stop and start OpenRAG native services.
|
||||
|
||||
## Upgrade OpenRAG {#upgrade}
|
||||
|
||||
To upgrade OpenRAG, upgrade the OpenRAG Python package, and then upgrade the OpenRAG containers.
|
||||
|
||||
This is a two part process because upgrading the OpenRAG Python package updates the TUI and Python code, but the container versions are controlled by environment variables in your `.env` file.
|
||||
|
||||
1. Stop your OpenRAG containers: In the OpenRAG TUI, go to the **Status** menu, and then click **Stop Services**.
|
||||
|
||||
2. Upgrade the OpenRAG Python package to the latest version from [PyPI](https://pypi.org/project/openrag/).
|
||||
|
||||
<Tabs groupId="Installation method">
|
||||
<TabItem value="installer" label="Automatic installer or uvx" default>
|
||||
|
||||
Use these steps to upgrade the Python package if you installed OpenRAG using the automatic installer or `uvx`:
|
||||
|
||||
1. Navigate to your OpenRAG workspace directory:
|
||||
|
||||
```bash
|
||||
cd openrag-workspace
|
||||
```
|
||||
|
||||
2. Upgrade the OpenRAG package:
|
||||
|
||||
```bash
|
||||
uvx --from openrag openrag
|
||||
```
|
||||
|
||||
To upgrade to a specific version:
|
||||
|
||||
```bash
|
||||
uvx --from openrag==0.1.33 openrag
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="uv-add" label="Python project (uv add)">
|
||||
|
||||
Use these steps to upgrade the Python package if you installed OpenRAG in a Python project with `uv add`:
|
||||
|
||||
1. Navigate to your project directory:
|
||||
|
||||
```bash
|
||||
cd YOUR_PROJECT_NAME
|
||||
```
|
||||
|
||||
2. Update OpenRAG to the latest version:
|
||||
|
||||
```bash
|
||||
uv add --upgrade openrag
|
||||
```
|
||||
|
||||
To upgrade to a specific version:
|
||||
|
||||
```bash
|
||||
uv add --upgrade openrag==0.1.33
|
||||
```
|
||||
|
||||
3. Start the OpenRAG TUI:
|
||||
|
||||
```bash
|
||||
uv run openrag
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="uv-pip" label="Virtual environment (uv pip install)">
|
||||
|
||||
Use these steps to upgrade the Python package if you installed OpenRAG in a venv with `uv pip install`:
|
||||
|
||||
1. Activate your virtual environment.
|
||||
|
||||
2. Upgrade OpenRAG:
|
||||
|
||||
```bash
|
||||
uv pip install --upgrade openrag
|
||||
```
|
||||
|
||||
To upgrade to a specific version:
|
||||
|
||||
```bash
|
||||
uv pip install --upgrade openrag==0.1.33
|
||||
```
|
||||
|
||||
3. Start the OpenRAG TUI:
|
||||
|
||||
```bash
|
||||
uv run openrag
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
3. Start the upgraded OpenRAG containers: In the OpenRAG TUI, click **Start All Services**, and then wait while the containers start.
|
||||
|
||||
After upgrading the Python package, OpenRAG runs `docker compose pull` to get the appropriate container images matching the version specified in your OpenRAG `.env` file. Then, it recreates the containers with the new images using `docker compose up -d --force-recreate`.
|
||||
|
||||
In the `.env` file, the `OPENRAG_VERSION` [environment variable](/reference/configuration#system-settings) is set to `latest` by default, which it pulls the `latest` available container images.
|
||||
To pin a specific container image version, you can set `OPENRAG_VERSION` to the desired container image version, such as `OPENRAG_VERSION=0.1.33`.
|
||||
|
||||
However, when you upgrade the Python package, OpenRAG automatically attempts to keep the `OPENRAG_VERSION` synchronized with the Python package version.
|
||||
You might need to edit the `.env` file after upgrading the Python package to enforce a different container version.
|
||||
The TUI warns you if it detects a version mismatch.
|
||||
|
||||
If you get an error that `langflow container already exists` error during upgrade, see [Langflow container already exists during upgrade](/support/troubleshoot#langflow-container-already-exists-during-upgrade).
|
||||
|
||||
4. When the upgrade process is complete, you can close the **Status** window and continue using OpenRAG.
|
||||
|
||||
## Reinstall OpenRAG {#reinstall}
|
||||
|
||||
Reset your OpenRAG deployment by recreating the containers and, optionally, removing related data:
|
||||
|
||||
1. In the TUI, [reset your containers](#reset-containers) to destroy the following:
|
||||
|
||||
* All existing OpenRAG containers, volumes, and local images
|
||||
* Any additional Docker objects
|
||||
* The contents of OpenRAG's `config` and `./opensearch-data` directories
|
||||
* The `conversations.json` file
|
||||
|
||||
2. Optional: Remove data that wasn't deleted by the **Factory Reset** operation. For a completely fresh installation, delete all of this data.
|
||||
|
||||
* **OpenRAG's `.env` file**: Contains your OpenRAG configuration, including OpenRAG passwords, API keys, OAuth settings, and other [environment variables](/reference/configuration). If you delete this file, you must either repeat the [setup process](#setup) to create a new `.env` file, or add a populated `.env` file to your OpenRAG installation directory before restarting OpenRAG.
|
||||
* **The contents of the `./openrag-documents` directory**: Contains documents that you uploaded to OpenRAG. Delete these files to prevent documents from being reingested to your knowledge base after restarting OpenRAG. However, you might want to preserve OpenRAG's [default documents](https://github.com/langflow-ai/openrag/tree/main/openrag-documents).
|
||||
|
||||
3. In the TUI **Setup** menu, repeat the [setup process](#setup) to configure OpenRAG, restart the services, and launch the OpenRAG app, and repeat [application onboarding](#application-onboarding).
|
||||
If OpenRAG detects a `.env` file, it automatically populates any OpenRAG passwords, OAuth credentials, and onboarding configuration set in that file.
|
||||
<PartialInstallNextSteps />
|
||||
187
docs/docs/get-started/manage-services.mdx
Normal file
187
docs/docs/get-started/manage-services.mdx
Normal file
|
|
@ -0,0 +1,187 @@
|
|||
---
|
||||
title: Manage OpenRAG containers and services
|
||||
slug: /manage-services
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
import PartialDockerComposeUp from '@site/docs/_partial-docker-compose-up.mdx';
|
||||
import PartialDockerComposeDownAndPrune from '@site/docs/_partial-docker-compose-down-and-prune.mdx';
|
||||
import PartialFactorResetWarning from '@site/docs/_partial-factory-reset-warning.mdx';
|
||||
|
||||
Service management is an essential part of maintaining your OpenRAG deployment.
|
||||
|
||||
Most OpenRAG services run in containers.
|
||||
However, some services, like Docling, run directly on the local machine.
|
||||
|
||||
If you [installed OpenRAG](/install-options) with the automated installer script, `uv`, or `uvx`, you can use the [Terminal User Interface (TUI)](/tui) to manage your OpenRAG configuration and services.
|
||||
|
||||
For [self-managed deployments](/docker), run Docker or Podman commands to manage your OpenRAG services.
|
||||
|
||||
## Monitor services
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="TUI" label="TUI-managed services" default>
|
||||
|
||||
* **TUI Status menu**: In the **Status** menu (<kbd>3</kbd>), you can access streaming logs for all OpenRAG services.
|
||||
Select the service you want to view, and then press <kbd>l</kbd>.
|
||||
To copy the logs, click **Copy to Clipboard**.
|
||||
|
||||
* **TUI Diagnostics menu**: The TUI's **Diagnostics** menu (<kbd>4</kbd>) provides health monitoring for your container runtimes and monitoring of your OpenSearch instance.
|
||||
|
||||
* **Docling**: See [Stop, start, and inspect native services](#start-native-services).
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="env" label="Self-managed services">
|
||||
|
||||
* **Containers**: Get container logs with [`docker compose logs`](https://docs.docker.com/reference/cli/docker/compose/logs/) or [`podman logs`](https://docs.podman.io/en/latest/markdown/podman-logs.1.html).
|
||||
|
||||
* **Docling**: See [Stop, start, and inspect native services](#start-native-services).
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
## Stop and start containers
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="TUI" label="TUI-managed services" default>
|
||||
|
||||
In the TUI's **Status** menu (<kbd>3</kbd>), click **Stop Services** to stop all OpenRAG container-based services.
|
||||
Then, click **Start All Services** to restart the OpenRAG containers.
|
||||
|
||||
When you click **Start All Services**, the following processes are triggered:
|
||||
|
||||
1. OpenRAG automatically detects your container runtime, and then checks if your machine has compatible GPU support by checking for `CUDA`, `NVIDIA_SMI`, and Docker/Podman runtime support. This check determines which Docker Compose file OpenRAG uses because there are separate Docker Compose files for GPU and CPU deployments.
|
||||
|
||||
2. OpenRAG pulls the OpenRAG container images with `docker compose pull` if any images are missing.
|
||||
|
||||
3. OpenRAG deploys the containers with `docker compose up -d`.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="env" label="Self-managed services">
|
||||
|
||||
Use [`docker compose down`](https://docs.docker.com/reference/cli/docker/compose/down/) and [`docker compose up -d`](https://docs.docker.com/reference/cli/docker/compose/up/).
|
||||
|
||||
To stop or start individual containers, use targeted commands like `docker stop CONTAINER_ID` and `docker start CONTAINER_ID`.
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
## Stop, start, and inspect native services (Docling) {#start-native-services}
|
||||
|
||||
A _native service_ in OpenRAG is a service that runs locally on your machine, not within a container. For example, the `docling serve` process is an OpenRAG native service because this document processing service runs on your local machine, separate from the OpenRAG containers.
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="TUI" label="TUI-managed services" default>
|
||||
|
||||
From the TUI's **Status** menu (<kbd>3</kbd>), click **Native Services** to do the following:
|
||||
|
||||
* View the service's status, port, and process ID (PID).
|
||||
* Stop, start, and restart native services.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="env" label="Self-managed services">
|
||||
|
||||
Because the Docling service doesn't run in a container, you must start and stop it manually on the host machine:
|
||||
|
||||
* Stop `docling serve`:
|
||||
|
||||
```bash
|
||||
uv run python scripts/docling_ctl.py stop
|
||||
```
|
||||
|
||||
* Start `docling serve`:
|
||||
|
||||
```bash
|
||||
uv run python scripts/docling_ctl.py start --port 5001
|
||||
```
|
||||
|
||||
* Check that `docling serve` is running:
|
||||
|
||||
```bash
|
||||
uv run python scripts/docling_ctl.py status
|
||||
```
|
||||
|
||||
If `docling serve` is running, the output includes the status, address, and process ID (PID):
|
||||
|
||||
```text
|
||||
Status: running
|
||||
Endpoint: http://127.0.0.1:5001
|
||||
Docs: http://127.0.0.1:5001/docs
|
||||
PID: 27746
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
## Upgrade services
|
||||
|
||||
See [Upgrade OpenRAG](/upgrade).
|
||||
|
||||
## Reset containers (destructive) {#reset-containers}
|
||||
|
||||
Reset your OpenRAG deployment by recreating the containers and removing some related data.
|
||||
|
||||
To completely reset your OpenRAG deployment and delete all OpenRAG data, see [Reinstall OpenRAG](/reinstall).
|
||||
|
||||
### Export customized flows before resetting containers {#export-customized-flows-before-resetting-containers}
|
||||
|
||||
If you modified the built-in flows or created custom flows in your OpenRAG Langflow instance, and you want to preserve those changes, [export your flows](https://docs.langflow.org/concepts-flows-import) before resetting your OpenRAG containers.
|
||||
|
||||
### Factory Reset with the TUI
|
||||
|
||||
<PartialFactorResetWarning />
|
||||
|
||||
1. To destroy and recreate your OpenRAG containers, open the TUI's **Status** menu (<kbd>3</kbd>), and then click **Factory Reset**.
|
||||
|
||||
2. Repeat the [setup process](/install#setup) to restart the services and launch the OpenRAG app. Your OpenRAG passwords, OAuth credentials (if previously set), and onboarding configuration are restored from the `.env` file.
|
||||
|
||||
### Rebuild self-managed containers
|
||||
|
||||
This command destroys and recreates the containers. Data stored exclusively on the containers is lost, such as Langflow flows.
|
||||
|
||||
If you want to preserve customized flows, see [Export customized flows before resetting containers](#export-customized-flows-before-resetting-containers).
|
||||
|
||||
The `.env` file, `config` directory, `./openrag-documents` directory, `./opensearch-data` directory, and the `conversations.json` file are preserved.
|
||||
|
||||
```bash title="Docker"
|
||||
docker compose up --build --force-recreate --remove-orphans
|
||||
```
|
||||
|
||||
```bash title="Podman"
|
||||
podman compose up --build --force-recreate --remove-orphans
|
||||
```
|
||||
|
||||
### Destroy and recreate self-managed containers
|
||||
|
||||
Use separate commands to destroy and recreate the containers if you want to modify the configuration or delete other OpenRAG data before recreating the containers.
|
||||
|
||||
:::warning
|
||||
These are destructive operations that reset your OpenRAG deployment to an initial state.
|
||||
Destroyed containers and deleted data are lost and cannot be recovered after running this operation.
|
||||
:::
|
||||
|
||||
1. Destroy the containers, volumes, and local images, and then remove (prune) any additional container objects:
|
||||
|
||||
<PartialDockerComposeDownAndPrune />
|
||||
|
||||
2. Optional: Remove data that wasn't deleted by the previous commands:
|
||||
|
||||
* OpenRAG's `.env` file
|
||||
* The contents of OpenRAG's `config` directory
|
||||
* The contents of the `./openrag-documents` directory
|
||||
* The contents of the `./opensearch-data` directory
|
||||
* The `conversations.json` file
|
||||
|
||||
3. If you deleted the `.env` file, prepare a new `.env` before redeploying the containers.
|
||||
For more information, see [Deploy OpenRAG with self-managed services](/docker).
|
||||
|
||||
4. Recreate the containers:
|
||||
|
||||
<PartialDockerComposeUp />
|
||||
|
||||
5. Launch the OpenRAG app, and then repeat the [application onboarding process](/docker#application-onboarding).
|
||||
|
||||
## See also
|
||||
|
||||
* [Uninstall OpenRAG](/uninstall)
|
||||
|
|
@ -6,36 +6,28 @@ slug: /quickstart
|
|||
import Icon from "@site/src/components/icon/icon";
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
import PartialWsl from '@site/docs/_partial-wsl-install.mdx';
|
||||
import PartialIntegrateChat from '@site/docs/_partial-integrate-chat.mdx';
|
||||
import PartialPrereqWindows from '@site/docs/_partial-prereq-windows.mdx';
|
||||
import PartialPrereqPython from '@site/docs/_partial-prereq-python.mdx';
|
||||
|
||||
Use this quickstart to install OpenRAG, and then try some of OpenRAG's core features.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
This quickstart requires the following:
|
||||
<PartialPrereqPython />
|
||||
|
||||
- An [OpenAI API key](https://platform.openai.com/api-keys).
|
||||
* Get an [OpenAI API key](https://platform.openai.com/api-keys).
|
||||
This quickstart uses OpenAI for simplicity.
|
||||
For other providers, see the complete [installation guide](/install).
|
||||
|
||||
- [Python](https://www.python.org/downloads/release/python-3100/) version 3.13 or later.
|
||||
|
||||
- Microsoft Windows only: To run OpenRAG on Windows, you must use the Windows Subsystem for Linux (WSL).
|
||||
|
||||
<details>
|
||||
<summary>Install WSL for OpenRAG</summary>
|
||||
|
||||
<PartialWsl />
|
||||
|
||||
</details>
|
||||
For other providers, see the other [installation methods](/install-options).
|
||||
|
||||
<PartialPrereqWindows />
|
||||
|
||||
## Install OpenRAG
|
||||
|
||||
For this quickstart, install OpenRAG with the automatic installer script and basic setup:
|
||||
For this quickstart, install OpenRAG with the automatic installer script and basic setup.
|
||||
The script installs OpenRAG dependencies, including Docker or Podman, and then it installs and runs OpenRAG with [`uvx`](https://docs.astral.sh/uv/guides/tools/#running-tools).
|
||||
|
||||
1. Create a directory to store the OpenRAG configuration files, and then change to that directory:
|
||||
1. Create a directory for your OpenRAG installation, and then change to that directory:
|
||||
|
||||
```bash
|
||||
mkdir openrag-workspace
|
||||
|
|
@ -48,40 +40,42 @@ For this quickstart, install OpenRAG with the automatic installer script and bas
|
|||
bash run_openrag_with_prereqs.sh
|
||||
```
|
||||
|
||||
This script installs OpenRAG and its dependencies, including Docker or Podman, and it creates a `.env` file and `docker-compose` files in the current working directory.
|
||||
Wait while the installer script prepares your environment and installs OpenRAG.
|
||||
You might be prompted to install certain dependencies if they aren't already present in your environment.
|
||||
This process can take a few minutes.
|
||||
Once the environment is ready, OpenRAG starts.
|
||||
|
||||
3. Click **Basic Setup**.
|
||||
The entire process can take a few minutes.
|
||||
Once the environment is ready, the OpenRAG [Terminal User Interface (TUI)](/tui) starts.
|
||||
|
||||
4. Create passwords for your OpenRAG installation's OpenSearch and Langflow services. You can click **Generate Passwords** to automatically generate passwords.
|
||||

|
||||
|
||||
The OpenSearch password is required. The Langflow admin password is optional.
|
||||
If you don't generate a Langflow admin password, Langflow runs in [autologin mode](https://docs.langflow.org/api-keys-and-authentication#langflow-auto-login) with no password required.
|
||||
3. In the TUI, click **Basic Setup**.
|
||||
|
||||
Your passwords are saved in the `.env` file that is used to start OpenRAG.
|
||||
You can find this file in your OpenRAG installation directory.
|
||||
4. Click **Generate Passwords** to create administrator passwords for your OpenRAG OpenSearch and Langflow services.
|
||||
|
||||
5. Click **Save Configuration**, and then click **Start All Services**.
|
||||
5. Leave the **OpenAI API key** field empty.
|
||||
|
||||
Wait a few minutes while the startup process pulls and runs the necessary container images.
|
||||
Proceed when you see the following messages in the terminal user interface (TUI):
|
||||
6. Click **Save Configuration**, and then click **Start All Services**.
|
||||
|
||||
```bash
|
||||
This process can take some time while OpenRAG pulls and runs the container images.
|
||||
If all services start successfully, the TUI prints a confirmation message:
|
||||
|
||||
```text
|
||||
Services started successfully
|
||||
Command completed successfully
|
||||
```
|
||||
|
||||
6. To open the OpenRAG application, go to the TUI main menu, and then click **Open App**.
|
||||
Alternatively, in your browser, navigate to `localhost:3000`.
|
||||
Your OpenRAG configuration and passwords are stored in an [OpenRAG `.env` file](/reference/configuration) file that is created automatically in your OpenRAG installation directory, which is the directory where you ran the installer script.
|
||||
Container definitions are stored in the `docker-compose` files in the same directory.
|
||||
|
||||
7. Select the **OpenAI** model provider, enter your OpenAI API key, and then click **Complete**.
|
||||
7. Under [**Native Services**](/manage-services), click **Start** to start the Docling service.
|
||||
|
||||
For this quickstart, you can use the default options for the model settings.
|
||||
8. From the TUI main menu, click **Open App** to launch the OpenRAG application and start the application onboarding process.
|
||||
|
||||
8. Click through the overview slides for a brief introduction to OpenRAG and basic setup, or click <Icon name="ArrowRight" aria-hidden="true"/> **Skip overview**.
|
||||
9. For this quickstart, select the **OpenAI** model provider, enter your OpenAI API key, and then click **Complete**. Use the default settings for all other model options.
|
||||
|
||||
10. Click through the overview slides for a brief introduction to OpenRAG, or click <Icon name="ArrowRight" aria-hidden="true"/> **Skip overview**.
|
||||
You can complete this quickstart without going through the overview.
|
||||
The overview demonstrates some basic functionality that is covered in the next section and in other parts of the OpenRAG documentation.
|
||||
|
||||
## Load and chat with documents {#chat-with-documents}
|
||||
|
||||
|
|
@ -158,9 +152,8 @@ You can send and receive requests with the Langflow API using Python, TypeScript
|
|||
|
||||
## Next steps
|
||||
|
||||
* **Reinstall OpenRAG with your preferred settings**: This quickstart used a minimal setup to demonstrate OpenRAG's core functionality.
|
||||
It is recommended that you [reinstall OpenRAG](/install#reinstall) with your preferred configuration because some settings are immutable after initial setup.
|
||||
For all installation options, see [Install OpenRAG with TUI](/install) and [Install OpenRAG with containers](/docker).
|
||||
* **Reinstall OpenRAG with your preferred settings**: This quickstart used `uvx` and a minimal setup to demonstrate OpenRAG's core functionality.
|
||||
It is recommended that you [reinstall OpenRAG](/reinstall) with your preferred configuration and [installation method](/install-options).
|
||||
|
||||
* **Learn more about OpenRAG**: Explore OpenRAG and the OpenRAG documentation to learn more about its features and functionality.
|
||||
|
||||
|
|
|
|||
90
docs/docs/get-started/reinstall.mdx
Normal file
90
docs/docs/get-started/reinstall.mdx
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
---
|
||||
title: Reinstall OpenRAG
|
||||
slug: /reinstall
|
||||
---
|
||||
|
||||
import PartialDockerComposeUp from '@site/docs/_partial-docker-compose-up.mdx';
|
||||
import PartialDockerComposeDownAndPrune from '@site/docs/_partial-docker-compose-down-and-prune.mdx';
|
||||
import PartialDockerStopAll from '@site/docs/_partial-docker-stop-all.mdx';
|
||||
import PartialDockerRemoveAndCleanupSteps from '@site/docs/_partial-docker-remove-and-cleanup-steps.mdx';
|
||||
import PartialFactorResetWarning from '@site/docs/_partial-factory-reset-warning.mdx';
|
||||
|
||||
You can reset your OpenRAG deployment to its initial state by recreating the containers and deleting accessory data, such as the `.env` file and ingested documents.
|
||||
|
||||
:::warning
|
||||
These are destructive operations that reset your OpenRAG deployment to an initial state.
|
||||
Destroyed containers and deleted data are lost and cannot be recovered after running these operations.
|
||||
:::
|
||||
|
||||
## Export customized flows before reinstalling
|
||||
|
||||
If you modified the built-in flows or created custom flows in your OpenRAG Langflow instance, and you want to preserve those changes, [export your flows](https://docs.langflow.org/concepts-flows-import) before reinstalling OpenRAG.
|
||||
|
||||
## Reinstall TUI-managed containers
|
||||
|
||||
1. In the TUI's **Status** menu (<kbd>3</kbd>), click **Factory Reset** to destroy your OpenRAG containers and some related data.
|
||||
|
||||
<PartialFactorResetWarning />
|
||||
|
||||
2. Exit the TUI with <kbd>q</kbd>.
|
||||
|
||||
3. Optional: Remove data that wasn't deleted by the **Factory Reset** operation.
|
||||
For a completely fresh installation, delete all of this data.
|
||||
|
||||
* **OpenRAG's `.env` file**: Contains your OpenRAG configuration, including OpenRAG passwords, API keys, OAuth settings, and other [environment variables](/reference/configuration). If you delete this file, OpenRAG automatically generates a new one after you repeat the setup and onboarding process. Alternatively, you can add a prepopulated `.env` file to your OpenRAG installation directory before restarting OpenRAG.
|
||||
* **The contents of the `./openrag-documents` directory**: Contains documents that you uploaded to OpenRAG. Delete these files to prevent documents from being reingested to your knowledge base after restarting OpenRAG. However, you might want to preserve OpenRAG's [default documents](https://github.com/langflow-ai/openrag/tree/main/openrag-documents).
|
||||
|
||||
4. Restart the TUI with `uv run openrag` or `uvx openrag`.
|
||||
|
||||
5. Repeat the [setup process](/install#setup) to configure OpenRAG and restart all services.
|
||||
Then, launch the OpenRAG app and repeat the [application onboarding process](/install#application-onboarding).
|
||||
|
||||
If OpenRAG detects a `.env` file during setup and onboarding, it automatically populates any OpenRAG passwords, OAuth credentials, and onboarding configuration set in that file.
|
||||
|
||||
## Reinstall self-managed containers with `docker compose` or `podman compose`
|
||||
|
||||
Use these steps to reinstall OpenRAG containers with streamlined `docker compose` or `podman compose` commands:
|
||||
|
||||
1. Destroy the containers, volumes, and local images, and then remove (prune) any additional container objects:
|
||||
|
||||
<PartialDockerComposeDownAndPrune />
|
||||
|
||||
2. Optional: Remove data that wasn't deleted by the previous commands:
|
||||
|
||||
* OpenRAG's `.env` file
|
||||
* The contents of OpenRAG's `config` directory
|
||||
* The contents of the `./openrag-documents` directory
|
||||
* The contents of the `./opensearch-data` directory
|
||||
* The `conversations.json` file
|
||||
|
||||
3. If you deleted the `.env` file, prepare a new `.env` before redeploying the containers.
|
||||
For more information, see [Deploy OpenRAG with self-managed services](/docker).
|
||||
|
||||
4. Redeploy OpenRAG:
|
||||
|
||||
<PartialDockerComposeUp />
|
||||
|
||||
5. Launch the OpenRAG app, and then repeat the [application onboarding process](/docker#application-onboarding).
|
||||
|
||||
## Reinstall self-managed containers with discrete `docker` or `podman` commands
|
||||
|
||||
Use these commands to remove and clean up OpenRAG containers with discrete `docker` or `podman` commands.
|
||||
|
||||
If you want to reinstall one container, specify the container name in the commands instead of running the commands on all containers.
|
||||
|
||||
1. Stop all running containers:
|
||||
|
||||
<PartialDockerStopAll />
|
||||
|
||||
<PartialDockerRemoveAndCleanupSteps />
|
||||
|
||||
7. Optional: Remove data that wasn't deleted by the previous commands:
|
||||
|
||||
* OpenRAG's `.env` file
|
||||
* The contents of OpenRAG's `config` directory
|
||||
* The contents of the `./openrag-documents` directory
|
||||
* The contents of the `./opensearch-data` directory
|
||||
* The `conversations.json` file
|
||||
|
||||
8. If you removed all OpenRAG containers, [redeploy OpenRAG](/docker).
|
||||
If you removed only one container, redeploy that container with the appropriate `docker run` or `podman run` command.
|
||||
32
docs/docs/get-started/tui.mdx
Normal file
32
docs/docs/get-started/tui.mdx
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
---
|
||||
title: Use the TUI
|
||||
slug: /tui
|
||||
---
|
||||
|
||||
The OpenRAG Terminal User Interface (TUI) provides a simplified and guided experience for configuring, managing, and monitoring your OpenRAG deployment directly from the terminal.
|
||||
|
||||

|
||||
|
||||
If you install OpenRAG with the [automatic installer script](/install), [`uv`](/install-uv), or [`uvx`](/install-uvx), you use the TUI to manage your OpenRAG deployment.
|
||||
The TUI guides you through the initial setup, automatically manages your OpenRAG `.env` and `docker-compose` files, and provides convenient access to [service management](/manage-services) controls.
|
||||
|
||||
In contrast, when you [deploy OpenRAG with self-managed services](/docker), you must manually configure OpenRAG by preparing a `.env` file and using Docker or Podman commands to deploy and manage your OpenRAG services.
|
||||
|
||||
## Access the TUI {#access-the-tui}
|
||||
|
||||
If you installed OpenRAG with `uv`, access the TUI with `uv run openrag`.
|
||||
|
||||
If you installed OpenRAG with the automatic installer script or `uvx`, access the TUI with `uvx openrag`.
|
||||
|
||||
## Manage services with the TUI
|
||||
|
||||
Use the TUI's **Status** menu (<kbd>3</kbd>) and **Diagnostics** menu (<kbd>4</kbd>) to access controls and information for your OpenRAG services.
|
||||
For more information, see [Manage OpenRAG services](/manage-services).
|
||||
|
||||
## Exit the OpenRAG TUI
|
||||
|
||||
To exit the OpenRAG TUI, go to the TUI main menu, and then press <kbd>q</kbd>.
|
||||
|
||||
Your OpenRAG containers continue to run until they are stopped.
|
||||
|
||||
To restart the TUI, see [Access the TUI](#access-the-tui).
|
||||
68
docs/docs/get-started/uninstall.mdx
Normal file
68
docs/docs/get-started/uninstall.mdx
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
---
|
||||
title: Remove OpenRAG
|
||||
slug: /uninstall
|
||||
---
|
||||
|
||||
import PartialDockerComposeDownAndPrune from '@site/docs/_partial-docker-compose-down-and-prune.mdx';
|
||||
import PartialDockerStopAll from '@site/docs/_partial-docker-stop-all.mdx';
|
||||
import PartialDockerRemoveAndCleanupSteps from '@site/docs/_partial-docker-remove-and-cleanup-steps.mdx';
|
||||
|
||||
:::tip
|
||||
If you want to reset your OpenRAG containers without removing OpenRAG entirely, see [Reset OpenRAG containers](/manage-services) and [Reinstall OpenRAG](/reinstall).
|
||||
:::
|
||||
|
||||
## Uninstall TUI-managed deployments
|
||||
|
||||
If you used the [automated installer script](/install) or [`uvx`](/install-uvx) to install OpenRAG, clear your `uv` cache (`uv cache clean`) to remove the TUI environment, and then delete the directory containing your OpenRAG configuration files and data (where you would invoke OpenRAG).
|
||||
|
||||
If you used [`uv`](/install-uv) to install OpenRAG, run `uv remove openrag` in your Python project.
|
||||
|
||||
## Uninstall self-managed deployments
|
||||
|
||||
For self-managed services, destroy the containers, prune any additional container objects, delete any remaining OpenRAG files, and then shut down the Docling service.
|
||||
|
||||
### Uninstall with `docker compose` or `podman compose`
|
||||
|
||||
Use these steps to uninstall a self-managed OpenRAG deployment with streamlined `docker compose` or `podman compose` commands:
|
||||
|
||||
1. Destroy the containers, volumes, and local images, and then remove (prune) any additional container objects:
|
||||
|
||||
<PartialDockerComposeDownAndPrune />
|
||||
|
||||
2. Remove data that wasn't deleted by the previous commands:
|
||||
|
||||
* OpenRAG's `.env` file
|
||||
* The contents of OpenRAG's `config` directory
|
||||
* The contents of the `./openrag-documents` directory
|
||||
* The contents of the `./opensearch-data` directory
|
||||
* The `conversations.json` file
|
||||
|
||||
3. Stop `docling-serve`:
|
||||
|
||||
```bash
|
||||
uv run python scripts/docling_ctl.py stop
|
||||
```
|
||||
|
||||
### Uninstall with discrete `docker` or `podman` commands
|
||||
|
||||
Use these commands to uninstall a self-managed OpenRAG deployment with discrete `docker` or `podman` commands:
|
||||
|
||||
1. Stop all running containers:
|
||||
|
||||
<PartialDockerStopAll />
|
||||
|
||||
<PartialDockerRemoveAndCleanupSteps />
|
||||
|
||||
7. Remove data that wasn't deleted by the previous commands:
|
||||
|
||||
* OpenRAG's `.env` file
|
||||
* The contents of OpenRAG's `config` directory
|
||||
* The contents of the `./openrag-documents` directory
|
||||
* The contents of the `./opensearch-data` directory
|
||||
* The `conversations.json` file
|
||||
|
||||
8. Stop `docling-serve`:
|
||||
|
||||
```bash
|
||||
uv run python scripts/docling_ctl.py stop
|
||||
```
|
||||
149
docs/docs/get-started/upgrade.mdx
Normal file
149
docs/docs/get-started/upgrade.mdx
Normal file
|
|
@ -0,0 +1,149 @@
|
|||
---
|
||||
title: Upgrade OpenRAG
|
||||
slug: /upgrade
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
Use these steps to upgrade your OpenRAG deployment to the latest version or a specific version.
|
||||
|
||||
## Export customized flows before upgrading
|
||||
|
||||
If you modified the built-in flows or created custom flows in your OpenRAG Langflow instance, [export your flows](https://docs.langflow.org/concepts-flows-import) before upgrading.
|
||||
This ensure that you won't lose your flows after upgrading, and you can reference the exported flows if there are any breaking changes in the new version.
|
||||
|
||||
## Upgrade TUI-managed installations
|
||||
|
||||
To upgrade OpenRAG, you need to upgrade the OpenRAG Python package, and then upgrade the OpenRAG containers.
|
||||
|
||||
Upgrading the Python package also upgrades Docling by bumping the dependency in `pyproject.toml`.
|
||||
|
||||
This is a two-part process because upgrading the OpenRAG Python package updates the Terminal User Interface (TUI) and Python code, but the container versions are controlled by environment variables in your [OpenRAG `.env` file](/reference/configuration).
|
||||
|
||||
1. To check for updates, open the TUI's **Status** menu (<kbd>3</kbd>), and then click **Upgrade**.
|
||||
|
||||
2. If there is an update, stop all OpenRAG services.
|
||||
In the **Status** menu, click **Stop Services**.
|
||||
|
||||
3. Upgrade the OpenRAG Python package to the latest version from [PyPI](https://pypi.org/project/openrag/).
|
||||
The commands to upgrade the package depend on how you installed OpenRAG.
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="installer" label="Script or uvx" default>
|
||||
|
||||
Use these steps to upgrade the Python package if you installed OpenRAG using the [installer script](/install) or [`uvx`](/install-uvx):
|
||||
|
||||
1. Navigate to your OpenRAG workspace directory:
|
||||
|
||||
```bash
|
||||
cd openrag-workspace
|
||||
```
|
||||
|
||||
2. Upgrade the OpenRAG package:
|
||||
|
||||
```bash
|
||||
uvx --from openrag openrag
|
||||
```
|
||||
|
||||
You can invoke a specific version using any of the [`uvx` version specifiers](https://docs.astral.sh/uv/guides/tools/#requesting-specific-versions), such as `--from`:
|
||||
|
||||
```bash
|
||||
uvx --from openrag==0.1.30 openrag
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="uv-add" label="uv add">
|
||||
|
||||
Use these steps to upgrade the Python package if you installed OpenRAG with [`uv add`](/install-uv):
|
||||
|
||||
1. Navigate to your project directory:
|
||||
|
||||
```bash
|
||||
cd YOUR_PROJECT_NAME
|
||||
```
|
||||
|
||||
2. Update OpenRAG to the latest version:
|
||||
|
||||
```bash
|
||||
uv add --upgrade openrag
|
||||
```
|
||||
|
||||
To upgrade to a specific version:
|
||||
|
||||
```bash
|
||||
uv add --upgrade openrag==0.1.33
|
||||
```
|
||||
|
||||
3. Start the OpenRAG TUI:
|
||||
|
||||
```bash
|
||||
uv run openrag
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="uv-pip" label="uv pip install">
|
||||
|
||||
Use these steps to upgrade the Python package if you installed OpenRAG with [`uv pip install`](/install-uv):
|
||||
|
||||
1. Activate your virtual environment.
|
||||
|
||||
2. Upgrade OpenRAG:
|
||||
|
||||
```bash
|
||||
uv pip install --upgrade openrag
|
||||
```
|
||||
|
||||
To upgrade to a specific version:
|
||||
|
||||
```bash
|
||||
uv pip install --upgrade openrag==0.1.33
|
||||
```
|
||||
|
||||
3. Start the OpenRAG TUI:
|
||||
|
||||
```bash
|
||||
uv run openrag
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
4. In the OpenRAG TUI, click **Start All Services**, and then wait while the upgraded containers start.
|
||||
|
||||
When you start services after upgrading the Python package, OpenRAG runs `docker compose pull` to get the appropriate container images matching the version specified in your OpenRAG `.env` file. Then, it recreates the containers with the new images using `docker compose up -d --force-recreate`.
|
||||
|
||||
:::tip Pin container versions
|
||||
In the `.env` file, the `OPENRAG_VERSION` [environment variable](/reference/configuration#system-settings) is set to `latest` by default, which pulls the `latest` available container images.
|
||||
To pin a specific container image version, you can set `OPENRAG_VERSION` to the desired container image version, such as `OPENRAG_VERSION=0.1.33`.
|
||||
|
||||
However, when you upgrade the Python package, OpenRAG automatically attempts to keep the `OPENRAG_VERSION` synchronized with the Python package version.
|
||||
You might need to edit the `.env` file after upgrading the Python package to enforce a different container version.
|
||||
The TUI warns you if it detects a version mismatch.
|
||||
:::
|
||||
|
||||
If you get an error that `langflow container already exists` error during upgrade, see [Langflow container already exists during upgrade](/support/troubleshoot#langflow-container-already-exists-during-upgrade).
|
||||
|
||||
5. Under [**Native Services**](/manage-services), click **Start** to start the Docling service.
|
||||
|
||||
6. When the upgrade process is complete, you can close the **Status** window and continue using OpenRAG.
|
||||
|
||||
## Upgrade self-managed containers
|
||||
|
||||
To fetch and apply the latest container images while preserving your OpenRAG data, run the commands for your container management tool.
|
||||
By default, OpenRAG's `docker-compose` files pull the latest container images.
|
||||
|
||||
```bash title="Docker"
|
||||
docker compose pull
|
||||
docker compose up -d --force-recreate
|
||||
```
|
||||
|
||||
```bash title="Podman"
|
||||
podman compose pull
|
||||
podman compose up -d --force-recreate
|
||||
```
|
||||
|
||||
## See also
|
||||
|
||||
* [Manage OpenRAG services](/manage-services)
|
||||
* [Troubleshoot OpenRAG](/support/troubleshoot)
|
||||
|
|
@ -3,13 +3,11 @@ title: Environment variables
|
|||
slug: /reference/configuration
|
||||
---
|
||||
|
||||
import Icon from "@site/src/components/icon/icon";
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
import PartialDockerComposeUp from '@site/docs/_partial-docker-compose-up.mdx';
|
||||
|
||||
OpenRAG recognizes environment variables from the following sources:
|
||||
|
||||
* [Environment variables](#configure-environment-variables): Values set in the `.env` file.
|
||||
* [Environment variables](#configure-environment-variables): Values set in the `.env` file in the OpenRAG installation directory.
|
||||
* [Langflow runtime overrides](#langflow-runtime-overrides): Langflow components can set environment variables at runtime.
|
||||
* [Default or fallback values](#default-values-and-fallbacks): These values are default or fallback values if OpenRAG doesn't find a value.
|
||||
|
||||
|
|
@ -25,67 +23,64 @@ Environment variables always take precedence over other variables.
|
|||
|
||||
### Set environment variables {#set-environment-variables}
|
||||
|
||||
After you start OpenRAG, you must [stop and restart OpenRAG containers](/install#tui-container-management) to apply any changes you make to the `.env` file.
|
||||
Environment variables are either mutable or immutable.
|
||||
|
||||
To set mutable environment variables, do the following:
|
||||
If you edit mutable environment variables, you can apply the changes by stopping and restarting the OpenRAG services after editing the `.env` file:
|
||||
|
||||
1. Stop OpenRAG with the TUI or Docker Compose.
|
||||
1. [Stop the OpenRAG services](/manage-services).
|
||||
|
||||
2. Set the values in the `.env` file:
|
||||
2. Edit your `.env` file.
|
||||
|
||||
```bash
|
||||
LOG_LEVEL=DEBUG
|
||||
LOG_FORMAT=json
|
||||
SERVICE_NAME=openrag-dev
|
||||
3. [Restart the OpenRAG services](/manage-services).
|
||||
|
||||
If you edit immutable environment variables, you must [redeploy OpenRAG](/reinstall) with your modified `.env` file.
|
||||
For example, with self-managed services, do the following:
|
||||
|
||||
1. Stop the deployment:
|
||||
|
||||
```bash title="Docker"
|
||||
docker compose down
|
||||
```
|
||||
|
||||
3. Start OpenRAG with the TUI or Docker Compose.
|
||||
|
||||
Certain environment variables that you set during [application onboarding](/install#application-onboarding), such as provider API keys and provider endpoints, require resetting the containers after modifying the `.env` file.
|
||||
|
||||
To change immutable variables with TUI-managed containers, you must [reinstall OpenRAG](/install#reinstall) and either delete or modify the `.env` file before you repeat the setup and onboarding process in the TUI.
|
||||
|
||||
To change immutable variables with self-managed containers, do the following:
|
||||
|
||||
1. Stop OpenRAG with Docker Compose.
|
||||
|
||||
2. Remove the containers:
|
||||
|
||||
```bash
|
||||
docker-compose down
|
||||
```bash title="Podman"
|
||||
podman compose down
|
||||
```
|
||||
|
||||
3. Update the values in your `.env` file.
|
||||
2. Edit your `.env` file.
|
||||
|
||||
4. Start OpenRAG with Docker Compose:
|
||||
3. Redeploy OpenRAG:
|
||||
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
<PartialDockerComposeUp />
|
||||
|
||||
5. Repeat [application onboarding](/install#application-onboarding). The values in your `.env` file are automatically populated.
|
||||
4. Restart the Docling service.
|
||||
|
||||
5. Launch the OpenRAG app, and then repeat the [application onboarding process](/install#application-onboarding). The values in your `.env` file are automatically populated.
|
||||
|
||||
## Supported environment variables
|
||||
|
||||
All OpenRAG configuration can be controlled through environment variables.
|
||||
|
||||
### AI provider settings
|
||||
### Model provider settings {#model-provider-settings}
|
||||
|
||||
Configure which models and providers OpenRAG uses to generate text and embeddings.
|
||||
These are initially set during [application onboarding](/install#application-onboarding).
|
||||
Some values are immutable and can only be changed by recreating the OpenRAG containers, as explained in [Set environment variables](#set-environment-variables).
|
||||
You only need to provide credentials for the providers you are using in OpenRAG.
|
||||
|
||||
These variables are initially set during the [application onboarding process](/install#application-onboarding).
|
||||
Some of these variables are immutable and can only be changed by redeploying OpenRAG, as explained in [Set environment variables](#set-environment-variables).
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `EMBEDDING_MODEL` | `text-embedding-3-small` | Embedding model for generating vector embeddings for documents in the knowledge base and similarity search queries. Can be changed after application onboarding. Accepts one or more models. |
|
||||
| `EMBEDDING_MODEL` | `text-embedding-3-small` | Embedding model for generating vector embeddings for documents in the knowledge base and similarity search queries. Can be changed after the application onboarding process. Accepts one or more models. |
|
||||
| `LLM_MODEL` | `gpt-4o-mini` | Language model for language processing and text generation in the **Chat** feature. |
|
||||
| `MODEL_PROVIDER` | `openai` | Model provider, such as OpenAI or IBM watsonx.ai. |
|
||||
| `OPENAI_API_KEY` | Not set | Optional OpenAI API key for the default model. For other providers, use `PROVIDER_API_KEY`. |
|
||||
| `PROVIDER_API_KEY` | Not set | API key for the model provider. |
|
||||
| `PROVIDER_ENDPOINT` | Not set | Custom provider endpoint for the IBM and Ollama model providers. Leave unset for other model providers. |
|
||||
| `PROVIDER_PROJECT_ID` | Not set | Project ID for the IBM watsonx.ai model provider only. Leave unset for other model providers. |
|
||||
| `MODEL_PROVIDER` | `openai` | Model provider, as one of `openai`, `watsonx`, `ollama`, or `anthropic`. |
|
||||
| `ANTHROPIC_API_KEY` | Not set | API key for the Anthropic language model provider. |
|
||||
| `OPENAI_API_KEY` | Not set | API key for the OpenAI model provider, which is also the default model provider. |
|
||||
| `OLLAMA_ENDPOINT` | Not set | Custom provider endpoint for the Ollama model provider. |
|
||||
| `WATSONX_API_KEY` | Not set | API key for the IBM watsonx.ai model provider. |
|
||||
| `WATSONX_ENDPOINT` | Not set | Custom provider endpoint for the IBM watsonx.ai model provider. |
|
||||
| `WATSONX_PROJECT_ID` | Not set | Project ID for the IBM watsonx.ai model provider. |
|
||||
|
||||
### Document processing
|
||||
### Document processing settings {#document-processing-settings}
|
||||
|
||||
Control how OpenRAG [processes and ingests documents](/ingestion) into your knowledge base.
|
||||
|
||||
|
|
@ -99,36 +94,42 @@ Control how OpenRAG [processes and ingests documents](/ingestion) into your know
|
|||
| `OPENRAG_DOCUMENTS_PATHS` | `./openrag-documents` | Document paths for ingestion. |
|
||||
| `PICTURE_DESCRIPTIONS_ENABLED` | `false` | Enable picture descriptions. |
|
||||
|
||||
### Langflow settings
|
||||
### Langflow settings {#langflow-settings}
|
||||
|
||||
Configure Langflow authentication.
|
||||
Configure the OpenRAG Langflow server's authentication, contact point, and built-in flow definitions.
|
||||
|
||||
:::info
|
||||
The `LANGFLOW_SUPERUSER_PASSWORD` is set in your `.env` file, and this value determines the default values for several other Langflow authentication variables.
|
||||
|
||||
If the `LANGFLOW_SUPERUSER_PASSWORD` variable isn't set, then the Langflow server starts _without_ authentication enabled.
|
||||
|
||||
For better security, it is recommended to set `LANGFLOW_SUPERUSER_PASSWORD` so the [Langflow server starts with authentication enabled](https://docs.langflow.org/api-keys-and-authentication#start-a-langflow-server-with-authentication-enabled).
|
||||
:::
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `LANGFLOW_AUTO_LOGIN` | `False` | Enable auto-login for Langflow. |
|
||||
| `LANGFLOW_CHAT_FLOW_ID` | Built-in flow ID | This value is automatically set to the ID of the chat [flow](/agents). The default value is found in [`.env.example`](https://github.com/langflow-ai/openrag/blob/main/.env.example). Only change this value if you explicitly don't want to use this built-in flow. |
|
||||
| `LANGFLOW_ENABLE_SUPERUSER_CLI` | `False` | Enable superuser privileges for Langflow CLI commands. |
|
||||
| `LANGFLOW_INGEST_FLOW_ID` | Built-in flow ID | This value is automatically set to the ID of the ingestion [flow](/agents). The default value is found in [`.env.example`](https://github.com/langflow-ai/openrag/blob/main/.env.example). Only change this value if you explicitly don't want to use this built-in flow. |
|
||||
| `LANGFLOW_KEY` | Automatically generated | Explicit Langflow API key. |
|
||||
| `LANGFLOW_NEW_USER_IS_ACTIVE` | `False` | Whether new Langflow users are active by default. |
|
||||
| `LANGFLOW_PUBLIC_URL` | `http://localhost:7860` | Public URL for the Langflow instance. |
|
||||
| `LANGFLOW_SECRET_KEY` | Not set | Secret key for Langflow internal operations. |
|
||||
| `LANGFLOW_SUPERUSER` | None, must be explicitly set | Langflow admin username. Required. |
|
||||
| `LANGFLOW_SUPERUSER_PASSWORD` | None, must be explicitly set | Langflow admin password. Required. |
|
||||
| `LANGFLOW_AUTO_LOGIN` | Determined by `LANGFLOW_SUPERUSER_PASSWORD` | Whether to enable [auto-login mode](https://docs.langflow.org/api-keys-and-authentication#langflow-auto-login) for the Langflow visual editor and CLI. If `LANGFLOW_SUPERUSER_PASSWORD` isn't set, then `LANGFLOW_AUTO_LOGIN` is `True` and auto-login mode is enabled. If `LANGFLOW_SUPERUSER_PASSWORD` is set, then `LANGFLOW_AUTO_LOGIN` is `False` and auto-login mode is disabled. Langflow API calls always require authentication with a Langflow API key regardless of the auto-login setting. |
|
||||
| `LANGFLOW_ENABLE_SUPERUSER_CLI` | Determined by `LANGFLOW_SUPERUSER_PASSWORD` | Whether to enable the [Langflow CLI `langflow superuser` command](https://docs.langflow.org/api-keys-and-authentication#langflow-enable-superuser-cli). If `LANGFLOW_SUPERUSER_PASSWORD` isn't set, then `LANGFLOW_ENABLE_SUPERUSER_CLI` is `True` and superuser accounts can be created with the Langflow CLI. If `LANGFLOW_SUPERUSER_PASSWORD` is set, then `LANGFLOW_ENABLE_SUPERUSER_CLI` is `False` and the `langflow superuser` command is disabled. |
|
||||
| `LANGFLOW_NEW_USER_IS_ACTIVE` | Determined by `LANGFLOW_SUPERUSER_PASSWORD` | Whether new [Langflow user accounts are active by default](https://docs.langflow.org/api-keys-and-authentication#langflow-new-user-is-active). If `LANGFLOW_SUPERUSER_PASSWORD` isn't set, then `LANGFLOW_NEW_USER_IS_ACTIVE` is `True` and new user accounts are active by default. If `LANGFLOW_SUPERUSER_PASSWORD` is set, then `LANGFLOW_NEW_USER_IS_ACTIVE` is `False` and new user accounts are inactive by default. |
|
||||
| `LANGFLOW_PUBLIC_URL` | `http://localhost:7860` | Public URL for the Langflow instance. Forms the base URL for Langflow API calls and other interfaces with your OpenRAG Langflow instance. |
|
||||
| `LANGFLOW_KEY` | Automatically generated | A Langflow API key to run flows with Langflow API calls. Because Langflow API keys are server-specific, allow OpenRAG to generate this key initially. You can create additional Langflow API keys after deploying OpenRAG. |
|
||||
| `LANGFLOW_SECRET_KEY` | Automatically generated | Secret encryption key for Langflow internal operations. It is recommended to [generate your own Langflow secret key](https://docs.langflow.org/api-keys-and-authentication#langflow-secret-key) for this variable. If this variable isn't set, then Langflow generates a secret key automatically. |
|
||||
| `LANGFLOW_SUPERUSER` | `admin` | Username for the Langflow administrator user. |
|
||||
| `LANGFLOW_SUPERUSER_PASSWORD` | Not set | Langflow administrator password. If this variable isn't set, then the Langflow server starts _without_ authentication enabled. It is recommended to set `LANGFLOW_SUPERUSER_PASSWORD` so the [Langflow server starts with authentication enabled](https://docs.langflow.org/api-keys-and-authentication#start-a-langflow-server-with-authentication-enabled). |
|
||||
| `LANGFLOW_URL` | `http://localhost:7860` | URL for the Langflow instance. |
|
||||
| `NUDGES_FLOW_ID` | Built-in flow ID | This value is automatically set to the ID of the nudges [flow](/agents). The default value is found in [`.env.example`](https://github.com/langflow-ai/openrag/blob/main/.env.example). Only change this value if you explicitly don't want to use this built-in flow. |
|
||||
| `LANGFLOW_CHAT_FLOW_ID`, `LANGFLOW_INGEST_FLOW_ID`, `NUDGES_FLOW_ID` | Built-in flow IDs | These variables are set automatically to the IDs of the chat, ingestion, and nudges [flows](/agents). The default values are found in [`.env.example`](https://github.com/langflow-ai/openrag/blob/main/.env.example). Only change these values if you want to replace a built-in flow with your own custom flow. The flow JSON must be present in your version of the OpenRAG codebase. For example, if you [deploy self-managed services](/docker), you can add the flow JSON to your local clone of the OpenRAG repository before deploying OpenRAG. |
|
||||
| `SYSTEM_PROMPT` | `You are a helpful AI assistant with access to a knowledge base. Answer questions based on the provided context.` | System prompt instructions for the agent driving the **Chat** flow. |
|
||||
|
||||
### OAuth provider settings
|
||||
|
||||
Configure OAuth providers and external service integrations.
|
||||
Configure [OAuth providers](/ingestion#oauth-ingestion) and external service integrations.
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `AWS_ACCESS_KEY_ID` / `AWS_SECRET_ACCESS_KEY` | - | AWS integrations. |
|
||||
| `GOOGLE_OAUTH_CLIENT_ID` / `GOOGLE_OAUTH_CLIENT_SECRET` | - | Google OAuth authentication. |
|
||||
| `MICROSOFT_GRAPH_OAUTH_CLIENT_ID` / `MICROSOFT_GRAPH_OAUTH_CLIENT_SECRET` | - | Microsoft OAuth. |
|
||||
| `WEBHOOK_BASE_URL` | - | Base URL for webhook endpoints. |
|
||||
| `AWS_ACCESS_KEY_ID`<br/>`AWS_SECRET_ACCESS_KEY` | Not set | Enable access to AWS S3 with an [AWS OAuth app](https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html) integration. |
|
||||
| `GOOGLE_OAUTH_CLIENT_ID`<br/>`GOOGLE_OAUTH_CLIENT_SECRET` | Not set | Enable the [Google OAuth client](https://developers.google.com/identity/protocols/oauth2) integration. You can generate these values in the [Google Cloud Console](https://console.cloud.google.com/apis/credentials). |
|
||||
| `MICROSOFT_GRAPH_OAUTH_CLIENT_ID`<br/>`MICROSOFT_GRAPH_OAUTH_CLIENT_SECRET` | Not set | Enable the [Microsoft Graph OAuth client](https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth) integration by providing [Azure application registration credentials for SharePoint and OneDrive](https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/app-registration?view=odsp-graph-online). |
|
||||
| `WEBHOOK_BASE_URL` | Not set | Base URL for OAuth connector webhook endpoints. If this variable isn't set, a default base URL is used. |
|
||||
|
||||
### OpenSearch settings
|
||||
|
||||
|
|
@ -136,10 +137,10 @@ Configure OpenSearch database authentication.
|
|||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `OPENSEARCH_HOST` | `localhost` | OpenSearch host. |
|
||||
| `OPENSEARCH_PASSWORD` | - | Password for OpenSearch admin user. Required. |
|
||||
| `OPENSEARCH_PORT` | `9200` | OpenSearch port. |
|
||||
| `OPENSEARCH_USERNAME` | `admin` | OpenSearch username. |
|
||||
| `OPENSEARCH_HOST` | `localhost` | OpenSearch instance host. |
|
||||
| `OPENSEARCH_PORT` | `9200` | OpenSearch instance port. |
|
||||
| `OPENSEARCH_USERNAME` | `admin` | OpenSearch administrator username. |
|
||||
| `OPENSEARCH_PASSWORD` | Must be set at start up | Required. OpenSearch administrator password. Must adhere to the [OpenSearch password complexity requirements](https://docs.opensearch.org/latest/security/configuration/demo-configuration/#setting-up-a-custom-admin-password). You must set this directly in the `.env` or in the TUI's [**Basic/Advanced Setup**](/install#setup). |
|
||||
|
||||
### System settings
|
||||
|
||||
|
|
@ -150,10 +151,10 @@ Configure general system components, session management, and logging.
|
|||
| `LANGFLOW_KEY_RETRIES` | `15` | Number of retries for Langflow key generation. |
|
||||
| `LANGFLOW_KEY_RETRY_DELAY` | `2.0` | Delay between retries in seconds. |
|
||||
| `LANGFLOW_VERSION` | `OPENRAG_VERSION` | Langflow Docker image version. By default, OpenRAG uses the `OPENRAG_VERSION` for the Langflow Docker image version. |
|
||||
| `LOG_FORMAT` | Disabled | Set to `json` to enabled JSON-formatted log output. |
|
||||
| `LOG_LEVEL` | `INFO` | Logging level (DEBUG, INFO, WARNING, ERROR). |
|
||||
| `LOG_FORMAT` | Not set | Set to `json` to enabled JSON-formatted log output. If this variable isn't set, then the default logging format is used. |
|
||||
| `LOG_LEVEL` | `INFO` | Logging level. Can be one of `DEBUG`, `INFO`, `WARNING`, or `ERROR`. `DEBUG` provides the most detailed logs but can impact performance. |
|
||||
| `MAX_WORKERS` | `1` | Maximum number of workers for document processing. |
|
||||
| `OPENRAG_VERSION` | `latest` | The version of the OpenRAG Docker images to run. For more information, see [Upgrade OpenRAG](/install#upgrade) |
|
||||
| `OPENRAG_VERSION` | `latest` | The version of the OpenRAG Docker images to run. For more information, see [Upgrade OpenRAG](/upgrade) |
|
||||
| `SERVICE_NAME` | `openrag` | Service name for logging. |
|
||||
| `SESSION_SECRET` | Automatically generated | Session management. |
|
||||
|
||||
|
|
|
|||
|
|
@ -3,15 +3,13 @@ title: Troubleshoot OpenRAG
|
|||
slug: /support/troubleshoot
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
This page provides troubleshooting advice for issues you might encounter when using OpenRAG or contributing to OpenRAG.
|
||||
|
||||
## OpenSearch fails to start
|
||||
|
||||
Check that `OPENSEARCH_PASSWORD` set in [Environment variables](/reference/configuration) meets requirements.
|
||||
The password must contain at least 8 characters, and must contain at least one uppercase letter, one lowercase letter, one digit, and one special character that is strong.
|
||||
Check that the value of the `OPENSEARCH_PASSWORD` [environment variable](/reference/configuration) meets the [OpenSearch password complexity requirements](https://docs.opensearch.org/latest/security/configuration/demo-configuration/#setting-up-a-custom-admin-password).
|
||||
|
||||
If you need to change the password, you must [reset the OpenRAG services](/manage-services).
|
||||
|
||||
## OpenRAG fails to start from the TUI with operation not supported
|
||||
|
||||
|
|
@ -33,7 +31,8 @@ Replace `VERSION` with your installed Python version, such as `3.13`.
|
|||
|
||||
## Langflow connection issues
|
||||
|
||||
Verify the `LANGFLOW_SUPERUSER` credentials set in [Environment variables](/reference/configuration) are correct.
|
||||
Verify that the value of the `LANGFLOW_SUPERUSER` environment variable is correct.
|
||||
For more information about this variable and how this variable controls Langflow access, see [Langflow settings](/reference/configuration#langflow-settings).
|
||||
|
||||
## Container out of memory errors
|
||||
|
||||
|
|
@ -53,31 +52,52 @@ podman machine start
|
|||
|
||||
## Port conflicts
|
||||
|
||||
Ensure ports 3000, 7860, 8000, 9200, 5601 are available.
|
||||
With the default [environment variable](/reference/configuration) values, OpenRAG requires the following ports to be available on the host machine:
|
||||
|
||||
* 3000: Langflow application
|
||||
* 5001: Docling local ingestion service
|
||||
* 5601: OpenSearch Dashboards
|
||||
* 7860: Docling UI
|
||||
* 8000: Docling API
|
||||
* 9200: OpenSearch service
|
||||
|
||||
## OCR ingestion fails (easyocr not installed)
|
||||
|
||||
If Docling ingestion fails with an OCR-related error and mentions `easyocr` is missing, this is likely due to a stale `uv` cache.
|
||||
Docling ingestion can fail with an OCR-related error that mentions `easyocr` is missing.
|
||||
This is likely due to a stale `uv` cache when you [install OpenRAG with `uvx`](/install-uvx).
|
||||
|
||||
`easyocr` is already included as a dependency in OpenRAG's `pyproject.toml`. Project-managed installations using `uv sync` and `uv run` always sync dependencies directly from your `pyproject.toml`, so they should have `easyocr` installed.
|
||||
When you invoke OpenRAG with `uvx openrag`, `uvx` creates a cached, ephemeral environment that doesn't modify your project.
|
||||
The location and path of this cache depends on your operating system.
|
||||
For example, on macOS, this is typically a user cache directory, such as `/Users/USER_NAME/.cache/uv`.
|
||||
|
||||
If you're running OpenRAG with `uvx openrag`, `uvx` creates a cached, ephemeral environment that doesn't modify your project. This cache can become stale.
|
||||
This cache can become stale, producing errors like missing dependencies.
|
||||
|
||||
On macOS, this cache directory is typically a user cache directory such as `/Users/USER_NAME/.cache/uv`.
|
||||
1. [Exit the TUI](/tui).
|
||||
|
||||
1. To clear the uv cache, run:
|
||||
2. Clear the `uv` cache:
|
||||
|
||||
```bash
|
||||
uv cache clean
|
||||
```
|
||||
|
||||
2. Start OpenRAG:
|
||||
To clear the OpenRAG cache only, run:
|
||||
|
||||
```bash
|
||||
uv cache clean openrag
|
||||
```
|
||||
|
||||
3. Invoke OpenRAG to restart the TUI:
|
||||
|
||||
```bash
|
||||
uvx openrag
|
||||
```
|
||||
|
||||
If you don't need OCR, you can disable OCR-based processing in your ingestion settings to avoid requiring `easyocr`.
|
||||
4. Click **Open App**, and then retry document ingestion.
|
||||
|
||||
If you install OpenRAG with `uv`, dependencies are synced directly from your `pyproject.toml` file.
|
||||
This should automatically install `easyocr` because `easyocr` is included as a dependency in OpenRAG's `pyproject.toml`.
|
||||
|
||||
If you don't need OCR, you can disable OCR-based processing in your [ingestion settings](/knowledge#knowledge-ingestion-settings) to avoid requiring `easyocr`.
|
||||
|
||||
## Upgrade fails due to Langflow container already exists {#langflow-container-already-exists-during-upgrade}
|
||||
|
||||
|
|
@ -87,50 +107,33 @@ To resolve this issue, do the following:
|
|||
|
||||
1. Remove only the Langflow container:
|
||||
|
||||
<Tabs groupId="Container software">
|
||||
<TabItem value="Podman" label="Podman">
|
||||
|
||||
1. Stop the Langflow container:
|
||||
|
||||
```bash
|
||||
```bash title="Docker"
|
||||
docker stop langflow
|
||||
```
|
||||
|
||||
```bash title="Podman"
|
||||
podman stop langflow
|
||||
```
|
||||
|
||||
2. Remove the Langflow container:
|
||||
|
||||
```bash
|
||||
podman rm langflow --force
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Docker" label="Docker" default>
|
||||
|
||||
1. Stop the Langflow container:
|
||||
|
||||
```bash
|
||||
docker stop langflow
|
||||
```
|
||||
|
||||
2. Remove the Langflow container:
|
||||
|
||||
```bash
|
||||
```bash title="Docker"
|
||||
docker rm langflow --force
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
```bash title="Podman"
|
||||
podman rm langflow --force
|
||||
```
|
||||
|
||||
2. Retry the upgrade:
|
||||
2. Retry the [upgrade](/upgrade).
|
||||
|
||||
* [Upgrade self-managed containers](/docker#upgrade-containers)
|
||||
* [Upgrade TUI-managed containers](/install#upgrade-containers)
|
||||
3. If reinstalling the Langflow container doesn't resolve the issue, then you must [reset all containers](/manage-services) or [reinstall OpenRAG](/reinstall).
|
||||
|
||||
3. If reinstalling the Langflow container doesn't resolve the issue, you must reset your OpenRAG deployment:
|
||||
4. Retry the [upgrade](/upgrade).
|
||||
|
||||
* [Reset self-managed containers](/docker#reset-containers)
|
||||
* [Reset TUI-managed containers](/install#reset-containers)
|
||||
|
||||
4. Retry the upgrade.
|
||||
If no updates are available after reinstalling OpenRAG, then you reinstalled at the latest version, and your deployment is up to date.
|
||||
|
||||
## Document ingestion or similarity search issues
|
||||
|
||||
|
|
|
|||
|
|
@ -124,6 +124,7 @@ const config = {
|
|||
prism: {
|
||||
theme: prismThemes.github,
|
||||
darkTheme: prismThemes.dracula,
|
||||
additionalLanguages: ['bash', 'docker', 'yaml'],
|
||||
},
|
||||
mermaid: {
|
||||
theme: {light: 'neutral', dark: 'forest'},
|
||||
|
|
|
|||
|
|
@ -22,8 +22,39 @@ const sidebars = {
|
|||
label: "About OpenRAG"
|
||||
},
|
||||
"get-started/quickstart",
|
||||
"get-started/install",
|
||||
"get-started/docker",
|
||||
{
|
||||
type: "category",
|
||||
label: "Installation",
|
||||
items: [
|
||||
"get-started/install-options",
|
||||
{ type: "doc",
|
||||
id: "get-started/install",
|
||||
label: "Run the installer script",
|
||||
},
|
||||
{ type: "doc",
|
||||
id: "get-started/install-uv",
|
||||
label: "Install OpenRAG with uv",
|
||||
},
|
||||
"get-started/install-uvx",
|
||||
{ type: "doc",
|
||||
id: "get-started/install-windows",
|
||||
label: "Install OpenRAG on Windows",
|
||||
},
|
||||
{ type: "doc",
|
||||
id: "get-started/docker",
|
||||
label: "Deploy self-managed services",
|
||||
},
|
||||
"get-started/upgrade",
|
||||
"get-started/reinstall",
|
||||
"get-started/uninstall",
|
||||
],
|
||||
},
|
||||
"get-started/tui",
|
||||
{
|
||||
type: "doc",
|
||||
id: "get-started/manage-services",
|
||||
label: "Manage services",
|
||||
},
|
||||
{
|
||||
type: "doc",
|
||||
id: "core-components/agents",
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
36
frontend/app/api/queries/useGetAllFiltersQuery.ts
Normal file
36
frontend/app/api/queries/useGetAllFiltersQuery.ts
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
import {
|
||||
type UseQueryOptions,
|
||||
useQuery,
|
||||
useQueryClient,
|
||||
} from "@tanstack/react-query";
|
||||
import type { KnowledgeFilter } from "./useGetFiltersSearchQuery";
|
||||
|
||||
export const useGetAllFiltersQuery = (
|
||||
options?: Omit<UseQueryOptions<KnowledgeFilter[]>, "queryKey" | "queryFn">,
|
||||
) => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
async function getAllFilters(): Promise<KnowledgeFilter[]> {
|
||||
const response = await fetch("/api/knowledge-filter/search", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ query: "", limit: 1000 }), // Fetch all filters
|
||||
});
|
||||
|
||||
const json = await response.json();
|
||||
if (!response.ok || !json.success) {
|
||||
// ensure we always return a KnowledgeFilter[] to satisfy the return type
|
||||
return [];
|
||||
}
|
||||
return (json.filters || []) as KnowledgeFilter[];
|
||||
}
|
||||
|
||||
return useQuery<KnowledgeFilter[]>(
|
||||
{
|
||||
queryKey: ["knowledge-filters", "all"],
|
||||
queryFn: getAllFilters,
|
||||
...options,
|
||||
},
|
||||
queryClient,
|
||||
);
|
||||
};
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,165 +1,158 @@
|
|||
"use client";
|
||||
|
||||
import { Plus } from "lucide-react";
|
||||
import { useState } from "react";
|
||||
import {
|
||||
type KnowledgeFilter,
|
||||
useGetFiltersSearchQuery,
|
||||
} from "@/app/api/queries/useGetFiltersSearchQuery";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useGetAllFiltersQuery } from "@/app/api/queries/useGetAllFiltersQuery";
|
||||
import type { KnowledgeFilter } from "@/app/api/queries/useGetFiltersSearchQuery";
|
||||
import { useKnowledgeFilter } from "@/contexts/knowledge-filter-context";
|
||||
import { cn } from "@/lib/utils";
|
||||
import {
|
||||
type FilterColor,
|
||||
type IconKey,
|
||||
iconKeyToComponent,
|
||||
type FilterColor,
|
||||
type IconKey,
|
||||
iconKeyToComponent,
|
||||
} from "./filter-icon-popover";
|
||||
import { filterAccentClasses } from "./knowledge-filter-panel";
|
||||
|
||||
interface ParsedQueryData {
|
||||
query: string;
|
||||
filters: {
|
||||
data_sources: string[];
|
||||
document_types: string[];
|
||||
owners: string[];
|
||||
};
|
||||
limit: number;
|
||||
scoreThreshold: number;
|
||||
color: FilterColor;
|
||||
icon: IconKey;
|
||||
query: string;
|
||||
filters: {
|
||||
data_sources: string[];
|
||||
document_types: string[];
|
||||
owners: string[];
|
||||
};
|
||||
limit: number;
|
||||
scoreThreshold: number;
|
||||
color: FilterColor;
|
||||
icon: IconKey;
|
||||
}
|
||||
|
||||
interface KnowledgeFilterListProps {
|
||||
selectedFilter: KnowledgeFilter | null;
|
||||
onFilterSelect: (filter: KnowledgeFilter | null) => void;
|
||||
selectedFilter: KnowledgeFilter | null;
|
||||
onFilterSelect: (filter: KnowledgeFilter | null) => void;
|
||||
}
|
||||
|
||||
export function KnowledgeFilterList({
|
||||
selectedFilter,
|
||||
onFilterSelect,
|
||||
selectedFilter,
|
||||
onFilterSelect,
|
||||
}: KnowledgeFilterListProps) {
|
||||
const [searchQuery] = useState("");
|
||||
const { startCreateMode } = useKnowledgeFilter();
|
||||
const { startCreateMode } = useKnowledgeFilter();
|
||||
|
||||
const { data, isFetching: loading } = useGetFiltersSearchQuery(
|
||||
searchQuery,
|
||||
20,
|
||||
);
|
||||
const { data, isFetching: loading } = useGetAllFiltersQuery();
|
||||
|
||||
const filters = data || [];
|
||||
const filters = data || [];
|
||||
|
||||
const handleFilterSelect = (filter: KnowledgeFilter) => {
|
||||
if (filter.id === selectedFilter?.id) {
|
||||
onFilterSelect(null);
|
||||
return;
|
||||
}
|
||||
onFilterSelect(filter);
|
||||
};
|
||||
const handleFilterSelect = (filter: KnowledgeFilter) => {
|
||||
if (filter.id === selectedFilter?.id) {
|
||||
onFilterSelect(null);
|
||||
return;
|
||||
}
|
||||
onFilterSelect(filter);
|
||||
};
|
||||
|
||||
const handleCreateNew = () => {
|
||||
startCreateMode();
|
||||
};
|
||||
const handleCreateNew = () => {
|
||||
startCreateMode();
|
||||
};
|
||||
|
||||
const parseQueryData = (queryData: string): ParsedQueryData => {
|
||||
return JSON.parse(queryData) as ParsedQueryData;
|
||||
};
|
||||
const parseQueryData = (queryData: string): ParsedQueryData => {
|
||||
return JSON.parse(queryData) as ParsedQueryData;
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex-1 min-h-0 flex flex-col">
|
||||
<div className="px-3 flex-1 min-h-0 flex flex-col">
|
||||
<div className="flex-shrink-0">
|
||||
<div className="flex items-center justify-between mb-3 mr-2 ml-4">
|
||||
<h3 className="text-xs font-medium text-muted-foreground">
|
||||
Knowledge Filters
|
||||
</h3>
|
||||
<button
|
||||
type="button"
|
||||
className="p-1 hover:bg-accent rounded"
|
||||
onClick={handleCreateNew}
|
||||
title="Create New Filter"
|
||||
>
|
||||
<Plus className="h-4 w-4 text-muted-foreground" />
|
||||
</button>
|
||||
</div>
|
||||
<div className="overflow-y-auto scrollbar-hide space-y-1">
|
||||
{loading ? (
|
||||
<div className="text-[13px] text-muted-foreground p-2 ml-2">
|
||||
Loading...
|
||||
</div>
|
||||
) : filters.length === 0 ? (
|
||||
<div className="text-[13px] text-muted-foreground pb-2 pt-3 ml-4">
|
||||
{searchQuery ? "No filters found" : "No saved filters"}
|
||||
</div>
|
||||
) : (
|
||||
filters.map((filter) => (
|
||||
<div
|
||||
key={filter.id}
|
||||
onClick={() => handleFilterSelect(filter)}
|
||||
className={cn(
|
||||
"flex items-center gap-3 px-3 py-2 w-full rounded-lg hover:bg-accent hover:text-accent-foreground cursor-pointer group transition-colors",
|
||||
selectedFilter?.id === filter.id &&
|
||||
"active bg-accent text-accent-foreground",
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-col gap-1 flex-1 min-w-0">
|
||||
<div className="flex items-center gap-2">
|
||||
{(() => {
|
||||
const parsed = parseQueryData(
|
||||
filter.query_data,
|
||||
) as ParsedQueryData;
|
||||
const Icon = iconKeyToComponent(parsed.icon);
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"flex items-center justify-center w-5 h-5 rounded transition-colors",
|
||||
filterAccentClasses[parsed.color],
|
||||
parsed.color === "zinc" &&
|
||||
"group-hover:bg-background group-[.active]:bg-background",
|
||||
)}
|
||||
>
|
||||
{Icon && <Icon className="h-3 w-3" />}
|
||||
</div>
|
||||
);
|
||||
})()}
|
||||
<div className="text-sm font-medium truncate group-hover:text-accent-foreground">
|
||||
{filter.name}
|
||||
</div>
|
||||
</div>
|
||||
{filter.description && (
|
||||
<div className="text-xs text-muted-foreground line-clamp-2">
|
||||
{filter.description}
|
||||
</div>
|
||||
)}
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="text-xs text-muted-foreground">
|
||||
{new Date(filter.created_at).toLocaleDateString(
|
||||
undefined,
|
||||
{
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
year: "numeric",
|
||||
},
|
||||
)}
|
||||
</div>
|
||||
<span className="text-xs bg-muted text-muted-foreground px-1 py-0.5 rounded-sm group-hover:bg-background group-[.active]:bg-background transition-colors">
|
||||
{(() => {
|
||||
const dataSources = parseQueryData(filter.query_data)
|
||||
.filters.data_sources;
|
||||
if (dataSources[0] === "*") return "All sources";
|
||||
const count = dataSources.length;
|
||||
return `${count} ${
|
||||
count === 1 ? "source" : "sources"
|
||||
}`;
|
||||
})()}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/* Create flow moved to panel create mode */}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
return (
|
||||
<div className="flex-1 min-h-0 flex flex-col">
|
||||
<div className="px-3 flex-1 min-h-0 flex flex-col">
|
||||
<div className="flex-shrink-0">
|
||||
<div className="flex items-center justify-between mb-3 mr-2 ml-4">
|
||||
<h3 className="text-xs font-medium text-muted-foreground">
|
||||
Knowledge Filters
|
||||
</h3>
|
||||
<button
|
||||
type="button"
|
||||
className="p-1 hover:bg-accent rounded"
|
||||
onClick={handleCreateNew}
|
||||
title="Create New Filter"
|
||||
>
|
||||
<Plus className="h-4 w-4 text-muted-foreground" />
|
||||
</button>
|
||||
</div>
|
||||
<div className="overflow-y-auto scrollbar-hide space-y-1">
|
||||
{loading ? (
|
||||
<div className="text-[13px] text-muted-foreground p-2 ml-2">
|
||||
Loading...
|
||||
</div>
|
||||
) : filters.length === 0 ? (
|
||||
<div className="text-[13px] text-muted-foreground pb-2 pt-3 ml-4">
|
||||
No saved filters
|
||||
</div>
|
||||
) : (
|
||||
filters.map((filter) => (
|
||||
<div
|
||||
key={filter.id}
|
||||
onClick={() => handleFilterSelect(filter)}
|
||||
className={cn(
|
||||
"flex items-center gap-3 px-3 py-2 w-full rounded-lg hover:bg-accent hover:text-accent-foreground cursor-pointer group transition-colors",
|
||||
selectedFilter?.id === filter.id &&
|
||||
"active bg-accent text-accent-foreground",
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-col gap-1 flex-1 min-w-0">
|
||||
<div className="flex items-center gap-2">
|
||||
{(() => {
|
||||
const parsed = parseQueryData(
|
||||
filter.query_data,
|
||||
) as ParsedQueryData;
|
||||
const Icon = iconKeyToComponent(parsed.icon);
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"flex items-center justify-center w-5 h-5 rounded transition-colors",
|
||||
filterAccentClasses[parsed.color],
|
||||
parsed.color === "zinc" &&
|
||||
"group-hover:bg-background group-[.active]:bg-background",
|
||||
)}
|
||||
>
|
||||
{Icon && <Icon className="h-3 w-3" />}
|
||||
</div>
|
||||
);
|
||||
})()}
|
||||
<div className="text-sm font-medium truncate group-hover:text-accent-foreground">
|
||||
{filter.name}
|
||||
</div>
|
||||
</div>
|
||||
{filter.description && (
|
||||
<div className="text-xs text-muted-foreground line-clamp-2">
|
||||
{filter.description}
|
||||
</div>
|
||||
)}
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="text-xs text-muted-foreground">
|
||||
{new Date(filter.created_at).toLocaleDateString(
|
||||
undefined,
|
||||
{
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
year: "numeric",
|
||||
},
|
||||
)}
|
||||
</div>
|
||||
<span className="text-xs bg-muted text-muted-foreground px-1 py-0.5 rounded-sm group-hover:bg-background group-[.active]:bg-background transition-colors">
|
||||
{(() => {
|
||||
const dataSources = parseQueryData(filter.query_data)
|
||||
.filters.data_sources;
|
||||
if (dataSources[0] === "*") return "All sources";
|
||||
const count = dataSources.length;
|
||||
return `${count} ${
|
||||
count === 1 ? "source" : "sources"
|
||||
}`;
|
||||
})()}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/* Create flow moved to panel create mode */}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
10
frontend/package-lock.json
generated
10
frontend/package-lock.json
generated
|
|
@ -30,6 +30,7 @@
|
|||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.1.1",
|
||||
"dotenv": "^17.2.3",
|
||||
"fuse.js": "^7.1.0",
|
||||
"lucide-react": "^0.525.0",
|
||||
"motion": "^12.23.12",
|
||||
"next": "15.5.7",
|
||||
|
|
@ -3429,6 +3430,15 @@
|
|||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/fuse.js": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fuse.js/-/fuse.js-7.1.0.tgz",
|
||||
"integrity": "sha512-trLf4SzuuUxfusZADLINj+dE8clK1frKdmqiJNb1Es75fmI5oY6X2mxLVUciLLjxqw/xr72Dhy+lER6dGd02FQ==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/get-nonce": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz",
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@
|
|||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.1.1",
|
||||
"dotenv": "^17.2.3",
|
||||
"fuse.js": "^7.1.0",
|
||||
"lucide-react": "^0.525.0",
|
||||
"motion": "^12.23.12",
|
||||
"next": "15.5.7",
|
||||
|
|
|
|||
Binary file not shown.
|
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||
|
||||
[project]
|
||||
name = "openrag"
|
||||
version = "0.1.52"
|
||||
version = "0.1.53"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.13"
|
||||
|
|
@ -34,7 +34,7 @@ dependencies = [
|
|||
"structlog>=25.4.0",
|
||||
"docling-serve==1.5.0",
|
||||
"docling-core==2.48.1",
|
||||
"easyocr>=1.7.1"
|
||||
"easyocr>=1.7.1; sys_platform != 'darwin'"
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
|
|
|
|||
|
|
@ -14,10 +14,17 @@ from src.tui.managers.container_manager import ContainerManager
|
|||
async def main():
|
||||
"""Clear OpenSearch data directory."""
|
||||
cm = ContainerManager()
|
||||
|
||||
opensearch_data_path = Path("opensearch-data")
|
||||
|
||||
# Get opensearch data path from env config (same as container_manager uses)
|
||||
from src.tui.managers.env_manager import EnvManager
|
||||
env_manager = EnvManager()
|
||||
env_manager.load_existing_env()
|
||||
opensearch_data_path = Path(
|
||||
env_manager.config.opensearch_data_path.replace("$HOME", str(Path.home()))
|
||||
).expanduser()
|
||||
|
||||
if not opensearch_data_path.exists():
|
||||
print("opensearch-data directory does not exist")
|
||||
print(f"opensearch-data directory does not exist at {opensearch_data_path}")
|
||||
return 0
|
||||
|
||||
print("Clearing OpenSearch data directory...")
|
||||
|
|
|
|||
|
|
@ -36,8 +36,10 @@ class ConnectionConfig:
|
|||
class ConnectionManager:
|
||||
"""Manages multiple connector connections with persistence"""
|
||||
|
||||
def __init__(self, connections_file: str = "connections.json"):
|
||||
def __init__(self, connections_file: str = "data/connections.json"):
|
||||
self.connections_file = Path(connections_file)
|
||||
# Ensure data directory exists
|
||||
self.connections_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.connections: Dict[str, ConnectionConfig] = {}
|
||||
self.active_connectors: Dict[str, BaseConnector] = {}
|
||||
|
||||
|
|
|
|||
|
|
@ -96,11 +96,8 @@ class GoogleDriveConnector(BaseConnector):
|
|||
client_id = config.get("client_id") or env_client_id
|
||||
client_secret = config.get("client_secret") or env_client_secret
|
||||
|
||||
# Token file default (so callback & workers don’t need to pass it)
|
||||
project_root = Path(__file__).resolve().parent.parent.parent.parent
|
||||
token_file = config.get("token_file") or str(
|
||||
project_root / "google_drive_token.json"
|
||||
)
|
||||
# Token file default - use data/ directory for persistence
|
||||
token_file = config.get("token_file") or "data/google_drive_token.json"
|
||||
Path(token_file).parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not isinstance(client_id, str) or not client_id.strip():
|
||||
|
|
|
|||
|
|
@ -58,9 +58,8 @@ class OneDriveConnector(BaseConnector):
|
|||
except Exception as e:
|
||||
logger.debug(f"Failed to get client_secret: {e}")
|
||||
|
||||
# Token file setup
|
||||
project_root = Path(__file__).resolve().parent.parent.parent.parent
|
||||
token_file = config.get("token_file") or str(project_root / "onedrive_token.json")
|
||||
# Token file setup - use data/ directory for persistence
|
||||
token_file = config.get("token_file") or "data/onedrive_token.json"
|
||||
Path(token_file).parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Only initialize OAuth if we have credentials
|
||||
|
|
@ -72,7 +71,7 @@ class OneDriveConnector(BaseConnector):
|
|||
oauth_token_file = config["token_file"]
|
||||
else:
|
||||
# Use a per-connection cache file to avoid collisions with other connectors
|
||||
oauth_token_file = f"onedrive_token_{connection_id}.json"
|
||||
oauth_token_file = f"data/onedrive_token_{connection_id}.json"
|
||||
|
||||
# MSA & org both work via /common for OneDrive personal testing
|
||||
authority = "https://login.microsoftonline.com/common"
|
||||
|
|
|
|||
|
|
@ -66,20 +66,19 @@ class SharePointConnector(BaseConnector):
|
|||
logger.debug(f"Failed to get client_secret: {e}")
|
||||
pass # Credentials not available, that's OK for listing
|
||||
|
||||
# Token file setup
|
||||
project_root = Path(__file__).resolve().parent.parent.parent.parent
|
||||
token_file = config.get("token_file") or str(project_root / "sharepoint_token.json")
|
||||
# Token file setup - use data/ directory for persistence
|
||||
token_file = config.get("token_file") or "data/sharepoint_token.json"
|
||||
Path(token_file).parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
# Only initialize OAuth if we have credentials
|
||||
if self.client_id and self.client_secret:
|
||||
connection_id = config.get("connection_id", "default")
|
||||
|
||||
|
||||
# Use token_file from config if provided, otherwise generate one
|
||||
if config.get("token_file"):
|
||||
oauth_token_file = config["token_file"]
|
||||
else:
|
||||
oauth_token_file = f"sharepoint_token_{connection_id}.json"
|
||||
oauth_token_file = f"data/sharepoint_token_{connection_id}.json"
|
||||
|
||||
authority = f"https://login.microsoftonline.com/{self.tenant_id}" if self.tenant_id != "common" else "https://login.microsoftonline.com/common"
|
||||
|
||||
|
|
|
|||
|
|
@ -63,8 +63,8 @@ class AuthService:
|
|||
|
||||
# We'll validate client credentials when creating the connector
|
||||
|
||||
# Create connection configuration
|
||||
token_file = f"{connector_type}_{purpose}_{uuid.uuid4().hex[:8]}.json"
|
||||
# Create connection configuration - use data/ directory for persistence
|
||||
token_file = f"data/{connector_type}_{purpose}_{uuid.uuid4().hex[:8]}.json"
|
||||
config = {
|
||||
"token_file": token_file,
|
||||
"connector_type": connector_type,
|
||||
|
|
|
|||
|
|
@ -15,9 +15,11 @@ logger = get_logger(__name__)
|
|||
|
||||
class ConversationPersistenceService:
|
||||
"""Simple service to persist conversations to disk"""
|
||||
|
||||
def __init__(self, storage_file: str = "conversations.json"):
|
||||
|
||||
def __init__(self, storage_file: str = "data/conversations.json"):
|
||||
self.storage_file = storage_file
|
||||
# Ensure data directory exists
|
||||
os.makedirs(os.path.dirname(self.storage_file), exist_ok=True)
|
||||
self.lock = threading.Lock()
|
||||
self._conversations = self._load_conversations()
|
||||
|
||||
|
|
|
|||
|
|
@ -351,6 +351,7 @@ class ModelsService:
|
|||
if text_response.status_code == 200:
|
||||
text_data = text_response.json()
|
||||
text_models = text_data.get("resources", [])
|
||||
logger.info(f"Retrieved {len(text_models)} text chat models from Watson API")
|
||||
|
||||
for i, model in enumerate(text_models):
|
||||
model_id = model.get("model_id", "")
|
||||
|
|
@ -363,6 +364,11 @@ class ModelsService:
|
|||
"default": i == 0, # First model is default
|
||||
}
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Failed to retrieve text chat models. Status: {text_response.status_code}, "
|
||||
f"Response: {text_response.text[:200]}"
|
||||
)
|
||||
|
||||
# Fetch embedding models
|
||||
embed_params = {
|
||||
|
|
@ -379,6 +385,7 @@ class ModelsService:
|
|||
if embed_response.status_code == 200:
|
||||
embed_data = embed_response.json()
|
||||
embed_models = embed_data.get("resources", [])
|
||||
logger.info(f"Retrieved {len(embed_models)} embedding models from Watson API")
|
||||
|
||||
for i, model in enumerate(embed_models):
|
||||
model_id = model.get("model_id", "")
|
||||
|
|
@ -391,6 +398,11 @@ class ModelsService:
|
|||
"default": i == 0, # First model is default
|
||||
}
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Failed to retrieve embedding models. Status: {embed_response.status_code}, "
|
||||
f"Response: {embed_response.text[:200]}"
|
||||
)
|
||||
|
||||
# Lightweight validation: API key is already validated by successfully getting bearer token
|
||||
# No need to make a generation request that consumes credits
|
||||
|
|
@ -400,7 +412,15 @@ class ModelsService:
|
|||
logger.warning("No bearer token available - API key validation may have failed")
|
||||
|
||||
if not language_models and not embedding_models:
|
||||
raise Exception("No IBM models retrieved from API")
|
||||
# Provide more specific error message about missing models
|
||||
error_msg = (
|
||||
"API key is valid, but no models are available. "
|
||||
"This usually means your Watson Machine Learning (WML) project is not properly configured. "
|
||||
"Please ensure: (1) Your watsonx.ai project is associated with a WML service instance, "
|
||||
"and (2) The project has access to foundation models. "
|
||||
"Visit your watsonx.ai project settings to configure the WML service association."
|
||||
)
|
||||
raise Exception(error_msg)
|
||||
|
||||
return {
|
||||
"language_models": language_models,
|
||||
|
|
|
|||
|
|
@ -13,9 +13,11 @@ logger = get_logger(__name__)
|
|||
|
||||
class SessionOwnershipService:
|
||||
"""Simple service to track which user owns which session"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.ownership_file = "session_ownership.json"
|
||||
self.ownership_file = "data/session_ownership.json"
|
||||
# Ensure data directory exists
|
||||
os.makedirs(os.path.dirname(self.ownership_file), exist_ok=True)
|
||||
self.ownership_data = self._load_ownership_data()
|
||||
|
||||
def _load_ownership_data(self) -> Dict[str, Dict[str, any]]:
|
||||
|
|
|
|||
219
src/tui/main.py
219
src/tui/main.py
|
|
@ -454,8 +454,30 @@ def _copy_assets(resource_tree, destination: Path, allowed_suffixes: Optional[It
|
|||
|
||||
|
||||
def copy_sample_documents(*, force: bool = False) -> None:
|
||||
"""Copy sample documents from package to current directory if they don't exist."""
|
||||
documents_dir = Path("openrag-documents")
|
||||
"""Copy sample documents from package to host directory.
|
||||
|
||||
Uses the first path from OPENRAG_DOCUMENTS_PATHS env var.
|
||||
Defaults to ~/.openrag/documents if not configured.
|
||||
"""
|
||||
from .managers.env_manager import EnvManager
|
||||
from pathlib import Path
|
||||
|
||||
# Get the configured documents path from env
|
||||
env_manager = EnvManager()
|
||||
env_manager.load_existing_env()
|
||||
|
||||
# Parse the first path from the documents paths config
|
||||
documents_path_str = env_manager.config.openrag_documents_paths
|
||||
if documents_path_str:
|
||||
first_path = documents_path_str.split(',')[0].strip()
|
||||
# Expand $HOME and ~
|
||||
first_path = first_path.replace("$HOME", str(Path.home()))
|
||||
documents_dir = Path(first_path).expanduser()
|
||||
else:
|
||||
# Default fallback
|
||||
documents_dir = Path.home() / ".openrag" / "documents"
|
||||
|
||||
documents_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
try:
|
||||
assets_files = files("tui._assets.openrag-documents")
|
||||
|
|
@ -466,8 +488,15 @@ def copy_sample_documents(*, force: bool = False) -> None:
|
|||
|
||||
|
||||
def copy_sample_flows(*, force: bool = False) -> None:
|
||||
"""Copy sample flows from package to current directory if they don't exist."""
|
||||
flows_dir = Path("flows")
|
||||
"""Copy sample flows from package to host directory.
|
||||
|
||||
Flows are placed in ~/.openrag/flows/ which will be volume-mounted to containers.
|
||||
"""
|
||||
from pathlib import Path
|
||||
|
||||
# Flows always go to ~/.openrag/flows/ - this will be volume-mounted
|
||||
flows_dir = Path.home() / ".openrag" / "flows"
|
||||
flows_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
try:
|
||||
assets_files = files("tui._assets.flows")
|
||||
|
|
@ -478,7 +507,9 @@ def copy_sample_flows(*, force: bool = False) -> None:
|
|||
|
||||
|
||||
def copy_compose_files(*, force: bool = False) -> None:
|
||||
"""Copy docker-compose templates into the workspace if they are missing."""
|
||||
"""Copy docker-compose templates into the TUI workspace if they are missing."""
|
||||
from utils.paths import get_tui_compose_file
|
||||
|
||||
try:
|
||||
assets_root = files("tui._assets")
|
||||
except Exception as e:
|
||||
|
|
@ -486,7 +517,9 @@ def copy_compose_files(*, force: bool = False) -> None:
|
|||
return
|
||||
|
||||
for filename in ("docker-compose.yml", "docker-compose.gpu.yml"):
|
||||
destination = Path(filename)
|
||||
is_gpu = "gpu" in filename
|
||||
destination = get_tui_compose_file(gpu=is_gpu)
|
||||
|
||||
if destination.exists() and not force:
|
||||
continue
|
||||
|
||||
|
|
@ -505,11 +538,177 @@ def copy_compose_files(*, force: bool = False) -> None:
|
|||
logger.debug(f"Failed to read existing compose file {destination}: {read_error}")
|
||||
|
||||
destination.write_bytes(resource_bytes)
|
||||
logger.info(f"Copied docker-compose template: {filename}")
|
||||
logger.info(f"Copied docker-compose template to {destination}")
|
||||
except Exception as error:
|
||||
logger.debug(f"Could not copy compose file {filename}: {error}")
|
||||
|
||||
|
||||
def migrate_legacy_data_directories():
|
||||
"""Migrate data from CWD-based directories to ~/.openrag/.
|
||||
|
||||
This is a one-time migration for users upgrading from the old layout.
|
||||
Migrates: documents, flows, keys, config, opensearch-data
|
||||
|
||||
Prompts user for confirmation before migrating. If user declines,
|
||||
exits with a message to downgrade to v1.52 or earlier.
|
||||
"""
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
cwd = Path.cwd()
|
||||
target_base = Path.home() / ".openrag"
|
||||
marker = target_base / ".migrated"
|
||||
|
||||
# Check if migration already completed
|
||||
if marker.exists():
|
||||
return
|
||||
|
||||
# Define migration mappings: (source_path, target_path, description)
|
||||
migrations = [
|
||||
(cwd / "openrag-documents", target_base / "documents", "documents"),
|
||||
(cwd / "flows", target_base / "flows", "flows"),
|
||||
(cwd / "keys", target_base / "keys", "keys"),
|
||||
(cwd / "config", target_base / "config", "config"),
|
||||
(cwd / "opensearch-data", target_base / "data" / "opensearch-data", "OpenSearch data"),
|
||||
]
|
||||
|
||||
# Check which sources exist and need migration
|
||||
sources_to_migrate = [(s, t, d) for s, t, d in migrations if s.exists()]
|
||||
|
||||
if not sources_to_migrate:
|
||||
# No legacy data to migrate, just mark as done and update .env paths
|
||||
marker.parent.mkdir(parents=True, exist_ok=True)
|
||||
marker.touch()
|
||||
# Still need to update .env with centralized paths
|
||||
try:
|
||||
from managers.env_manager import EnvManager
|
||||
env_manager = EnvManager()
|
||||
env_manager.load_existing_env()
|
||||
# Explicitly set centralized paths (overrides any old CWD-relative paths)
|
||||
home = str(Path.home())
|
||||
env_manager.config.openrag_documents_paths = f"{home}/.openrag/documents"
|
||||
env_manager.config.openrag_documents_path = f"{home}/.openrag/documents"
|
||||
env_manager.config.openrag_keys_path = f"{home}/.openrag/keys"
|
||||
env_manager.config.openrag_flows_path = f"{home}/.openrag/flows"
|
||||
env_manager.config.openrag_config_path = f"{home}/.openrag/config"
|
||||
env_manager.config.openrag_data_path = f"{home}/.openrag/data"
|
||||
env_manager.config.opensearch_data_path = f"{home}/.openrag/data/opensearch-data"
|
||||
env_manager.save_env()
|
||||
logger.info("Updated .env file with centralized paths")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update .env paths: {e}")
|
||||
return
|
||||
|
||||
# Prompt user for confirmation
|
||||
print("\n" + "=" * 60)
|
||||
print(" OpenRAG Data Migration Required")
|
||||
print("=" * 60)
|
||||
print(f"\nStarting with this version, OpenRAG stores data in:")
|
||||
print(f" {target_base}")
|
||||
print("\nThe following will be copied from your current directory:")
|
||||
for source, target, desc in sources_to_migrate:
|
||||
print(f" - {desc}: {source.name}/ -> {target}")
|
||||
print("\nThis is a one-time migration.")
|
||||
print("\nIf you don't want to migrate, exit and downgrade to v1.52 or earlier.")
|
||||
|
||||
try:
|
||||
response = input("\nProceed with migration? [y/N]: ").strip().lower()
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
response = ""
|
||||
|
||||
if response != "y":
|
||||
print("\nMigration cancelled. Exiting.")
|
||||
sys.exit(0)
|
||||
|
||||
print("\nMigrating...")
|
||||
|
||||
# Perform migration (always copy, never delete originals)
|
||||
for source, target, description in sources_to_migrate:
|
||||
try:
|
||||
target.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if target.exists():
|
||||
# Target exists - merge contents (copy only new items)
|
||||
logger.info(f"Merging {description} from {source} to {target}")
|
||||
if source.is_dir():
|
||||
for item in source.iterdir():
|
||||
src_item = source / item.name
|
||||
dst_item = target / item.name
|
||||
|
||||
if not dst_item.exists():
|
||||
if src_item.is_dir():
|
||||
shutil.copytree(src_item, dst_item)
|
||||
else:
|
||||
shutil.copy2(src_item, dst_item)
|
||||
logger.debug(f"Copied {src_item} to {dst_item}")
|
||||
else:
|
||||
# Target doesn't exist - copy entire directory
|
||||
logger.info(f"Copying {description} from {source} to {target}")
|
||||
if source.is_dir():
|
||||
shutil.copytree(source, target)
|
||||
else:
|
||||
shutil.copy2(source, target)
|
||||
|
||||
print(f" Migrated {description}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to migrate {description}: {e}")
|
||||
print(f" Warning: Failed to migrate {description}: {e}")
|
||||
|
||||
# Create marker to prevent future migration prompts
|
||||
marker.parent.mkdir(parents=True, exist_ok=True)
|
||||
marker.touch()
|
||||
|
||||
# Update .env file with centralized paths
|
||||
try:
|
||||
from managers.env_manager import EnvManager
|
||||
env_manager = EnvManager()
|
||||
env_manager.load_existing_env()
|
||||
# Explicitly set centralized paths (overrides any old CWD-relative paths)
|
||||
home = str(Path.home())
|
||||
env_manager.config.openrag_documents_paths = f"{home}/.openrag/documents"
|
||||
env_manager.config.openrag_documents_path = f"{home}/.openrag/documents"
|
||||
env_manager.config.openrag_keys_path = f"{home}/.openrag/keys"
|
||||
env_manager.config.openrag_flows_path = f"{home}/.openrag/flows"
|
||||
env_manager.config.openrag_config_path = f"{home}/.openrag/config"
|
||||
env_manager.config.openrag_data_path = f"{home}/.openrag/data"
|
||||
env_manager.config.opensearch_data_path = f"{home}/.openrag/data/opensearch-data"
|
||||
env_manager.save_env()
|
||||
print(" Updated .env with centralized paths")
|
||||
logger.info("Updated .env file with centralized paths")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update .env paths: {e}")
|
||||
print(f" Warning: Failed to update .env paths: {e}")
|
||||
|
||||
print("\nMigration complete!\n")
|
||||
logger.info("Data migration completed successfully")
|
||||
|
||||
|
||||
def setup_host_directories():
|
||||
"""Initialize OpenRAG directory structure on the host.
|
||||
|
||||
Creates directories that will be volume-mounted into containers:
|
||||
- ~/.openrag/documents/ (for document ingestion)
|
||||
- ~/.openrag/flows/ (for Langflow flows)
|
||||
- ~/.openrag/keys/ (for JWT keys)
|
||||
- ~/.openrag/config/ (for configuration)
|
||||
- ~/.openrag/data/ (for backend data: conversations, OAuth tokens, etc.)
|
||||
- ~/.openrag/data/opensearch-data/ (for OpenSearch index)
|
||||
"""
|
||||
base_dir = Path.home() / ".openrag"
|
||||
directories = [
|
||||
base_dir / "documents",
|
||||
base_dir / "flows",
|
||||
base_dir / "keys",
|
||||
base_dir / "config",
|
||||
base_dir / "data",
|
||||
base_dir / "data" / "opensearch-data",
|
||||
]
|
||||
|
||||
for directory in directories:
|
||||
directory.mkdir(parents=True, exist_ok=True)
|
||||
logger.debug(f"Ensured directory exists: {directory}")
|
||||
|
||||
|
||||
def run_tui():
|
||||
"""Run the OpenRAG TUI application."""
|
||||
# Check for native Windows before launching TUI
|
||||
|
|
@ -526,6 +725,12 @@ def run_tui():
|
|||
|
||||
app = None
|
||||
try:
|
||||
# Migrate legacy data directories from CWD to ~/.openrag/
|
||||
migrate_legacy_data_directories()
|
||||
|
||||
# Initialize host directory structure
|
||||
setup_host_directories()
|
||||
|
||||
# Keep bundled assets aligned with the packaged versions
|
||||
copy_sample_documents(force=True)
|
||||
copy_sample_flows(force=True)
|
||||
|
|
|
|||
|
|
@ -87,11 +87,25 @@ class ContainerManager:
|
|||
}
|
||||
|
||||
def _find_compose_file(self, filename: str) -> Path:
|
||||
"""Find compose file in current directory or package resources."""
|
||||
# First check current working directory
|
||||
cwd_path = Path(filename)
|
||||
"""Find compose file in centralized TUI directory, current directory, or package resources."""
|
||||
from utils.paths import get_tui_compose_file
|
||||
|
||||
self._compose_search_log = f"Searching for {filename}:\n"
|
||||
self._compose_search_log += f" 1. Current directory: {cwd_path.absolute()}"
|
||||
|
||||
# First check centralized TUI directory (~/.openrag/tui/)
|
||||
is_gpu = "gpu" in filename
|
||||
tui_path = get_tui_compose_file(gpu=is_gpu)
|
||||
self._compose_search_log += f" 1. TUI directory: {tui_path.absolute()}"
|
||||
|
||||
if tui_path.exists():
|
||||
self._compose_search_log += " ✓ FOUND"
|
||||
return tui_path
|
||||
else:
|
||||
self._compose_search_log += " ✗ NOT FOUND"
|
||||
|
||||
# Then check current working directory (for backward compatibility)
|
||||
cwd_path = Path(filename)
|
||||
self._compose_search_log += f"\n 2. Current directory: {cwd_path.absolute()}"
|
||||
|
||||
if cwd_path.exists():
|
||||
self._compose_search_log += " ✓ FOUND"
|
||||
|
|
@ -99,28 +113,29 @@ class ContainerManager:
|
|||
else:
|
||||
self._compose_search_log += " ✗ NOT FOUND"
|
||||
|
||||
# Then check package resources
|
||||
self._compose_search_log += f"\n 2. Package resources: "
|
||||
# Finally check package resources
|
||||
self._compose_search_log += f"\n 3. Package resources: "
|
||||
try:
|
||||
pkg_files = files("tui._assets")
|
||||
self._compose_search_log += f"{pkg_files}"
|
||||
compose_resource = pkg_files / filename
|
||||
|
||||
if compose_resource.is_file():
|
||||
self._compose_search_log += f" ✓ FOUND, copying to current directory"
|
||||
# Copy to cwd for compose command to work
|
||||
self._compose_search_log += f" ✓ FOUND, copying to TUI directory"
|
||||
# Copy to TUI directory
|
||||
tui_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
content = compose_resource.read_text()
|
||||
cwd_path.write_text(content)
|
||||
return cwd_path
|
||||
tui_path.write_text(content)
|
||||
return tui_path
|
||||
else:
|
||||
self._compose_search_log += f" ✗ NOT FOUND"
|
||||
except Exception as e:
|
||||
self._compose_search_log += f" ✗ SKIPPED ({e})"
|
||||
# Don't log this as an error since it's expected when running from source
|
||||
|
||||
# Fall back to original path (will fail later if not found)
|
||||
self._compose_search_log += f"\n 3. Falling back to: {cwd_path.absolute()}"
|
||||
return Path(filename)
|
||||
# Fall back to TUI path (will fail later if not found)
|
||||
self._compose_search_log += f"\n 4. Falling back to: {tui_path.absolute()}"
|
||||
return tui_path
|
||||
|
||||
def _get_env_from_file(self) -> Dict[str, str]:
|
||||
"""Read environment variables from .env file, prioritizing file values over os.environ.
|
||||
|
|
@ -136,9 +151,17 @@ class ContainerManager:
|
|||
even if os.environ has stale values.
|
||||
"""
|
||||
from dotenv import load_dotenv
|
||||
from utils.paths import get_tui_env_file
|
||||
|
||||
env = dict(os.environ) # Start with current environment
|
||||
env_file = Path(".env")
|
||||
|
||||
# Check centralized TUI .env location first
|
||||
tui_env_file = get_tui_env_file()
|
||||
if tui_env_file.exists():
|
||||
env_file = tui_env_file
|
||||
else:
|
||||
# Fall back to CWD .env for backward compatibility
|
||||
env_file = Path(".env")
|
||||
|
||||
if env_file.exists():
|
||||
try:
|
||||
|
|
@ -147,6 +170,7 @@ class ContainerManager:
|
|||
load_dotenv(dotenv_path=env_file, override=True)
|
||||
# Update our dict with all environment variables (including those from .env)
|
||||
env.update(os.environ)
|
||||
logger.debug(f"Loaded environment from {env_file}")
|
||||
except Exception as e:
|
||||
logger.debug(f"Error reading .env file for Docker Compose: {e}")
|
||||
|
||||
|
|
@ -269,7 +293,17 @@ class ContainerManager:
|
|||
use_gpu = not cpu_mode
|
||||
|
||||
# Build compose command with override pattern
|
||||
cmd = self.runtime_info.compose_command + ["-f", str(self.compose_file)]
|
||||
cmd = self.runtime_info.compose_command.copy()
|
||||
|
||||
# Add --env-file to explicitly specify the .env location
|
||||
from utils.paths import get_tui_env_file
|
||||
tui_env_file = get_tui_env_file()
|
||||
if tui_env_file.exists():
|
||||
cmd.extend(["--env-file", str(tui_env_file)])
|
||||
elif Path(".env").exists():
|
||||
cmd.extend(["--env-file", ".env"])
|
||||
|
||||
cmd.extend(["-f", str(self.compose_file)])
|
||||
if use_gpu and self.gpu_compose_file.exists():
|
||||
cmd.extend(["-f", str(self.gpu_compose_file)])
|
||||
cmd.extend(args)
|
||||
|
|
@ -315,7 +349,17 @@ class ContainerManager:
|
|||
use_gpu = not cpu_mode
|
||||
|
||||
# Build compose command with override pattern
|
||||
cmd = self.runtime_info.compose_command + ["-f", str(self.compose_file)]
|
||||
cmd = self.runtime_info.compose_command.copy()
|
||||
|
||||
# Add --env-file to explicitly specify the .env location
|
||||
from utils.paths import get_tui_env_file
|
||||
tui_env_file = get_tui_env_file()
|
||||
if tui_env_file.exists():
|
||||
cmd.extend(["--env-file", str(tui_env_file)])
|
||||
elif Path(".env").exists():
|
||||
cmd.extend(["--env-file", ".env"])
|
||||
|
||||
cmd.extend(["-f", str(self.compose_file)])
|
||||
if use_gpu and self.gpu_compose_file.exists():
|
||||
cmd.extend(["-f", str(self.gpu_compose_file)])
|
||||
cmd.extend(args)
|
||||
|
|
@ -388,7 +432,17 @@ class ContainerManager:
|
|||
use_gpu = not cpu_mode
|
||||
|
||||
# Build compose command with override pattern
|
||||
cmd = self.runtime_info.compose_command + ["-f", str(self.compose_file)]
|
||||
cmd = self.runtime_info.compose_command.copy()
|
||||
|
||||
# Add --env-file to explicitly specify the .env location
|
||||
from utils.paths import get_tui_env_file
|
||||
tui_env_file = get_tui_env_file()
|
||||
if tui_env_file.exists():
|
||||
cmd.extend(["--env-file", str(tui_env_file)])
|
||||
elif Path(".env").exists():
|
||||
cmd.extend(["--env-file", ".env"])
|
||||
|
||||
cmd.extend(["-f", str(self.compose_file)])
|
||||
if use_gpu and self.gpu_compose_file.exists():
|
||||
cmd.extend(["-f", str(self.gpu_compose_file)])
|
||||
cmd.extend(args)
|
||||
|
|
@ -794,13 +848,24 @@ class ContainerManager:
|
|||
|
||||
async def _parse_compose_images(self) -> list[str]:
|
||||
"""Get resolved image names from compose files using docker/podman compose, with robust fallbacks."""
|
||||
from utils.paths import get_tui_env_file
|
||||
|
||||
images: set[str] = set()
|
||||
|
||||
# Try both GPU and CPU modes to get all images
|
||||
for use_gpu in [True, False]:
|
||||
try:
|
||||
# Build compose command with override pattern
|
||||
cmd = self.runtime_info.compose_command + ["-f", str(self.compose_file)]
|
||||
cmd = self.runtime_info.compose_command.copy()
|
||||
|
||||
# Add --env-file to explicitly specify the .env location
|
||||
tui_env_file = get_tui_env_file()
|
||||
if tui_env_file.exists():
|
||||
cmd.extend(["--env-file", str(tui_env_file)])
|
||||
elif Path(".env").exists():
|
||||
cmd.extend(["--env-file", ".env"])
|
||||
|
||||
cmd.extend(["-f", str(self.compose_file)])
|
||||
if use_gpu and self.gpu_compose_file.exists():
|
||||
cmd.extend(["-f", str(self.gpu_compose_file)])
|
||||
cmd.extend(["config", "--format", "json"])
|
||||
|
|
@ -821,7 +886,16 @@ class ContainerManager:
|
|||
continue
|
||||
|
||||
# Fallback to YAML output (for older compose versions)
|
||||
cmd = self.runtime_info.compose_command + ["-f", str(self.compose_file)]
|
||||
cmd = self.runtime_info.compose_command.copy()
|
||||
|
||||
# Add --env-file to explicitly specify the .env location
|
||||
tui_env_file = get_tui_env_file()
|
||||
if tui_env_file.exists():
|
||||
cmd.extend(["--env-file", str(tui_env_file)])
|
||||
elif Path(".env").exists():
|
||||
cmd.extend(["--env-file", ".env"])
|
||||
|
||||
cmd.extend(["-f", str(self.compose_file)])
|
||||
if use_gpu and self.gpu_compose_file.exists():
|
||||
cmd.extend(["-f", str(self.gpu_compose_file)])
|
||||
cmd.append("config")
|
||||
|
|
@ -966,7 +1040,7 @@ class ContainerManager:
|
|||
up_success = {"value": True}
|
||||
error_messages = []
|
||||
|
||||
async for message, replace_last in self._stream_compose_command(["up", "-d"], up_success, cpu_mode):
|
||||
async for message, replace_last in self._stream_compose_command(["up", "-d", "--no-build"], up_success, cpu_mode):
|
||||
# Detect error patterns in the output
|
||||
lower_msg = message.lower()
|
||||
|
||||
|
|
@ -1041,7 +1115,7 @@ class ContainerManager:
|
|||
# Restart with new images using streaming output
|
||||
restart_success = True
|
||||
async for message, replace_last in self._run_compose_command_streaming(
|
||||
["up", "-d", "--force-recreate"], cpu_mode
|
||||
["up", "-d", "--force-recreate", "--no-build"], cpu_mode
|
||||
):
|
||||
yield False, message, replace_last
|
||||
# Check for error patterns in the output
|
||||
|
|
@ -1053,6 +1127,39 @@ class ContainerManager:
|
|||
else:
|
||||
yield False, "Some errors occurred during service restart", False
|
||||
|
||||
async def clear_directory_with_container(self, path: Path) -> tuple[bool, str]:
|
||||
"""Clear a directory using a container to handle container-owned files.
|
||||
|
||||
Args:
|
||||
path: The directory to clear (contents will be deleted, directory recreated)
|
||||
|
||||
Returns:
|
||||
Tuple of (success, message)
|
||||
"""
|
||||
if not self.is_available():
|
||||
return False, "No container runtime available"
|
||||
|
||||
if not path.exists():
|
||||
return True, "Directory does not exist, nothing to clear"
|
||||
|
||||
path = path.absolute()
|
||||
|
||||
# Use alpine container to delete files owned by container user
|
||||
cmd = [
|
||||
"run", "--rm",
|
||||
"-v", f"{path}:/work:Z",
|
||||
"alpine",
|
||||
"sh", "-c",
|
||||
"rm -rf /work/* /work/.[!.]* 2>/dev/null; echo done"
|
||||
]
|
||||
|
||||
success, stdout, stderr = await self._run_runtime_command(cmd)
|
||||
|
||||
if success and "done" in stdout:
|
||||
return True, f"Cleared {path}"
|
||||
else:
|
||||
return False, f"Failed to clear {path}: {stderr or 'Unknown error'}"
|
||||
|
||||
async def clear_opensearch_data_volume(self) -> AsyncIterator[tuple[bool, str]]:
|
||||
"""Clear opensearch data using a temporary container with proper permissions."""
|
||||
if not self.is_available():
|
||||
|
|
@ -1061,45 +1168,23 @@ class ContainerManager:
|
|||
|
||||
yield False, "Clearing OpenSearch data volume..."
|
||||
|
||||
# Get the absolute path to opensearch-data directory
|
||||
opensearch_data_path = Path("opensearch-data").absolute()
|
||||
|
||||
# Get opensearch data path from env config
|
||||
from .env_manager import EnvManager
|
||||
env_manager = EnvManager()
|
||||
env_manager.load_existing_env()
|
||||
opensearch_data_path = Path(env_manager.config.opensearch_data_path.replace("$HOME", str(Path.home()))).expanduser().absolute()
|
||||
|
||||
if not opensearch_data_path.exists():
|
||||
yield True, "OpenSearch data directory does not exist, skipping"
|
||||
return
|
||||
|
||||
# Use the opensearch container with proper volume mount flags
|
||||
# :Z flag ensures proper SELinux labeling and UID mapping for rootless containers
|
||||
cmd = [
|
||||
"run",
|
||||
"--rm",
|
||||
"-v", f"{opensearch_data_path}:/usr/share/opensearch/data:Z",
|
||||
"langflowai/openrag-opensearch:latest",
|
||||
"bash", "-c",
|
||||
"rm -rf /usr/share/opensearch/data/* /usr/share/opensearch/data/.[!.]* && echo 'Cleared successfully'"
|
||||
]
|
||||
|
||||
success, stdout, stderr = await self._run_runtime_command(cmd)
|
||||
|
||||
if success and "Cleared successfully" in stdout:
|
||||
|
||||
# Use alpine with root to clear container-owned files
|
||||
success, msg = await self.clear_directory_with_container(opensearch_data_path)
|
||||
|
||||
if success:
|
||||
yield True, "OpenSearch data cleared successfully"
|
||||
else:
|
||||
# If it fails, try with the base opensearch image
|
||||
yield False, "Retrying with base OpenSearch image..."
|
||||
cmd = [
|
||||
"run",
|
||||
"--rm",
|
||||
"-v", f"{opensearch_data_path}:/usr/share/opensearch/data:Z",
|
||||
"opensearchproject/opensearch:3.0.0",
|
||||
"bash", "-c",
|
||||
"rm -rf /usr/share/opensearch/data/* /usr/share/opensearch/data/.[!.]* && echo 'Cleared successfully'"
|
||||
]
|
||||
success, stdout, stderr = await self._run_runtime_command(cmd)
|
||||
|
||||
if success and "Cleared successfully" in stdout:
|
||||
yield True, "OpenSearch data cleared successfully"
|
||||
else:
|
||||
yield False, f"Failed to clear OpenSearch data: {stderr if stderr else 'Unknown error'}"
|
||||
yield False, f"Failed to clear OpenSearch data: {msg}"
|
||||
|
||||
async def reset_services(self) -> AsyncIterator[tuple[bool, str]]:
|
||||
"""Reset all services (stop, remove containers/volumes, clear data) and yield progress updates."""
|
||||
|
|
@ -1145,7 +1230,7 @@ class ContainerManager:
|
|||
return
|
||||
|
||||
# Build compose command with override pattern
|
||||
cmd = self.runtime_info.compose_command + ["-f", str(self.compose_file)]
|
||||
cmd = self.runtime_info.compose_command.copy() + ["-f", str(self.compose_file)]
|
||||
if self.use_gpu_compose and self.gpu_compose_file.exists():
|
||||
cmd.extend(["-f", str(self.gpu_compose_file)])
|
||||
cmd.extend(["logs", "-f", service_name])
|
||||
|
|
|
|||
|
|
@ -64,11 +64,16 @@ class EnvConfig:
|
|||
disable_ingest_with_langflow: str = "False"
|
||||
nudges_flow_id: str = "ebc01d31-1976-46ce-a385-b0240327226c"
|
||||
|
||||
# Document paths (comma-separated)
|
||||
openrag_documents_paths: str = "./openrag-documents"
|
||||
# Document paths (comma-separated) - use centralized location by default
|
||||
openrag_documents_paths: str = "$HOME/.openrag/documents"
|
||||
|
||||
# OpenSearch data path
|
||||
opensearch_data_path: str = "./opensearch-data"
|
||||
# Volume mount paths - use centralized location by default
|
||||
openrag_documents_path: str = "$HOME/.openrag/documents" # Primary documents path for compose
|
||||
openrag_keys_path: str = "$HOME/.openrag/keys"
|
||||
openrag_flows_path: str = "$HOME/.openrag/flows"
|
||||
openrag_config_path: str = "$HOME/.openrag/config"
|
||||
openrag_data_path: str = "$HOME/.openrag/data" # Backend data (conversations, tokens, etc.)
|
||||
opensearch_data_path: str = "$HOME/.openrag/data/opensearch-data"
|
||||
|
||||
# Container version (linked to TUI version)
|
||||
openrag_version: str = ""
|
||||
|
|
@ -81,7 +86,26 @@ class EnvManager:
|
|||
"""Manages environment configuration for OpenRAG."""
|
||||
|
||||
def __init__(self, env_file: Optional[Path] = None):
|
||||
self.env_file = env_file or Path(".env")
|
||||
if env_file:
|
||||
self.env_file = env_file
|
||||
else:
|
||||
# Use centralized location for TUI .env file
|
||||
from utils.paths import get_tui_env_file, get_legacy_paths
|
||||
self.env_file = get_tui_env_file()
|
||||
|
||||
# Check for legacy .env in current directory and migrate if needed
|
||||
legacy_env = get_legacy_paths()["tui_env"]
|
||||
if not self.env_file.exists() and legacy_env.exists():
|
||||
try:
|
||||
import shutil
|
||||
self.env_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(legacy_env, self.env_file)
|
||||
logger.info(f"Migrated .env from {legacy_env} to {self.env_file}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to migrate .env file: {e}")
|
||||
|
||||
self.config = EnvConfig()
|
||||
|
||||
def generate_secure_password(self) -> str:
|
||||
|
|
@ -155,6 +179,11 @@ class EnvManager:
|
|||
"AWS_SECRET_ACCESS_KEY": "aws_secret_access_key", # pragma: allowlist secret
|
||||
"LANGFLOW_PUBLIC_URL": "langflow_public_url",
|
||||
"OPENRAG_DOCUMENTS_PATHS": "openrag_documents_paths",
|
||||
"OPENRAG_DOCUMENTS_PATH": "openrag_documents_path",
|
||||
"OPENRAG_KEYS_PATH": "openrag_keys_path",
|
||||
"OPENRAG_FLOWS_PATH": "openrag_flows_path",
|
||||
"OPENRAG_CONFIG_PATH": "openrag_config_path",
|
||||
"OPENRAG_DATA_PATH": "openrag_data_path",
|
||||
"OPENSEARCH_DATA_PATH": "opensearch_data_path",
|
||||
"LANGFLOW_AUTO_LOGIN": "langflow_auto_login",
|
||||
"LANGFLOW_NEW_USER_IS_ACTIVE": "langflow_new_user_is_active",
|
||||
|
|
@ -348,11 +377,34 @@ class EnvManager:
|
|||
f.write(f"LANGFLOW_URL_INGEST_FLOW_ID={self._quote_env_value(self.config.langflow_url_ingest_flow_id)}\n")
|
||||
f.write(f"NUDGES_FLOW_ID={self._quote_env_value(self.config.nudges_flow_id)}\n")
|
||||
f.write(f"OPENSEARCH_PASSWORD={self._quote_env_value(self.config.opensearch_password)}\n")
|
||||
|
||||
# Expand $HOME in paths before writing to .env
|
||||
# This ensures paths work with all compose implementations (docker, podman)
|
||||
from utils.paths import expand_path
|
||||
f.write(
|
||||
f"OPENRAG_DOCUMENTS_PATHS={self._quote_env_value(self.config.openrag_documents_paths)}\n"
|
||||
f"OPENRAG_DOCUMENTS_PATHS={self._quote_env_value(expand_path(self.config.openrag_documents_paths))}\n"
|
||||
)
|
||||
f.write("\n")
|
||||
|
||||
# Volume mount paths for Docker Compose
|
||||
f.write("# Volume mount paths for Docker Compose\n")
|
||||
f.write(
|
||||
f"OPENRAG_DOCUMENTS_PATH={self._quote_env_value(expand_path(self.config.openrag_documents_path))}\n"
|
||||
)
|
||||
f.write(
|
||||
f"OPENSEARCH_DATA_PATH={self._quote_env_value(self.config.opensearch_data_path)}\n"
|
||||
f"OPENRAG_KEYS_PATH={self._quote_env_value(expand_path(self.config.openrag_keys_path))}\n"
|
||||
)
|
||||
f.write(
|
||||
f"OPENRAG_FLOWS_PATH={self._quote_env_value(expand_path(self.config.openrag_flows_path))}\n"
|
||||
)
|
||||
f.write(
|
||||
f"OPENRAG_CONFIG_PATH={self._quote_env_value(expand_path(self.config.openrag_config_path))}\n"
|
||||
)
|
||||
f.write(
|
||||
f"OPENRAG_DATA_PATH={self._quote_env_value(expand_path(self.config.openrag_data_path))}\n"
|
||||
)
|
||||
f.write(
|
||||
f"OPENSEARCH_DATA_PATH={self._quote_env_value(expand_path(self.config.opensearch_data_path))}\n"
|
||||
)
|
||||
# Set OPENRAG_VERSION to TUI version
|
||||
if self.config.openrag_version:
|
||||
|
|
@ -476,7 +528,7 @@ class EnvManager:
|
|||
(
|
||||
"openrag_documents_paths",
|
||||
"Documents Paths",
|
||||
"./openrag-documents,/path/to/more/docs",
|
||||
"~/.openrag/documents",
|
||||
False,
|
||||
),
|
||||
]
|
||||
|
|
@ -601,12 +653,13 @@ class EnvManager:
|
|||
|
||||
def generate_compose_volume_mounts(self) -> List[str]:
|
||||
"""Generate Docker Compose volume mount strings from documents paths."""
|
||||
is_valid, _, validated_paths = validate_documents_paths(
|
||||
self.config.openrag_documents_paths
|
||||
)
|
||||
# Expand $HOME before validation
|
||||
paths_str = self.config.openrag_documents_paths.replace("$HOME", str(Path.home()))
|
||||
is_valid, error_msg, validated_paths = validate_documents_paths(paths_str)
|
||||
|
||||
if not is_valid:
|
||||
return ["./openrag-documents:/app/openrag-documents:Z"] # fallback
|
||||
logger.warning(f"Invalid documents paths: {error_msg}")
|
||||
return []
|
||||
|
||||
volume_mounts = []
|
||||
for i, path in enumerate(validated_paths):
|
||||
|
|
|
|||
|
|
@ -523,7 +523,7 @@ class ConfigScreen(Screen):
|
|||
yield Label("Documents Paths")
|
||||
current_value = getattr(self.env_manager.config, "openrag_documents_paths", "")
|
||||
input_widget = Input(
|
||||
placeholder="./openrag-documents,/path/to/more/docs",
|
||||
placeholder="~/.openrag/documents",
|
||||
value=current_value,
|
||||
validators=[DocumentsPathValidator()],
|
||||
id="input-openrag_documents_paths",
|
||||
|
|
@ -544,9 +544,9 @@ class ConfigScreen(Screen):
|
|||
"Directory to persist OpenSearch indices across upgrades",
|
||||
classes="helper-text",
|
||||
)
|
||||
current_value = getattr(self.env_manager.config, "opensearch_data_path", "./opensearch-data")
|
||||
current_value = getattr(self.env_manager.config, "opensearch_data_path", "$HOME/.openrag/data/opensearch-data")
|
||||
input_widget = Input(
|
||||
placeholder="./opensearch-data",
|
||||
placeholder="~/.openrag/data/opensearch-data",
|
||||
value=current_value,
|
||||
id="input-opensearch_data_path",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -167,8 +167,8 @@ class DiagnosticsScreen(Screen):
|
|||
status = self.query_one("#copy-status", Static)
|
||||
|
||||
# Create logs directory if it doesn't exist
|
||||
logs_dir = Path("logs")
|
||||
logs_dir.mkdir(exist_ok=True)
|
||||
logs_dir = Path.home() / ".openrag" / "logs"
|
||||
logs_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create a timestamped filename
|
||||
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
|
|
|||
|
|
@ -481,27 +481,40 @@ class MonitorScreen(Screen):
|
|||
|
||||
# Clear config, conversations.json, and optionally flow backups (before stopping containers)
|
||||
try:
|
||||
config_path = Path("config")
|
||||
conversations_file = Path("conversations.json")
|
||||
flows_backup_path = Path("flows/backup")
|
||||
|
||||
# Get paths from env config
|
||||
from ..managers.env_manager import EnvManager
|
||||
env_manager = EnvManager()
|
||||
env_manager.load_existing_env()
|
||||
|
||||
def expand_path(path_str: str) -> Path:
|
||||
return Path(path_str.replace("$HOME", str(Path.home()))).expanduser()
|
||||
|
||||
config_path = expand_path(env_manager.config.openrag_config_path)
|
||||
flows_path = expand_path(env_manager.config.openrag_flows_path)
|
||||
flows_backup_path = flows_path / "backup"
|
||||
|
||||
if config_path.exists():
|
||||
shutil.rmtree(config_path)
|
||||
# Use container to handle files owned by container user
|
||||
success, msg = await self.container_manager.clear_directory_with_container(config_path)
|
||||
if not success:
|
||||
# Fallback to regular rmtree if container method fails
|
||||
shutil.rmtree(config_path)
|
||||
# Recreate empty config directory
|
||||
config_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if conversations_file.exists():
|
||||
conversations_file.unlink()
|
||||
|
||||
|
||||
# Delete flow backups only if user chose to (and they actually exist)
|
||||
if self._check_flow_backups():
|
||||
if delete_backups:
|
||||
shutil.rmtree(flows_backup_path)
|
||||
# Use container to handle files owned by container user
|
||||
success, msg = await self.container_manager.clear_directory_with_container(flows_backup_path)
|
||||
if not success:
|
||||
# Fallback to regular rmtree if container method fails
|
||||
shutil.rmtree(flows_backup_path)
|
||||
# Recreate empty backup directory
|
||||
flows_backup_path.mkdir(parents=True, exist_ok=True)
|
||||
self.notify("Flow backups deleted", severity="information")
|
||||
else:
|
||||
self.notify("Flow backups preserved in ./flows/backup", severity="information")
|
||||
self.notify(f"Flow backups preserved in {flows_backup_path}", severity="information")
|
||||
|
||||
except Exception as e:
|
||||
self.notify(
|
||||
|
|
@ -531,7 +544,11 @@ class MonitorScreen(Screen):
|
|||
|
||||
# Now clear opensearch-data using container
|
||||
yield False, "Clearing OpenSearch data..."
|
||||
opensearch_data_path = Path("opensearch-data")
|
||||
# Get opensearch data path from env config
|
||||
from ..managers.env_manager import EnvManager
|
||||
env_manager = EnvManager()
|
||||
env_manager.load_existing_env()
|
||||
opensearch_data_path = Path(env_manager.config.opensearch_data_path.replace("$HOME", str(Path.home()))).expanduser()
|
||||
if opensearch_data_path.exists():
|
||||
async for success, message in self.container_manager.clear_opensearch_data_volume():
|
||||
yield success, message
|
||||
|
|
@ -549,10 +566,15 @@ class MonitorScreen(Screen):
|
|||
yield True, "Factory reset completed successfully"
|
||||
|
||||
def _check_flow_backups(self) -> bool:
|
||||
"""Check if there are any flow backups in ./flows/backup directory."""
|
||||
"""Check if there are any flow backups in flows/backup directory."""
|
||||
from pathlib import Path
|
||||
from ..managers.env_manager import EnvManager
|
||||
|
||||
backup_dir = Path("flows/backup")
|
||||
# Get flows path from env config
|
||||
env_manager = EnvManager()
|
||||
env_manager.load_existing_env()
|
||||
flows_path = Path(env_manager.config.openrag_flows_path.replace("$HOME", str(Path.home()))).expanduser()
|
||||
backup_dir = flows_path / "backup"
|
||||
if not backup_dir.exists():
|
||||
return False
|
||||
|
||||
|
|
|
|||
|
|
@ -68,11 +68,17 @@ class WelcomeScreen(Screen):
|
|||
yield Footer()
|
||||
|
||||
def _check_flow_backups(self) -> bool:
|
||||
"""Check if there are any flow backups in ./flows/backup directory."""
|
||||
backup_dir = Path("flows/backup")
|
||||
"""Check if there are any flow backups in flows/backup directory."""
|
||||
from ..managers.env_manager import EnvManager
|
||||
|
||||
# Get flows path from env config
|
||||
env_manager = EnvManager()
|
||||
env_manager.load_existing_env()
|
||||
flows_path = Path(env_manager.config.openrag_flows_path.replace("$HOME", str(Path.home()))).expanduser()
|
||||
backup_dir = flows_path / "backup"
|
||||
if not backup_dir.exists():
|
||||
return False
|
||||
|
||||
|
||||
try:
|
||||
# Check if there are any .json files in the backup directory
|
||||
backup_files = list(backup_dir.glob("*.json"))
|
||||
|
|
|
|||
|
|
@ -315,15 +315,22 @@ class CommandOutputModal(ModalScreen):
|
|||
asyncio.create_task(callback_result)
|
||||
|
||||
self.call_after_refresh(_invoke_callback)
|
||||
except asyncio.CancelledError:
|
||||
# Modal was dismissed while command was running - this is fine
|
||||
pass
|
||||
except Exception as e:
|
||||
self._update_output(f"Error: {e}", False)
|
||||
output.text = "\n".join(self._output_lines)
|
||||
output.move_cursor((len(self._output_lines), 0))
|
||||
finally:
|
||||
# Enable the close button and focus it
|
||||
close_btn = self.query_one("#close-btn", Button)
|
||||
close_btn.disabled = False
|
||||
close_btn.focus()
|
||||
# Enable the close button and focus it (if modal still exists)
|
||||
try:
|
||||
close_btn = self.query_one("#close-btn", Button)
|
||||
close_btn.disabled = False
|
||||
close_btn.focus()
|
||||
except Exception:
|
||||
# Modal was already dismissed
|
||||
pass
|
||||
|
||||
def _update_output(self, message: str, replace_last: bool = False) -> None:
|
||||
"""Update the output buffer by appending or replacing the last line.
|
||||
|
|
|
|||
|
|
@ -100,9 +100,9 @@ class FlowBackupWarningModal(ModalScreen[tuple[bool, bool]]):
|
|||
with Container(id="dialog"):
|
||||
yield Label("⚠ Flow Backups Detected", id="title")
|
||||
yield Static(
|
||||
f"Flow backups found in ./flows/backup\n\n"
|
||||
f"Flow backups found in your flows/backup directory.\n\n"
|
||||
f"Proceeding with {self.operation} will reset custom flows to defaults.\n"
|
||||
f"Your customizations are backed up in ./flows/backup/\n\n"
|
||||
f"Your customizations are backed up in the flows/backup/ directory.\n\n"
|
||||
f"Choose whether to keep or delete the backup files:",
|
||||
id="message"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -92,8 +92,8 @@ class VersionMismatchWarningModal(ModalScreen[bool]):
|
|||
f"Current TUI version is {self.tui_version}\n\n"
|
||||
f"Starting services will update containers to version {self.tui_version}.\n"
|
||||
f"This may cause compatibility issues with your flows.\n\n"
|
||||
f"⚠️ Please backup your flows before continuing:\n"
|
||||
f" Your flows are in ./flows/ directory\n\n"
|
||||
f"⚠️ Please backup your flows before continuing.\n"
|
||||
f" Your flows are in ~/.openrag/flows/\n\n"
|
||||
f"Do you want to continue?",
|
||||
id="message"
|
||||
)
|
||||
|
|
|
|||
85
src/utils/paths.py
Normal file
85
src/utils/paths.py
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
"""Host-side path management for OpenRAG TUI.
|
||||
|
||||
This module provides functions for TUI to get standardized paths on the host machine.
|
||||
All TUI files are centralized under ~/.openrag/ to avoid cluttering the user's CWD.
|
||||
|
||||
Note: This module is for HOST-SIDE (TUI) use only. Container code should not use these paths.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def get_openrag_home() -> Path:
|
||||
"""Get the OpenRAG home directory on the host.
|
||||
|
||||
Returns:
|
||||
Path to ~/.openrag/ directory
|
||||
"""
|
||||
home_dir = Path.home() / ".openrag"
|
||||
home_dir.mkdir(parents=True, exist_ok=True)
|
||||
return home_dir
|
||||
|
||||
|
||||
def get_tui_dir() -> Path:
|
||||
"""Get the TUI directory for TUI-specific files.
|
||||
|
||||
Returns:
|
||||
Path to ~/.openrag/tui/ directory
|
||||
"""
|
||||
tui_dir = get_openrag_home() / "tui"
|
||||
tui_dir.mkdir(parents=True, exist_ok=True)
|
||||
return tui_dir
|
||||
|
||||
|
||||
def get_tui_env_file() -> Path:
|
||||
"""Get the TUI .env file path.
|
||||
|
||||
Returns:
|
||||
Path to ~/.openrag/tui/.env file
|
||||
"""
|
||||
return get_tui_dir() / ".env"
|
||||
|
||||
|
||||
def get_tui_compose_file(gpu: bool = False) -> Path:
|
||||
"""Get the TUI docker-compose file path.
|
||||
|
||||
Args:
|
||||
gpu: If True, returns path to docker-compose.gpu.yml
|
||||
|
||||
Returns:
|
||||
Path to docker-compose file in ~/.openrag/tui/
|
||||
"""
|
||||
filename = "docker-compose.gpu.yml" if gpu else "docker-compose.yml"
|
||||
return get_tui_dir() / filename
|
||||
|
||||
|
||||
def get_legacy_paths() -> dict:
|
||||
"""Get legacy (CWD-based) paths for migration purposes.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping resource names to their old CWD-based paths
|
||||
"""
|
||||
cwd = Path.cwd()
|
||||
return {
|
||||
"tui_env": cwd / ".env",
|
||||
"tui_compose": cwd / "docker-compose.yml",
|
||||
"tui_compose_gpu": cwd / "docker-compose.gpu.yml",
|
||||
}
|
||||
|
||||
|
||||
def expand_path(path: str) -> str:
|
||||
"""Expand $HOME and ~ in a path string to the actual home directory.
|
||||
|
||||
Args:
|
||||
path: Path string that may contain $HOME or ~
|
||||
|
||||
Returns:
|
||||
Path string with $HOME and ~ expanded to actual home directory
|
||||
"""
|
||||
if not path:
|
||||
return path
|
||||
expanded = path.replace("$HOME", str(Path.home()))
|
||||
# Also handle ~ at start of path
|
||||
if expanded.startswith("~"):
|
||||
expanded = str(Path.home()) + expanded[1:]
|
||||
return expanded
|
||||
375
uv.lock
generated
375
uv.lock
generated
|
|
@ -1,12 +1,15 @@
|
|||
version = 1
|
||||
revision = 1
|
||||
requires-python = ">=3.13"
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and platform_system != 'Linux' and sys_platform == 'linux'",
|
||||
"python_version < '0'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"sys_platform == 'darwin'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system == 'Darwin' and sys_platform == 'linux') or (platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_system == 'Darwin' and sys_platform == 'darwin'",
|
||||
"platform_system != 'Darwin' and sys_platform == 'darwin'",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -156,10 +159,13 @@ source = { registry = "https://pypi.org/simple" }
|
|||
sdist = { url = "https://files.pythonhosted.org/packages/38/53/946db57842a50b2da2e0c1e34bd37f36f5aadba1a929a3971c5d7841dbca/audioop_lts-0.2.2.tar.gz", hash = "sha256:64d0c62d88e67b98a1a5e71987b7aa7b5bcffc7dcee65b635823dbdd0a8dbbd0", size = 30686 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/de/d4/94d277ca941de5a507b07f0b592f199c22454eeaec8f008a286b3fbbacd6/audioop_lts-0.2.2-cp313-abi3-macosx_10_13_universal2.whl", hash = "sha256:fd3d4602dc64914d462924a08c1a9816435a2155d74f325853c1f1ac3b2d9800", size = 46523 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/5a/656d1c2da4b555920ce4177167bfeb8623d98765594af59702c8873f60ec/audioop_lts-0.2.2-cp313-abi3-macosx_10_13_x86_64.whl", hash = "sha256:550c114a8df0aafe9a05442a1162dfc8fec37e9af1d625ae6060fed6e756f303", size = 27455 },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/83/ea581e364ce7b0d41456fb79d6ee0ad482beda61faf0cab20cbd4c63a541/audioop_lts-0.2.2-cp313-abi3-macosx_11_0_arm64.whl", hash = "sha256:9a13dc409f2564de15dd68be65b462ba0dde01b19663720c68c1140c782d1d75", size = 26997 },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/a7/0a764f77b5c4ac58dc13c01a580f5d32ae8c74c92020b961556a43e26d02/audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:73f80bf4cd5d2ca7814da30a120de1f9408ee0619cc75da87d0641273d202a09", size = 47096 },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/ed/ebebedde1a18848b085ad0fa54b66ceb95f1f94a3fc04f1cd1b5ccb0ed42/audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:106753a83a25ee4d6f473f2be6b0966fc1c9af7e0017192f5531a3e7463dce58", size = 27748 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/6e/11ca8c21af79f15dbb1c7f8017952ee8c810c438ce4e2b25638dfef2b02c/audioop_lts-0.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fbdd522624141e40948ab3e8cdae6e04c748d78710e9f0f8d4dae2750831de19", size = 27329 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/73/413b5a2804091e2c7d5def1d618e4837f1cb82464e230f827226278556b7/audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f9ee9b52f5f857fbaf9d605a360884f034c92c1c23021fb90b2e39b8e64bede6", size = 47104 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/8c/daa3308dc6593944410c2c68306a5e217f5c05b70a12e70228e7dd42dc5c/audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:49ee1a41738a23e98d98b937a0638357a2477bc99e61b0f768a8f654f45d9b7a", size = 27754 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/86/c2e0f627168fcf61781a8f72cab06b228fe1da4b9fa4ab39cfb791b5836b/audioop_lts-0.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b00be98ccd0fc123dcfad31d50030d25fcf31488cde9e61692029cd7394733b", size = 27332 },
|
||||
]
|
||||
|
||||
|
|
@ -211,6 +217,7 @@ source = { registry = "https://pypi.org/simple" }
|
|||
sdist = { url = "https://files.pythonhosted.org/packages/2f/c2/f9e977608bdf958650638c3f1e28f85a1b075f075ebbe77db8555463787b/Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724", size = 7372270 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/9f/fb37bb8ffc52a8da37b1c03c459a8cd55df7a57bdccd8831d500e994a0ca/Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5", size = 815681 },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/b3/dbd332a988586fefb0aa49c779f59f47cae76855c2d00f450364bb574cac/Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8", size = 422475 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -280,7 +287,8 @@ name = "click"
|
|||
version = "8.1.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"sys_platform == 'darwin'",
|
||||
"platform_system == 'Darwin' and sys_platform == 'darwin'",
|
||||
"platform_system != 'Darwin' and sys_platform == 'darwin'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 }
|
||||
wheels = [
|
||||
|
|
@ -293,11 +301,14 @@ version = "8.2.1"
|
|||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and platform_system != 'Linux' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system == 'Darwin' and sys_platform == 'linux') or (platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "colorama", marker = "(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system == 'Windows' and sys_platform == 'linux') or (platform_system == 'Windows' and sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342 }
|
||||
wheels = [
|
||||
|
|
@ -504,7 +515,8 @@ name = "dill"
|
|||
version = "0.3.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"sys_platform == 'darwin'",
|
||||
"platform_system == 'Darwin' and sys_platform == 'darwin'",
|
||||
"platform_system != 'Darwin' and sys_platform == 'darwin'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/17/4d/ac7ffa80c69ea1df30a8aa11b3578692a5118e7cd1aa157e3ef73b092d15/dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca", size = 184847 }
|
||||
wheels = [
|
||||
|
|
@ -517,8 +529,11 @@ version = "0.4.0"
|
|||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and platform_system != 'Linux' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system == 'Darwin' and sys_platform == 'linux') or (platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/12/80/630b4b88364e9a8c8c5797f4602d0f76ef820909ee32f0bacb9f90654042/dill-0.4.0.tar.gz", hash = "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0", size = 186976 }
|
||||
wheels = [
|
||||
|
|
@ -682,8 +697,11 @@ version = "1.1.0"
|
|||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and platform_system != 'Linux' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system == 'Darwin' and sys_platform == 'linux') or (platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "docling", marker = "sys_platform != 'darwin'" },
|
||||
|
|
@ -703,7 +721,8 @@ name = "docling-mcp"
|
|||
version = "1.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"sys_platform == 'darwin'",
|
||||
"platform_system == 'Darwin' and sys_platform == 'darwin'",
|
||||
"platform_system != 'Darwin' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "docling", marker = "sys_platform == 'darwin'" },
|
||||
|
|
@ -727,7 +746,7 @@ dependencies = [
|
|||
{ name = "docling-core" },
|
||||
{ name = "pillow" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pywin32", marker = "sys_platform == 'win32'" },
|
||||
{ name = "pywin32", marker = "(platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform == 'win32') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'win32')" },
|
||||
{ name = "tabulate" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c0/24/fff30a36af50a720813b1bdbeaee140136ff0fcdfad041ec8127c3115b4f/docling_parse-4.1.0.tar.gz", hash = "sha256:6c2f52c5438ff6158ad2e6d2064b35786f01ce7f1b235c7c882b71ab221549c6", size = 39407179 }
|
||||
|
|
@ -988,7 +1007,8 @@ name = "fsspec"
|
|||
version = "2025.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"sys_platform == 'darwin'",
|
||||
"platform_system == 'Darwin' and sys_platform == 'darwin'",
|
||||
"platform_system != 'Darwin' and sys_platform == 'darwin'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/34/f4/5721faf47b8c499e776bc34c6a8fc17efdf7fdef0b00f398128bc5dcb4ac/fsspec-2025.3.0.tar.gz", hash = "sha256:a935fd1ea872591f2b5148907d103488fc523295e6c64b835cfad8c3eca44972", size = 298491 }
|
||||
wheels = [
|
||||
|
|
@ -1006,8 +1026,11 @@ version = "2025.5.1"
|
|||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and platform_system != 'Linux' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system == 'Darwin' and sys_platform == 'linux') or (platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/00/f7/27f15d41f0ed38e8fcc488584b57e902b331da7f7c6dcda53721b15838fc/fsspec-2025.5.1.tar.gz", hash = "sha256:2e55e47a540b91843b755e83ded97c6e897fa0942b11490113f09e9c443c2475", size = 303033 }
|
||||
wheels = [
|
||||
|
|
@ -1327,8 +1350,11 @@ version = "0.33.2"
|
|||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and platform_system != 'Linux' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system == 'Darwin' and sys_platform == 'linux') or (platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "filelock", marker = "sys_platform != 'darwin'" },
|
||||
|
|
@ -1350,7 +1376,8 @@ name = "huggingface-hub"
|
|||
version = "0.34.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"sys_platform == 'darwin'",
|
||||
"platform_system == 'Darwin' and sys_platform == 'darwin'",
|
||||
"platform_system != 'Darwin' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "filelock", marker = "sys_platform == 'darwin'" },
|
||||
|
|
@ -1755,7 +1782,7 @@ dependencies = [
|
|||
{ name = "pydantic" },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "python-multipart" },
|
||||
{ name = "pywin32", marker = "sys_platform == 'win32'" },
|
||||
{ name = "pywin32", marker = "(platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform == 'win32') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'win32')" },
|
||||
{ name = "sse-starlette" },
|
||||
{ name = "starlette" },
|
||||
{ name = "uvicorn", marker = "sys_platform != 'emscripten'" },
|
||||
|
|
@ -1844,7 +1871,7 @@ name = "mlx"
|
|||
version = "0.29.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "mlx-metal", marker = "sys_platform == 'darwin'" },
|
||||
{ name = "mlx-metal", marker = "platform_system == 'Darwin' and sys_platform == 'darwin'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/66/62/7691ea664123d6e1fc0626207d5f1a6ed2b92b71059f4be42634e89b479e/mlx-0.29.1-cp313-cp313-macosx_13_0_arm64.whl", hash = "sha256:e86644cef409a00dd46eb9debf0796899623c686d16cc25b6e83078fb5081eba", size = 546904 },
|
||||
|
|
@ -1907,7 +1934,7 @@ version = "2.10.2"
|
|||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pygments" },
|
||||
{ name = "pywin32", marker = "sys_platform == 'win32'" },
|
||||
{ name = "pywin32", marker = "(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system == 'Windows' and sys_platform == 'linux') or (platform_system == 'Windows' and sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "tqdm" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3a/93/80ac75c20ce54c785648b4ed363c88f148bf22637e10c9863db4fbe73e74/mpire-2.10.2.tar.gz", hash = "sha256:f66a321e93fadff34585a4bfa05e95bd946cf714b442f51c529038eb45773d97", size = 271270 }
|
||||
|
|
@ -2012,7 +2039,8 @@ name = "multiprocess"
|
|||
version = "0.70.16"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"sys_platform == 'darwin'",
|
||||
"platform_system == 'Darwin' and sys_platform == 'darwin'",
|
||||
"platform_system != 'Darwin' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "dill", version = "0.3.8", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'darwin'" },
|
||||
|
|
@ -2032,8 +2060,11 @@ version = "0.70.18"
|
|||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and platform_system != 'Linux' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system == 'Darwin' and sys_platform == 'linux') or (platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "dill", version = "0.4.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'darwin'" },
|
||||
|
|
@ -2115,128 +2146,310 @@ wheels = [
|
|||
name = "nvidia-cublas-cu12"
|
||||
version = "12.8.3.14"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/63/684a6f72f52671ea222c12ecde9bdf748a0ba025e2ad3ec374e466c26eb6/nvidia_cublas_cu12-12.8.3.14-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:93a4e0e386cc7f6e56c822531396de8170ed17068a1e18f987574895044cd8c3", size = 604900717 },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/df/4b01f10069e23c641f116c62fc31e31e8dc361a153175d81561d15c8143b/nvidia_cublas_cu12-12.8.3.14-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:3f0e05e7293598cf61933258b73e66a160c27d59c4422670bf0b79348c04be44", size = 609620630 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cublas-cu12"
|
||||
version = "12.8.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cuda-cupti-cu12"
|
||||
version = "12.8.57"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/53/458956a65283c55c22ba40a65745bbe9ff20c10b68ea241bc575e20c0465/nvidia_cuda_cupti_cu12-12.8.57-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ff154211724fd824e758ce176b66007b558eea19c9a5135fc991827ee147e317", size = 9526469 },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/6f/3683ecf4e38931971946777d231c2df00dd5c1c4c2c914c42ad8f9f4dca6/nvidia_cuda_cupti_cu12-12.8.57-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8e0b2eb847de260739bee4a3f66fac31378f4ff49538ff527a38a01a9a39f950", size = 10237547 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cuda-cupti-cu12"
|
||||
version = "12.8.90"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cuda-nvrtc-cu12"
|
||||
version = "12.8.61"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/22/32029d4583f7b19cfe75c84399cbcfd23f2aaf41c66fc8db4da460104fff/nvidia_cuda_nvrtc_cu12-12.8.61-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:a0fa9c2a21583105550ebd871bd76e2037205d56f33f128e69f6d2a55e0af9ed", size = 88024585 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/98/29f98d57fc40d6646337e942d37509c6d5f8abe29012671f7a6eb9978ebe/nvidia_cuda_nvrtc_cu12-12.8.61-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b1f376bf58111ca73dde4fd4df89a462b164602e074a76a2c29c121ca478dcd4", size = 43097015 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cuda-nvrtc-cu12"
|
||||
version = "12.8.93"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cuda-runtime-cu12"
|
||||
version = "12.8.57"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/9d/e77ec4227e70c6006195bdf410370f2d0e5abfa2dc0d1d315cacd57c5c88/nvidia_cuda_runtime_cu12-12.8.57-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:534ccebd967b6a44292678fa5da4f00666029cb2ed07a79515ea41ef31fe3ec7", size = 965264 },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/f6/0e1ef31f4753a44084310ba1a7f0abaf977ccd810a604035abb43421c057/nvidia_cuda_runtime_cu12-12.8.57-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:75342e28567340b7428ce79a5d6bb6ca5ff9d07b69e7ce00d2c7b4dc23eff0be", size = 954762 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cuda-runtime-cu12"
|
||||
version = "12.8.90"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cudnn-cu12"
|
||||
version = "9.7.1.26"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cublas-cu12", version = "12.8.3.14", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/2e/ec5dda717eeb1de3afbbbb611ca556f9d6d057470759c6abd36d72f0063b/nvidia_cudnn_cu12-9.7.1.26-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:848a61d40ef3b32bd4e1fadb599f0cf04a4b942fbe5fb3be572ad75f9b8c53ef", size = 725862213 },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/dc/dc825c4b1c83b538e207e34f48f86063c88deaa35d46c651c7c181364ba2/nvidia_cudnn_cu12-9.7.1.26-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:6d011159a158f3cfc47bf851aea79e31bcff60d530b70ef70474c84cac484d07", size = 726851421 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cudnn-cu12"
|
||||
version = "9.10.2.21"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "nvidia-cublas-cu12", version = "12.8.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cufft-cu12"
|
||||
version = "11.3.3.41"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-nvjitlink-cu12", version = "12.8.61", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/72/95/6157cb45a49f5090a470de42353a22a0ed5b13077886dca891b4b0e350fe/nvidia_cufft_cu12-11.3.3.41-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:68509dcd7e3306e69d0e2d8a6d21c8b25ed62e6df8aac192ce752f17677398b5", size = 193108626 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/26/b53c493c38dccb1f1a42e1a21dc12cba2a77fbe36c652f7726d9ec4aba28/nvidia_cufft_cu12-11.3.3.41-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:da650080ab79fcdf7a4b06aa1b460e99860646b176a43f6208099bdc17836b6a", size = 193118795 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cufft-cu12"
|
||||
version = "11.3.3.83"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "nvidia-nvjitlink-cu12", version = "12.8.93", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cufile-cu12"
|
||||
version = "1.13.0.11"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/9c/1f3264d0a84c8a031487fb7f59780fc78fa6f1c97776233956780e3dc3ac/nvidia_cufile_cu12-1.13.0.11-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:483f434c541806936b98366f6d33caef5440572de8ddf38d453213729da3e7d4", size = 1197801 },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/80/f6a0fc90ab6fa4ac916f3643e5b620fd19724626c59ae83b74f5efef0349/nvidia_cufile_cu12-1.13.0.11-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:2acbee65dc2eaf58331f0798c5e6bcdd790c4acb26347530297e63528c9eba5d", size = 1120660 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cufile-cu12"
|
||||
version = "1.13.1.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-curand-cu12"
|
||||
version = "10.3.9.55"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/13/bbcf48e2f8a6a9adef58f130bc968810528a4e66bbbe62fad335241e699f/nvidia_curand_cu12-10.3.9.55-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:b6bb90c044fa9b07cedae2ef29077c4cf851fb6fdd6d862102321f359dca81e9", size = 63623836 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/fc/7be5d0082507269bb04ac07cc614c84b78749efb96e8cf4100a8a1178e98/nvidia_curand_cu12-10.3.9.55-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8387d974240c91f6a60b761b83d4b2f9b938b7e0b9617bae0f0dafe4f5c36b86", size = 63618038 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-curand-cu12"
|
||||
version = "10.3.9.90"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cusolver-cu12"
|
||||
version = "11.7.2.55"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cublas-cu12", version = "12.8.3.14", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cusparse-cu12", version = "12.5.7.53", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-nvjitlink-cu12", version = "12.8.61", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/ce/4214a892e804b20bf66d04f04a473006fc2d3dac158160ef85f1bc906639/nvidia_cusolver_cu12-11.7.2.55-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:0fd9e98246f43c15bee5561147ad235dfdf2d037f5d07c9d41af3f7f72feb7cc", size = 260094827 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/08/953675873a136d96bb12f93b49ba045d1107bc94d2551c52b12fa6c7dec3/nvidia_cusolver_cu12-11.7.2.55-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4d1354102f1e922cee9db51920dba9e2559877cf6ff5ad03a00d853adafb191b", size = 260373342 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cusolver-cu12"
|
||||
version = "11.7.3.90"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "nvidia-cublas-cu12", version = "12.8.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "nvidia-cusparse-cu12", version = "12.5.8.93", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "nvidia-nvjitlink-cu12", version = "12.8.93", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cusparse-cu12"
|
||||
version = "12.5.7.53"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-nvjitlink-cu12", version = "12.8.61", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/a2/313db0453087f5324a5900380ca2e57e050c8de76f407b5e11383dc762ae/nvidia_cusparse_cu12-12.5.7.53-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d869c6146ca80f4305b62e02d924b4aaced936f8173e3cef536a67eed2a91af1", size = 291963692 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/ab/31e8149c66213b846c082a3b41b1365b831f41191f9f40c6ddbc8a7d550e/nvidia_cusparse_cu12-12.5.7.53-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c1b61eb8c85257ea07e9354606b26397612627fdcd327bfd91ccf6155e7c86d", size = 292064180 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cusparse-cu12"
|
||||
version = "12.5.8.93"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "nvidia-nvjitlink-cu12", version = "12.8.93", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cusparselt-cu12"
|
||||
version = "0.6.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/62/da/4de092c61c6dea1fc9c936e69308a02531d122e12f1f649825934ad651b5/nvidia_cusparselt_cu12-0.6.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8371549623ba601a06322af2133c4a44350575f5a3108fb75f3ef20b822ad5f1", size = 156402859 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/9a/72ef35b399b0e183bc2e8f6f558036922d453c4d8237dab26c666a04244b/nvidia_cusparselt_cu12-0.6.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e5c8a26c36445dd2e6812f1177978a24e2d37cacce7e090f297a688d1ec44f46", size = 156785796 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-cusparselt-cu12"
|
||||
version = "0.7.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-nccl-cu12"
|
||||
version = "2.26.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/69/5b/ca2f213f637305633814ae8c36b153220e40a07ea001966dcd87391f3acb/nvidia_nccl_cu12-2.26.2-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5c196e95e832ad30fbbb50381eb3cbd1fadd5675e587a548563993609af19522", size = 291671495 },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/ca/f42388aed0fddd64ade7493dbba36e1f534d4e6fdbdd355c6a90030ae028/nvidia_nccl_cu12-2.26.2-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:694cf3879a206553cc9d7dbda76b13efaf610fdb70a50cba303de1b0d1530ac6", size = 201319755 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-nccl-cu12"
|
||||
version = "2.27.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-nvjitlink-cu12"
|
||||
version = "12.8.61"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/03/f8/9d85593582bd99b8d7c65634d2304780aefade049b2b94d96e44084be90b/nvidia_nvjitlink_cu12-12.8.61-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:45fd79f2ae20bd67e8bc411055939049873bfd8fac70ff13bd4865e0b9bdab17", size = 39243473 },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/53/698f3758f48c5fcb1112721e40cc6714da3980d3c7e93bae5b29dafa9857/nvidia_nvjitlink_cu12-12.8.61-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b80ecab31085dda3ce3b41d043be0ec739216c3fc633b8abe212d5a30026df0", size = 38374634 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-nvjitlink-cu12"
|
||||
version = "12.8.93"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-nvtx-cu12"
|
||||
version = "12.8.55"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/e8/ae6ecbdade8bb9174d75db2b302c57c1c27d9277d6531c62aafde5fb32a3/nvidia_nvtx_cu12-12.8.55-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c38405335fbc0f0bf363eaeaeb476e8dfa8bae82fada41d25ace458b9ba9f3db", size = 91103 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/cd/0e8c51b2ae3a58f054f2e7fe91b82d201abfb30167f2431e9bd92d532f42/nvidia_nvtx_cu12-12.8.55-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2dd0780f1a55c21d8e06a743de5bd95653de630decfff40621dbde78cc307102", size = 89896 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nvidia-nvtx-cu12"
|
||||
version = "12.8.90"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "oauthlib"
|
||||
version = "3.3.1"
|
||||
|
|
@ -2320,6 +2533,7 @@ dependencies = [
|
|||
sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/b70a2d9ab205110d715906fc8ec83fbb00404aeb3a37a0654fdb68eb0c8c/opencv-python-4.10.0.84.tar.gz", hash = "sha256:72d234e4582e9658ffea8e9cae5b63d488ad06994ef12d81dc303b17472f3526", size = 95103981 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/66/82/564168a349148298aca281e342551404ef5521f33fba17b388ead0a84dc5/opencv_python-4.10.0.84-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:fc182f8f4cda51b45f01c64e4cbedfc2f00aff799debebc305d8d0210c43f251", size = 54835524 },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/4a/016cda9ad7cf18c58ba074628a4eaae8aa55f3fd06a266398cef8831a5b9/opencv_python-4.10.0.84-cp37-abi3-macosx_12_0_x86_64.whl", hash = "sha256:71e575744f1d23f79741450254660442785f45a0797212852ee5199ef12eed98", size = 56475426 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2353,7 +2567,7 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "openrag"
|
||||
version = "0.1.52"
|
||||
version = "0.1.53"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "agentd" },
|
||||
|
|
@ -2932,7 +3146,7 @@ name = "pytest"
|
|||
version = "8.4.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "colorama", marker = "(platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform == 'win32') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'win32')" },
|
||||
{ name = "iniconfig" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pluggy" },
|
||||
|
|
@ -3354,6 +3568,7 @@ version = "0.13.0"
|
|||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6e/1a/1f4b722862840295bcaba8c9e5261572347509548faaa99b2d57ee7bfe6a/ruff-0.13.0.tar.gz", hash = "sha256:5b4b1ee7eb35afae128ab94459b13b2baaed282b1fb0f472a73c82c996c8ae60", size = 5372863 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/25/c92296b1fc36d2499e12b74a3fdb230f77af7bdf048fad7b0a62e94ed56a/ruff-0.13.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:21ae48151b66e71fd111b7d79f9ad358814ed58c339631450c66a4be33cc28b9", size = 12933428 },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/cf/40bc7221a949470307d9c35b4ef5810c294e6cfa3caafb57d882731a9f42/ruff-0.13.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:64de45f4ca5441209e41742d527944635a05a6e7c05798904f39c85bafa819e3", size = 12095543 },
|
||||
]
|
||||
|
||||
|
|
@ -3738,27 +3953,29 @@ dependencies = [
|
|||
{ name = "fsspec", version = "2025.5.1", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "jinja2", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "networkx", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cufile-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cusparselt-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cublas-cu12", version = "12.8.3.14", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cuda-cupti-cu12", version = "12.8.57", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cuda-nvrtc-cu12", version = "12.8.61", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cuda-runtime-cu12", version = "12.8.57", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cudnn-cu12", version = "9.7.1.26", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cufft-cu12", version = "11.3.3.41", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cufile-cu12", version = "1.13.0.11", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-curand-cu12", version = "10.3.9.55", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cusolver-cu12", version = "11.7.2.55", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cusparse-cu12", version = "12.5.7.53", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-cusparselt-cu12", version = "0.6.3", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-nccl-cu12", version = "2.26.2", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-nvjitlink-cu12", version = "12.8.61", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "nvidia-nvtx-cu12", version = "12.8.55", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "setuptools", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "sympy", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
{ name = "triton", version = "3.3.1", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux'" },
|
||||
{ name = "typing-extensions", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:d56d29a6ad7758ba5173cc2b0c51c93e126e2b0a918e874101dc66545283967f" },
|
||||
{ url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:9560425f9ea1af1791507e8ca70d5b9ecf62fed7ca226a95fcd58d0eb2cca78f" },
|
||||
{ url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:f112465fdf42eb1297c6dddda1a8b7f411914428b704e1b8a47870c52e290909" },
|
||||
{ url = "https://download.pytorch.org/whl/cu128/torch-2.7.1%2Bcu128-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:c355db49c218ada70321d5c5c9bb3077312738b99113c8f3723ef596b554a7b9" },
|
||||
]
|
||||
|
||||
|
|
@ -3767,9 +3984,13 @@ name = "torch"
|
|||
version = "2.8.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"sys_platform == 'darwin'",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and platform_system != 'Linux' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system == 'Darwin' and sys_platform == 'linux') or (platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_system == 'Darwin' and sys_platform == 'darwin'",
|
||||
"platform_system != 'Darwin' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "filelock", marker = "platform_machine != 'x86_64' or sys_platform != 'linux'" },
|
||||
|
|
@ -3777,8 +3998,23 @@ dependencies = [
|
|||
{ name = "fsspec", version = "2025.5.1", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
{ name = "jinja2", marker = "platform_machine != 'x86_64' or sys_platform != 'linux'" },
|
||||
{ name = "networkx", marker = "platform_machine != 'x86_64' or sys_platform != 'linux'" },
|
||||
{ name = "nvidia-cublas-cu12", version = "12.8.4.1", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-cuda-cupti-cu12", version = "12.8.90", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-cuda-nvrtc-cu12", version = "12.8.93", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-cuda-runtime-cu12", version = "12.8.90", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-cudnn-cu12", version = "9.10.2.21", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-cufft-cu12", version = "11.3.3.83", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-cufile-cu12", version = "1.13.1.3", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-curand-cu12", version = "10.3.9.90", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-cusolver-cu12", version = "11.7.3.90", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-cusparse-cu12", version = "12.5.8.93", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-cusparselt-cu12", version = "0.7.1", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-nccl-cu12", version = "2.27.3", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-nvjitlink-cu12", version = "12.8.93", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "nvidia-nvtx-cu12", version = "12.8.90", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "setuptools", marker = "platform_machine != 'x86_64' or sys_platform != 'linux'" },
|
||||
{ name = "sympy", marker = "platform_machine != 'x86_64' or sys_platform != 'linux'" },
|
||||
{ name = "triton", version = "3.4.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'" },
|
||||
{ name = "typing-extensions", marker = "platform_machine != 'x86_64' or sys_platform != 'linux'" },
|
||||
]
|
||||
wheels = [
|
||||
|
|
@ -3805,7 +4041,9 @@ dependencies = [
|
|||
{ name = "torch", version = "2.7.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/55/f4/b45f6cd92fa0acfac5e31b8e9258232f25bcdb0709a604e8b8a39d76e411/torchvision-0.22.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:4a614a6a408d2ed74208d0ea6c28a2fbb68290e9a7df206c5fef3f0b6865d307", size = 2471597 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/b0/3cffd6a285b5ffee3fe4a31caff49e350c98c5963854474d1c4f7a51dea5/torchvision-0.22.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:7ee682be589bb1a002b7704f06b8ec0b89e4b9068f48e79307d2c6e937a9fdf4", size = 7485894 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/c8/2ebe90f18e7ffa2120f5c3eab62aa86923185f78d2d051a455ea91461608/torchvision-0.22.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:27142bcc8a984227a6dcf560985e83f52b82a7d3f5fe9051af586a2ccc46ef26", size = 2476561 },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/8b/04c6b15f8c29b39f0679589753091cec8b192ab296d4fdaf9055544c4ec9/torchvision-0.22.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ef46e065502f7300ad6abc98554131c35dc4c837b978d91306658f1a65c00baa", size = 7658543 },
|
||||
]
|
||||
|
||||
|
|
@ -3814,9 +4052,13 @@ name = "torchvision"
|
|||
version = "0.23.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"sys_platform == 'darwin'",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'",
|
||||
"platform_machine == 'aarch64' and platform_system != 'Linux' and sys_platform == 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system == 'Darwin' and sys_platform == 'linux') or (platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'",
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
"platform_system == 'Darwin' and sys_platform == 'darwin'",
|
||||
"platform_system != 'Darwin' and sys_platform == 'darwin'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "numpy", marker = "platform_machine != 'x86_64' or sys_platform != 'linux'" },
|
||||
|
|
@ -3839,7 +4081,7 @@ name = "tqdm"
|
|||
version = "4.67.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "colorama", marker = "(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system == 'Windows' and sys_platform == 'linux') or (platform_system == 'Windows' and sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 }
|
||||
wheels = [
|
||||
|
|
@ -3872,6 +4114,9 @@ wheels = [
|
|||
name = "triton"
|
||||
version = "3.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "setuptools", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
]
|
||||
|
|
@ -3880,6 +4125,17 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/28/71/bd20ffcb7a64c753dc2463489a61bf69d531f308e390ad06390268c4ea04/triton-3.3.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3198adb9d78b77818a5388bff89fa72ff36f9da0bc689db2f0a651a67ce6a42", size = 155735832 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "triton"
|
||||
version = "3.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"(platform_machine != 'aarch64' and platform_machine != 'x86_64' and platform_system != 'Darwin' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
]
|
||||
dependencies = [
|
||||
{ name = "setuptools", marker = "(platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typer"
|
||||
version = "0.16.0"
|
||||
|
|
@ -3993,7 +4249,7 @@ wheels = [
|
|||
|
||||
[package.optional-dependencies]
|
||||
standard = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "colorama", marker = "(platform_machine != 'aarch64' and platform_system == 'Linux' and sys_platform == 'win32') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'win32')" },
|
||||
{ name = "httptools" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "pyyaml" },
|
||||
|
|
@ -4114,6 +4370,7 @@ version = "3.5.0"
|
|||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/00/5e/d6e5258d69df8b4ed8c83b6664f2b47d30d2dec551a29ad72a6c69eafd31/xxhash-3.5.0.tar.gz", hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f", size = 84241 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/b8/e4b3ad92d249be5c83fa72916c9091b0965cb0faeff05d9a0a3870ae6bff/xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:37889a0d13b0b7d739cfc128b1c902f04e32de17b33d74b637ad42f1c55101f6", size = 31795 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/d8/b3627a0aebfbfa4c12a41e22af3742cf08c8ea84f5cc3367b5de2d039cce/xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97a662338797c660178e682f3bc180277b9569a59abfb5925e8620fba00b9fc5", size = 30792 },
|
||||
]
|
||||
|
||||
|
|
@ -4172,4 +4429,4 @@ source = { registry = "https://pypi.org/simple" }
|
|||
sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276 },
|
||||
]
|
||||
]
|
||||
Loading…
Add table
Reference in a new issue