Merge branch 'main' into 159-docker-npm
This commit is contained in:
commit
b8cf5656d5
38 changed files with 1334 additions and 972 deletions
3
.github/workflows/deploy-gh-pages.yml
vendored
3
.github/workflows/deploy-gh-pages.yml
vendored
|
|
@ -6,8 +6,7 @@ on:
|
|||
- main
|
||||
paths:
|
||||
- 'docs/**'
|
||||
# Review gh actions docs if you want to further define triggers, paths, etc
|
||||
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#on
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
|
|
|
|||
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -19,4 +19,4 @@ wheels/
|
|||
*.json
|
||||
.DS_Store
|
||||
|
||||
config.yaml
|
||||
config/
|
||||
|
|
|
|||
15
config/config.example.yaml
Normal file
15
config/config.example.yaml
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# OpenRAG Configuration File
|
||||
provider:
|
||||
model_provider: "openai" # openai, anthropic, azure, etc.
|
||||
api_key: "your-api-key" # or use OPENAI_API_KEY env var
|
||||
|
||||
knowledge:
|
||||
embedding_model: "text-embedding-3-small"
|
||||
chunk_size: 1000
|
||||
chunk_overlap: 200
|
||||
ocr: true
|
||||
picture_descriptions: false
|
||||
|
||||
agent:
|
||||
llm_model: "gpt-4o-mini"
|
||||
system_prompt: "You are a helpful AI assistant..."
|
||||
|
|
@ -74,6 +74,7 @@ services:
|
|||
- ./documents:/app/documents:Z
|
||||
- ./keys:/app/keys:Z
|
||||
- ./flows:/app/flows:Z
|
||||
- ./config:/app/config:z
|
||||
|
||||
openrag-frontend:
|
||||
image: phact/openrag-frontend:${OPENRAG_VERSION:-latest}
|
||||
|
|
|
|||
|
|
@ -73,6 +73,7 @@ services:
|
|||
- ./documents:/app/documents:Z
|
||||
- ./keys:/app/keys:Z
|
||||
- ./flows:/app/flows:z
|
||||
- ./config:/app/config:z
|
||||
gpus: all
|
||||
|
||||
openrag-frontend:
|
||||
|
|
|
|||
111
docs/VERSIONING_SETUP.md
Normal file
111
docs/VERSIONING_SETUP.md
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
# Docusaurus versioning setup
|
||||
|
||||
Docs versioning is currently **DISABLED** but configured and ready to enable.
|
||||
The configuration is found in `docusaurus.config.js` with commented-out sections.
|
||||
|
||||
To enable versioning, do the following:
|
||||
|
||||
1. Open `docusaurus.config.js`
|
||||
2. Find the versioning configuration section (around line 57)
|
||||
3. Uncomment the versioning configuration:
|
||||
|
||||
```javascript
|
||||
docs: {
|
||||
// ... other config
|
||||
lastVersion: 'current', // Use 'current' to make ./docs the latest version
|
||||
versions: {
|
||||
current: {
|
||||
label: 'Next (unreleased)',
|
||||
path: 'next',
|
||||
},
|
||||
},
|
||||
onlyIncludeVersions: ['current'], // Limit versions for faster builds
|
||||
},
|
||||
```
|
||||
|
||||
## Create docs versions
|
||||
|
||||
See the [Docusaurus docs](https://docusaurus.io/docs/versioning) for more info.
|
||||
|
||||
1. Use the Docusaurus CLI command to create a version.
|
||||
You can use `yarn` instead of `npm`.
|
||||
```bash
|
||||
# Create version 1.0.0 from current docs
|
||||
npm run docusaurus docs:version 1.0.0
|
||||
```
|
||||
|
||||
This command will:
|
||||
- Copy the full `docs/` folder contents into `versioned_docs/version-1.0.0/`
|
||||
- Create a versioned sidebar file at `versioned_sidebars/version-1.0.0-sidebars.json`
|
||||
- Append the new version to `versions.json`
|
||||
|
||||
3. After creating a version, update the Docusaurus configuration to include multiple versions.
|
||||
`lastVersion:'1.0.0'` makes the '1.0.0' release the `latest` version.
|
||||
`current` is the work-in-progress docset, accessible at `/docs/next`.
|
||||
To remove a version, remove it from `onlyIncludeVersions`.
|
||||
|
||||
```javascript
|
||||
docs: {
|
||||
// ... other config
|
||||
lastVersion: '1.0.0', // Make 1.0.0 the latest version
|
||||
versions: {
|
||||
current: {
|
||||
label: 'Next (unreleased)',
|
||||
path: 'next',
|
||||
},
|
||||
'1.0.0': {
|
||||
label: '1.0.0',
|
||||
path: '1.0.0',
|
||||
},
|
||||
},
|
||||
onlyIncludeVersions: ['current', '1.0.0'], // Include both versions
|
||||
},
|
||||
```
|
||||
|
||||
4. Test the deployment locally.
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
npm run serve
|
||||
```
|
||||
|
||||
5. To add subsequent versions, repeat the process, first running the CLI command then updating `docusaurus.config.js`.
|
||||
|
||||
```bash
|
||||
# Create version 2.0.0 from current docs
|
||||
npm run docusaurus docs:version 2.0.0
|
||||
```
|
||||
|
||||
After creating a new version, update `docusaurus.config.js`.
|
||||
|
||||
```javascript
|
||||
docs: {
|
||||
lastVersion: '2.0.0', // Make 2.0.0 the latest version
|
||||
versions: {
|
||||
current: {
|
||||
label: 'Next (unreleased)',
|
||||
path: 'next',
|
||||
},
|
||||
'2.0.0': {
|
||||
label: '2.0.0',
|
||||
path: '2.0.0',
|
||||
},
|
||||
'1.0.0': {
|
||||
label: '1.0.0',
|
||||
path: '1.0.0',
|
||||
},
|
||||
},
|
||||
onlyIncludeVersions: ['current', '2.0.0', '1.0.0'], // Include all versions
|
||||
},
|
||||
```
|
||||
|
||||
## Disable versioning
|
||||
|
||||
1. Remove the `versions` configuration from `docusaurus.config.js`.
|
||||
2. Delete the `docs/versioned_docs/` and `docs/versioned_sidebars/` directories.
|
||||
3. Delete `docs/versions.json`.
|
||||
|
||||
## References
|
||||
|
||||
- [Official Docusaurus Versioning Documentation](https://docusaurus.io/docs/versioning)
|
||||
- [Docusaurus Versioning Best Practices](https://docusaurus.io/docs/versioning#recommended-practices)
|
||||
50
docs/docs/core-components/ingestion.mdx
Normal file
50
docs/docs/core-components/ingestion.mdx
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
---
|
||||
title: Docling Ingestion
|
||||
slug: /ingestion
|
||||
---
|
||||
|
||||
import Icon from "@site/src/components/icon/icon";
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
import PartialModifyFlows from '@site/docs/_partial-modify-flows.mdx';
|
||||
|
||||
OpenRAG uses [Docling](https://docling-project.github.io/docling/) for its document ingestion pipeline.
|
||||
More specifically, OpenRAG uses [Docling Serve](https://github.com/docling-project/docling-serve), which starts a `docling-serve` process on your local machine and runs Docling ingestion through an API service.
|
||||
|
||||
Docling ingests documents from your local machine or OAuth connectors, splits them into chunks, and stores them as separate, structured documents in the OpenSearch `documents` index.
|
||||
|
||||
OpenRAG chose Docling for its support for a wide variety of file formats, high performance, and advanced understanding of tables and images.
|
||||
|
||||
## Docling ingestion settings
|
||||
|
||||
These settings configure the Docling ingestion parameters.
|
||||
|
||||
OpenRAG will warn you if `docling-serve` is not running.
|
||||
To start or stop `docling-serve` or any other native services, in the TUI main menu, click **Start Native Services** or **Stop Native Services**.
|
||||
|
||||
**Embedding model** determines which AI model is used to create vector embeddings. The default is `text-embedding-3-small`.
|
||||
|
||||
**Chunk size** determines how large each text chunk is in number of characters.
|
||||
Larger chunks yield more context per chunk, but may include irrelevant information. Smaller chunks yield more precise semantic search, but may lack context.
|
||||
The default value of `1000` characters provides a good starting point that balances these considerations.
|
||||
|
||||
**Chunk overlap** controls the number of characters that overlap over chunk boundaries.
|
||||
Use larger overlap values for documents where context is most important, and use smaller overlap values for simpler documents, or when optimization is most important.
|
||||
The default value of 200 characters of overlap with a chunk size of 1000 (20% overlap) is suitable for general use cases. Decrease the overlap to 10% for a more efficient pipeline, or increase to 40% for more complex documents.
|
||||
|
||||
**OCR** enables or disabled OCR processing when extracting text from images and scanned documents.
|
||||
OCR is disabled by default. This setting is best suited for processing text-based documents as quickly as possible with Docling's [`DocumentConverter`](https://docling-project.github.io/docling/reference/document_converter/). Images are ignored and not processed.
|
||||
|
||||
Enable OCR when you are processing documents containing images with text that requires extraction, or for scanned documents. Enabling OCR can slow ingestion performance.
|
||||
|
||||
If OpenRAG detects that the local machine is running on macOS, OpenRAG uses the [ocrmac](https://www.piwheels.org/project/ocrmac/) OCR engine. Other platforms use [easyocr](https://www.jaided.ai/easyocr/).
|
||||
|
||||
**Picture descriptions** adds image descriptions generated by the [SmolVLM-256M-Instruct](https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct) model to OCR processing. Enabling picture descriptions can slow ingestion performance.
|
||||
|
||||
## Use OpenRAG default ingestion instead of Docling serve
|
||||
|
||||
If you want to use OpenRAG's built-in pipeline instead of Docling serve, set `DISABLE_INGEST_WITH_LANGFLOW=true` in [Environment variables](/configure/configuration#ingestion-configuration).
|
||||
|
||||
The built-in pipeline still uses the Docling processor, but uses it directly without the Docling Serve API.
|
||||
|
||||
For more information, see [`processors.py` in the OpenRAG repository](https://github.com/langflow-ai/openrag/blob/main/src/models/processors.py#L58).
|
||||
|
|
@ -97,6 +97,10 @@ You can monitor the sync progress in the <Icon name="Bell" aria-hidden="true"/>
|
|||
|
||||
Once processing is complete, the synced documents become available in your knowledge base and can be searched through the chat interface or Knowledge page.
|
||||
|
||||
### Knowledge ingestion settings
|
||||
|
||||
To configure the knowledge ingestion pipeline parameters, see [Docling Ingestion](/ingestion).
|
||||
|
||||
## Create knowledge filters
|
||||
|
||||
OpenRAG includes a knowledge filter system for organizing and managing document collections.
|
||||
|
|
|
|||
|
|
@ -20,10 +20,10 @@ const config = {
|
|||
},
|
||||
|
||||
// Set the production url of your site here
|
||||
url: 'https://langflow-ai.github.io',
|
||||
url: 'https://docs.openr.ag',
|
||||
// Set the /<baseUrl>/ pathname under which your site is served
|
||||
// For GitHub pages deployment, it is often '/<projectName>/'
|
||||
baseUrl: process.env.BASE_URL ? process.env.BASE_URL : '/openrag/',
|
||||
baseUrl: process.env.BASE_URL ? process.env.BASE_URL : '/',
|
||||
|
||||
// GitHub pages deployment config.
|
||||
// If you aren't using GitHub pages, you don't need these.
|
||||
|
|
@ -53,6 +53,16 @@ const config = {
|
|||
editUrl:
|
||||
'https://github.com/openrag/openrag/tree/main/docs/',
|
||||
routeBasePath: '/',
|
||||
// Versioning configuration - see VERSIONING_SETUP.md
|
||||
// To enable versioning, uncomment the following lines:
|
||||
// lastVersion: 'current',
|
||||
// versions: {
|
||||
// current: {
|
||||
// label: 'Next (unreleased)',
|
||||
// path: 'next',
|
||||
// },
|
||||
// },
|
||||
// onlyIncludeVersions: ['current'],
|
||||
},
|
||||
theme: {
|
||||
customCss: './src/css/custom.css',
|
||||
|
|
@ -65,12 +75,13 @@ const config = {
|
|||
/** @type {import('@docusaurus/preset-classic').ThemeConfig} */
|
||||
({
|
||||
// Replace with your project's social card
|
||||
image: 'img/docusaurus-social-card.jpg',
|
||||
// image: 'img/docusaurus-social-card.jpg',
|
||||
navbar: {
|
||||
title: 'OpenRAG',
|
||||
// title: 'OpenRAG',
|
||||
logo: {
|
||||
alt: 'OpenRAG Logo',
|
||||
src: 'img/logo.svg',
|
||||
src: "img/logo-openrag-light.svg",
|
||||
srcDark: "img/logo-openrag-dark.svg",
|
||||
href: '/',
|
||||
},
|
||||
items: [
|
||||
|
|
|
|||
|
|
@ -60,6 +60,11 @@ const sidebars = {
|
|||
type: "doc",
|
||||
id: "core-components/knowledge",
|
||||
label: "OpenSearch Knowledge"
|
||||
},
|
||||
{
|
||||
type: "doc",
|
||||
id: "core-components/ingestion",
|
||||
label: "Docling Ingestion"
|
||||
}
|
||||
],
|
||||
},
|
||||
|
|
|
|||
1
docs/static/CNAME
vendored
Normal file
1
docs/static/CNAME
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
docs.openr.ag
|
||||
16
docs/static/img/logo-openrag-dark.svg
vendored
Normal file
16
docs/static/img/logo-openrag-dark.svg
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
<svg width="1335" height="185" viewBox="0 0 1335 185" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M172.336 97.5947H201.395C207.095 97.5948 211.713 102.212 211.713 107.912V125.055C211.713 130.755 207.095 135.372 201.395 135.372H176.05C173.312 135.372 170.687 136.462 168.752 138.397L125.259 181.883C123.324 183.819 120.699 184.908 117.961 184.908H97.0534C91.4612 184.908 86.8796 180.449 86.736 174.856L86.2848 157.354C86.1343 151.554 90.7947 146.765 96.6022 146.765H114.598C117.337 146.765 119.961 145.675 121.897 143.739L165.023 100.613C166.959 98.6775 169.583 97.5879 172.322 97.5879L172.336 97.5947Z" fill="white"/>
|
||||
<path d="M201.395 22.083C207.095 22.083 211.713 26.7004 211.713 32.4004V49.543C211.713 55.2429 207.095 59.8604 201.395 59.8604H176.05C173.312 59.8604 170.687 60.95 168.752 62.8857L125.259 106.378C123.324 108.314 120.699 109.403 117.961 109.403H92.5582C89.8983 109.403 87.339 110.429 85.4176 112.271L36.5914 159.061C34.6699 160.903 32.1098 161.929 29.4498 161.929H11.6549C5.95497 161.929 1.33753 157.304 1.33753 151.611V133.995C1.33777 128.295 5.95512 123.679 11.6549 123.679H29.3209C32.0598 123.679 34.6839 122.588 36.6198 120.652L82.9869 74.2861C84.9228 72.3503 87.5469 71.2598 90.2858 71.2598H114.598C117.337 71.2598 119.961 70.1702 121.897 68.2344L165.023 25.1084C166.959 23.1726 169.583 22.083 172.322 22.083H201.395Z" fill="white"/>
|
||||
<path d="M115.114 0C120.814 0 125.431 4.61743 125.431 10.3174V27.46C125.431 33.1599 120.814 37.7773 115.114 37.7773H89.7692C87.0304 37.7773 84.4062 38.867 82.4703 40.8027L38.9782 84.2949C37.0423 86.2306 34.418 87.3203 31.6793 87.3203H10.7731C5.1807 87.3203 0.599138 82.8609 0.455697 77.2686L0.0035481 59.7676C-0.147015 53.9673 4.5135 49.1847 10.3209 49.1846H28.317C31.0558 49.1846 33.68 48.0949 35.6159 46.1592L78.7418 3.0332C80.6776 1.09742 83.3019 0.00689882 86.0407 0.00683594L86.0553 0H115.114Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M445.716 55.0117C452.121 55.0117 457.897 56.6483 463.043 59.9229C468.225 63.1614 472.327 67.9658 475.35 74.335C478.408 80.6683 479.937 88.4412 479.938 97.6533C479.938 106.757 478.445 114.494 475.458 120.863C472.471 127.233 468.405 132.091 463.259 135.438C458.113 138.784 452.284 140.457 445.771 140.457C441.021 140.457 437.08 139.665 433.949 138.082C430.819 136.499 428.3 134.591 426.393 132.36C424.521 130.093 423.064 127.953 422.021 125.938H421.211V170.091H401.671V56.0908H420.887V69.8008H422.021C423.028 67.7858 424.449 65.6448 426.284 63.3779C428.119 61.0749 430.603 59.1135 433.733 57.4941C436.864 55.8389 440.858 55.0118 445.716 55.0117ZM440.318 71.043C436.144 71.043 432.599 72.1399 429.685 74.335C426.77 76.53 424.557 79.6072 423.046 83.5654C421.571 87.5238 420.833 92.1842 420.833 97.5459C420.833 102.907 421.588 107.604 423.1 111.634C424.647 115.664 426.861 118.812 429.739 121.079C432.654 123.31 436.18 124.426 440.318 124.426C444.637 124.426 448.253 123.275 451.168 120.972C454.083 118.633 456.278 115.448 457.753 111.418C459.264 107.352 460.02 102.728 460.021 97.5459C460.021 92.4001 459.283 87.8293 457.808 83.835C456.332 79.8407 454.136 76.7104 451.222 74.4434C448.307 72.1764 444.672 71.043 440.318 71.043Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M526.27 55.0117C531.379 55.0117 536.201 55.8389 540.735 57.4941C545.305 59.1134 549.336 61.6324 552.826 65.0508C556.353 68.4692 559.124 72.8237 561.139 78.1133C563.154 83.367 564.162 89.6286 564.162 96.8975V102.89H506.411C506.469 107.504 507.347 111.48 509.051 114.818C510.814 118.237 513.28 120.864 516.446 122.699C519.613 124.498 523.319 125.397 527.565 125.397C530.408 125.397 532.981 125.002 535.284 124.21C537.587 123.382 539.584 122.177 541.275 120.594C542.967 119.01 544.244 117.049 545.107 114.71L563.353 116.762C562.201 121.584 560.005 125.793 556.767 129.392C553.564 132.954 549.462 135.725 544.46 137.704C539.458 139.647 533.736 140.619 527.295 140.619C518.983 140.619 511.804 138.892 505.759 135.438C499.749 131.947 495.125 127.017 491.887 120.647C488.648 114.242 487.028 106.704 487.028 98.0312C487.028 89.5029 488.648 82.0184 491.887 75.5771C495.161 69.0999 499.731 64.0614 505.597 60.4629C511.462 56.8286 518.353 55.0118 526.27 55.0117ZM526.54 70.2334C522.546 70.2334 519.036 71.2043 516.014 73.1475C512.991 75.0546 510.635 77.5737 508.943 80.7041C507.466 83.4653 506.636 86.4706 506.448 89.7188H545.432C545.396 85.9765 544.586 82.6481 543.003 79.7334C541.42 76.7826 539.206 74.4608 536.363 72.7695C533.557 71.0784 530.282 70.2335 526.54 70.2334Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M338.89 26.9434C348.569 26.9434 357.224 29.1924 364.853 33.6904C372.517 38.1885 378.562 44.6836 382.988 53.1758C387.45 61.6322 389.682 71.8166 389.682 83.7275C389.682 95.6384 387.45 105.84 382.988 114.332C378.562 122.788 372.517 129.267 364.853 133.765C357.224 138.263 348.569 140.512 338.89 140.512C329.21 140.512 320.537 138.263 312.872 133.765C305.243 129.231 299.198 122.735 294.736 114.278C290.31 105.786 288.097 95.6024 288.097 83.7275C288.097 71.8166 290.31 61.6322 294.736 53.1758C299.198 44.6836 305.244 38.1885 312.872 33.6904C320.537 29.1923 329.21 26.9434 338.89 26.9434ZM338.89 44.918C332.88 44.918 327.572 46.4107 322.966 49.3975C318.36 52.3482 314.743 56.7211 312.116 62.5146C309.525 68.2721 308.23 75.3433 308.23 83.7275C308.23 92.1118 309.525 99.2007 312.116 104.994C314.743 110.752 318.36 115.124 322.966 118.11C327.572 121.061 332.88 122.537 338.89 122.537C344.899 122.537 350.207 121.061 354.812 118.11C359.418 115.124 363.018 110.752 365.608 104.994C368.235 99.2007 369.549 92.1118 369.549 83.7275C369.549 75.3433 368.235 68.2721 365.608 62.5146C363.017 56.7211 359.419 52.3482 354.812 49.3975C350.207 46.4109 344.899 44.918 338.89 44.918Z" fill="white"/>
|
||||
<path d="M901.341 26.9434C907.458 26.9434 913.162 27.8424 918.452 29.6416C923.742 31.4049 928.438 33.9246 932.54 37.1992C936.678 40.4738 940.079 44.3604 942.742 48.8584C945.405 53.3564 947.15 58.3224 947.978 63.7559H927.574C926.675 60.8411 925.433 58.2317 923.85 55.9287C922.302 53.5899 920.431 51.5927 918.236 49.9375C916.077 48.2822 913.594 47.0405 910.787 46.2129C907.98 45.3493 904.922 44.918 901.611 44.918C895.674 44.918 890.384 46.4109 885.742 49.3975C881.1 52.3842 877.447 56.7748 874.784 62.5684C872.157 68.3259 870.844 75.3428 870.844 83.6191C870.844 91.9675 872.157 99.0385 874.784 104.832C877.411 110.626 881.064 115.034 885.742 118.057C890.42 121.043 895.854 122.537 902.043 122.537C907.657 122.537 912.515 121.457 916.617 119.298C920.755 117.139 923.94 114.08 926.171 110.122C928.304 106.303 929.414 101.859 929.508 96.79H904.04V81.1367H948.896V94.415C948.895 103.879 946.88 112.066 942.85 118.975C938.819 125.884 933.278 131.21 926.225 134.952C919.172 138.659 911.075 140.512 901.935 140.512C891.751 140.512 882.809 138.226 875.108 133.656C867.444 129.05 861.452 122.519 857.134 114.062C852.852 105.57 850.711 95.4941 850.711 83.835C850.711 74.9108 851.97 66.9398 854.489 59.9229C857.044 52.906 860.607 46.9505 865.177 42.0566C869.747 37.1269 875.108 33.384 881.262 30.8291C887.415 28.2383 894.108 26.9434 901.341 26.9434Z" fill="white"/>
|
||||
<path d="M618.981 55.0117C624.703 55.0117 629.687 56.2347 633.934 58.6816C638.216 61.1286 641.526 64.6734 643.865 69.3154C646.24 73.9574 647.41 79.5889 647.374 86.21V139H627.834V89.2334C627.834 83.6917 626.394 79.3553 623.516 76.2246C620.673 73.0939 616.732 71.5283 611.694 71.5283C608.276 71.5284 605.235 72.2846 602.572 73.7959C599.946 75.2713 597.877 77.4121 596.365 80.2188C594.89 83.0256 594.152 86.4266 594.152 90.4209V139H574.612V56.0908H593.288V70.1787H594.26C596.167 65.5368 599.208 61.8481 603.382 59.1133C607.592 56.3785 612.792 55.0118 618.981 55.0117Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M703.476 28.4541C711.968 28.4541 719.093 29.9301 724.851 32.8809C730.644 35.8316 735.016 39.9702 737.967 45.2959C740.953 50.5856 742.447 56.7567 742.447 63.8096C742.447 70.8985 740.936 77.0518 737.913 82.2695C734.926 87.4513 730.518 91.4638 724.688 94.3066C724.005 94.6355 723.304 94.944 722.584 95.2344L746.495 139H724.148L702.424 98.5166H682.047V139H662.021V28.4541H703.476ZM682.047 81.8916H700.507C705.473 81.8916 709.539 81.2082 712.706 79.8408C715.873 78.4374 718.211 76.404 719.723 73.7412C721.27 71.0424 722.044 67.7318 722.044 63.8096C722.044 59.8873 721.27 56.5403 719.723 53.7695C718.175 50.9629 715.818 48.8397 712.651 47.4004C709.485 45.9251 705.4 45.1875 700.398 45.1875H682.047V81.8916Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M850.232 139H828.857L819.698 111.741H778.139L768.996 139H747.621L786.539 28.4541H811.261L850.232 139ZM783.533 95.6562H814.294L799.332 51.125H798.468L783.533 95.6562Z" fill="white"/>
|
||||
<path d="M1037.86 139.705H1008.9V54.1694H1038.44C1046.93 54.1694 1054.22 55.8818 1060.31 59.3066C1066.43 62.7035 1071.13 67.5901 1074.41 73.9663C1077.7 80.3425 1079.34 87.9717 1079.34 96.8538C1079.34 105.764 1077.68 113.421 1074.37 119.825C1071.09 126.229 1066.34 131.143 1060.14 134.568C1053.96 137.993 1046.54 139.705 1037.86 139.705ZM1024.38 126.299H1037.11C1043.06 126.299 1048.02 125.213 1052 123.041C1055.98 120.841 1058.97 117.57 1060.98 113.226C1062.98 108.854 1063.98 103.397 1063.98 96.8538C1063.98 90.3105 1062.98 84.881 1060.98 80.5653C1058.97 76.2216 1056.01 72.9779 1052.09 70.8339C1048.19 68.6621 1043.35 67.5762 1037.56 67.5762H1024.38V126.299Z" fill="#E0E0E0"/>
|
||||
<path d="M1170.41 96.9374C1170.41 106.154 1168.69 114.047 1165.24 120.618C1161.82 127.162 1157.14 132.174 1151.22 135.654C1145.32 139.134 1138.63 140.875 1131.15 140.875C1123.66 140.875 1116.96 139.134 1111.03 135.654C1105.13 132.146 1100.46 127.12 1097.01 120.577C1093.59 114.006 1091.88 106.126 1091.88 96.9374C1091.88 87.7211 1093.59 79.8413 1097.01 73.2981C1100.46 66.7269 1105.13 61.7012 1111.03 58.2207C1116.96 54.7402 1123.66 53 1131.15 53C1138.63 53 1145.32 54.7402 1151.22 58.2207C1157.14 61.7012 1161.82 66.7269 1165.24 73.2981C1168.69 79.8413 1170.41 87.7211 1170.41 96.9374ZM1154.85 96.9374C1154.85 90.4498 1153.83 84.9785 1151.8 80.5235C1149.8 76.0407 1147.02 72.6577 1143.46 70.3745C1139.9 68.0634 1135.79 66.9079 1131.15 66.9079C1126.5 66.9079 1122.4 68.0634 1118.83 70.3745C1115.27 72.6577 1112.48 76.0407 1110.45 80.5235C1108.44 84.9785 1107.44 90.4498 1107.44 96.9374C1107.44 103.425 1108.44 108.91 1110.45 113.393C1112.48 117.848 1115.27 121.231 1118.83 123.542C1122.4 125.825 1126.5 126.967 1131.15 126.967C1135.79 126.967 1139.9 125.825 1143.46 123.542C1147.02 121.231 1149.8 117.848 1151.8 113.393C1153.83 108.91 1154.85 103.425 1154.85 96.9374Z" fill="#E0E0E0"/>
|
||||
<path d="M1258.2 83.0294H1242.6C1242.15 80.4678 1241.33 78.1985 1240.14 76.2216C1238.94 74.2169 1237.45 72.5184 1235.67 71.1262C1233.89 69.7341 1231.86 68.6899 1229.58 67.9938C1227.32 67.2699 1224.89 66.9079 1222.27 66.9079C1217.63 66.9079 1213.51 68.0774 1209.92 70.4162C1206.33 72.7273 1203.52 76.1242 1201.49 80.607C1199.46 85.062 1198.45 90.5055 1198.45 96.9374C1198.45 103.481 1199.46 108.994 1201.49 113.477C1203.55 117.931 1206.36 121.301 1209.92 123.584C1213.51 125.839 1217.61 126.967 1222.23 126.967C1224.79 126.967 1227.19 126.633 1229.41 125.964C1231.66 125.268 1233.68 124.252 1235.46 122.916C1237.27 121.579 1238.79 119.936 1240.01 117.987C1241.26 116.038 1242.12 113.811 1242.6 111.305L1258.2 111.388C1257.62 115.453 1256.35 119.268 1254.41 122.832C1252.49 126.396 1249.97 129.542 1246.85 132.271C1243.74 134.972 1240.09 137.088 1235.92 138.619C1231.75 140.123 1227.12 140.875 1222.02 140.875C1214.51 140.875 1207.81 139.134 1201.91 135.654C1196.01 132.174 1191.37 127.148 1187.97 120.577C1184.58 114.006 1182.88 106.126 1182.88 96.9374C1182.88 87.7211 1184.59 79.8413 1188.01 73.2981C1191.44 66.7269 1196.1 61.7012 1201.99 58.2207C1207.89 54.7402 1214.57 53 1222.02 53C1226.78 53 1231.21 53.6682 1235.29 55.0047C1239.38 56.3412 1243.03 58.3042 1246.23 60.8937C1249.43 63.4553 1252.06 66.6016 1254.12 70.3327C1256.2 74.0359 1257.57 78.2682 1258.2 83.0294Z" fill="#E0E0E0"/>
|
||||
<path d="M1318.83 77.6834C1318.44 74.0359 1316.8 71.1959 1313.91 69.1633C1311.04 67.1307 1307.31 66.1144 1302.72 66.1144C1299.49 66.1144 1296.73 66.6016 1294.42 67.5762C1292.11 68.5507 1290.34 69.8733 1289.12 71.5439C1287.89 73.2145 1287.27 75.1218 1287.24 77.2658C1287.24 79.0478 1287.64 80.5931 1288.45 81.9018C1289.28 83.2104 1290.41 84.3242 1291.83 85.243C1293.25 86.134 1294.82 86.8858 1296.55 87.4983C1298.27 88.1109 1300.01 88.626 1301.76 89.0437L1309.77 91.0484C1313 91.8002 1316.1 92.8165 1319.08 94.0973C1322.08 95.3781 1324.77 96.993 1327.13 98.9421C1329.53 100.891 1331.42 103.244 1332.81 106C1334.2 108.757 1334.9 111.987 1334.9 115.69C1334.9 120.702 1333.62 125.115 1331.06 128.93C1328.5 132.716 1324.8 135.682 1319.96 137.826C1315.14 139.942 1309.31 141 1302.47 141C1295.82 141 1290.05 139.97 1285.15 137.909C1280.28 135.849 1276.47 132.842 1273.72 128.888C1270.99 124.934 1269.52 120.117 1269.29 114.437H1284.53C1284.75 117.416 1285.67 119.894 1287.28 121.871C1288.89 123.848 1290.99 125.324 1293.58 126.299C1296.2 127.273 1299.12 127.76 1302.35 127.76C1305.71 127.76 1308.66 127.259 1311.19 126.257C1313.75 125.227 1315.76 123.807 1317.2 121.997C1318.65 120.159 1319.39 118.015 1319.41 115.565C1319.39 113.337 1318.73 111.5 1317.45 110.052C1316.17 108.576 1314.38 107.351 1312.07 106.376C1309.79 105.374 1307.12 104.483 1304.06 103.703L1294.33 101.197C1287.29 99.3876 1281.73 96.645 1277.64 92.9696C1273.58 89.2664 1271.55 84.352 1271.55 78.2264C1271.55 73.1867 1272.91 68.7735 1275.64 64.9867C1278.39 61.2 1282.13 58.2625 1286.86 56.1742C1291.59 54.0581 1296.95 53 1302.93 53C1308.99 53 1314.31 54.0581 1318.87 56.1742C1323.46 58.2625 1327.06 61.1721 1329.68 64.9032C1332.29 68.6064 1333.64 72.8665 1333.73 77.6834H1318.83Z" fill="#E0E0E0"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 13 KiB |
16
docs/static/img/logo-openrag-light.svg
vendored
Normal file
16
docs/static/img/logo-openrag-light.svg
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
<svg width="1335" height="185" viewBox="0 0 1335 185" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M172.336 97.5947H201.395C207.095 97.5948 211.713 102.212 211.713 107.912V125.055C211.713 130.755 207.095 135.372 201.395 135.372H176.05C173.312 135.372 170.687 136.462 168.752 138.397L125.259 181.883C123.324 183.819 120.699 184.908 117.961 184.908H97.0534C91.4612 184.908 86.8796 180.449 86.736 174.856L86.2848 157.354C86.1343 151.554 90.7947 146.765 96.6022 146.765H114.598C117.337 146.765 119.961 145.675 121.897 143.739L165.023 100.613C166.959 98.6775 169.583 97.5879 172.322 97.5879L172.336 97.5947Z" fill="black"/>
|
||||
<path d="M201.395 22.083C207.095 22.083 211.713 26.7004 211.713 32.4004V49.543C211.713 55.2429 207.095 59.8604 201.395 59.8604H176.05C173.312 59.8604 170.687 60.95 168.752 62.8857L125.259 106.378C123.324 108.314 120.699 109.403 117.961 109.403H92.5582C89.8983 109.403 87.339 110.429 85.4176 112.271L36.5914 159.061C34.6699 160.903 32.1098 161.929 29.4498 161.929H11.6549C5.95497 161.929 1.33753 157.304 1.33753 151.611V133.995C1.33777 128.295 5.95512 123.679 11.6549 123.679H29.3209C32.0598 123.679 34.6839 122.588 36.6198 120.652L82.9869 74.2861C84.9228 72.3503 87.5469 71.2598 90.2858 71.2598H114.598C117.337 71.2598 119.961 70.1702 121.897 68.2344L165.023 25.1084C166.959 23.1726 169.583 22.083 172.322 22.083H201.395Z" fill="black"/>
|
||||
<path d="M115.114 0C120.814 0 125.431 4.61743 125.431 10.3174V27.46C125.431 33.1599 120.814 37.7773 115.114 37.7773H89.7692C87.0304 37.7773 84.4062 38.867 82.4703 40.8027L38.9782 84.2949C37.0423 86.2306 34.418 87.3203 31.6793 87.3203H10.7731C5.1807 87.3203 0.599138 82.8609 0.455697 77.2686L0.0035481 59.7676C-0.147015 53.9673 4.5135 49.1847 10.3209 49.1846H28.317C31.0558 49.1846 33.68 48.0949 35.6159 46.1592L78.7418 3.0332C80.6776 1.09742 83.3019 0.00689882 86.0407 0.00683594L86.0553 0H115.114Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M445.716 55.0117C452.121 55.0117 457.897 56.6483 463.043 59.9229C468.225 63.1614 472.327 67.9658 475.35 74.335C478.408 80.6683 479.937 88.4412 479.938 97.6533C479.938 106.757 478.445 114.494 475.458 120.863C472.471 127.233 468.405 132.091 463.259 135.438C458.113 138.784 452.284 140.457 445.771 140.457C441.021 140.457 437.08 139.665 433.949 138.082C430.819 136.499 428.3 134.591 426.393 132.36C424.521 130.093 423.064 127.953 422.021 125.938H421.211V170.091H401.671V56.0908H420.887V69.8008H422.021C423.028 67.7858 424.449 65.6448 426.284 63.3779C428.119 61.0749 430.603 59.1135 433.733 57.4941C436.864 55.8389 440.858 55.0118 445.716 55.0117ZM440.318 71.043C436.144 71.043 432.599 72.1399 429.685 74.335C426.77 76.53 424.557 79.6072 423.046 83.5654C421.571 87.5238 420.833 92.1842 420.833 97.5459C420.833 102.907 421.588 107.604 423.1 111.634C424.647 115.664 426.861 118.812 429.739 121.079C432.654 123.31 436.18 124.426 440.318 124.426C444.637 124.426 448.253 123.275 451.168 120.972C454.083 118.633 456.278 115.448 457.753 111.418C459.264 107.352 460.02 102.728 460.021 97.5459C460.021 92.4001 459.283 87.8293 457.808 83.835C456.332 79.8407 454.136 76.7104 451.222 74.4434C448.307 72.1764 444.672 71.043 440.318 71.043Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M526.27 55.0117C531.379 55.0117 536.201 55.8389 540.735 57.4941C545.305 59.1134 549.336 61.6324 552.826 65.0508C556.353 68.4692 559.124 72.8237 561.139 78.1133C563.154 83.367 564.162 89.6286 564.162 96.8975V102.89H506.411C506.469 107.504 507.347 111.48 509.051 114.818C510.814 118.237 513.28 120.864 516.446 122.699C519.613 124.498 523.319 125.397 527.565 125.397C530.408 125.397 532.981 125.002 535.284 124.21C537.587 123.382 539.584 122.177 541.275 120.594C542.967 119.01 544.244 117.049 545.107 114.71L563.353 116.762C562.201 121.584 560.005 125.793 556.767 129.392C553.564 132.954 549.462 135.725 544.46 137.704C539.458 139.647 533.736 140.619 527.295 140.619C518.983 140.619 511.804 138.892 505.759 135.438C499.749 131.947 495.125 127.017 491.887 120.647C488.648 114.242 487.028 106.704 487.028 98.0312C487.028 89.5029 488.648 82.0184 491.887 75.5771C495.161 69.0999 499.731 64.0614 505.597 60.4629C511.462 56.8286 518.353 55.0118 526.27 55.0117ZM526.54 70.2334C522.546 70.2334 519.036 71.2043 516.014 73.1475C512.991 75.0546 510.635 77.5737 508.943 80.7041C507.466 83.4653 506.636 86.4706 506.448 89.7188H545.432C545.396 85.9765 544.586 82.6481 543.003 79.7334C541.42 76.7826 539.206 74.4608 536.363 72.7695C533.557 71.0784 530.282 70.2335 526.54 70.2334Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M338.89 26.9434C348.569 26.9434 357.224 29.1924 364.853 33.6904C372.517 38.1885 378.562 44.6836 382.988 53.1758C387.45 61.6322 389.682 71.8166 389.682 83.7275C389.682 95.6384 387.45 105.84 382.988 114.332C378.562 122.788 372.517 129.267 364.853 133.765C357.224 138.263 348.569 140.512 338.89 140.512C329.21 140.512 320.537 138.263 312.872 133.765C305.243 129.231 299.198 122.735 294.736 114.278C290.31 105.786 288.097 95.6024 288.097 83.7275C288.097 71.8166 290.31 61.6322 294.736 53.1758C299.198 44.6836 305.244 38.1885 312.872 33.6904C320.537 29.1923 329.21 26.9434 338.89 26.9434ZM338.89 44.918C332.88 44.918 327.572 46.4107 322.966 49.3975C318.36 52.3482 314.743 56.7211 312.116 62.5146C309.525 68.2721 308.23 75.3433 308.23 83.7275C308.23 92.1118 309.525 99.2007 312.116 104.994C314.743 110.752 318.36 115.124 322.966 118.11C327.572 121.061 332.88 122.537 338.89 122.537C344.899 122.537 350.207 121.061 354.812 118.11C359.418 115.124 363.018 110.752 365.608 104.994C368.235 99.2007 369.549 92.1118 369.549 83.7275C369.549 75.3433 368.235 68.2721 365.608 62.5146C363.017 56.7211 359.419 52.3482 354.812 49.3975C350.207 46.4109 344.899 44.918 338.89 44.918Z" fill="black"/>
|
||||
<path d="M901.341 26.9434C907.458 26.9434 913.162 27.8424 918.452 29.6416C923.742 31.4049 928.438 33.9246 932.54 37.1992C936.678 40.4738 940.079 44.3604 942.742 48.8584C945.405 53.3564 947.15 58.3224 947.978 63.7559H927.574C926.675 60.8411 925.433 58.2317 923.85 55.9287C922.302 53.5899 920.431 51.5927 918.236 49.9375C916.077 48.2822 913.594 47.0405 910.787 46.2129C907.98 45.3493 904.922 44.918 901.611 44.918C895.674 44.918 890.384 46.4109 885.742 49.3975C881.1 52.3842 877.447 56.7748 874.784 62.5684C872.157 68.3259 870.844 75.3428 870.844 83.6191C870.844 91.9675 872.157 99.0385 874.784 104.832C877.411 110.626 881.064 115.034 885.742 118.057C890.42 121.043 895.854 122.537 902.043 122.537C907.657 122.537 912.515 121.457 916.617 119.298C920.755 117.139 923.94 114.08 926.171 110.122C928.304 106.303 929.414 101.859 929.508 96.79H904.04V81.1367H948.896V94.415C948.895 103.879 946.88 112.066 942.85 118.975C938.819 125.884 933.278 131.21 926.225 134.952C919.172 138.659 911.075 140.512 901.935 140.512C891.751 140.512 882.809 138.226 875.108 133.656C867.444 129.05 861.452 122.519 857.134 114.062C852.852 105.57 850.711 95.4941 850.711 83.835C850.711 74.9108 851.97 66.9398 854.489 59.9229C857.044 52.906 860.607 46.9505 865.177 42.0566C869.747 37.1269 875.108 33.384 881.262 30.8291C887.415 28.2383 894.108 26.9434 901.341 26.9434Z" fill="black"/>
|
||||
<path d="M618.981 55.0117C624.703 55.0117 629.687 56.2347 633.934 58.6816C638.216 61.1286 641.526 64.6734 643.865 69.3154C646.24 73.9574 647.41 79.5889 647.374 86.21V139H627.834V89.2334C627.834 83.6917 626.394 79.3553 623.516 76.2246C620.673 73.0939 616.732 71.5283 611.694 71.5283C608.276 71.5284 605.235 72.2846 602.572 73.7959C599.946 75.2713 597.877 77.4121 596.365 80.2188C594.89 83.0256 594.152 86.4266 594.152 90.4209V139H574.612V56.0908H593.288V70.1787H594.26C596.167 65.5368 599.208 61.8481 603.382 59.1133C607.592 56.3785 612.792 55.0118 618.981 55.0117Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M703.476 28.4541C711.968 28.4541 719.093 29.9301 724.851 32.8809C730.644 35.8316 735.016 39.9702 737.967 45.2959C740.953 50.5856 742.447 56.7567 742.447 63.8096C742.447 70.8985 740.936 77.0518 737.913 82.2695C734.926 87.4513 730.518 91.4638 724.688 94.3066C724.005 94.6355 723.304 94.944 722.584 95.2344L746.495 139H724.148L702.424 98.5166H682.047V139H662.021V28.4541H703.476ZM682.047 81.8916H700.507C705.473 81.8916 709.539 81.2082 712.706 79.8408C715.873 78.4374 718.211 76.404 719.723 73.7412C721.27 71.0424 722.044 67.7318 722.044 63.8096C722.044 59.8873 721.27 56.5403 719.723 53.7695C718.175 50.9629 715.818 48.8397 712.651 47.4004C709.485 45.9251 705.4 45.1875 700.398 45.1875H682.047V81.8916Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M850.232 139H828.857L819.698 111.741H778.139L768.996 139H747.621L786.539 28.4541H811.261L850.232 139ZM783.533 95.6562H814.294L799.332 51.125H798.468L783.533 95.6562Z" fill="black"/>
|
||||
<path d="M1037.86 139.705H1008.9V54.1694H1038.44C1046.93 54.1694 1054.22 55.8818 1060.31 59.3066C1066.43 62.7035 1071.13 67.5901 1074.41 73.9663C1077.7 80.3425 1079.34 87.9717 1079.34 96.8538C1079.34 105.764 1077.68 113.421 1074.37 119.825C1071.09 126.229 1066.34 131.143 1060.14 134.568C1053.96 137.993 1046.54 139.705 1037.86 139.705ZM1024.38 126.299H1037.11C1043.06 126.299 1048.02 125.213 1052 123.041C1055.98 120.841 1058.97 117.57 1060.98 113.226C1062.98 108.854 1063.98 103.397 1063.98 96.8538C1063.98 90.3105 1062.98 84.881 1060.98 80.5653C1058.97 76.2216 1056.01 72.9779 1052.09 70.8339C1048.19 68.6621 1043.35 67.5762 1037.56 67.5762H1024.38V126.299Z" fill="#9F9FA9"/>
|
||||
<path d="M1170.41 96.9374C1170.41 106.154 1168.69 114.047 1165.24 120.618C1161.82 127.162 1157.14 132.174 1151.22 135.654C1145.32 139.134 1138.63 140.875 1131.15 140.875C1123.66 140.875 1116.96 139.134 1111.03 135.654C1105.13 132.146 1100.46 127.12 1097.01 120.577C1093.59 114.006 1091.88 106.126 1091.88 96.9374C1091.88 87.7211 1093.59 79.8413 1097.01 73.2981C1100.46 66.7269 1105.13 61.7012 1111.03 58.2207C1116.96 54.7402 1123.66 53 1131.15 53C1138.63 53 1145.32 54.7402 1151.22 58.2207C1157.14 61.7012 1161.82 66.7269 1165.24 73.2981C1168.69 79.8413 1170.41 87.7211 1170.41 96.9374ZM1154.85 96.9374C1154.85 90.4498 1153.83 84.9785 1151.8 80.5235C1149.8 76.0407 1147.02 72.6577 1143.46 70.3745C1139.9 68.0634 1135.79 66.9079 1131.15 66.9079C1126.5 66.9079 1122.4 68.0634 1118.83 70.3745C1115.27 72.6577 1112.48 76.0407 1110.45 80.5235C1108.44 84.9785 1107.44 90.4498 1107.44 96.9374C1107.44 103.425 1108.44 108.91 1110.45 113.393C1112.48 117.848 1115.27 121.231 1118.83 123.542C1122.4 125.825 1126.5 126.967 1131.15 126.967C1135.79 126.967 1139.9 125.825 1143.46 123.542C1147.02 121.231 1149.8 117.848 1151.8 113.393C1153.83 108.91 1154.85 103.425 1154.85 96.9374Z" fill="#9F9FA9"/>
|
||||
<path d="M1258.2 83.0294H1242.6C1242.15 80.4678 1241.33 78.1985 1240.14 76.2216C1238.94 74.2169 1237.45 72.5184 1235.67 71.1262C1233.89 69.7341 1231.86 68.6899 1229.58 67.9938C1227.32 67.2699 1224.89 66.9079 1222.27 66.9079C1217.63 66.9079 1213.51 68.0774 1209.92 70.4162C1206.33 72.7273 1203.52 76.1242 1201.49 80.607C1199.46 85.062 1198.45 90.5055 1198.45 96.9374C1198.45 103.481 1199.46 108.994 1201.49 113.477C1203.55 117.931 1206.36 121.301 1209.92 123.584C1213.51 125.839 1217.61 126.967 1222.23 126.967C1224.79 126.967 1227.19 126.633 1229.41 125.964C1231.66 125.268 1233.68 124.252 1235.46 122.916C1237.27 121.579 1238.79 119.936 1240.01 117.987C1241.26 116.038 1242.12 113.811 1242.6 111.305L1258.2 111.388C1257.62 115.453 1256.35 119.268 1254.41 122.832C1252.49 126.396 1249.97 129.542 1246.85 132.271C1243.74 134.972 1240.09 137.088 1235.92 138.619C1231.75 140.123 1227.12 140.875 1222.02 140.875C1214.51 140.875 1207.81 139.134 1201.91 135.654C1196.01 132.174 1191.37 127.148 1187.97 120.577C1184.58 114.006 1182.88 106.126 1182.88 96.9374C1182.88 87.7211 1184.59 79.8413 1188.01 73.2981C1191.44 66.7269 1196.1 61.7012 1201.99 58.2207C1207.89 54.7402 1214.57 53 1222.02 53C1226.78 53 1231.21 53.6682 1235.29 55.0047C1239.38 56.3412 1243.03 58.3042 1246.23 60.8937C1249.43 63.4553 1252.06 66.6016 1254.12 70.3327C1256.2 74.0359 1257.57 78.2682 1258.2 83.0294Z" fill="#9F9FA9"/>
|
||||
<path d="M1318.83 77.6834C1318.44 74.0359 1316.8 71.1959 1313.91 69.1633C1311.04 67.1307 1307.31 66.1144 1302.72 66.1144C1299.49 66.1144 1296.73 66.6016 1294.42 67.5762C1292.11 68.5507 1290.34 69.8733 1289.12 71.5439C1287.89 73.2145 1287.27 75.1218 1287.24 77.2658C1287.24 79.0478 1287.64 80.5931 1288.45 81.9018C1289.28 83.2104 1290.41 84.3242 1291.83 85.243C1293.25 86.134 1294.82 86.8858 1296.55 87.4983C1298.27 88.1109 1300.01 88.626 1301.76 89.0437L1309.77 91.0484C1313 91.8002 1316.1 92.8165 1319.08 94.0973C1322.08 95.3781 1324.77 96.993 1327.13 98.9421C1329.53 100.891 1331.42 103.244 1332.81 106C1334.2 108.757 1334.9 111.987 1334.9 115.69C1334.9 120.702 1333.62 125.115 1331.06 128.93C1328.5 132.716 1324.8 135.682 1319.96 137.826C1315.14 139.942 1309.31 141 1302.47 141C1295.82 141 1290.05 139.97 1285.15 137.909C1280.28 135.849 1276.47 132.842 1273.72 128.888C1270.99 124.934 1269.52 120.117 1269.29 114.437H1284.53C1284.75 117.416 1285.67 119.894 1287.28 121.871C1288.89 123.848 1290.99 125.324 1293.58 126.299C1296.2 127.273 1299.12 127.76 1302.35 127.76C1305.71 127.76 1308.66 127.259 1311.19 126.257C1313.75 125.227 1315.76 123.807 1317.2 121.997C1318.65 120.159 1319.39 118.015 1319.41 115.565C1319.39 113.337 1318.73 111.5 1317.45 110.052C1316.17 108.576 1314.38 107.351 1312.07 106.376C1309.79 105.374 1307.12 104.483 1304.06 103.703L1294.33 101.197C1287.29 99.3876 1281.73 96.645 1277.64 92.9696C1273.58 89.2664 1271.55 84.352 1271.55 78.2264C1271.55 73.1867 1272.91 68.7735 1275.64 64.9867C1278.39 61.2 1282.13 58.2625 1286.86 56.1742C1291.59 54.0581 1296.95 53 1302.93 53C1308.99 53 1314.31 54.0581 1318.87 56.1742C1323.46 58.2625 1327.06 61.1721 1329.68 64.9032C1332.29 68.6064 1333.64 72.8665 1333.73 77.6834H1318.83Z" fill="#9F9FA9"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 13 KiB |
1
docs/static/img/logo.svg
vendored
1
docs/static/img/logo.svg
vendored
|
|
@ -1 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="22" viewBox="0 0 24 22" fill="currentColor" class="h-6 w-6"><path d="M13.0486 0.462158H9.75399C9.44371 0.462158 9.14614 0.586082 8.92674 0.806667L4.03751 5.72232C3.81811 5.9429 3.52054 6.06682 3.21026 6.06682H1.16992C0.511975 6.06682 -0.0165756 6.61212 0.000397655 7.2734L0.0515933 9.26798C0.0679586 9.90556 0.586745 10.4139 1.22111 10.4139H3.59097C3.90124 10.4139 4.19881 10.2899 4.41821 10.0694L9.34823 5.11269C9.56763 4.89211 9.8652 4.76818 10.1755 4.76818H13.0486C13.6947 4.76818 14.2185 4.24157 14.2185 3.59195V1.63839C14.2185 0.988773 13.6947 0.462158 13.0486 0.462158Z"></path><path d="M19.5355 11.5862H22.8301C23.4762 11.5862 24 12.1128 24 12.7624V14.716C24 15.3656 23.4762 15.8922 22.8301 15.8922H19.957C19.6467 15.8922 19.3491 16.0161 19.1297 16.2367L14.1997 21.1934C13.9803 21.414 13.6827 21.5379 13.3725 21.5379H11.0026C10.3682 21.5379 9.84945 21.0296 9.83309 20.392L9.78189 18.3974C9.76492 17.7361 10.2935 17.1908 10.9514 17.1908H12.9918C13.302 17.1908 13.5996 17.0669 13.819 16.8463L18.7082 11.9307C18.9276 11.7101 19.2252 11.5862 19.5355 11.5862Z"></path><path d="M19.5355 2.9796L22.8301 2.9796C23.4762 2.9796 24 3.50622 24 4.15583V6.1094C24 6.75901 23.4762 7.28563 22.8301 7.28563H19.957C19.6467 7.28563 19.3491 7.40955 19.1297 7.63014L14.1997 12.5868C13.9803 12.8074 13.6827 12.9313 13.3725 12.9313H10.493C10.1913 12.9313 9.90126 13.0485 9.68346 13.2583L4.14867 18.5917C3.93087 18.8016 3.64085 18.9187 3.33917 18.9187H1.32174C0.675616 18.9187 0.151832 18.3921 0.151832 17.7425V15.7343C0.151832 15.0846 0.675616 14.558 1.32174 14.558H3.32468C3.63496 14.558 3.93253 14.4341 4.15193 14.2135L9.40827 8.92878C9.62767 8.70819 9.92524 8.58427 10.2355 8.58427H12.9918C13.302 8.58427 13.5996 8.46034 13.819 8.23976L18.7082 3.32411C18.9276 3.10353 19.2252 2.9796 19.5355 2.9796Z"></path></svg>
|
||||
|
Before Width: | Height: | Size: 1.8 KiB |
|
|
@ -37,7 +37,7 @@ export function LabelWrapper({
|
|||
>
|
||||
<Label
|
||||
htmlFor={id}
|
||||
className="!text-mmd font-medium flex items-center gap-1.5"
|
||||
className={cn("font-medium flex items-center gap-1.5", description ? "!text-sm" : "!text-mmd")}
|
||||
>
|
||||
{label}
|
||||
{required && <span className="text-red-500">*</span>}
|
||||
|
|
|
|||
|
|
@ -8,11 +8,24 @@ export default function IBMLogo(props: React.SVGProps<SVGSVGElement>) {
|
|||
fill="none"
|
||||
{...props}
|
||||
>
|
||||
<title>IBM Logo</title>
|
||||
<path
|
||||
d="M15.696 10.9901C15.7213 10.9901 15.7356 10.979 15.7356 10.9552V10.9313C15.7356 10.9076 15.7213 10.8964 15.696 10.8964H15.6359V10.9901H15.696ZM15.6359 11.1649H15.5552V10.8329H15.7055C15.7799 10.8329 15.8179 10.8773 15.8179 10.9378C15.8179 10.9901 15.7942 11.0235 15.7577 11.0378L15.8321 11.1649H15.7436L15.6818 11.0504H15.6359V11.1649ZM15.9255 11.0171V10.9759C15.9255 10.8424 15.821 10.7376 15.6833 10.7376C15.5456 10.7376 15.4412 10.8424 15.4412 10.9759V11.0171C15.4412 11.1505 15.5456 11.2554 15.6833 11.2554C15.821 11.2554 15.9255 11.1505 15.9255 11.0171ZM15.3668 10.9964C15.3668 10.8107 15.5077 10.6693 15.6833 10.6693C15.859 10.6693 16 10.8107 16 10.9964C16 11.1823 15.859 11.3237 15.6833 11.3237C15.5077 11.3237 15.3668 11.1823 15.3668 10.9964ZM10.8069 5.74885L10.6627 5.33301H8.28904V5.74885H10.8069ZM11.0821 6.54285L10.9379 6.12691H8.28904V6.54285H11.0821ZM12.8481 11.3067H14.9203V10.8908H12.8481V11.3067ZM12.8481 10.5126H14.9203V10.0968H12.8481V10.5126ZM12.8481 9.71873H14.0914V9.3028H12.8481V9.71873ZM12.8481 8.92474H14.0914V8.50889H12.8481V8.92474ZM12.8481 8.13084H14.0914V7.7149H11.7212L11.6047 8.05102L11.4882 7.7149H9.11794V8.13084H10.3613V7.74863L10.4951 8.13084H12.7143L12.8481 7.74863V8.13084ZM14.0914 6.921H11.9964L11.8522 7.33675H14.0914V6.921ZM9.11794 8.92474H10.3613V8.50889H9.11794V8.92474ZM9.11794 9.71873H10.3613V9.3028H9.11794V9.71873ZM8.28904 10.5126H10.3613V10.0968H8.28904V10.5126ZM8.28904 11.3067H10.3613V10.8908H8.28904V11.3067ZM12.5466 5.33301L12.4025 5.74885H14.9203V5.33301H12.5466ZM12.1273 6.54285H14.9203V6.12691H12.2714L12.1273 6.54285ZM9.11794 7.33675H11.3572L11.213 6.921H9.11794V7.33675ZM10.7727 8.92474H12.4366L12.5821 8.50889H10.6272L10.7727 8.92474ZM11.0505 9.71873H12.1588L12.3042 9.3028H10.9051L11.0505 9.71873ZM11.3283 10.5126H11.881L12.0265 10.0969H11.1828L11.3283 10.5126ZM11.604 11.3067L11.7487 10.8908H11.4606L11.604 11.3067ZM3.31561 11.3026L6.36754 11.3067C6.78195 11.3067 7.15365 11.1491 7.43506 10.8908H3.31561V11.3026ZM6.55592 9.3028V9.71873H7.94994C7.94994 9.57477 7.93029 9.43551 7.89456 9.3028H6.55592ZM4.14452 9.71873H5.38783V9.3028H4.14452V9.71873ZM6.55592 7.33675H7.89456C7.93029 7.20422 7.94994 7.06486 7.94994 6.921H6.55592V7.33675ZM4.14452 7.33675H5.38783V6.9209H4.14452V7.33675ZM6.36754 5.33301H3.31561V5.74885H7.43506C7.15365 5.49061 6.77892 5.33301 6.36754 5.33301ZM7.73778 6.12691H3.31561V6.54285H7.90448C7.86839 6.39502 7.81172 6.25539 7.73778 6.12691ZM4.14452 7.7149V8.13084H7.39152C7.5292 8.01333 7.64621 7.87268 7.73732 7.7149H4.14452ZM7.39152 8.50889H4.14452V8.92474H7.73732C7.64621 8.76695 7.5292 8.62631 7.39152 8.50889ZM3.31561 10.5126H7.73778C7.81172 10.3843 7.86839 10.2447 7.90448 10.0969H3.31561V10.5126ZM0 5.74885H2.90121V5.33301H0V5.74885ZM0 6.54285H2.90121V6.12691H0V6.54285ZM0.828996 7.33684H2.0723V6.921H0.828996V7.33684ZM0.828996 8.13084H2.0723V7.7149H0.828996V8.13084ZM0.828996 8.92474H2.0723V8.50889H0.828996V8.92474ZM0.828996 9.71873H2.0723V9.3028H0.828996V9.71873ZM0 10.5126H2.90121V10.0968H0V10.5126ZM0 11.3067H2.90121V10.8908H0V11.3067Z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
<title>IBM watsonx.ai Logo</title>
|
||||
<g clip-path="url(#clip0_2620_2081)">
|
||||
<path
|
||||
d="M13 12.0007C12.4477 12.0007 12 12.4484 12 13.0007C12 13.0389 12.0071 13.0751 12.0112 13.1122C10.8708 14.0103 9.47165 14.5007 8 14.5007C5.86915 14.5007 4 12.5146 4 10.2507C4 7.90722 5.9065 6.00072 8.25 6.00072H8.5V5.00072H8.25C5.3552 5.00072 3 7.35592 3 10.2507C3 11.1927 3.2652 12.0955 3.71855 12.879C2.3619 11.6868 1.5 9.94447 1.5 8.00072C1.5 6.94312 1.74585 5.93432 2.23095 5.00292L1.34375 4.54102C0.79175 5.60157 0.5 6.79787 0.5 8.00072C0.5 12.1362 3.8645 15.5007 8 15.5007C9.6872 15.5007 11.2909 14.9411 12.6024 13.9176C12.7244 13.9706 12.8586 14.0007 13 14.0007C13.5523 14.0007 14 13.553 14 13.0007C14 12.4484 13.5523 12.0007 13 12.0007Z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
<path d="M6.5 11V10H5.5V11H6.5Z" fill="currentColor" />
|
||||
<path d="M10.5 6V5H9.5V6H10.5Z" fill="currentColor" />
|
||||
<path
|
||||
d="M8 0.5C6.3128 0.5 4.7091 1.05965 3.3976 2.0831C3.2756 2.0301 3.14145 2 3 2C2.4477 2 2 2.4477 2 3C2 3.5523 2.4477 4 3 4C3.5523 4 4 3.5523 4 3C4 2.9618 3.9929 2.9256 3.98875 2.88855C5.12915 1.9904 6.52835 1.5 8 1.5C10.1308 1.5 12 3.4861 12 5.75C12 8.0935 10.0935 10 7.75 10H7.5V11H7.75C10.6448 11 13 8.6448 13 5.75C13 4.80735 12.7339 3.90415 12.28 3.12035C13.6375 4.3125 14.5 6.05555 14.5 8C14.5 9.0576 14.2541 10.0664 13.769 10.9978L14.6562 11.4597C15.2083 10.3991 15.5 9.20285 15.5 8C15.5 3.8645 12.1355 0.5 8 0.5Z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_2620_2081">
|
||||
<rect width="16" height="16" fill="white" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { cva, type VariantProps } from "class-variance-authority";
|
|||
import * as React from "react";
|
||||
|
||||
const buttonVariants = cva(
|
||||
"inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:opacity-50 disabled:pointer-events-none [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0",
|
||||
"inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:opacity-50 disabled:pointer-events-none disabled:select-none [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0",
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ const Card = React.forwardRef<
|
|||
<div
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"rounded-lg border border-border bg-card text-card-foreground shadow-sm",
|
||||
"rounded-xl border border-border bg-card text-card-foreground shadow-sm",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
"use client"
|
||||
|
||||
import { useEffect, useState, useRef, Suspense } from "react"
|
||||
import { useEffect, useState, Suspense } from "react"
|
||||
import { useRouter, useSearchParams } from "next/navigation"
|
||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"
|
||||
import { Button } from "@/components/ui/button"
|
||||
|
|
@ -14,17 +14,20 @@ function AuthCallbackContent() {
|
|||
const [status, setStatus] = useState<"processing" | "success" | "error">("processing")
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [purpose, setPurpose] = useState<string>("app_auth")
|
||||
const hasProcessed = useRef(false)
|
||||
|
||||
useEffect(() => {
|
||||
// Prevent double execution in React Strict Mode
|
||||
if (hasProcessed.current) return
|
||||
hasProcessed.current = true
|
||||
const code = searchParams.get('code')
|
||||
const callbackKey = `callback_processed_${code}`
|
||||
|
||||
// Prevent double execution across component remounts
|
||||
if (sessionStorage.getItem(callbackKey)) {
|
||||
return
|
||||
}
|
||||
sessionStorage.setItem(callbackKey, 'true')
|
||||
|
||||
const handleCallback = async () => {
|
||||
try {
|
||||
// Get parameters from URL
|
||||
const code = searchParams.get('code')
|
||||
const state = searchParams.get('state')
|
||||
const errorParam = searchParams.get('error')
|
||||
|
||||
|
|
|
|||
|
|
@ -121,7 +121,6 @@ function ChatPage() {
|
|||
>(new Set());
|
||||
// previousResponseIds now comes from useChat context
|
||||
const [isUploading, setIsUploading] = useState(false);
|
||||
const [isDragOver, setIsDragOver] = useState(false);
|
||||
const [isFilterDropdownOpen, setIsFilterDropdownOpen] = useState(false);
|
||||
const [availableFilters, setAvailableFilters] = useState<
|
||||
KnowledgeFilterData[]
|
||||
|
|
@ -132,7 +131,6 @@ function ChatPage() {
|
|||
const [dropdownDismissed, setDropdownDismissed] = useState(false);
|
||||
const [isUserInteracting, setIsUserInteracting] = useState(false);
|
||||
const [isForkingInProgress, setIsForkingInProgress] = useState(false);
|
||||
const dragCounterRef = useRef(0);
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||
const inputRef = useRef<HTMLTextAreaElement>(null);
|
||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||
|
|
@ -275,43 +273,6 @@ function ChatPage() {
|
|||
}
|
||||
};
|
||||
|
||||
// Remove the old pollTaskStatus function since we're using centralized system
|
||||
|
||||
const handleDragEnter = (e: React.DragEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
dragCounterRef.current++;
|
||||
if (dragCounterRef.current === 1) {
|
||||
setIsDragOver(true);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDragOver = (e: React.DragEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
};
|
||||
|
||||
const handleDragLeave = (e: React.DragEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
dragCounterRef.current--;
|
||||
if (dragCounterRef.current === 0) {
|
||||
setIsDragOver(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDrop = (e: React.DragEvent) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
dragCounterRef.current = 0;
|
||||
setIsDragOver(false);
|
||||
|
||||
const files = Array.from(e.dataTransfer.files);
|
||||
if (files.length > 0) {
|
||||
handleFileUpload(files[0]); // Upload first file only
|
||||
}
|
||||
};
|
||||
|
||||
const handleFilePickerClick = () => {
|
||||
fileInputRef.current?.click();
|
||||
};
|
||||
|
|
@ -1958,31 +1919,12 @@ function ChatPage() {
|
|||
<div className="flex-1 flex flex-col gap-4 min-h-0 overflow-hidden">
|
||||
{/* Messages Area */}
|
||||
<div
|
||||
className={`flex-1 overflow-y-auto overflow-x-hidden scrollbar-hide space-y-6 min-h-0 transition-all relative ${
|
||||
isDragOver
|
||||
? "bg-primary/10 border-2 border-dashed border-primary rounded-lg p-4"
|
||||
: ""
|
||||
}`}
|
||||
onDragEnter={handleDragEnter}
|
||||
onDragOver={handleDragOver}
|
||||
onDragLeave={handleDragLeave}
|
||||
onDrop={handleDrop}
|
||||
className={`flex-1 overflow-y-auto overflow-x-hidden scrollbar-hide space-y-6 min-h-0 transition-all relative`}
|
||||
>
|
||||
{messages.length === 0 && !streamingMessage ? (
|
||||
<div className="flex items-center justify-center h-full text-muted-foreground">
|
||||
<div className="text-center">
|
||||
{isDragOver ? (
|
||||
<>
|
||||
<Upload className="h-12 w-12 mx-auto mb-4 text-primary" />
|
||||
<p className="text-primary font-medium">
|
||||
Drop your document here
|
||||
</p>
|
||||
<p className="text-sm mt-2">
|
||||
I'll process it and add it to our conversation
|
||||
context
|
||||
</p>
|
||||
</>
|
||||
) : isUploading ? (
|
||||
{isUploading ? (
|
||||
<>
|
||||
<Loader2 className="h-12 w-12 mx-auto mb-4 animate-spin" />
|
||||
<p>Processing your document...</p>
|
||||
|
|
@ -1999,8 +1941,8 @@ function ChatPage() {
|
|||
<div key={index} className="space-y-6 group">
|
||||
{message.role === "user" && (
|
||||
<div className="flex gap-3">
|
||||
<Avatar className="w-8 h-8 flex-shrink-0">
|
||||
<AvatarImage src={user?.picture} alt={user?.name} />
|
||||
<Avatar className="w-8 h-8 flex-shrink-0 select-none">
|
||||
<AvatarImage draggable={false} src={user?.picture} alt={user?.name} />
|
||||
<AvatarFallback className="text-sm bg-primary/20 text-primary">
|
||||
{user?.name ? (
|
||||
user.name.charAt(0).toUpperCase()
|
||||
|
|
@ -2019,7 +1961,7 @@ function ChatPage() {
|
|||
|
||||
{message.role === "assistant" && (
|
||||
<div className="flex gap-3">
|
||||
<div className="w-8 h-8 rounded-lg bg-accent/20 flex items-center justify-center flex-shrink-0">
|
||||
<div className="w-8 h-8 rounded-lg bg-accent/20 flex items-center justify-center flex-shrink-0 select-none">
|
||||
<Bot className="h-4 w-4 text-accent-foreground" />
|
||||
</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
|
|
@ -2083,18 +2025,6 @@ function ChatPage() {
|
|||
<div ref={messagesEndRef} />
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Drag overlay for existing messages */}
|
||||
{isDragOver && messages.length > 0 && (
|
||||
<div className="absolute inset-0 bg-primary/20 backdrop-blur-sm flex items-center justify-center rounded-lg">
|
||||
<div className="text-center">
|
||||
<Upload className="h-8 w-8 mx-auto mb-2 text-primary" />
|
||||
<p className="text-primary font-medium">
|
||||
Drop document to add context
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -2344,12 +2274,14 @@ function ChatPage() {
|
|||
<button
|
||||
key={filter.id}
|
||||
onClick={() => handleFilterSelect(filter)}
|
||||
className={`w-full text-left px-2 py-2 text-sm rounded hover:bg-muted/50 flex items-center justify-between ${
|
||||
className={`w-full overflow-hidden text-left px-2 py-2 gap-2 text-sm rounded hover:bg-muted/50 flex items-center justify-between ${
|
||||
index === selectedFilterIndex ? "bg-muted/50" : ""
|
||||
}`}
|
||||
>
|
||||
<div>
|
||||
<div className="font-medium">{filter.name}</div>
|
||||
<div className="overflow-hidden">
|
||||
<div className="font-medium truncate">
|
||||
{filter.name}
|
||||
</div>
|
||||
{filter.description && (
|
||||
<div className="text-xs text-muted-foreground truncate">
|
||||
{filter.description}
|
||||
|
|
@ -2357,7 +2289,7 @@ function ChatPage() {
|
|||
)}
|
||||
</div>
|
||||
{selectedFilter?.id === filter.id && (
|
||||
<div className="w-2 h-2 rounded-full bg-blue-500" />
|
||||
<div className="w-2 h-2 shrink-0 rounded-full bg-blue-500" />
|
||||
)}
|
||||
</button>
|
||||
))}
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ function SearchPage() {
|
|||
};
|
||||
|
||||
// Convert TaskFiles to File format and merge with backend results
|
||||
const taskFilesAsFiles: File[] = taskFiles.map((taskFile) => {
|
||||
const taskFilesAsFiles: File[] = taskFiles.map(taskFile => {
|
||||
return {
|
||||
filename: taskFile.filename,
|
||||
mimetype: taskFile.mimetype,
|
||||
|
|
@ -77,11 +77,11 @@ function SearchPage() {
|
|||
|
||||
const backendFiles = data as File[];
|
||||
|
||||
const filteredTaskFiles = taskFilesAsFiles.filter((taskFile) => {
|
||||
const filteredTaskFiles = taskFilesAsFiles.filter(taskFile => {
|
||||
return (
|
||||
taskFile.status !== "active" &&
|
||||
!backendFiles.some(
|
||||
(backendFile) => backendFile.filename === taskFile.filename
|
||||
backendFile => backendFile.filename === taskFile.filename
|
||||
)
|
||||
);
|
||||
});
|
||||
|
|
@ -123,7 +123,7 @@ function SearchPage() {
|
|||
{
|
||||
field: "size",
|
||||
headerName: "Size",
|
||||
valueFormatter: (params) =>
|
||||
valueFormatter: params =>
|
||||
params.value ? `${Math.round(params.value / 1024)} KB` : "-",
|
||||
},
|
||||
{
|
||||
|
|
@ -133,13 +133,13 @@ function SearchPage() {
|
|||
{
|
||||
field: "owner",
|
||||
headerName: "Owner",
|
||||
valueFormatter: (params) =>
|
||||
valueFormatter: params =>
|
||||
params.data?.owner_name || params.data?.owner_email || "—",
|
||||
},
|
||||
{
|
||||
field: "chunkCount",
|
||||
headerName: "Chunks",
|
||||
valueFormatter: (params) => params.data?.chunkCount?.toString() || "-",
|
||||
valueFormatter: params => params.data?.chunkCount?.toString() || "-",
|
||||
},
|
||||
{
|
||||
field: "avgScore",
|
||||
|
|
@ -201,7 +201,7 @@ function SearchPage() {
|
|||
|
||||
try {
|
||||
// Delete each file individually since the API expects one filename at a time
|
||||
const deletePromises = selectedRows.map((row) =>
|
||||
const deletePromises = selectedRows.map(row =>
|
||||
deleteDocumentMutation.mutateAsync({ filename: row.filename })
|
||||
);
|
||||
|
||||
|
|
@ -318,18 +318,17 @@ function SearchPage() {
|
|||
rowSelection="multiple"
|
||||
rowMultiSelectWithClick={false}
|
||||
suppressRowClickSelection={true}
|
||||
getRowId={(params) => params.data.filename}
|
||||
domLayout="autoHeight"
|
||||
getRowId={params => params.data.filename}
|
||||
domLayout="normal"
|
||||
onSelectionChanged={onSelectionChanged}
|
||||
noRowsOverlayComponent={() => (
|
||||
<div className="text-center">
|
||||
<Search className="h-12 w-12 mx-auto mb-4 text-muted-foreground/50" />
|
||||
<p className="text-lg text-muted-foreground">
|
||||
No documents found
|
||||
</p>
|
||||
<p className="text-sm text-muted-foreground/70 mt-2">
|
||||
Try adjusting your search terms
|
||||
</p>
|
||||
<div className="text-center pb-[45px]">
|
||||
<div className="text-lg text-primary font-semibold">
|
||||
No knowledge
|
||||
</div>
|
||||
<div className="text-sm mt-1 text-muted-foreground">
|
||||
Add files from local or your preferred cloud.
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
|
|
@ -347,7 +346,7 @@ function SearchPage() {
|
|||
}? This will remove all chunks and data associated with these documents. This action cannot be undone.
|
||||
|
||||
Documents to be deleted:
|
||||
${selectedRows.map((row) => `• ${row.filename}`).join("\n")}`}
|
||||
${selectedRows.map(row => `• ${row.filename}`).join("\n")}`}
|
||||
confirmText="Delete All"
|
||||
onConfirm={handleBulkDelete}
|
||||
isLoading={deleteDocumentMutation.isPending}
|
||||
|
|
|
|||
|
|
@ -69,18 +69,12 @@ function LoginPageContent() {
|
|||
/>
|
||||
<div className="flex flex-col items-center justify-center gap-4 z-10">
|
||||
<Logo className="fill-primary" width={32} height={28} />
|
||||
<div className="flex flex-col items-center justify-center gap-8">
|
||||
<h1 className="text-2xl font-medium font-chivo">Welcome to OpenRAG</h1>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
All your knowledge at your fingertips.
|
||||
</p>
|
||||
<Button onClick={login} className="w-80 gap-1.5" size="lg">
|
||||
<GoogleLogo className="h-4 w-4" />
|
||||
Continue with Google
|
||||
</Button>
|
||||
</div>
|
||||
<div className="flex items-center justify-center gap-2 absolute bottom-6 text-xs text-muted-foreground z-10">
|
||||
<p className="text-accent-emerald-foreground">Systems Operational</p>•
|
||||
<p>Privacy Policy</p>
|
||||
</Button></div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -111,6 +111,7 @@ export function IBMOnboarding({
|
|||
<ModelSelector
|
||||
options={options}
|
||||
value={endpoint}
|
||||
custom
|
||||
onValueChange={setEndpoint}
|
||||
searchPlaceholder="Search endpoint..."
|
||||
noOptionsPlaceholder="No endpoints available"
|
||||
|
|
@ -118,8 +119,17 @@ export function IBMOnboarding({
|
|||
/>
|
||||
</LabelWrapper>
|
||||
<LabelInput
|
||||
label="IBM API key"
|
||||
helperText="The API key for your watsonx.ai account."
|
||||
label="watsonx Project ID"
|
||||
helperText="Project ID for the model"
|
||||
id="project-id"
|
||||
required
|
||||
placeholder="your-project-id"
|
||||
value={projectId}
|
||||
onChange={(e) => setProjectId(e.target.value)}
|
||||
/>
|
||||
<LabelInput
|
||||
label="watsonx API key"
|
||||
helperText="API key to access watsonx.ai"
|
||||
id="api-key"
|
||||
type="password"
|
||||
required
|
||||
|
|
@ -127,15 +137,6 @@ export function IBMOnboarding({
|
|||
value={apiKey}
|
||||
onChange={(e) => setApiKey(e.target.value)}
|
||||
/>
|
||||
<LabelInput
|
||||
label="IBM Project ID"
|
||||
helperText="The project ID for your watsonx.ai account."
|
||||
id="project-id"
|
||||
required
|
||||
placeholder="your-project-id"
|
||||
value={projectId}
|
||||
onChange={(e) => setProjectId(e.target.value)}
|
||||
/>
|
||||
{isLoadingModels && (
|
||||
<p className="text-mmd text-muted-foreground">
|
||||
Validating configuration...
|
||||
|
|
|
|||
|
|
@ -1,115 +1,158 @@
|
|||
import { CheckIcon, ChevronsUpDownIcon } from "lucide-react";
|
||||
import { useEffect, useState } from "react";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Command,
|
||||
CommandEmpty,
|
||||
CommandGroup,
|
||||
CommandInput,
|
||||
CommandItem,
|
||||
CommandList,
|
||||
Command,
|
||||
CommandEmpty,
|
||||
CommandGroup,
|
||||
CommandInput,
|
||||
CommandItem,
|
||||
CommandList,
|
||||
} from "@/components/ui/command";
|
||||
import {
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverTrigger,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverTrigger,
|
||||
} from "@/components/ui/popover";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
export function ModelSelector({
|
||||
options,
|
||||
value,
|
||||
onValueChange,
|
||||
icon,
|
||||
placeholder = "Select model...",
|
||||
searchPlaceholder = "Search model...",
|
||||
noOptionsPlaceholder = "No models available",
|
||||
options,
|
||||
value,
|
||||
onValueChange,
|
||||
icon,
|
||||
placeholder = "Select model...",
|
||||
searchPlaceholder = "Search model...",
|
||||
noOptionsPlaceholder = "No models available",
|
||||
custom = false,
|
||||
}: {
|
||||
options: {
|
||||
value: string;
|
||||
label: string;
|
||||
default?: boolean;
|
||||
}[];
|
||||
value: string;
|
||||
icon?: React.ReactNode;
|
||||
placeholder?: string;
|
||||
searchPlaceholder?: string;
|
||||
noOptionsPlaceholder?: string;
|
||||
onValueChange: (value: string) => void;
|
||||
options: {
|
||||
value: string;
|
||||
label: string;
|
||||
default?: boolean;
|
||||
}[];
|
||||
value: string;
|
||||
icon?: React.ReactNode;
|
||||
placeholder?: string;
|
||||
searchPlaceholder?: string;
|
||||
noOptionsPlaceholder?: string;
|
||||
custom?: boolean;
|
||||
onValueChange: (value: string) => void;
|
||||
}) {
|
||||
const [open, setOpen] = useState(false);
|
||||
useEffect(() => {
|
||||
if (value && !options.find((option) => option.value === value)) {
|
||||
onValueChange("");
|
||||
}
|
||||
}, [options, value, onValueChange]);
|
||||
return (
|
||||
<Popover open={open} onOpenChange={setOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
{/** biome-ignore lint/a11y/useSemanticElements: has to be a Button */}
|
||||
<Button
|
||||
variant="outline"
|
||||
role="combobox"
|
||||
disabled={options.length === 0}
|
||||
aria-expanded={open}
|
||||
className="w-full gap-2 justify-between font-normal text-sm"
|
||||
>
|
||||
{value ? (
|
||||
<div className="flex items-center gap-2">
|
||||
{icon && <div className="w-4 h-4">{icon}</div>}
|
||||
{options.find((framework) => framework.value === value)?.label}
|
||||
{options.find((framework) => framework.value === value)
|
||||
?.default && (
|
||||
<span className="text-xs text-foreground p-1 rounded-md bg-muted">
|
||||
Default
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
) : options.length === 0 ? (
|
||||
noOptionsPlaceholder
|
||||
) : (
|
||||
placeholder
|
||||
)}
|
||||
<ChevronsUpDownIcon className="ml-2 h-4 w-4 shrink-0 opacity-50" />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align="start" className="w-[400px] p-0">
|
||||
<Command>
|
||||
<CommandInput placeholder={searchPlaceholder} />
|
||||
<CommandList>
|
||||
<CommandEmpty>{noOptionsPlaceholder}</CommandEmpty>
|
||||
<CommandGroup>
|
||||
{options.map((option) => (
|
||||
<CommandItem
|
||||
key={option.value}
|
||||
value={option.value}
|
||||
onSelect={(currentValue) => {
|
||||
if (currentValue !== value) {
|
||||
onValueChange(currentValue);
|
||||
}
|
||||
setOpen(false);
|
||||
}}
|
||||
>
|
||||
<CheckIcon
|
||||
className={cn(
|
||||
"mr-2 h-4 w-4",
|
||||
value === option.value ? "opacity-100" : "opacity-0",
|
||||
)}
|
||||
/>
|
||||
<div className="flex items-center gap-2">
|
||||
{option.label}
|
||||
{option.default && (
|
||||
<span className="text-xs text-foreground p-1 rounded-md bg-muted">
|
||||
Default
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</CommandItem>
|
||||
))}
|
||||
</CommandGroup>
|
||||
</CommandList>
|
||||
</Command>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
const [open, setOpen] = useState(false);
|
||||
const [searchValue, setSearchValue] = useState("");
|
||||
|
||||
useEffect(() => {
|
||||
if (value && (!options.find((option) => option.value === value) && !custom)) {
|
||||
onValueChange("");
|
||||
}
|
||||
}, [options, value, custom, onValueChange]);
|
||||
return (
|
||||
<Popover open={open} onOpenChange={setOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
{/** biome-ignore lint/a11y/useSemanticElements: has to be a Button */}
|
||||
<Button
|
||||
variant="outline"
|
||||
role="combobox"
|
||||
disabled={options.length === 0}
|
||||
aria-expanded={open}
|
||||
className="w-full gap-2 justify-between font-normal text-sm"
|
||||
>
|
||||
{value ? (
|
||||
<div className="flex items-center gap-2">
|
||||
{icon && <div className="w-4 h-4">{icon}</div>}
|
||||
{options.find((framework) => framework.value === value)?.label ||
|
||||
value}
|
||||
{/* {options.find((framework) => framework.value === value)
|
||||
?.default && (
|
||||
<span className="text-xs text-foreground p-1 rounded-md bg-muted">
|
||||
Default
|
||||
</span>
|
||||
)} */}
|
||||
{custom &&
|
||||
value &&
|
||||
!options.find((framework) => framework.value === value) && (
|
||||
<Badge variant="outline" className="text-xs">
|
||||
CUSTOM
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
) : options.length === 0 ? (
|
||||
noOptionsPlaceholder
|
||||
) : (
|
||||
placeholder
|
||||
)}
|
||||
<ChevronsUpDownIcon className="ml-2 h-4 w-4 shrink-0 opacity-50" />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align="start" className=" p-0 w-[var(--radix-popover-trigger-width)]">
|
||||
<Command>
|
||||
<CommandInput
|
||||
placeholder={searchPlaceholder}
|
||||
value={searchValue}
|
||||
onValueChange={setSearchValue}
|
||||
/>
|
||||
<CommandList>
|
||||
<CommandEmpty>{noOptionsPlaceholder}</CommandEmpty>
|
||||
<CommandGroup>
|
||||
{options.map((option) => (
|
||||
<CommandItem
|
||||
key={option.value}
|
||||
value={option.value}
|
||||
onSelect={(currentValue) => {
|
||||
if (currentValue !== value) {
|
||||
onValueChange(currentValue);
|
||||
}
|
||||
setOpen(false);
|
||||
}}
|
||||
>
|
||||
<CheckIcon
|
||||
className={cn(
|
||||
"mr-2 h-4 w-4",
|
||||
value === option.value ? "opacity-100" : "opacity-0",
|
||||
)}
|
||||
/>
|
||||
<div className="flex items-center gap-2">
|
||||
{option.label}
|
||||
{/* {option.default && (
|
||||
<span className="text-xs text-foreground p-1 rounded-md bg-muted"> // DISABLING DEFAULT TAG FOR NOW
|
||||
Default
|
||||
</span>
|
||||
)} */}
|
||||
</div>
|
||||
</CommandItem>
|
||||
))}
|
||||
{custom &&
|
||||
searchValue &&
|
||||
!options.find((option) => option.value === searchValue) && (
|
||||
<CommandItem
|
||||
value={searchValue}
|
||||
onSelect={(currentValue) => {
|
||||
if (currentValue !== value) {
|
||||
onValueChange(currentValue);
|
||||
}
|
||||
setOpen(false);
|
||||
}}
|
||||
>
|
||||
<CheckIcon
|
||||
className={cn(
|
||||
"mr-2 h-4 w-4",
|
||||
value === searchValue ? "opacity-100" : "opacity-0",
|
||||
)}
|
||||
/>
|
||||
<div className="flex items-center gap-2">
|
||||
{searchValue}
|
||||
<span className="text-xs text-foreground p-1 rounded-md bg-muted">
|
||||
Custom
|
||||
</span>
|
||||
</div>
|
||||
</CommandItem>
|
||||
)}
|
||||
</CommandGroup>
|
||||
</CommandList>
|
||||
</Command>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import { useState } from "react";
|
|||
import { LabelInput } from "@/components/label-input";
|
||||
import { LabelWrapper } from "@/components/label-wrapper";
|
||||
import OpenAILogo from "@/components/logo/openai-logo";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import { useDebouncedValue } from "@/lib/debounce";
|
||||
import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutation";
|
||||
import { useGetOpenAIModelsQuery } from "../../api/queries/useGetModelsQuery";
|
||||
|
|
@ -11,121 +11,114 @@ import { useUpdateSettings } from "../hooks/useUpdateSettings";
|
|||
import { AdvancedOnboarding } from "./advanced";
|
||||
|
||||
export function OpenAIOnboarding({
|
||||
setSettings,
|
||||
sampleDataset,
|
||||
setSampleDataset,
|
||||
setSettings,
|
||||
sampleDataset,
|
||||
setSampleDataset,
|
||||
}: {
|
||||
setSettings: (settings: OnboardingVariables) => void;
|
||||
sampleDataset: boolean;
|
||||
setSampleDataset: (dataset: boolean) => void;
|
||||
setSettings: (settings: OnboardingVariables) => void;
|
||||
sampleDataset: boolean;
|
||||
setSampleDataset: (dataset: boolean) => void;
|
||||
}) {
|
||||
const [apiKey, setApiKey] = useState("");
|
||||
const [getFromEnv, setGetFromEnv] = useState(true);
|
||||
const debouncedApiKey = useDebouncedValue(apiKey, 500);
|
||||
const [apiKey, setApiKey] = useState("");
|
||||
const [getFromEnv, setGetFromEnv] = useState(true);
|
||||
const debouncedApiKey = useDebouncedValue(apiKey, 500);
|
||||
|
||||
// Fetch models from API when API key is provided
|
||||
const {
|
||||
data: modelsData,
|
||||
isLoading: isLoadingModels,
|
||||
error: modelsError,
|
||||
} = useGetOpenAIModelsQuery(
|
||||
getFromEnv
|
||||
? { apiKey: "" }
|
||||
: debouncedApiKey
|
||||
? { apiKey: debouncedApiKey }
|
||||
: undefined,
|
||||
{ enabled: debouncedApiKey !== "" || getFromEnv },
|
||||
);
|
||||
// Use custom hook for model selection logic
|
||||
const {
|
||||
languageModel,
|
||||
embeddingModel,
|
||||
setLanguageModel,
|
||||
setEmbeddingModel,
|
||||
languageModels,
|
||||
embeddingModels,
|
||||
} = useModelSelection(modelsData);
|
||||
const handleSampleDatasetChange = (dataset: boolean) => {
|
||||
setSampleDataset(dataset);
|
||||
};
|
||||
// Fetch models from API when API key is provided
|
||||
const {
|
||||
data: modelsData,
|
||||
isLoading: isLoadingModels,
|
||||
error: modelsError,
|
||||
} = useGetOpenAIModelsQuery(
|
||||
getFromEnv
|
||||
? { apiKey: "" }
|
||||
: debouncedApiKey
|
||||
? { apiKey: debouncedApiKey }
|
||||
: undefined,
|
||||
{ enabled: debouncedApiKey !== "" || getFromEnv },
|
||||
);
|
||||
// Use custom hook for model selection logic
|
||||
const {
|
||||
languageModel,
|
||||
embeddingModel,
|
||||
setLanguageModel,
|
||||
setEmbeddingModel,
|
||||
languageModels,
|
||||
embeddingModels,
|
||||
} = useModelSelection(modelsData);
|
||||
const handleSampleDatasetChange = (dataset: boolean) => {
|
||||
setSampleDataset(dataset);
|
||||
};
|
||||
|
||||
const handleGetFromEnvChange = (fromEnv: boolean) => {
|
||||
setGetFromEnv(fromEnv);
|
||||
if (fromEnv) {
|
||||
setApiKey("");
|
||||
}
|
||||
setLanguageModel("");
|
||||
setEmbeddingModel("");
|
||||
};
|
||||
const handleGetFromEnvChange = (fromEnv: boolean) => {
|
||||
setGetFromEnv(fromEnv);
|
||||
if (fromEnv) {
|
||||
setApiKey("");
|
||||
}
|
||||
setLanguageModel("");
|
||||
setEmbeddingModel("");
|
||||
};
|
||||
|
||||
// Update settings when values change
|
||||
useUpdateSettings(
|
||||
"openai",
|
||||
{
|
||||
apiKey,
|
||||
languageModel,
|
||||
embeddingModel,
|
||||
},
|
||||
setSettings,
|
||||
);
|
||||
return (
|
||||
<>
|
||||
<div className="space-y-5">
|
||||
<LabelWrapper
|
||||
label="Use environment OpenAI API key"
|
||||
id="get-api-key"
|
||||
helperText={
|
||||
<>
|
||||
Reuse the key from your environment config.
|
||||
<br />
|
||||
Uncheck to enter a different key.
|
||||
</>
|
||||
}
|
||||
flex
|
||||
start
|
||||
>
|
||||
<Checkbox
|
||||
checked={getFromEnv}
|
||||
onCheckedChange={handleGetFromEnvChange}
|
||||
/>
|
||||
</LabelWrapper>
|
||||
{!getFromEnv && (
|
||||
<div className="space-y-1">
|
||||
<LabelInput
|
||||
label="OpenAI API key"
|
||||
helperText="The API key for your OpenAI account."
|
||||
className={modelsError ? "!border-destructive" : ""}
|
||||
id="api-key"
|
||||
type="password"
|
||||
required
|
||||
placeholder="sk-..."
|
||||
value={apiKey}
|
||||
onChange={(e) => setApiKey(e.target.value)}
|
||||
/>
|
||||
{isLoadingModels && (
|
||||
<p className="text-mmd text-muted-foreground">
|
||||
Validating API key...
|
||||
</p>
|
||||
)}
|
||||
{modelsError && (
|
||||
<p className="text-mmd text-destructive">
|
||||
Invalid OpenAI API key. Verify or replace the key.
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<AdvancedOnboarding
|
||||
icon={<OpenAILogo className="w-4 h-4" />}
|
||||
languageModels={languageModels}
|
||||
embeddingModels={embeddingModels}
|
||||
languageModel={languageModel}
|
||||
embeddingModel={embeddingModel}
|
||||
sampleDataset={sampleDataset}
|
||||
setLanguageModel={setLanguageModel}
|
||||
setSampleDataset={handleSampleDatasetChange}
|
||||
setEmbeddingModel={setEmbeddingModel}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
// Update settings when values change
|
||||
useUpdateSettings(
|
||||
"openai",
|
||||
{
|
||||
apiKey,
|
||||
languageModel,
|
||||
embeddingModel,
|
||||
},
|
||||
setSettings,
|
||||
);
|
||||
return (
|
||||
<>
|
||||
<div className="space-y-5">
|
||||
<LabelWrapper
|
||||
label="Use environment OpenAI API key"
|
||||
id="get-api-key"
|
||||
description="Reuse the key from your environment config. Turn off to enter a different key."
|
||||
flex
|
||||
>
|
||||
<Switch
|
||||
checked={getFromEnv}
|
||||
onCheckedChange={handleGetFromEnvChange}
|
||||
/>
|
||||
</LabelWrapper>
|
||||
{!getFromEnv && (
|
||||
<div className="space-y-1">
|
||||
<LabelInput
|
||||
label="OpenAI API key"
|
||||
helperText="The API key for your OpenAI account."
|
||||
className={modelsError ? "!border-destructive" : ""}
|
||||
id="api-key"
|
||||
type="password"
|
||||
required
|
||||
placeholder="sk-..."
|
||||
value={apiKey}
|
||||
onChange={(e) => setApiKey(e.target.value)}
|
||||
/>
|
||||
{isLoadingModels && (
|
||||
<p className="text-mmd text-muted-foreground">
|
||||
Validating API key...
|
||||
</p>
|
||||
)}
|
||||
{modelsError && (
|
||||
<p className="text-mmd text-destructive">
|
||||
Invalid OpenAI API key. Verify or replace the key.
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<AdvancedOnboarding
|
||||
icon={<OpenAILogo className="w-4 h-4" />}
|
||||
languageModels={languageModels}
|
||||
embeddingModels={embeddingModels}
|
||||
languageModel={languageModel}
|
||||
embeddingModel={embeddingModel}
|
||||
sampleDataset={sampleDataset}
|
||||
setLanguageModel={setLanguageModel}
|
||||
setSampleDataset={handleSampleDatasetChange}
|
||||
setEmbeddingModel={setEmbeddingModel}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -68,7 +68,6 @@ function OnboardingPage() {
|
|||
// Mutations
|
||||
const onboardingMutation = useOnboardingMutation({
|
||||
onSuccess: (data) => {
|
||||
toast.success("Onboarding completed successfully!");
|
||||
console.log("Onboarding completed successfully", data);
|
||||
router.push(redirect);
|
||||
},
|
||||
|
|
@ -137,7 +136,7 @@ function OnboardingPage() {
|
|||
Connect a model provider
|
||||
</h1>
|
||||
</div>
|
||||
<Card className="w-full max-w-[580px]">
|
||||
<Card className="w-full max-w-[600px]">
|
||||
<Tabs
|
||||
defaultValue={modelProvider}
|
||||
onValueChange={handleSetModelProvider}
|
||||
|
|
@ -150,7 +149,7 @@ function OnboardingPage() {
|
|||
</TabsTrigger>
|
||||
<TabsTrigger value="watsonx">
|
||||
<IBMLogo className="w-4 h-4" />
|
||||
IBM
|
||||
IBM watsonx.ai
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="ollama">
|
||||
<OllamaLogo className="w-4 h-4" />
|
||||
|
|
@ -192,7 +191,7 @@ function OnboardingPage() {
|
|||
disabled={!isComplete}
|
||||
loading={onboardingMutation.isPending}
|
||||
>
|
||||
Complete
|
||||
<span className="select-none">Complete</span>
|
||||
</Button>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
"use client";
|
||||
|
||||
import { ArrowUpRight, Loader2, PlugZap, RefreshCw } from "lucide-react";
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import { ArrowUpRight, Loader2, PlugZap, Plus, RefreshCw } from "lucide-react";
|
||||
import { useRouter, useSearchParams } from "next/navigation";
|
||||
import { Suspense, useCallback, useEffect, useState } from "react";
|
||||
import { useUpdateFlowSettingMutation } from "@/app/api/mutations/useUpdateFlowSettingMutation";
|
||||
import {
|
||||
|
|
@ -35,7 +35,11 @@ import { Textarea } from "@/components/ui/textarea";
|
|||
import { useAuth } from "@/contexts/auth-context";
|
||||
import { useTask } from "@/contexts/task-context";
|
||||
import { useDebounce } from "@/lib/debounce";
|
||||
import { DEFAULT_AGENT_SETTINGS, DEFAULT_KNOWLEDGE_SETTINGS, UI_CONSTANTS } from "@/lib/constants";
|
||||
import {
|
||||
DEFAULT_AGENT_SETTINGS,
|
||||
DEFAULT_KNOWLEDGE_SETTINGS,
|
||||
UI_CONSTANTS,
|
||||
} from "@/lib/constants";
|
||||
import { getFallbackModels, type ModelProvider } from "./helpers/model-helpers";
|
||||
import { ModelSelectItems } from "./helpers/model-select-item";
|
||||
import { LabelWrapper } from "@/components/label-wrapper";
|
||||
|
|
@ -92,6 +96,7 @@ function KnowledgeSourcesPage() {
|
|||
const { isAuthenticated, isNoAuthMode } = useAuth();
|
||||
const { addTask, tasks } = useTask();
|
||||
const searchParams = useSearchParams();
|
||||
const router = useRouter();
|
||||
|
||||
// Connectors state
|
||||
const [connectors, setConnectors] = useState<Connector[]>([]);
|
||||
|
|
@ -159,7 +164,7 @@ function KnowledgeSourcesPage() {
|
|||
onSuccess: () => {
|
||||
console.log("Setting updated successfully");
|
||||
},
|
||||
onError: (error) => {
|
||||
onError: error => {
|
||||
console.error("Failed to update setting:", error.message);
|
||||
},
|
||||
});
|
||||
|
|
@ -298,8 +303,8 @@ function KnowledgeSourcesPage() {
|
|||
|
||||
// Initialize connectors list with metadata from backend
|
||||
const initialConnectors = connectorTypes
|
||||
.filter((type) => connectorsResult.connectors[type].available) // Only show available connectors
|
||||
.map((type) => ({
|
||||
.filter(type => connectorsResult.connectors[type].available) // Only show available connectors
|
||||
.map(type => ({
|
||||
id: type,
|
||||
name: connectorsResult.connectors[type].name,
|
||||
description: connectorsResult.connectors[type].description,
|
||||
|
|
@ -322,8 +327,8 @@ function KnowledgeSourcesPage() {
|
|||
);
|
||||
const isConnected = activeConnection !== undefined;
|
||||
|
||||
setConnectors((prev) =>
|
||||
prev.map((c) =>
|
||||
setConnectors(prev =>
|
||||
prev.map(c =>
|
||||
c.type === connectorType
|
||||
? {
|
||||
...c,
|
||||
|
|
@ -342,7 +347,7 @@ function KnowledgeSourcesPage() {
|
|||
|
||||
const handleConnect = async (connector: Connector) => {
|
||||
setIsConnecting(connector.id);
|
||||
setSyncResults((prev) => ({ ...prev, [connector.id]: null }));
|
||||
setSyncResults(prev => ({ ...prev, [connector.id]: null }));
|
||||
|
||||
try {
|
||||
// Use the shared auth callback URL, same as connectors page
|
||||
|
|
@ -392,58 +397,58 @@ function KnowledgeSourcesPage() {
|
|||
}
|
||||
};
|
||||
|
||||
const handleSync = async (connector: Connector) => {
|
||||
if (!connector.connectionId) return;
|
||||
// const handleSync = async (connector: Connector) => {
|
||||
// if (!connector.connectionId) return;
|
||||
|
||||
setIsSyncing(connector.id);
|
||||
setSyncResults((prev) => ({ ...prev, [connector.id]: null }));
|
||||
// setIsSyncing(connector.id);
|
||||
// setSyncResults(prev => ({ ...prev, [connector.id]: null }));
|
||||
|
||||
try {
|
||||
const syncBody: {
|
||||
connection_id: string;
|
||||
max_files?: number;
|
||||
selected_files?: string[];
|
||||
} = {
|
||||
connection_id: connector.connectionId,
|
||||
max_files: syncAllFiles ? 0 : maxFiles || undefined,
|
||||
};
|
||||
// try {
|
||||
// const syncBody: {
|
||||
// connection_id: string;
|
||||
// max_files?: number;
|
||||
// selected_files?: string[];
|
||||
// } = {
|
||||
// connection_id: connector.connectionId,
|
||||
// max_files: syncAllFiles ? 0 : maxFiles || undefined,
|
||||
// };
|
||||
|
||||
// Note: File selection is now handled via the cloud connectors dialog
|
||||
// // Note: File selection is now handled via the cloud connectors dialog
|
||||
|
||||
const response = await fetch(`/api/connectors/${connector.type}/sync`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(syncBody),
|
||||
});
|
||||
// const response = await fetch(`/api/connectors/${connector.type}/sync`, {
|
||||
// method: "POST",
|
||||
// headers: {
|
||||
// "Content-Type": "application/json",
|
||||
// },
|
||||
// body: JSON.stringify(syncBody),
|
||||
// });
|
||||
|
||||
const result = await response.json();
|
||||
// const result = await response.json();
|
||||
|
||||
if (response.status === 201) {
|
||||
const taskId = result.task_id;
|
||||
if (taskId) {
|
||||
addTask(taskId);
|
||||
setSyncResults((prev) => ({
|
||||
...prev,
|
||||
[connector.id]: {
|
||||
processed: 0,
|
||||
total: result.total_files || 0,
|
||||
},
|
||||
}));
|
||||
}
|
||||
} else if (response.ok) {
|
||||
setSyncResults((prev) => ({ ...prev, [connector.id]: result }));
|
||||
// Note: Stats will auto-refresh via task completion watcher for async syncs
|
||||
} else {
|
||||
console.error("Sync failed:", result.error);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Sync error:", error);
|
||||
} finally {
|
||||
setIsSyncing(null);
|
||||
}
|
||||
};
|
||||
// if (response.status === 201) {
|
||||
// const taskId = result.task_id;
|
||||
// if (taskId) {
|
||||
// addTask(taskId);
|
||||
// setSyncResults(prev => ({
|
||||
// ...prev,
|
||||
// [connector.id]: {
|
||||
// processed: 0,
|
||||
// total: result.total_files || 0,
|
||||
// },
|
||||
// }));
|
||||
// }
|
||||
// } else if (response.ok) {
|
||||
// setSyncResults(prev => ({ ...prev, [connector.id]: result }));
|
||||
// // Note: Stats will auto-refresh via task completion watcher for async syncs
|
||||
// } else {
|
||||
// console.error("Sync failed:", result.error);
|
||||
// }
|
||||
// } catch (error) {
|
||||
// console.error("Sync error:", error);
|
||||
// } finally {
|
||||
// setIsSyncing(null);
|
||||
// }
|
||||
// };
|
||||
|
||||
const getStatusBadge = (status: Connector["status"]) => {
|
||||
switch (status) {
|
||||
|
|
@ -479,6 +484,11 @@ function KnowledgeSourcesPage() {
|
|||
}
|
||||
};
|
||||
|
||||
const navigateToKnowledgePage = (connector: Connector) => {
|
||||
const provider = connector.type.replace(/-/g, "_");
|
||||
router.push(`/upload/${provider}`);
|
||||
};
|
||||
|
||||
// Check connector status on mount and when returning from OAuth
|
||||
useEffect(() => {
|
||||
if (isAuthenticated) {
|
||||
|
|
@ -498,9 +508,9 @@ function KnowledgeSourcesPage() {
|
|||
// Watch for task completions and refresh stats
|
||||
useEffect(() => {
|
||||
// Find newly completed tasks by comparing with previous state
|
||||
const newlyCompletedTasks = tasks.filter((task) => {
|
||||
const newlyCompletedTasks = tasks.filter(task => {
|
||||
const wasCompleted =
|
||||
prevTasks.find((prev) => prev.task_id === task.task_id)?.status ===
|
||||
prevTasks.find(prev => prev.task_id === task.task_id)?.status ===
|
||||
"completed";
|
||||
return task.status === "completed" && !wasCompleted;
|
||||
});
|
||||
|
|
@ -554,7 +564,7 @@ function KnowledgeSourcesPage() {
|
|||
fetch(`/api/reset-flow/retrieval`, {
|
||||
method: "POST",
|
||||
})
|
||||
.then((response) => {
|
||||
.then(response => {
|
||||
if (response.ok) {
|
||||
return response.json();
|
||||
}
|
||||
|
|
@ -567,7 +577,7 @@ function KnowledgeSourcesPage() {
|
|||
handleModelChange(DEFAULT_AGENT_SETTINGS.llm_model);
|
||||
closeDialog(); // Close after successful completion
|
||||
})
|
||||
.catch((error) => {
|
||||
.catch(error => {
|
||||
console.error("Error restoring retrieval flow:", error);
|
||||
closeDialog(); // Close even on error (could show error toast instead)
|
||||
});
|
||||
|
|
@ -577,7 +587,7 @@ function KnowledgeSourcesPage() {
|
|||
fetch(`/api/reset-flow/ingest`, {
|
||||
method: "POST",
|
||||
})
|
||||
.then((response) => {
|
||||
.then(response => {
|
||||
if (response.ok) {
|
||||
return response.json();
|
||||
}
|
||||
|
|
@ -592,7 +602,7 @@ function KnowledgeSourcesPage() {
|
|||
setPictureDescriptions(false);
|
||||
closeDialog(); // Close after successful completion
|
||||
})
|
||||
.catch((error) => {
|
||||
.catch(error => {
|
||||
console.error("Error restoring ingest flow:", error);
|
||||
closeDialog(); // Close even on error (could show error toast instead)
|
||||
});
|
||||
|
|
@ -609,85 +619,88 @@ function KnowledgeSourcesPage() {
|
|||
</div>
|
||||
|
||||
{/* Conditional Sync Settings or No-Auth Message */}
|
||||
{isNoAuthMode ? (
|
||||
<Card className="border-yellow-500/50 bg-yellow-500/5">
|
||||
<CardHeader>
|
||||
<CardTitle className="text-lg text-yellow-600">
|
||||
Cloud connectors are only available with auth mode enabled
|
||||
</CardTitle>
|
||||
<CardDescription className="text-sm">
|
||||
Please provide the following environment variables and restart:
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="bg-muted rounded-md p-4 font-mono text-sm">
|
||||
<div className="text-muted-foreground mb-2">
|
||||
# make here https://console.cloud.google.com/apis/credentials
|
||||
{
|
||||
isNoAuthMode ? (
|
||||
<Card className="border-yellow-500/50 bg-yellow-500/5">
|
||||
<CardHeader>
|
||||
<CardTitle className="text-lg text-yellow-600">
|
||||
Cloud connectors are only available with auth mode enabled
|
||||
</CardTitle>
|
||||
<CardDescription className="text-sm">
|
||||
Please provide the following environment variables and
|
||||
restart:
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="bg-muted rounded-md p-4 font-mono text-sm">
|
||||
<div className="text-muted-foreground mb-2">
|
||||
# make here
|
||||
https://console.cloud.google.com/apis/credentials
|
||||
</div>
|
||||
<div>GOOGLE_OAUTH_CLIENT_ID=</div>
|
||||
<div>GOOGLE_OAUTH_CLIENT_SECRET=</div>
|
||||
</div>
|
||||
<div>GOOGLE_OAUTH_CLIENT_ID=</div>
|
||||
<div>GOOGLE_OAUTH_CLIENT_SECRET=</div>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
) : (
|
||||
<div className="flex items-center justify-between py-4">
|
||||
<div>
|
||||
<h3 className="text-lg font-medium">Sync Settings</h3>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Configure how many files to sync when manually triggering a sync
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex items-center gap-4">
|
||||
<div className="flex items-center space-x-2">
|
||||
<Checkbox
|
||||
id="syncAllFiles"
|
||||
checked={syncAllFiles}
|
||||
onCheckedChange={(checked) => {
|
||||
setSyncAllFiles(!!checked);
|
||||
if (checked) {
|
||||
setMaxFiles(0);
|
||||
} else {
|
||||
setMaxFiles(10);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
<Label
|
||||
htmlFor="syncAllFiles"
|
||||
className="font-medium whitespace-nowrap"
|
||||
>
|
||||
Sync all files
|
||||
</Label>
|
||||
</div>
|
||||
<Label
|
||||
htmlFor="maxFiles"
|
||||
className="font-medium whitespace-nowrap"
|
||||
>
|
||||
Max files per sync:
|
||||
</Label>
|
||||
<div className="relative">
|
||||
<Input
|
||||
id="maxFiles"
|
||||
type="number"
|
||||
value={syncAllFiles ? 0 : maxFiles}
|
||||
onChange={(e) => setMaxFiles(parseInt(e.target.value) || 10)}
|
||||
disabled={syncAllFiles}
|
||||
className="w-16 min-w-16 max-w-16 flex-shrink-0 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
min="1"
|
||||
max="100"
|
||||
title={
|
||||
syncAllFiles
|
||||
? "Disabled when 'Sync all files' is checked"
|
||||
: "Leave blank or set to 0 for unlimited"
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
) : null
|
||||
// <div className="flex items-center justify-between py-4">
|
||||
// <div>
|
||||
// <h3 className="text-lg font-medium">Sync Settings</h3>
|
||||
// <p className="text-sm text-muted-foreground">
|
||||
// Configure how many files to sync when manually triggering a sync
|
||||
// </p>
|
||||
// </div>
|
||||
// <div className="flex items-center gap-4">
|
||||
// <div className="flex items-center space-x-2">
|
||||
// <Checkbox
|
||||
// id="syncAllFiles"
|
||||
// checked={syncAllFiles}
|
||||
// onCheckedChange={checked => {
|
||||
// setSyncAllFiles(!!checked);
|
||||
// if (checked) {
|
||||
// setMaxFiles(0);
|
||||
// } else {
|
||||
// setMaxFiles(10);
|
||||
// }
|
||||
// }}
|
||||
// />
|
||||
// <Label
|
||||
// htmlFor="syncAllFiles"
|
||||
// className="font-medium whitespace-nowrap"
|
||||
// >
|
||||
// Sync all files
|
||||
// </Label>
|
||||
// </div>
|
||||
// <Label
|
||||
// htmlFor="maxFiles"
|
||||
// className="font-medium whitespace-nowrap"
|
||||
// >
|
||||
// Max files per sync:
|
||||
// </Label>
|
||||
// <div className="relative">
|
||||
// <Input
|
||||
// id="maxFiles"
|
||||
// type="number"
|
||||
// value={syncAllFiles ? 0 : maxFiles}
|
||||
// onChange={e => setMaxFiles(parseInt(e.target.value) || 10)}
|
||||
// disabled={syncAllFiles}
|
||||
// className="w-16 min-w-16 max-w-16 flex-shrink-0 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
// min="1"
|
||||
// max="100"
|
||||
// title={
|
||||
// syncAllFiles
|
||||
// ? "Disabled when 'Sync all files' is checked"
|
||||
// : "Leave blank or set to 0 for unlimited"
|
||||
// }
|
||||
// />
|
||||
// </div>
|
||||
// </div>
|
||||
// </div>
|
||||
}
|
||||
|
||||
{/* Connectors Grid */}
|
||||
<div className="grid gap-6 md:grid-cols-2 lg:grid-cols-3">
|
||||
{connectors.map((connector) => (
|
||||
{connectors.map(connector => (
|
||||
<Card key={connector.id} className="relative flex flex-col">
|
||||
<CardHeader>
|
||||
<div className="flex items-center justify-between">
|
||||
|
|
@ -709,22 +722,13 @@ function KnowledgeSourcesPage() {
|
|||
{connector.status === "connected" ? (
|
||||
<div className="space-y-3">
|
||||
<Button
|
||||
onClick={() => handleSync(connector)}
|
||||
onClick={() => navigateToKnowledgePage(connector)}
|
||||
disabled={isSyncing === connector.id}
|
||||
className="w-full"
|
||||
variant="outline"
|
||||
>
|
||||
{isSyncing === connector.id ? (
|
||||
<>
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
Syncing...
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<RefreshCw className="mr-2 h-4 w-4" />
|
||||
Sync Now
|
||||
</>
|
||||
)}
|
||||
<Plus className="h-4 w-4" />
|
||||
Add Knowledge
|
||||
</Button>
|
||||
|
||||
{syncResults[connector.id] && (
|
||||
|
|
@ -830,7 +834,7 @@ function KnowledgeSourcesPage() {
|
|||
}
|
||||
confirmText="Proceed"
|
||||
confirmIcon={<ArrowUpRight />}
|
||||
onConfirm={(closeDialog) =>
|
||||
onConfirm={closeDialog =>
|
||||
handleEditInLangflow("chat", closeDialog)
|
||||
}
|
||||
variant="warning"
|
||||
|
|
@ -850,8 +854,7 @@ function KnowledgeSourcesPage() {
|
|||
<Select
|
||||
value={
|
||||
settings.agent?.llm_model ||
|
||||
modelsData?.language_models?.find((m) => m.default)
|
||||
?.value ||
|
||||
modelsData?.language_models?.find(m => m.default)?.value ||
|
||||
"gpt-4"
|
||||
}
|
||||
onValueChange={handleModelChange}
|
||||
|
|
@ -879,7 +882,7 @@ function KnowledgeSourcesPage() {
|
|||
id="system-prompt"
|
||||
placeholder="Enter your agent instructions here..."
|
||||
value={systemPrompt}
|
||||
onChange={(e) => setSystemPrompt(e.target.value)}
|
||||
onChange={e => setSystemPrompt(e.target.value)}
|
||||
rows={6}
|
||||
className={`resize-none ${
|
||||
systemPrompt.length > MAX_SYSTEM_PROMPT_CHARS
|
||||
|
|
@ -990,7 +993,7 @@ function KnowledgeSourcesPage() {
|
|||
confirmText="Proceed"
|
||||
confirmIcon={<ArrowUpRight />}
|
||||
variant="warning"
|
||||
onConfirm={(closeDialog) =>
|
||||
onConfirm={closeDialog =>
|
||||
handleEditInLangflow("ingest", closeDialog)
|
||||
}
|
||||
/>
|
||||
|
|
@ -1010,8 +1013,7 @@ function KnowledgeSourcesPage() {
|
|||
disabled={true}
|
||||
value={
|
||||
settings.knowledge?.embedding_model ||
|
||||
modelsData?.embedding_models?.find((m) => m.default)
|
||||
?.value ||
|
||||
modelsData?.embedding_models?.find(m => m.default)?.value ||
|
||||
"text-embedding-ada-002"
|
||||
}
|
||||
onValueChange={handleEmbeddingModelChange}
|
||||
|
|
@ -1049,7 +1051,7 @@ function KnowledgeSourcesPage() {
|
|||
type="number"
|
||||
min="1"
|
||||
value={chunkSize}
|
||||
onChange={(e) => handleChunkSizeChange(e.target.value)}
|
||||
onChange={e => handleChunkSizeChange(e.target.value)}
|
||||
className="w-full pr-20"
|
||||
/>
|
||||
<div className="absolute inset-y-0 right-0 flex items-center pr-8 pointer-events-none">
|
||||
|
|
@ -1072,7 +1074,7 @@ function KnowledgeSourcesPage() {
|
|||
type="number"
|
||||
min="0"
|
||||
value={chunkOverlap}
|
||||
onChange={(e) => handleChunkOverlapChange(e.target.value)}
|
||||
onChange={e => handleChunkOverlapChange(e.target.value)}
|
||||
className="w-full pr-20"
|
||||
/>
|
||||
<div className="absolute inset-y-0 right-0 flex items-center pr-8 pointer-events-none">
|
||||
|
|
|
|||
|
|
@ -1,386 +1,378 @@
|
|||
"use client";
|
||||
|
||||
import { useState, useEffect } from "react";
|
||||
import { AlertCircle, ArrowLeft } from "lucide-react";
|
||||
import { useParams, useRouter } from "next/navigation";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { ArrowLeft, AlertCircle } from "lucide-react";
|
||||
import { UnifiedCloudPicker, CloudFile } from "@/components/cloud-picker";
|
||||
import { useEffect, useState } from "react";
|
||||
import { type CloudFile, UnifiedCloudPicker } from "@/components/cloud-picker";
|
||||
import type { IngestSettings } from "@/components/cloud-picker/types";
|
||||
import { useTask } from "@/contexts/task-context";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Toast } from "@/components/ui/toast";
|
||||
import { useTask } from "@/contexts/task-context";
|
||||
|
||||
// CloudFile interface is now imported from the unified cloud picker
|
||||
|
||||
interface CloudConnector {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
status: "not_connected" | "connecting" | "connected" | "error";
|
||||
type: string;
|
||||
connectionId?: string;
|
||||
clientId: string;
|
||||
hasAccessToken: boolean;
|
||||
accessTokenError?: string;
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
status: "not_connected" | "connecting" | "connected" | "error";
|
||||
type: string;
|
||||
connectionId?: string;
|
||||
clientId: string;
|
||||
hasAccessToken: boolean;
|
||||
accessTokenError?: string;
|
||||
}
|
||||
|
||||
export default function UploadProviderPage() {
|
||||
const params = useParams();
|
||||
const router = useRouter();
|
||||
const provider = params.provider as string;
|
||||
const { addTask, tasks } = useTask();
|
||||
const params = useParams();
|
||||
const router = useRouter();
|
||||
const provider = params.provider as string;
|
||||
const { addTask, tasks } = useTask();
|
||||
|
||||
const [connector, setConnector] = useState<CloudConnector | null>(null);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [accessToken, setAccessToken] = useState<string | null>(null);
|
||||
const [selectedFiles, setSelectedFiles] = useState<CloudFile[]>([]);
|
||||
const [isIngesting, setIsIngesting] = useState<boolean>(false);
|
||||
const [currentSyncTaskId, setCurrentSyncTaskId] = useState<string | null>(
|
||||
null
|
||||
);
|
||||
const [showSuccessToast, setShowSuccessToast] = useState(false);
|
||||
const [ingestSettings, setIngestSettings] = useState<IngestSettings>({
|
||||
chunkSize: 1000,
|
||||
chunkOverlap: 200,
|
||||
ocr: false,
|
||||
pictureDescriptions: false,
|
||||
embeddingModel: "text-embedding-3-small",
|
||||
});
|
||||
const [connector, setConnector] = useState<CloudConnector | null>(null);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [accessToken, setAccessToken] = useState<string | null>(null);
|
||||
const [selectedFiles, setSelectedFiles] = useState<CloudFile[]>([]);
|
||||
const [isIngesting, setIsIngesting] = useState<boolean>(false);
|
||||
const [currentSyncTaskId, setCurrentSyncTaskId] = useState<string | null>(
|
||||
null,
|
||||
);
|
||||
const [ingestSettings, setIngestSettings] = useState<IngestSettings>({
|
||||
chunkSize: 1000,
|
||||
chunkOverlap: 200,
|
||||
ocr: false,
|
||||
pictureDescriptions: false,
|
||||
embeddingModel: "text-embedding-3-small",
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const fetchConnectorInfo = async () => {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
useEffect(() => {
|
||||
const fetchConnectorInfo = async () => {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
// Fetch available connectors to validate the provider
|
||||
const connectorsResponse = await fetch("/api/connectors");
|
||||
if (!connectorsResponse.ok) {
|
||||
throw new Error("Failed to load connectors");
|
||||
}
|
||||
try {
|
||||
// Fetch available connectors to validate the provider
|
||||
const connectorsResponse = await fetch("/api/connectors");
|
||||
if (!connectorsResponse.ok) {
|
||||
throw new Error("Failed to load connectors");
|
||||
}
|
||||
|
||||
const connectorsResult = await connectorsResponse.json();
|
||||
const providerInfo = connectorsResult.connectors[provider];
|
||||
const connectorsResult = await connectorsResponse.json();
|
||||
const providerInfo = connectorsResult.connectors[provider];
|
||||
|
||||
if (!providerInfo || !providerInfo.available) {
|
||||
setError(
|
||||
`Cloud provider "${provider}" is not available or configured.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (!providerInfo || !providerInfo.available) {
|
||||
setError(
|
||||
`Cloud provider "${provider}" is not available or configured.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check connector status
|
||||
const statusResponse = await fetch(
|
||||
`/api/connectors/${provider}/status`
|
||||
);
|
||||
if (!statusResponse.ok) {
|
||||
throw new Error(`Failed to check ${provider} status`);
|
||||
}
|
||||
// Check connector status
|
||||
const statusResponse = await fetch(
|
||||
`/api/connectors/${provider}/status`,
|
||||
);
|
||||
if (!statusResponse.ok) {
|
||||
throw new Error(`Failed to check ${provider} status`);
|
||||
}
|
||||
|
||||
const statusData = await statusResponse.json();
|
||||
const connections = statusData.connections || [];
|
||||
const activeConnection = connections.find(
|
||||
(conn: { is_active: boolean; connection_id: string }) =>
|
||||
conn.is_active
|
||||
);
|
||||
const isConnected = activeConnection !== undefined;
|
||||
const statusData = await statusResponse.json();
|
||||
const connections = statusData.connections || [];
|
||||
const activeConnection = connections.find(
|
||||
(conn: { is_active: boolean; connection_id: string }) =>
|
||||
conn.is_active,
|
||||
);
|
||||
const isConnected = activeConnection !== undefined;
|
||||
|
||||
let hasAccessToken = false;
|
||||
let accessTokenError: string | undefined = undefined;
|
||||
let hasAccessToken = false;
|
||||
let accessTokenError: string | undefined;
|
||||
|
||||
// Try to get access token for connected connectors
|
||||
if (isConnected && activeConnection) {
|
||||
try {
|
||||
const tokenResponse = await fetch(
|
||||
`/api/connectors/${provider}/token?connection_id=${activeConnection.connection_id}`
|
||||
);
|
||||
if (tokenResponse.ok) {
|
||||
const tokenData = await tokenResponse.json();
|
||||
if (tokenData.access_token) {
|
||||
hasAccessToken = true;
|
||||
setAccessToken(tokenData.access_token);
|
||||
}
|
||||
} else {
|
||||
const errorData = await tokenResponse
|
||||
.json()
|
||||
.catch(() => ({ error: "Token unavailable" }));
|
||||
accessTokenError = errorData.error || "Access token unavailable";
|
||||
}
|
||||
} catch {
|
||||
accessTokenError = "Failed to fetch access token";
|
||||
}
|
||||
}
|
||||
// Try to get access token for connected connectors
|
||||
if (isConnected && activeConnection) {
|
||||
try {
|
||||
const tokenResponse = await fetch(
|
||||
`/api/connectors/${provider}/token?connection_id=${activeConnection.connection_id}`,
|
||||
);
|
||||
if (tokenResponse.ok) {
|
||||
const tokenData = await tokenResponse.json();
|
||||
if (tokenData.access_token) {
|
||||
hasAccessToken = true;
|
||||
setAccessToken(tokenData.access_token);
|
||||
}
|
||||
} else {
|
||||
const errorData = await tokenResponse
|
||||
.json()
|
||||
.catch(() => ({ error: "Token unavailable" }));
|
||||
accessTokenError = errorData.error || "Access token unavailable";
|
||||
}
|
||||
} catch {
|
||||
accessTokenError = "Failed to fetch access token";
|
||||
}
|
||||
}
|
||||
|
||||
setConnector({
|
||||
id: provider,
|
||||
name: providerInfo.name,
|
||||
description: providerInfo.description,
|
||||
status: isConnected ? "connected" : "not_connected",
|
||||
type: provider,
|
||||
connectionId: activeConnection?.connection_id,
|
||||
clientId: activeConnection?.client_id,
|
||||
hasAccessToken,
|
||||
accessTokenError,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Failed to load connector info:", error);
|
||||
setError(
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: "Failed to load connector information"
|
||||
);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
setConnector({
|
||||
id: provider,
|
||||
name: providerInfo.name,
|
||||
description: providerInfo.description,
|
||||
status: isConnected ? "connected" : "not_connected",
|
||||
type: provider,
|
||||
connectionId: activeConnection?.connection_id,
|
||||
clientId: activeConnection?.client_id,
|
||||
hasAccessToken,
|
||||
accessTokenError,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Failed to load connector info:", error);
|
||||
setError(
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: "Failed to load connector information",
|
||||
);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
if (provider) {
|
||||
fetchConnectorInfo();
|
||||
}
|
||||
}, [provider]);
|
||||
if (provider) {
|
||||
fetchConnectorInfo();
|
||||
}
|
||||
}, [provider]);
|
||||
|
||||
// Watch for sync task completion and redirect
|
||||
useEffect(() => {
|
||||
if (!currentSyncTaskId) return;
|
||||
// Watch for sync task completion and redirect
|
||||
useEffect(() => {
|
||||
if (!currentSyncTaskId) return;
|
||||
|
||||
const currentTask = tasks.find(task => task.task_id === currentSyncTaskId);
|
||||
const currentTask = tasks.find(
|
||||
(task) => task.task_id === currentSyncTaskId,
|
||||
);
|
||||
|
||||
if (currentTask && currentTask.status === "completed") {
|
||||
// Task completed successfully, show toast and redirect
|
||||
setIsIngesting(false);
|
||||
setShowSuccessToast(true);
|
||||
setTimeout(() => {
|
||||
router.push("/knowledge");
|
||||
}, 2000); // 2 second delay to let user see toast
|
||||
} else if (currentTask && currentTask.status === "failed") {
|
||||
// Task failed, clear the tracking but don't redirect
|
||||
setIsIngesting(false);
|
||||
setCurrentSyncTaskId(null);
|
||||
}
|
||||
}, [tasks, currentSyncTaskId, router]);
|
||||
if (currentTask && currentTask.status === "completed") {
|
||||
// Task completed successfully, show toast and redirect
|
||||
setIsIngesting(false);
|
||||
setTimeout(() => {
|
||||
router.push("/knowledge");
|
||||
}, 2000); // 2 second delay to let user see toast
|
||||
} else if (currentTask && currentTask.status === "failed") {
|
||||
// Task failed, clear the tracking but don't redirect
|
||||
setIsIngesting(false);
|
||||
setCurrentSyncTaskId(null);
|
||||
}
|
||||
}, [tasks, currentSyncTaskId, router]);
|
||||
|
||||
const handleFileSelected = (files: CloudFile[]) => {
|
||||
setSelectedFiles(files);
|
||||
console.log(`Selected ${files.length} files from ${provider}:`, files);
|
||||
// You can add additional handling here like triggering sync, etc.
|
||||
};
|
||||
const handleFileSelected = (files: CloudFile[]) => {
|
||||
setSelectedFiles(files);
|
||||
console.log(`Selected ${files.length} files from ${provider}:`, files);
|
||||
// You can add additional handling here like triggering sync, etc.
|
||||
};
|
||||
|
||||
const handleSync = async (connector: CloudConnector) => {
|
||||
if (!connector.connectionId || selectedFiles.length === 0) return;
|
||||
const handleSync = async (connector: CloudConnector) => {
|
||||
if (!connector.connectionId || selectedFiles.length === 0) return;
|
||||
|
||||
setIsIngesting(true);
|
||||
setIsIngesting(true);
|
||||
|
||||
try {
|
||||
const syncBody: {
|
||||
connection_id: string;
|
||||
max_files?: number;
|
||||
selected_files?: string[];
|
||||
settings?: IngestSettings;
|
||||
} = {
|
||||
connection_id: connector.connectionId,
|
||||
selected_files: selectedFiles.map(file => file.id),
|
||||
settings: ingestSettings,
|
||||
};
|
||||
try {
|
||||
const syncBody: {
|
||||
connection_id: string;
|
||||
max_files?: number;
|
||||
selected_files?: string[];
|
||||
settings?: IngestSettings;
|
||||
} = {
|
||||
connection_id: connector.connectionId,
|
||||
selected_files: selectedFiles.map((file) => file.id),
|
||||
settings: ingestSettings,
|
||||
};
|
||||
|
||||
const response = await fetch(`/api/connectors/${connector.type}/sync`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(syncBody),
|
||||
});
|
||||
const response = await fetch(`/api/connectors/${connector.type}/sync`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(syncBody),
|
||||
});
|
||||
|
||||
const result = await response.json();
|
||||
const result = await response.json();
|
||||
|
||||
if (response.status === 201) {
|
||||
const taskIds = result.task_ids;
|
||||
if (taskIds && taskIds.length > 0) {
|
||||
const taskId = taskIds[0]; // Use the first task ID
|
||||
addTask(taskId);
|
||||
setCurrentSyncTaskId(taskId);
|
||||
}
|
||||
} else {
|
||||
console.error("Sync failed:", result.error);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Sync error:", error);
|
||||
setIsIngesting(false);
|
||||
}
|
||||
};
|
||||
if (response.status === 201) {
|
||||
const taskIds = result.task_ids;
|
||||
if (taskIds && taskIds.length > 0) {
|
||||
const taskId = taskIds[0]; // Use the first task ID
|
||||
addTask(taskId);
|
||||
setCurrentSyncTaskId(taskId);
|
||||
}
|
||||
} else {
|
||||
console.error("Sync failed:", result.error);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Sync error:", error);
|
||||
setIsIngesting(false);
|
||||
}
|
||||
};
|
||||
|
||||
const getProviderDisplayName = () => {
|
||||
const nameMap: { [key: string]: string } = {
|
||||
google_drive: "Google Drive",
|
||||
onedrive: "OneDrive",
|
||||
sharepoint: "SharePoint",
|
||||
};
|
||||
return nameMap[provider] || provider;
|
||||
};
|
||||
const getProviderDisplayName = () => {
|
||||
const nameMap: { [key: string]: string } = {
|
||||
google_drive: "Google Drive",
|
||||
onedrive: "OneDrive",
|
||||
sharepoint: "SharePoint",
|
||||
};
|
||||
return nameMap[provider] || provider;
|
||||
};
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="container mx-auto p-6">
|
||||
<div className="flex items-center justify-center py-12">
|
||||
<div className="text-center">
|
||||
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-primary mx-auto mb-4"></div>
|
||||
<p>Loading {getProviderDisplayName()} connector...</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="container mx-auto p-6">
|
||||
<div className="flex items-center justify-center py-12">
|
||||
<div className="text-center">
|
||||
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-primary mx-auto mb-4"></div>
|
||||
<p>Loading {getProviderDisplayName()} connector...</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error || !connector) {
|
||||
return (
|
||||
<div className="container mx-auto p-6">
|
||||
<div className="mb-6">
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => router.back()}
|
||||
className="mb-4"
|
||||
>
|
||||
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||
Back
|
||||
</Button>
|
||||
</div>
|
||||
if (error || !connector) {
|
||||
return (
|
||||
<div className="container mx-auto p-6">
|
||||
<div className="mb-6">
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => router.back()}
|
||||
className="mb-4"
|
||||
>
|
||||
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||
Back
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-center py-12">
|
||||
<div className="text-center max-w-md">
|
||||
<AlertCircle className="h-12 w-12 text-red-500 mx-auto mb-4" />
|
||||
<h2 className="text-xl font-semibold mb-2">
|
||||
Provider Not Available
|
||||
</h2>
|
||||
<p className="text-muted-foreground mb-4">{error}</p>
|
||||
<Button onClick={() => router.push("/settings")}>
|
||||
Configure Connectors
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
<div className="flex items-center justify-center py-12">
|
||||
<div className="text-center max-w-md">
|
||||
<AlertCircle className="h-12 w-12 text-red-500 mx-auto mb-4" />
|
||||
<h2 className="text-xl font-semibold mb-2">
|
||||
Provider Not Available
|
||||
</h2>
|
||||
<p className="text-muted-foreground mb-4">{error}</p>
|
||||
<Button onClick={() => router.push("/settings")}>
|
||||
Configure Connectors
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (connector.status !== "connected") {
|
||||
return (
|
||||
<div className="container mx-auto p-6">
|
||||
<div className="mb-6">
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => router.back()}
|
||||
className="mb-4"
|
||||
>
|
||||
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||
Back
|
||||
</Button>
|
||||
</div>
|
||||
if (connector.status !== "connected") {
|
||||
return (
|
||||
<div className="container mx-auto p-6">
|
||||
<div className="mb-6">
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => router.back()}
|
||||
className="mb-4"
|
||||
>
|
||||
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||
Back
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-center py-12">
|
||||
<div className="text-center max-w-md">
|
||||
<AlertCircle className="h-12 w-12 text-yellow-500 mx-auto mb-4" />
|
||||
<h2 className="text-xl font-semibold mb-2">
|
||||
{connector.name} Not Connected
|
||||
</h2>
|
||||
<p className="text-muted-foreground mb-4">
|
||||
You need to connect your {connector.name} account before you can
|
||||
select files.
|
||||
</p>
|
||||
<Button onClick={() => router.push("/settings")}>
|
||||
Connect {connector.name}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
<div className="flex items-center justify-center py-12">
|
||||
<div className="text-center max-w-md">
|
||||
<AlertCircle className="h-12 w-12 text-yellow-500 mx-auto mb-4" />
|
||||
<h2 className="text-xl font-semibold mb-2">
|
||||
{connector.name} Not Connected
|
||||
</h2>
|
||||
<p className="text-muted-foreground mb-4">
|
||||
You need to connect your {connector.name} account before you can
|
||||
select files.
|
||||
</p>
|
||||
<Button onClick={() => router.push("/settings")}>
|
||||
Connect {connector.name}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (!connector.hasAccessToken) {
|
||||
return (
|
||||
<div className="container mx-auto p-6">
|
||||
<div className="mb-6">
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => router.back()}
|
||||
className="mb-4"
|
||||
>
|
||||
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||
Back
|
||||
</Button>
|
||||
</div>
|
||||
if (!connector.hasAccessToken) {
|
||||
return (
|
||||
<div className="container mx-auto p-6">
|
||||
<div className="mb-6">
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => router.back()}
|
||||
className="mb-4"
|
||||
>
|
||||
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||
Back
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-center py-12">
|
||||
<div className="text-center max-w-md">
|
||||
<AlertCircle className="h-12 w-12 text-red-500 mx-auto mb-4" />
|
||||
<h2 className="text-xl font-semibold mb-2">
|
||||
Access Token Required
|
||||
</h2>
|
||||
<p className="text-muted-foreground mb-4">
|
||||
{connector.accessTokenError ||
|
||||
`Unable to get access token for ${connector.name}. Try reconnecting your account.`}
|
||||
</p>
|
||||
<Button onClick={() => router.push("/settings")}>
|
||||
Reconnect {connector.name}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
<div className="flex items-center justify-center py-12">
|
||||
<div className="text-center max-w-md">
|
||||
<AlertCircle className="h-12 w-12 text-red-500 mx-auto mb-4" />
|
||||
<h2 className="text-xl font-semibold mb-2">
|
||||
Access Token Required
|
||||
</h2>
|
||||
<p className="text-muted-foreground mb-4">
|
||||
{connector.accessTokenError ||
|
||||
`Unable to get access token for ${connector.name}. Try reconnecting your account.`}
|
||||
</p>
|
||||
<Button onClick={() => router.push("/settings")}>
|
||||
Reconnect {connector.name}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="container mx-auto max-w-3xl p-6">
|
||||
<div className="mb-6 flex gap-2 items-center">
|
||||
<Button variant="ghost" onClick={() => router.back()}>
|
||||
<ArrowLeft className="h-4 w-4 scale-125" />
|
||||
</Button>
|
||||
<h2 className="text-2xl font-bold">
|
||||
Add from {getProviderDisplayName()}
|
||||
</h2>
|
||||
</div>
|
||||
return (
|
||||
<div className="container mx-auto max-w-3xl p-6">
|
||||
<div className="mb-6 flex gap-2 items-center">
|
||||
<Button variant="ghost" onClick={() => router.back()}>
|
||||
<ArrowLeft className="h-4 w-4 scale-125" />
|
||||
</Button>
|
||||
<h2 className="text-2xl font-bold">
|
||||
Add from {getProviderDisplayName()}
|
||||
</h2>
|
||||
</div>
|
||||
|
||||
<div className="max-w-3xl mx-auto">
|
||||
<UnifiedCloudPicker
|
||||
provider={
|
||||
connector.type as "google_drive" | "onedrive" | "sharepoint"
|
||||
}
|
||||
onFileSelected={handleFileSelected}
|
||||
selectedFiles={selectedFiles}
|
||||
isAuthenticated={true}
|
||||
accessToken={accessToken || undefined}
|
||||
clientId={connector.clientId}
|
||||
onSettingsChange={setIngestSettings}
|
||||
/>
|
||||
</div>
|
||||
<div className="max-w-3xl mx-auto">
|
||||
<UnifiedCloudPicker
|
||||
provider={
|
||||
connector.type as "google_drive" | "onedrive" | "sharepoint"
|
||||
}
|
||||
onFileSelected={handleFileSelected}
|
||||
selectedFiles={selectedFiles}
|
||||
isAuthenticated={true}
|
||||
accessToken={accessToken || undefined}
|
||||
clientId={connector.clientId}
|
||||
onSettingsChange={setIngestSettings}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="max-w-3xl mx-auto mt-6">
|
||||
<div className="flex justify-between gap-3 mb-4">
|
||||
<Button
|
||||
variant="ghost"
|
||||
className=" border bg-transparent border-border rounded-lg text-secondary-foreground"
|
||||
onClick={() => router.back()}
|
||||
>
|
||||
Back
|
||||
</Button>
|
||||
<Button
|
||||
variant="secondary"
|
||||
onClick={() => handleSync(connector)}
|
||||
disabled={selectedFiles.length === 0 || isIngesting}
|
||||
>
|
||||
{isIngesting ? (
|
||||
<>Ingesting {selectedFiles.length} Files...</>
|
||||
) : (
|
||||
<>Start ingest</>
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Success toast notification */}
|
||||
<Toast
|
||||
message="Ingested successfully!."
|
||||
show={showSuccessToast}
|
||||
onHide={() => setShowSuccessToast(false)}
|
||||
duration={20000}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
<div className="max-w-3xl mx-auto mt-6">
|
||||
<div className="flex justify-between gap-3 mb-4">
|
||||
<Button
|
||||
variant="ghost"
|
||||
className=" border bg-transparent border-border rounded-lg text-secondary-foreground"
|
||||
onClick={() => router.back()}
|
||||
>
|
||||
Back
|
||||
</Button>
|
||||
<Button
|
||||
variant="secondary"
|
||||
onClick={() => handleSync(connector)}
|
||||
disabled={selectedFiles.length === 0 || isIngesting}
|
||||
>
|
||||
{isIngesting ? (
|
||||
<>Ingesting {selectedFiles.length} Files...</>
|
||||
) : (
|
||||
<>Start ingest</>
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -197,7 +197,7 @@ export function TaskProvider({ children }: { children: React.ReactNode }) {
|
|||
newTask.status === "completed"
|
||||
) {
|
||||
// Task just completed - show success toast
|
||||
toast.success("Task completed successfully!", {
|
||||
toast.success("Task completed successfully", {
|
||||
description: `Task ${newTask.task_id} has finished processing.`,
|
||||
action: {
|
||||
label: "View",
|
||||
|
|
|
|||
|
|
@ -2,7 +2,12 @@
|
|||
|
||||
from starlette.requests import Request
|
||||
|
||||
from config.settings import DISABLE_INGEST_WITH_LANGFLOW
|
||||
from config.settings import (
|
||||
DISABLE_INGEST_WITH_LANGFLOW,
|
||||
clients,
|
||||
INDEX_NAME,
|
||||
INDEX_BODY,
|
||||
)
|
||||
from utils.logging_config import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
|
@ -12,19 +17,19 @@ class ConnectorRouter:
|
|||
"""
|
||||
Router that automatically chooses between LangflowConnectorService and ConnectorService
|
||||
based on the DISABLE_INGEST_WITH_LANGFLOW configuration.
|
||||
|
||||
|
||||
- If DISABLE_INGEST_WITH_LANGFLOW is False (default): uses LangflowConnectorService
|
||||
- If DISABLE_INGEST_WITH_LANGFLOW is True: uses traditional ConnectorService
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, langflow_connector_service, openrag_connector_service):
|
||||
self.langflow_connector_service = langflow_connector_service
|
||||
self.openrag_connector_service = openrag_connector_service
|
||||
logger.debug(
|
||||
"ConnectorRouter initialized",
|
||||
disable_langflow_ingest=DISABLE_INGEST_WITH_LANGFLOW
|
||||
disable_langflow_ingest=DISABLE_INGEST_WITH_LANGFLOW,
|
||||
)
|
||||
|
||||
|
||||
def get_active_service(self):
|
||||
"""Get the currently active connector service based on configuration."""
|
||||
if DISABLE_INGEST_WITH_LANGFLOW:
|
||||
|
|
@ -33,28 +38,32 @@ class ConnectorRouter:
|
|||
else:
|
||||
logger.debug("Using Langflow connector service")
|
||||
return self.langflow_connector_service
|
||||
|
||||
|
||||
# Proxy all connector service methods to the active service
|
||||
|
||||
|
||||
async def initialize(self):
|
||||
"""Initialize the active connector service."""
|
||||
# Initialize OpenSearch index if using traditional OpenRAG connector service
|
||||
|
||||
return await self.get_active_service().initialize()
|
||||
|
||||
|
||||
@property
|
||||
def connection_manager(self):
|
||||
"""Get the connection manager from the active service."""
|
||||
return self.get_active_service().connection_manager
|
||||
|
||||
|
||||
async def get_connector(self, connection_id: str):
|
||||
"""Get a connector instance from the active service."""
|
||||
return await self.get_active_service().get_connector(connection_id)
|
||||
|
||||
async def sync_specific_files(self, connection_id: str, user_id: str, file_list: list, jwt_token: str = None):
|
||||
|
||||
async def sync_specific_files(
|
||||
self, connection_id: str, user_id: str, file_list: list, jwt_token: str = None
|
||||
):
|
||||
"""Sync specific files using the active service."""
|
||||
return await self.get_active_service().sync_specific_files(
|
||||
connection_id, user_id, file_list, jwt_token
|
||||
)
|
||||
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""
|
||||
Proxy any other method calls to the active service.
|
||||
|
|
@ -64,4 +73,6 @@ class ConnectorRouter:
|
|||
if hasattr(active_service, name):
|
||||
return getattr(active_service, name)
|
||||
else:
|
||||
raise AttributeError(f"'{type(active_service).__name__}' object has no attribute '{name}'")
|
||||
raise AttributeError(
|
||||
f"'{type(active_service).__name__}' object has no attribute '{name}'"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from starlette.responses import JSONResponse
|
|||
from utils.container_utils import transform_localhost_url
|
||||
from utils.logging_config import get_logger
|
||||
from config.settings import (
|
||||
DISABLE_INGEST_WITH_LANGFLOW,
|
||||
LANGFLOW_URL,
|
||||
LANGFLOW_CHAT_FLOW_ID,
|
||||
LANGFLOW_INGEST_FLOW_ID,
|
||||
|
|
@ -450,7 +451,7 @@ async def onboarding(request, flows_service):
|
|||
config_updated = True
|
||||
|
||||
# Update knowledge settings
|
||||
if "embedding_model" in body:
|
||||
if "embedding_model" in body and not DISABLE_INGEST_WITH_LANGFLOW:
|
||||
if (
|
||||
not isinstance(body["embedding_model"], str)
|
||||
or not body["embedding_model"].strip()
|
||||
|
|
@ -600,11 +601,16 @@ async def onboarding(request, flows_service):
|
|||
# Import here to avoid circular imports
|
||||
from main import init_index
|
||||
|
||||
logger.info("Initializing OpenSearch index after onboarding configuration")
|
||||
logger.info(
|
||||
"Initializing OpenSearch index after onboarding configuration"
|
||||
)
|
||||
await init_index()
|
||||
logger.info("OpenSearch index initialization completed successfully")
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize OpenSearch index after onboarding", error=str(e))
|
||||
logger.error(
|
||||
"Failed to initialize OpenSearch index after onboarding",
|
||||
error=str(e),
|
||||
)
|
||||
# Don't fail the entire onboarding process if index creation fails
|
||||
# The application can still work, but document operations may fail
|
||||
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ class ConfigManager:
|
|||
Args:
|
||||
config_file: Path to configuration file. Defaults to 'config.yaml' in project root.
|
||||
"""
|
||||
self.config_file = Path(config_file) if config_file else Path("config.yaml")
|
||||
self.config_file = Path(config_file) if config_file else Path("config/config.yaml")
|
||||
self._config: Optional[OpenRAGConfig] = None
|
||||
|
||||
def load_config(self) -> OpenRAGConfig:
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ class GoogleDriveConnector(BaseConnector):
|
|||
|
||||
# Connector metadata
|
||||
CONNECTOR_NAME = "Google Drive"
|
||||
CONNECTOR_DESCRIPTION = "Connect your Google Drive to automatically sync documents"
|
||||
CONNECTOR_DESCRIPTION = "Add knowledge from Google Drive"
|
||||
CONNECTOR_ICON = "google-drive"
|
||||
|
||||
# Supported alias keys coming from various frontends / pickers
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class OneDriveConnector(BaseConnector):
|
|||
|
||||
# Connector metadata
|
||||
CONNECTOR_NAME = "OneDrive"
|
||||
CONNECTOR_DESCRIPTION = "Connect to OneDrive (personal) to sync documents and files"
|
||||
CONNECTOR_DESCRIPTION = "Add knowledge from OneDrive"
|
||||
CONNECTOR_ICON = "onedrive"
|
||||
|
||||
def __init__(self, config: Dict[str, Any]):
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class SharePointConnector(BaseConnector):
|
|||
|
||||
# Connector metadata
|
||||
CONNECTOR_NAME = "SharePoint"
|
||||
CONNECTOR_DESCRIPTION = "Connect to SharePoint to sync documents and files"
|
||||
CONNECTOR_DESCRIPTION = "Add knowledge from SharePoint"
|
||||
CONNECTOR_ICON = "sharepoint"
|
||||
|
||||
def __init__(self, config: Dict[str, Any]):
|
||||
|
|
|
|||
57
src/main.py
57
src/main.py
|
|
@ -53,6 +53,7 @@ from auth_middleware import optional_auth, require_auth
|
|||
from config.settings import (
|
||||
DISABLE_INGEST_WITH_LANGFLOW,
|
||||
EMBED_MODEL,
|
||||
INDEX_BODY,
|
||||
INDEX_NAME,
|
||||
SESSION_SECRET,
|
||||
clients,
|
||||
|
|
@ -82,6 +83,7 @@ logger.info(
|
|||
cuda_version=torch.version.cuda,
|
||||
)
|
||||
|
||||
|
||||
async def wait_for_opensearch():
|
||||
"""Wait for OpenSearch to be ready with retries"""
|
||||
max_retries = 30
|
||||
|
|
@ -128,6 +130,34 @@ async def configure_alerting_security():
|
|||
# Don't fail startup if alerting config fails
|
||||
|
||||
|
||||
async def _ensure_opensearch_index(self):
|
||||
"""Ensure OpenSearch index exists when using traditional connector service."""
|
||||
try:
|
||||
# Check if index already exists
|
||||
if await clients.opensearch.indices.exists(index=INDEX_NAME):
|
||||
logger.debug("OpenSearch index already exists", index_name=INDEX_NAME)
|
||||
return
|
||||
|
||||
# Create the index with hard-coded INDEX_BODY (uses OpenAI embedding dimensions)
|
||||
await clients.opensearch.indices.create(index=INDEX_NAME, body=INDEX_BODY)
|
||||
logger.info(
|
||||
"Created OpenSearch index for traditional connector service",
|
||||
index_name=INDEX_NAME,
|
||||
vector_dimensions=INDEX_BODY["mappings"]["properties"]["chunk_embedding"][
|
||||
"dimension"
|
||||
],
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to initialize OpenSearch index for traditional connector service",
|
||||
error=str(e),
|
||||
index_name=INDEX_NAME,
|
||||
)
|
||||
# Don't raise the exception to avoid breaking the initialization
|
||||
# The service can still function, document operations might fail later
|
||||
|
||||
|
||||
async def init_index():
|
||||
"""Initialize OpenSearch index and security roles"""
|
||||
await wait_for_opensearch()
|
||||
|
|
@ -141,10 +171,20 @@ async def init_index():
|
|||
|
||||
# Create documents index
|
||||
if not await clients.opensearch.indices.exists(index=INDEX_NAME):
|
||||
await clients.opensearch.indices.create(index=INDEX_NAME, body=dynamic_index_body)
|
||||
logger.info("Created OpenSearch index", index_name=INDEX_NAME, embedding_model=embedding_model)
|
||||
await clients.opensearch.indices.create(
|
||||
index=INDEX_NAME, body=dynamic_index_body
|
||||
)
|
||||
logger.info(
|
||||
"Created OpenSearch index",
|
||||
index_name=INDEX_NAME,
|
||||
embedding_model=embedding_model,
|
||||
)
|
||||
else:
|
||||
logger.info("Index already exists, skipping creation", index_name=INDEX_NAME, embedding_model=embedding_model)
|
||||
logger.info(
|
||||
"Index already exists, skipping creation",
|
||||
index_name=INDEX_NAME,
|
||||
embedding_model=embedding_model,
|
||||
)
|
||||
|
||||
# Create knowledge filters index
|
||||
knowledge_filter_index_name = "knowledge_filters"
|
||||
|
|
@ -402,6 +442,9 @@ async def startup_tasks(services):
|
|||
# Index will be created after onboarding when we know the embedding model
|
||||
await wait_for_opensearch()
|
||||
|
||||
if DISABLE_INGEST_WITH_LANGFLOW:
|
||||
await _ensure_opensearch_index()
|
||||
|
||||
# Configure alerting security
|
||||
await configure_alerting_security()
|
||||
|
||||
|
|
@ -1075,14 +1118,6 @@ async def create_app():
|
|||
return app
|
||||
|
||||
|
||||
async def startup():
|
||||
"""Application startup tasks"""
|
||||
await init_index()
|
||||
# Get services from app state if needed for initialization
|
||||
# services = app.state.services
|
||||
# await services['connector_service'].initialize()
|
||||
|
||||
|
||||
def cleanup():
|
||||
"""Cleanup on application shutdown"""
|
||||
# Cleanup process pools only (webhooks handled by Starlette shutdown)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import asyncio
|
||||
from config.settings import (
|
||||
DISABLE_INGEST_WITH_LANGFLOW,
|
||||
NUDGES_FLOW_ID,
|
||||
LANGFLOW_URL,
|
||||
LANGFLOW_CHAT_FLOW_ID,
|
||||
|
|
@ -73,17 +74,17 @@ class FlowsService:
|
|||
# Scan all JSON files in the flows directory
|
||||
try:
|
||||
for filename in os.listdir(flows_dir):
|
||||
if not filename.endswith('.json'):
|
||||
if not filename.endswith(".json"):
|
||||
continue
|
||||
|
||||
file_path = os.path.join(flows_dir, filename)
|
||||
|
||||
try:
|
||||
with open(file_path, 'r') as f:
|
||||
with open(file_path, "r") as f:
|
||||
flow_data = json.load(f)
|
||||
|
||||
# Check if this file contains the flow we're looking for
|
||||
if flow_data.get('id') == flow_id:
|
||||
if flow_data.get("id") == flow_id:
|
||||
# Cache the result
|
||||
self._flow_file_cache[flow_id] = file_path
|
||||
logger.info(f"Found flow {flow_id} in file: {filename}")
|
||||
|
|
@ -99,6 +100,7 @@ class FlowsService:
|
|||
|
||||
logger.warning(f"Flow with ID {flow_id} not found in flows directory")
|
||||
return None
|
||||
|
||||
async def reset_langflow_flow(self, flow_type: str):
|
||||
"""Reset a Langflow flow by uploading the corresponding JSON file
|
||||
|
||||
|
|
@ -135,7 +137,9 @@ class FlowsService:
|
|||
try:
|
||||
with open(flow_path, "r") as f:
|
||||
flow_data = json.load(f)
|
||||
logger.info(f"Successfully loaded flow data for {flow_type} from {os.path.basename(flow_path)}")
|
||||
logger.info(
|
||||
f"Successfully loaded flow data for {flow_type} from {os.path.basename(flow_path)}"
|
||||
)
|
||||
except json.JSONDecodeError as e:
|
||||
raise ValueError(f"Invalid JSON in flow file {flow_path}: {e}")
|
||||
except FileNotFoundError:
|
||||
|
|
@ -161,43 +165,62 @@ class FlowsService:
|
|||
|
||||
# Check if configuration has been edited (onboarding completed)
|
||||
if config.edited:
|
||||
logger.info(f"Updating {flow_type} flow with current configuration settings")
|
||||
logger.info(
|
||||
f"Updating {flow_type} flow with current configuration settings"
|
||||
)
|
||||
|
||||
provider = config.provider.model_provider.lower()
|
||||
|
||||
# Step 1: Assign model provider (replace components) if not OpenAI
|
||||
if provider != "openai":
|
||||
logger.info(f"Assigning {provider} components to {flow_type} flow")
|
||||
logger.info(
|
||||
f"Assigning {provider} components to {flow_type} flow"
|
||||
)
|
||||
provider_result = await self.assign_model_provider(provider)
|
||||
|
||||
if not provider_result.get("success"):
|
||||
logger.warning(f"Failed to assign {provider} components: {provider_result.get('error', 'Unknown error')}")
|
||||
logger.warning(
|
||||
f"Failed to assign {provider} components: {provider_result.get('error', 'Unknown error')}"
|
||||
)
|
||||
# Continue anyway, maybe just value updates will work
|
||||
|
||||
# Step 2: Update model values for the specific flow being reset
|
||||
single_flow_config = [{
|
||||
"name": flow_type,
|
||||
"flow_id": flow_id,
|
||||
}]
|
||||
single_flow_config = [
|
||||
{
|
||||
"name": flow_type,
|
||||
"flow_id": flow_id,
|
||||
}
|
||||
]
|
||||
|
||||
logger.info(f"Updating {flow_type} flow model values")
|
||||
update_result = await self.change_langflow_model_value(
|
||||
provider=provider,
|
||||
embedding_model=config.knowledge.embedding_model,
|
||||
llm_model=config.agent.llm_model,
|
||||
endpoint=config.provider.endpoint if config.provider.endpoint else None,
|
||||
flow_configs=single_flow_config
|
||||
endpoint=config.provider.endpoint
|
||||
if config.provider.endpoint
|
||||
else None,
|
||||
flow_configs=single_flow_config,
|
||||
)
|
||||
|
||||
if update_result.get("success"):
|
||||
logger.info(f"Successfully updated {flow_type} flow with current configuration")
|
||||
logger.info(
|
||||
f"Successfully updated {flow_type} flow with current configuration"
|
||||
)
|
||||
else:
|
||||
logger.warning(f"Failed to update {flow_type} flow with current configuration: {update_result.get('error', 'Unknown error')}")
|
||||
logger.warning(
|
||||
f"Failed to update {flow_type} flow with current configuration: {update_result.get('error', 'Unknown error')}"
|
||||
)
|
||||
else:
|
||||
logger.info(f"Configuration not yet edited (onboarding not completed), skipping model updates for {flow_type} flow")
|
||||
logger.info(
|
||||
f"Configuration not yet edited (onboarding not completed), skipping model updates for {flow_type} flow"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating {flow_type} flow with current configuration", error=str(e))
|
||||
logger.error(
|
||||
f"Error updating {flow_type} flow with current configuration",
|
||||
error=str(e),
|
||||
)
|
||||
# Don't fail the entire reset operation if configuration update fails
|
||||
|
||||
return {
|
||||
|
|
@ -243,7 +266,9 @@ class FlowsService:
|
|||
|
||||
try:
|
||||
# Load component templates based on provider
|
||||
llm_template, embedding_template, llm_text_template = self._load_component_templates(provider)
|
||||
llm_template, embedding_template, llm_text_template = (
|
||||
self._load_component_templates(provider)
|
||||
)
|
||||
|
||||
logger.info(f"Assigning {provider} components")
|
||||
|
||||
|
|
@ -358,7 +383,9 @@ class FlowsService:
|
|||
logger.info(f"Loaded component templates for {provider}")
|
||||
return llm_template, embedding_template, llm_text_template
|
||||
|
||||
async def _update_flow_components(self, config, llm_template, embedding_template, llm_text_template):
|
||||
async def _update_flow_components(
|
||||
self, config, llm_template, embedding_template, llm_text_template
|
||||
):
|
||||
"""Update components in a specific flow"""
|
||||
flow_name = config["name"]
|
||||
flow_id = config["flow_id"]
|
||||
|
|
@ -383,20 +410,23 @@ class FlowsService:
|
|||
components_updated = []
|
||||
|
||||
# Replace embedding component
|
||||
embedding_node = self._find_node_by_id(flow_data, old_embedding_id)
|
||||
if embedding_node:
|
||||
# Preserve position
|
||||
original_position = embedding_node.get("position", {})
|
||||
if not DISABLE_INGEST_WITH_LANGFLOW:
|
||||
embedding_node = self._find_node_by_id(flow_data, old_embedding_id)
|
||||
if embedding_node:
|
||||
# Preserve position
|
||||
original_position = embedding_node.get("position", {})
|
||||
|
||||
# Replace with new template
|
||||
new_embedding_node = embedding_template.copy()
|
||||
new_embedding_node["position"] = original_position
|
||||
# Replace with new template
|
||||
new_embedding_node = embedding_template.copy()
|
||||
new_embedding_node["position"] = original_position
|
||||
|
||||
# Replace in flow
|
||||
self._replace_node_in_flow(flow_data, old_embedding_id, new_embedding_node)
|
||||
components_updated.append(
|
||||
f"embedding: {old_embedding_id} -> {new_embedding_id}"
|
||||
)
|
||||
# Replace in flow
|
||||
self._replace_node_in_flow(
|
||||
flow_data, old_embedding_id, new_embedding_node
|
||||
)
|
||||
components_updated.append(
|
||||
f"embedding: {old_embedding_id} -> {new_embedding_id}"
|
||||
)
|
||||
|
||||
# Replace LLM component (if exists in this flow)
|
||||
if old_llm_id:
|
||||
|
|
@ -425,27 +455,30 @@ class FlowsService:
|
|||
new_llm_text_node["position"] = original_position
|
||||
|
||||
# Replace in flow
|
||||
self._replace_node_in_flow(flow_data, old_llm_text_id, new_llm_text_node)
|
||||
components_updated.append(f"llm: {old_llm_text_id} -> {new_llm_text_id}")
|
||||
self._replace_node_in_flow(
|
||||
flow_data, old_llm_text_id, new_llm_text_node
|
||||
)
|
||||
components_updated.append(
|
||||
f"llm: {old_llm_text_id} -> {new_llm_text_id}"
|
||||
)
|
||||
|
||||
# Update all edge references using regex replacement
|
||||
flow_json_str = json.dumps(flow_data)
|
||||
|
||||
# Replace embedding ID references
|
||||
flow_json_str = re.sub(
|
||||
re.escape(old_embedding_id), new_embedding_id, flow_json_str
|
||||
)
|
||||
flow_json_str = re.sub(
|
||||
re.escape(old_embedding_id.split("-")[0]),
|
||||
new_embedding_id.split("-")[0],
|
||||
flow_json_str,
|
||||
)
|
||||
if not DISABLE_INGEST_WITH_LANGFLOW:
|
||||
flow_json_str = re.sub(
|
||||
re.escape(old_embedding_id), new_embedding_id, flow_json_str
|
||||
)
|
||||
flow_json_str = re.sub(
|
||||
re.escape(old_embedding_id.split("-")[0]),
|
||||
new_embedding_id.split("-")[0],
|
||||
flow_json_str,
|
||||
)
|
||||
|
||||
# Replace LLM ID references (if applicable)
|
||||
if old_llm_id:
|
||||
flow_json_str = re.sub(
|
||||
re.escape(old_llm_id), new_llm_id, flow_json_str
|
||||
)
|
||||
flow_json_str = re.sub(re.escape(old_llm_id), new_llm_id, flow_json_str)
|
||||
if old_llm_text_id:
|
||||
flow_json_str = re.sub(
|
||||
re.escape(old_llm_text_id), new_llm_text_id, flow_json_str
|
||||
|
|
@ -506,7 +539,14 @@ class FlowsService:
|
|||
|
||||
return None, None
|
||||
|
||||
async def _update_flow_field(self, flow_id: str, field_name: str, field_value: str, node_display_name: str = None, node_id: str = None):
|
||||
async def _update_flow_field(
|
||||
self,
|
||||
flow_id: str,
|
||||
field_name: str,
|
||||
field_value: str,
|
||||
node_display_name: str = None,
|
||||
node_id: str = None,
|
||||
):
|
||||
"""
|
||||
Generic helper function to update any field in any Langflow component.
|
||||
|
||||
|
|
@ -521,22 +561,26 @@ class FlowsService:
|
|||
raise ValueError("flow_id is required")
|
||||
|
||||
# Get the current flow data from Langflow
|
||||
response = await clients.langflow_request(
|
||||
"GET", f"/api/v1/flows/{flow_id}"
|
||||
)
|
||||
response = await clients.langflow_request("GET", f"/api/v1/flows/{flow_id}")
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(f"Failed to get flow: HTTP {response.status_code} - {response.text}")
|
||||
raise Exception(
|
||||
f"Failed to get flow: HTTP {response.status_code} - {response.text}"
|
||||
)
|
||||
|
||||
flow_data = response.json()
|
||||
|
||||
# Find the target component by display name first, then by ID as fallback
|
||||
target_node, target_node_index = None, None
|
||||
if node_display_name:
|
||||
target_node, target_node_index = self._find_node_in_flow(flow_data, display_name=node_display_name)
|
||||
target_node, target_node_index = self._find_node_in_flow(
|
||||
flow_data, display_name=node_display_name
|
||||
)
|
||||
|
||||
if target_node is None and node_id:
|
||||
target_node, target_node_index = self._find_node_in_flow(flow_data, node_id=node_id)
|
||||
target_node, target_node_index = self._find_node_in_flow(
|
||||
flow_data, node_id=node_id
|
||||
)
|
||||
|
||||
if target_node is None:
|
||||
identifier = node_display_name or node_id
|
||||
|
|
@ -545,7 +589,9 @@ class FlowsService:
|
|||
# Update the field value directly in the existing node
|
||||
template = target_node.get("data", {}).get("node", {}).get("template", {})
|
||||
if template.get(field_name):
|
||||
flow_data["data"]["nodes"][target_node_index]["data"]["node"]["template"][field_name]["value"] = field_value
|
||||
flow_data["data"]["nodes"][target_node_index]["data"]["node"]["template"][
|
||||
field_name
|
||||
]["value"] = field_value
|
||||
else:
|
||||
identifier = node_display_name or node_id
|
||||
raise Exception(f"{field_name} field not found in {identifier} component")
|
||||
|
|
@ -556,21 +602,31 @@ class FlowsService:
|
|||
)
|
||||
|
||||
if patch_response.status_code != 200:
|
||||
raise Exception(f"Failed to update flow: HTTP {patch_response.status_code} - {patch_response.text}")
|
||||
raise Exception(
|
||||
f"Failed to update flow: HTTP {patch_response.status_code} - {patch_response.text}"
|
||||
)
|
||||
|
||||
async def update_chat_flow_model(self, model_name: str):
|
||||
"""Helper function to update the model in the chat flow"""
|
||||
if not LANGFLOW_CHAT_FLOW_ID:
|
||||
raise ValueError("LANGFLOW_CHAT_FLOW_ID is not configured")
|
||||
await self._update_flow_field(LANGFLOW_CHAT_FLOW_ID, "model_name", model_name,
|
||||
node_display_name="Language Model")
|
||||
await self._update_flow_field(
|
||||
LANGFLOW_CHAT_FLOW_ID,
|
||||
"model_name",
|
||||
model_name,
|
||||
node_display_name="Language Model",
|
||||
)
|
||||
|
||||
async def update_chat_flow_system_prompt(self, system_prompt: str):
|
||||
"""Helper function to update the system prompt in the chat flow"""
|
||||
if not LANGFLOW_CHAT_FLOW_ID:
|
||||
raise ValueError("LANGFLOW_CHAT_FLOW_ID is not configured")
|
||||
await self._update_flow_field(LANGFLOW_CHAT_FLOW_ID, "system_prompt", system_prompt,
|
||||
node_display_name="Agent")
|
||||
await self._update_flow_field(
|
||||
LANGFLOW_CHAT_FLOW_ID,
|
||||
"system_prompt",
|
||||
system_prompt,
|
||||
node_display_name="Agent",
|
||||
)
|
||||
|
||||
async def update_flow_docling_preset(self, preset: str, preset_config: dict):
|
||||
"""Helper function to update docling preset in the ingest flow"""
|
||||
|
|
@ -578,29 +634,46 @@ class FlowsService:
|
|||
raise ValueError("LANGFLOW_INGEST_FLOW_ID is not configured")
|
||||
|
||||
from config.settings import DOCLING_COMPONENT_ID
|
||||
await self._update_flow_field(LANGFLOW_INGEST_FLOW_ID, "docling_serve_opts", preset_config,
|
||||
node_id=DOCLING_COMPONENT_ID)
|
||||
|
||||
await self._update_flow_field(
|
||||
LANGFLOW_INGEST_FLOW_ID,
|
||||
"docling_serve_opts",
|
||||
preset_config,
|
||||
node_id=DOCLING_COMPONENT_ID,
|
||||
)
|
||||
|
||||
async def update_ingest_flow_chunk_size(self, chunk_size: int):
|
||||
"""Helper function to update chunk size in the ingest flow"""
|
||||
if not LANGFLOW_INGEST_FLOW_ID:
|
||||
raise ValueError("LANGFLOW_INGEST_FLOW_ID is not configured")
|
||||
await self._update_flow_field(LANGFLOW_INGEST_FLOW_ID, "chunk_size", chunk_size,
|
||||
node_display_name="Split Text")
|
||||
await self._update_flow_field(
|
||||
LANGFLOW_INGEST_FLOW_ID,
|
||||
"chunk_size",
|
||||
chunk_size,
|
||||
node_display_name="Split Text",
|
||||
)
|
||||
|
||||
async def update_ingest_flow_chunk_overlap(self, chunk_overlap: int):
|
||||
"""Helper function to update chunk overlap in the ingest flow"""
|
||||
if not LANGFLOW_INGEST_FLOW_ID:
|
||||
raise ValueError("LANGFLOW_INGEST_FLOW_ID is not configured")
|
||||
await self._update_flow_field(LANGFLOW_INGEST_FLOW_ID, "chunk_overlap", chunk_overlap,
|
||||
node_display_name="Split Text")
|
||||
await self._update_flow_field(
|
||||
LANGFLOW_INGEST_FLOW_ID,
|
||||
"chunk_overlap",
|
||||
chunk_overlap,
|
||||
node_display_name="Split Text",
|
||||
)
|
||||
|
||||
async def update_ingest_flow_embedding_model(self, embedding_model: str):
|
||||
"""Helper function to update embedding model in the ingest flow"""
|
||||
if not LANGFLOW_INGEST_FLOW_ID:
|
||||
raise ValueError("LANGFLOW_INGEST_FLOW_ID is not configured")
|
||||
await self._update_flow_field(LANGFLOW_INGEST_FLOW_ID, "model", embedding_model,
|
||||
node_display_name="Embedding Model")
|
||||
await self._update_flow_field(
|
||||
LANGFLOW_INGEST_FLOW_ID,
|
||||
"model",
|
||||
embedding_model,
|
||||
node_display_name="Embedding Model",
|
||||
)
|
||||
|
||||
def _replace_node_in_flow(self, flow_data, old_id, new_node):
|
||||
"""Replace a node in the flow data"""
|
||||
|
|
@ -612,7 +685,12 @@ class FlowsService:
|
|||
return False
|
||||
|
||||
async def change_langflow_model_value(
|
||||
self, provider: str, embedding_model: str, llm_model: str, endpoint: str = None, flow_configs: list = None
|
||||
self,
|
||||
provider: str,
|
||||
embedding_model: str,
|
||||
llm_model: str,
|
||||
endpoint: str = None,
|
||||
flow_configs: list = None,
|
||||
):
|
||||
"""
|
||||
Change dropdown values for provider-specific components across flows
|
||||
|
|
@ -656,8 +734,8 @@ class FlowsService:
|
|||
]
|
||||
|
||||
# Determine target component IDs based on provider
|
||||
target_embedding_id, target_llm_id, target_llm_text_id = self._get_provider_component_ids(
|
||||
provider
|
||||
target_embedding_id, target_llm_id, target_llm_text_id = (
|
||||
self._get_provider_component_ids(provider)
|
||||
)
|
||||
|
||||
results = []
|
||||
|
|
@ -713,12 +791,24 @@ class FlowsService:
|
|||
def _get_provider_component_ids(self, provider: str):
|
||||
"""Get the component IDs for a specific provider"""
|
||||
if provider == "watsonx":
|
||||
return WATSONX_EMBEDDING_COMPONENT_ID, WATSONX_LLM_COMPONENT_ID, WATSONX_LLM_TEXT_COMPONENT_ID
|
||||
return (
|
||||
WATSONX_EMBEDDING_COMPONENT_ID,
|
||||
WATSONX_LLM_COMPONENT_ID,
|
||||
WATSONX_LLM_TEXT_COMPONENT_ID,
|
||||
)
|
||||
elif provider == "ollama":
|
||||
return OLLAMA_EMBEDDING_COMPONENT_ID, OLLAMA_LLM_COMPONENT_ID, OLLAMA_LLM_TEXT_COMPONENT_ID
|
||||
return (
|
||||
OLLAMA_EMBEDDING_COMPONENT_ID,
|
||||
OLLAMA_LLM_COMPONENT_ID,
|
||||
OLLAMA_LLM_TEXT_COMPONENT_ID,
|
||||
)
|
||||
elif provider == "openai":
|
||||
# OpenAI components are the default ones
|
||||
return OPENAI_EMBEDDING_COMPONENT_ID, OPENAI_LLM_COMPONENT_ID, OPENAI_LLM_TEXT_COMPONENT_ID
|
||||
return (
|
||||
OPENAI_EMBEDDING_COMPONENT_ID,
|
||||
OPENAI_LLM_COMPONENT_ID,
|
||||
OPENAI_LLM_TEXT_COMPONENT_ID,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unsupported provider: {provider}")
|
||||
|
||||
|
|
@ -738,26 +828,25 @@ class FlowsService:
|
|||
flow_id = config["flow_id"]
|
||||
|
||||
# Get flow data from Langflow API instead of file
|
||||
response = await clients.langflow_request(
|
||||
"GET", f"/api/v1/flows/{flow_id}"
|
||||
)
|
||||
|
||||
response = await clients.langflow_request("GET", f"/api/v1/flows/{flow_id}")
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(
|
||||
f"Failed to get flow from Langflow: HTTP {response.status_code} - {response.text}"
|
||||
)
|
||||
|
||||
|
||||
flow_data = response.json()
|
||||
|
||||
updates_made = []
|
||||
|
||||
# Update embedding component
|
||||
embedding_node = self._find_node_by_id(flow_data, target_embedding_id)
|
||||
if embedding_node:
|
||||
if self._update_component_fields(
|
||||
embedding_node, provider, embedding_model, endpoint
|
||||
):
|
||||
updates_made.append(f"embedding model: {embedding_model}")
|
||||
if not DISABLE_INGEST_WITH_LANGFLOW:
|
||||
embedding_node = self._find_node_by_id(flow_data, target_embedding_id)
|
||||
if embedding_node:
|
||||
if self._update_component_fields(
|
||||
embedding_node, provider, embedding_model, endpoint
|
||||
):
|
||||
updates_made.append(f"embedding model: {embedding_model}")
|
||||
|
||||
# Update LLM component (if exists in this flow)
|
||||
if target_llm_id:
|
||||
|
|
|
|||
|
|
@ -21,6 +21,27 @@ class ModelsService:
|
|||
"jina-embeddings-v2-base-en",
|
||||
]
|
||||
|
||||
OPENAI_TOOL_CALLING_MODELS = [
|
||||
"gpt-5",
|
||||
"gpt-5-mini",
|
||||
"gpt-5-nano",
|
||||
"gpt-4o-mini",
|
||||
"gpt-4o",
|
||||
"gpt-4.1",
|
||||
"gpt-4.1-mini",
|
||||
"gpt-4.1-nano",
|
||||
"gpt-4-turbo",
|
||||
"gpt-4-turbo-preview",
|
||||
"gpt-4",
|
||||
"gpt-3.5-turbo",
|
||||
"o1",
|
||||
"o3-mini",
|
||||
"o3",
|
||||
"o3-pro",
|
||||
"o4-mini",
|
||||
"o4-mini-high",
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
self.session_manager = None
|
||||
|
||||
|
|
@ -49,12 +70,12 @@ class ModelsService:
|
|||
model_id = model.get("id", "")
|
||||
|
||||
# Language models (GPT models)
|
||||
if any(prefix in model_id for prefix in ["gpt-4", "gpt-3.5"]):
|
||||
if model_id in self.OPENAI_TOOL_CALLING_MODELS:
|
||||
language_models.append(
|
||||
{
|
||||
"value": model_id,
|
||||
"label": model_id,
|
||||
"default": model_id == "gpt-4o-mini",
|
||||
"default": model_id == "gpt-5",
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue