Merge branch 'main' into docs-install-methods-anchor
This commit is contained in:
commit
bb9aa4e3ea
9 changed files with 66 additions and 17 deletions
|
|
@ -39,7 +39,7 @@ This command will:
|
|||
- Create a versioned sidebar file at `versioned_sidebars/version-1.0.0-sidebars.json`
|
||||
- Append the new version to `versions.json`
|
||||
|
||||
3. After creating a version, update the Docusaurus configuration to include multiple versions.
|
||||
2. After creating a version, update the Docusaurus configuration to include multiple versions.
|
||||
`lastVersion:'1.0.0'` makes the '1.0.0' release the `latest` version.
|
||||
`current` is the work-in-progress docset, accessible at `/docs/next`.
|
||||
To remove a version, remove it from `onlyIncludeVersions`.
|
||||
|
|
@ -62,14 +62,14 @@ docs: {
|
|||
},
|
||||
```
|
||||
|
||||
4. Test the deployment locally.
|
||||
3. Test the deployment locally.
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
npm run serve
|
||||
```
|
||||
|
||||
5. To add subsequent versions, repeat the process, first running the CLI command then updating `docusaurus.config.js`.
|
||||
4. To add subsequent versions, repeat the process, first running the CLI command then updating `docusaurus.config.js`.
|
||||
|
||||
```bash
|
||||
# Create version 2.0.0 from current docs
|
||||
|
|
|
|||
|
|
@ -86,6 +86,19 @@ You can select multiple files.
|
|||
The ingestion process may take some time, depending on the size of your documents.
|
||||
4. When ingestion is complete, your documents are available in the Knowledge screen.
|
||||
|
||||
If ingestion fails, click **Status** to view the logged error.
|
||||
|
||||
## Monitor ingestion tasks
|
||||
|
||||
When you upload files, process folders, or sync documents, OpenRAG processes them as background tasks.
|
||||
A badge appears on the <Icon name="Bell" aria-hidden="true"/> **Tasks** icon when there are active tasks running.
|
||||
To open the Tasks menu, click <Icon name="Bell" aria-hidden="true"/> **Tasks**.
|
||||
|
||||
**Active Tasks** shows tasks that are currently processing.
|
||||
A **Pending** task is queued and waiting to start, a **Running** task is actively processing files, and a **Processing** task is performing ingestion operations. For each active task, you can find the task ID, start time, duration, the number of files processed so far, and the total files.
|
||||
|
||||
You can cancel active tasks by clicking <Icon name="X" aria-hidden="true"/> **Cancel**. Canceling a task stops processing immediately and marks the task as failed.
|
||||
|
||||
## Explore knowledge
|
||||
|
||||
The **Knowledge** page lists the documents OpenRAG has ingested into the OpenSearch vector database's `documents` index.
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ Once OpenRAG is running, use the TUI to monitor your application, control your c
|
|||
|
||||
If you prefer running Podman or Docker containers and manually editing `.env` files, see [Install OpenRAG Containers](/get-started/docker).
|
||||
|
||||
## Prerequisites
|
||||
## Prerequisites
|
||||
|
||||
- Install [Python Version 3.10 to 3.13](https://www.python.org/downloads/release/python-3100/)
|
||||
- Install [uv](https://docs.astral.sh/uv/getting-started/installation/)
|
||||
|
|
@ -185,7 +185,8 @@ If the TUI detects OAuth credentials, it enforces the **Advanced Setup** path.
|
|||
Command completed successfully
|
||||
```
|
||||
6. To start the Docling service, under **Native Services**, click **Start**.
|
||||
7. To open the OpenRAG application, click **Open App**.
|
||||
7. To open the OpenRAG application, navigate to the TUI main menu, and then click **Open App**.
|
||||
Alternatively, in your browser, navigate to `localhost:3000`.
|
||||
8. Continue with [Application Onboarding](#application-onboarding).
|
||||
</TabItem>
|
||||
<TabItem value="Advanced setup" label="Advanced setup">
|
||||
|
|
@ -212,7 +213,8 @@ If the TUI detects OAuth credentials, it enforces the **Advanced Setup** path.
|
|||
Command completed successfully
|
||||
```
|
||||
8. To start the Docling service, under **Native Services**, click **Start**.
|
||||
9. To open the OpenRAG application, click **Open App**.
|
||||
9. To open the OpenRAG application, navigate to the TUI main menu, and then click **Open App**.
|
||||
Alternatively, in your browser, navigate to `localhost:3000`.
|
||||
You are presented with your provider's OAuth sign-in screen.
|
||||
After sign-in, you are redirected to the redirect URI.
|
||||
|
||||
|
|
|
|||
|
|
@ -13,6 +13,14 @@ This page provides troubleshooting advice for issues you might encounter when us
|
|||
Check that `OPENSEARCH_PASSWORD` set in [Environment variables](/reference/configuration) meets requirements.
|
||||
The password must contain at least 8 characters, and must contain at least one uppercase letter, one lowercase letter, one digit, and one special character that is strong.
|
||||
|
||||
## OpenRAG fails to start from the TUI with "Operation not supported" error
|
||||
|
||||
This error occurs when starting OpenRAG with the TUI in [WSL (Windows Subsystem for Linux)](https://learn.microsoft.com/en-us/windows/wsl/install).
|
||||
|
||||
The error occurs because OpenRAG is running within a WSL environment, so `webbrowser.open()` can't launch a browser automatically.
|
||||
|
||||
To access the OpenRAG application, open a web browser and enter `http://localhost:3000` in the address bar.
|
||||
|
||||
## Langflow connection issues
|
||||
|
||||
Verify the `LANGFLOW_SUPERUSER` credentials set in [Environment variables](/reference/configuration) are correct.
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import { useProviderHealthQuery } from "@/src/app/api/queries/useProviderHealthQ
|
|||
import { Button } from "./ui/button";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery";
|
||||
import { ModelProvider } from "@/app/settings/helpers/model-helpers";
|
||||
|
||||
interface ProviderHealthBannerProps {
|
||||
className?: string;
|
||||
|
|
@ -42,12 +43,22 @@ export function useProviderHealth() {
|
|||
};
|
||||
}
|
||||
|
||||
const providerTitleMap: Record<ModelProvider, string> = {
|
||||
openai: "OpenAI",
|
||||
ollama: "Ollama",
|
||||
watsonx: "IBM watsonx.ai",
|
||||
};
|
||||
|
||||
export function ProviderHealthBanner({ className }: ProviderHealthBannerProps) {
|
||||
const { isLoading, isHealthy, isUnhealthy, health } = useProviderHealth();
|
||||
const router = useRouter();
|
||||
|
||||
const { data: settings = {} } = useGetSettingsQuery();
|
||||
|
||||
const providerTitle =
|
||||
providerTitleMap[settings.provider?.model_provider as ModelProvider] ||
|
||||
"Provider";
|
||||
|
||||
// Only show banner when provider is unhealthy (not when backend is unavailable)
|
||||
if (isLoading || isHealthy) {
|
||||
return null;
|
||||
|
|
@ -71,7 +82,7 @@ export function ProviderHealthBanner({ className }: ProviderHealthBannerProps) {
|
|||
icon={AlertTriangle}
|
||||
/>
|
||||
<BannerTitle className="font-medium flex items-center gap-2">
|
||||
{errorMessage}
|
||||
{providerTitle} error - {errorMessage}
|
||||
</BannerTitle>
|
||||
<Button size="sm" onClick={() => router.push(settingsUrl)}>
|
||||
Fix Setup
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import {
|
|||
useQueryClient,
|
||||
} from "@tanstack/react-query";
|
||||
import type { Settings } from "../queries/useGetSettingsQuery";
|
||||
import { useGetCurrentProviderModelsQuery } from "../queries/useGetModelsQuery";
|
||||
|
||||
export interface UpdateSettingsRequest {
|
||||
// Agent settings
|
||||
|
|
@ -37,6 +38,7 @@ export const useUpdateSettingsMutation = (
|
|||
>
|
||||
) => {
|
||||
const queryClient = useQueryClient();
|
||||
const { refetch: refetchModels } = useGetCurrentProviderModelsQuery();
|
||||
|
||||
async function updateSettings(
|
||||
variables: UpdateSettingsRequest
|
||||
|
|
@ -63,6 +65,7 @@ export const useUpdateSettingsMutation = (
|
|||
queryClient.invalidateQueries({
|
||||
queryKey: ["settings"],
|
||||
});
|
||||
refetchModels(); // Refetch models for the settings page
|
||||
options?.onSuccess?.(...args);
|
||||
},
|
||||
onError: options?.onError,
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
import { ModelProvider } from "@/app/settings/helpers/model-helpers";
|
||||
import {
|
||||
type UseQueryOptions,
|
||||
useQuery,
|
||||
useQueryClient,
|
||||
} from "@tanstack/react-query";
|
||||
import { useGetSettingsQuery } from "./useGetSettingsQuery";
|
||||
|
||||
export interface ProviderHealthDetails {
|
||||
llm_model: string;
|
||||
|
|
@ -22,12 +22,6 @@ export interface ProviderHealthParams {
|
|||
provider?: "openai" | "ollama" | "watsonx";
|
||||
}
|
||||
|
||||
const providerTitleMap: Record<ModelProvider, string> = {
|
||||
openai: "OpenAI",
|
||||
ollama: "Ollama",
|
||||
watsonx: "IBM watsonx.ai",
|
||||
};
|
||||
|
||||
export const useProviderHealthQuery = (
|
||||
params?: ProviderHealthParams,
|
||||
options?: Omit<
|
||||
|
|
@ -37,6 +31,8 @@ export const useProviderHealthQuery = (
|
|||
) => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const { data: settings = {} } = useGetSettingsQuery();
|
||||
|
||||
async function checkProviderHealth(): Promise<ProviderHealthResponse> {
|
||||
try {
|
||||
const url = new URL("/api/provider/health", window.location.origin);
|
||||
|
|
@ -84,6 +80,7 @@ export const useProviderHealthQuery = (
|
|||
queryKey: ["provider", "health"],
|
||||
queryFn: checkProviderHealth,
|
||||
retry: false, // Don't retry health checks automatically
|
||||
enabled: !!settings?.edited && options?.enabled !== false, // Only run after onboarding is complete
|
||||
...options,
|
||||
},
|
||||
queryClient
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ export function ModelSelector({
|
|||
}, [options, value, custom, onValueChange]);
|
||||
|
||||
return (
|
||||
<Popover open={open} onOpenChange={setOpen}>
|
||||
<Popover open={open} onOpenChange={setOpen} modal={false}>
|
||||
<PopoverTrigger asChild>
|
||||
{/** biome-ignore lint/a11y/useSemanticElements: has to be a Button */}
|
||||
<Button
|
||||
|
|
@ -99,7 +99,8 @@ export function ModelSelector({
|
|||
</PopoverTrigger>
|
||||
<PopoverContent
|
||||
align="start"
|
||||
className=" p-0 w-[var(--radix-popover-trigger-width)]"
|
||||
className="p-0 w-[var(--radix-popover-trigger-width)]"
|
||||
onOpenAutoFocus={(e) => e.preventDefault()}
|
||||
>
|
||||
<Command>
|
||||
<CommandInput
|
||||
|
|
@ -107,7 +108,10 @@ export function ModelSelector({
|
|||
value={searchValue}
|
||||
onValueChange={setSearchValue}
|
||||
/>
|
||||
<CommandList>
|
||||
<CommandList
|
||||
className="max-h-[300px] overflow-y-auto"
|
||||
onWheel={(e) => e.stopPropagation()}
|
||||
>
|
||||
<CommandEmpty>{noOptionsPlaceholder}</CommandEmpty>
|
||||
<CommandGroup>
|
||||
{options.map((option) => (
|
||||
|
|
|
|||
|
|
@ -26,6 +26,8 @@ import { IBMOnboarding } from "./ibm-onboarding";
|
|||
import { OllamaOnboarding } from "./ollama-onboarding";
|
||||
import { OpenAIOnboarding } from "./openai-onboarding";
|
||||
import { TabTrigger } from "./tab-trigger";
|
||||
import { ProviderHealthResponse } from "@/app/api/queries/useProviderHealthQuery";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
|
||||
interface OnboardingCardProps {
|
||||
onComplete: () => void;
|
||||
|
|
@ -57,6 +59,8 @@ const OnboardingCard = ({
|
|||
|
||||
const [loadingStep, setLoadingStep] = useState<number>(0);
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
// Reset loading step when models start loading
|
||||
useEffect(() => {
|
||||
if (isLoadingModels) {
|
||||
|
|
@ -129,6 +133,13 @@ const OnboardingCard = ({
|
|||
const onboardingMutation = useOnboardingMutation({
|
||||
onSuccess: (data) => {
|
||||
console.log("Onboarding completed successfully", data);
|
||||
// Update provider health cache to healthy since backend just validated
|
||||
const healthData: ProviderHealthResponse = {
|
||||
status: "healthy",
|
||||
message: "Provider is configured and working correctly",
|
||||
provider: settings.model_provider,
|
||||
};
|
||||
queryClient.setQueryData(["provider", "health"], healthData);
|
||||
setCurrentStep(0);
|
||||
setError(null);
|
||||
},
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue