Merge branch 'main' into add-notes-to-flows

This commit is contained in:
Mendon Kissling 2025-10-24 11:02:04 -04:00 committed by GitHub
commit ff344b88b6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
46 changed files with 4621 additions and 3721 deletions

View file

@ -1262,7 +1262,7 @@
"display_name": "as_dataframe",
"name": "as_dataframe",
"readonly": false,
"status": true,
"status": false,
"tags": [
"as_dataframe"
]
@ -1281,7 +1281,7 @@
"display_name": "as_vector_store",
"name": "as_vector_store",
"readonly": false,
"status": true,
"status": false,
"tags": [
"as_vector_store"
]
@ -2087,7 +2087,7 @@
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": "You are a helpful assistant that can use tools to answer questions and perform tasks."
"value": "You are a helpful assistant that can use tools to answer questions and perform tasks. You are part of OpenRAG, an assistant that analyzes documents and provides informations about them. When asked about what is OpenRAG, answer the following:\n\n\"OpenRAG is an open-source package for building agentic RAG systems. It supports integration with a wide range of orchestration tools, vector databases, and LLM providers. OpenRAG connects and amplifies three popular, proven open-source projects into one powerful platform:\n\n**Langflow** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://www.langflow.org/)\n\n**OpenSearch** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://opensearch.org/)\n\n**Docling** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://www.docling.ai/)\""
},
"tools": {
"_input_type": "HandleInput",

View file

@ -1,147 +1,148 @@
"use client";
import { AlertTriangle, ExternalLink, Copy } from "lucide-react";
import { useDoclingHealthQuery } from "@/src/app/api/queries/useDoclingHealthQuery";
import { Banner, BannerIcon, BannerTitle, BannerAction } from "@/components/ui/banner";
import { AlertTriangle, Copy, ExternalLink } from "lucide-react";
import { useState } from "react";
import {
Banner,
BannerAction,
BannerIcon,
BannerTitle,
} from "@/components/ui/banner";
import { Button } from "@/components/ui/button";
import {
Dialog,
DialogContent,
DialogHeader,
DialogTitle,
DialogDescription,
DialogFooter
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import { HEADER_HEIGHT } from "@/lib/constants";
import { cn } from "@/lib/utils";
import { useState } from "react";
import { useDoclingHealthQuery } from "@/src/app/api/queries/useDoclingHealthQuery";
interface DoclingHealthBannerProps {
className?: string;
className?: string;
}
// DoclingSetupDialog component
interface DoclingSetupDialogProps {
open: boolean;
onOpenChange: (open: boolean) => void;
className?: string;
open: boolean;
onOpenChange: (open: boolean) => void;
className?: string;
}
function DoclingSetupDialog({
open,
onOpenChange,
className
open,
onOpenChange,
className,
}: DoclingSetupDialogProps) {
const [copied, setCopied] = useState(false);
const [copied, setCopied] = useState(false);
const handleCopy = async () => {
await navigator.clipboard.writeText("uv run openrag");
setCopied(true);
setTimeout(() => setCopied(false), 2000);
};
const handleCopy = async () => {
await navigator.clipboard.writeText("uv run openrag");
setCopied(true);
setTimeout(() => setCopied(false), 2000);
};
return (
<Dialog open={open} onOpenChange={onOpenChange}>
<DialogContent className={cn("max-w-lg", className)}>
<DialogHeader>
<DialogTitle className="flex items-center gap-2 text-base">
<AlertTriangle className="h-4 w-4 text-amber-600 dark:text-amber-400" />
docling-serve is stopped. Knowledge ingest is unavailable.
</DialogTitle>
<DialogDescription>
Start docling-serve by running:
</DialogDescription>
</DialogHeader>
return (
<Dialog open={open} onOpenChange={onOpenChange}>
<DialogContent className={cn("max-w-lg", className)}>
<DialogHeader>
<DialogTitle className="flex items-center gap-2 text-base">
<AlertTriangle className="h-4 w-4 text-amber-600 dark:text-amber-400" />
docling-serve is stopped. Knowledge ingest is unavailable.
</DialogTitle>
<DialogDescription>Start docling-serve by running:</DialogDescription>
</DialogHeader>
<div className="space-y-4">
<div className="flex items-center gap-2">
<code className="flex-1 bg-muted px-3 py-2.5 rounded-md text-sm font-mono">
uv run openrag
</code>
<Button
variant="ghost"
size="icon"
onClick={handleCopy}
className="shrink-0"
title={copied ? "Copied!" : "Copy to clipboard"}
>
<Copy className="h-4 w-4" />
</Button>
</div>
<div className="space-y-4">
<div className="flex items-center gap-2">
<code className="flex-1 bg-muted px-3 py-2.5 rounded-md text-sm font-mono">
uv run openrag
</code>
<Button
variant="ghost"
size="icon"
onClick={handleCopy}
className="shrink-0"
title={copied ? "Copied!" : "Copy to clipboard"}
>
<Copy className="h-4 w-4" />
</Button>
</div>
<DialogDescription>
Then, select <span className="font-semibold text-foreground">Start All Services</span> in the TUI. Once docling-serve is running, refresh OpenRAG.
</DialogDescription>
</div>
<DialogDescription>
Then, select{" "}
<span className="font-semibold text-foreground">
Start All Services
</span>{" "}
in the TUI. Once docling-serve is running, refresh OpenRAG.
</DialogDescription>
</div>
<DialogFooter>
<Button
variant="default"
onClick={() => onOpenChange(false)}
>
Close
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
);
<DialogFooter>
<Button variant="default" onClick={() => onOpenChange(false)}>
Close
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
);
}
// Custom hook to check docling health status
export function useDoclingHealth() {
const { data: health, isLoading, isError } = useDoclingHealthQuery();
const { data: health, isLoading, isError } = useDoclingHealthQuery();
const isHealthy = health?.status === "healthy" && !isError;
const isUnhealthy = health?.status === "unhealthy" || isError;
const isHealthy = health?.status === "healthy" && !isError;
const isUnhealthy = health?.status === "unhealthy" || isError;
return {
health,
isLoading,
isError,
isHealthy,
isUnhealthy,
};
return {
health,
isLoading,
isError,
isHealthy,
isUnhealthy,
};
}
export function DoclingHealthBanner({ className }: DoclingHealthBannerProps) {
const { isLoading, isHealthy, isUnhealthy } = useDoclingHealth();
const [showDialog, setShowDialog] = useState(false);
const { isLoading, isHealthy, isUnhealthy } = useDoclingHealth();
const [showDialog, setShowDialog] = useState(false);
// Only show banner when service is unhealthy
if (isLoading || isHealthy) {
return null;
}
// Only show banner when service is unhealthy
if (isLoading || isHealthy) {
return null;
}
if (isUnhealthy) {
return (
<>
<Banner
className={cn(
"bg-amber-50 text-amber-900 dark:bg-amber-950 dark:text-amber-200 border-amber-200 dark:border-amber-800",
className
)}
>
<BannerIcon
icon={AlertTriangle}
/>
<BannerTitle className="font-medium">
docling-serve native service is stopped. Knowledge ingest is unavailable.
</BannerTitle>
<BannerAction
onClick={() => setShowDialog(true)}
className="bg-foreground text-background hover:bg-primary/90"
>
Setup Docling Serve
<ExternalLink className="h-3 w-3 ml-1" />
</BannerAction>
</Banner>
if (isUnhealthy) {
return (
<>
<Banner
className={cn(
`bg-amber-50 text-amber-900 dark:bg-amber-950 dark:text-amber-200 border-amber-200 dark:border-amber-800`,
className,
)}
>
<BannerIcon icon={AlertTriangle} />
<BannerTitle className="font-medium">
docling-serve native service is stopped. Knowledge ingest is
unavailable.
</BannerTitle>
<BannerAction
onClick={() => setShowDialog(true)}
className="bg-foreground text-background hover:bg-primary/90"
>
Setup Docling Serve
<ExternalLink className="h-3 w-3 ml-1" />
</BannerAction>
</Banner>
<DoclingSetupDialog
open={showDialog}
onOpenChange={setShowDialog}
/>
</>
);
}
<DoclingSetupDialog open={showDialog} onOpenChange={setShowDialog} />
</>
);
}
return null;
}
return null;
}

View file

@ -32,12 +32,16 @@ import {
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import { useTask } from "@/contexts/task-context";
import { cn } from "@/lib/utils";
import {
duplicateCheck,
uploadFile as uploadFileUtil,
} from "@/lib/upload-utils";
import type { File as SearchFile } from "@/src/app/api/queries/useGetSearchQuery";
import GoogleDriveIcon from "@/app/settings/icons/google-drive-icon";
import OneDriveIcon from "@/app/settings/icons/one-drive-icon";
import SharePointIcon from "@/app/settings/icons/share-point-icon";
import AwsIcon from "@/app/settings/icons/aws-icon";
import { cn } from "@/lib/utils";
export function KnowledgeDropdown() {
const { addTask } = useTask();
@ -155,45 +159,33 @@ export function KnowledgeDropdown() {
fileInputRef.current?.click();
};
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
const files = e.target.files;
const resetFileInput = () => {
if (fileInputRef.current) {
fileInputRef.current.value = "";
}
};
const handleFileChange = async (
event: React.ChangeEvent<HTMLInputElement>
) => {
const files = event.target.files;
if (files && files.length > 0) {
const file = files[0];
// File selection will close dropdown automatically
try {
// Check if filename already exists (using ORIGINAL filename)
console.log("[Duplicate Check] Checking file:", file.name);
const checkResponse = await fetch(
`/api/documents/check-filename?filename=${encodeURIComponent(
file.name
)}`
);
console.log("[Duplicate Check] Response status:", checkResponse.status);
if (!checkResponse.ok) {
const errorText = await checkResponse.text();
console.error("[Duplicate Check] Error response:", errorText);
throw new Error(
`Failed to check duplicates: ${checkResponse.statusText}`
);
}
const checkData = await checkResponse.json();
const checkData = await duplicateCheck(file);
console.log("[Duplicate Check] Result:", checkData);
if (checkData.exists) {
// Show duplicate handling dialog
console.log("[Duplicate Check] Duplicate detected, showing dialog");
setPendingFile(file);
setDuplicateFilename(file.name);
setShowDuplicateDialog(true);
// Reset file input
if (fileInputRef.current) {
fileInputRef.current.value = "";
}
resetFileInput();
return;
}
@ -208,105 +200,20 @@ export function KnowledgeDropdown() {
}
}
// Reset file input
if (fileInputRef.current) {
fileInputRef.current.value = "";
}
resetFileInput();
};
const uploadFile = async (file: File, replace: boolean) => {
setFileUploading(true);
// Trigger the same file upload event as the chat page
window.dispatchEvent(
new CustomEvent("fileUploadStart", {
detail: { filename: file.name },
})
);
try {
const formData = new FormData();
formData.append("file", file);
formData.append("replace_duplicates", replace.toString());
// Use router upload and ingest endpoint (automatically routes based on configuration)
const uploadIngestRes = await fetch("/api/router/upload_ingest", {
method: "POST",
body: formData,
});
const uploadIngestJson = await uploadIngestRes.json();
if (!uploadIngestRes.ok) {
throw new Error(uploadIngestJson?.error || "Upload and ingest failed");
}
// Extract results from the response - handle both unified and simple formats
const fileId =
uploadIngestJson?.upload?.id ||
uploadIngestJson?.id ||
uploadIngestJson?.task_id;
const filePath =
uploadIngestJson?.upload?.path || uploadIngestJson?.path || "uploaded";
const runJson = uploadIngestJson?.ingestion;
const deleteResult = uploadIngestJson?.deletion;
console.log("c", uploadIngestJson);
if (!fileId) {
throw new Error("Upload successful but no file id returned");
}
// Check if ingestion actually succeeded
if (
runJson &&
runJson.status !== "COMPLETED" &&
runJson.status !== "SUCCESS"
) {
const errorMsg = runJson.error || "Ingestion pipeline failed";
throw new Error(
`Ingestion failed: ${errorMsg}. Try setting DISABLE_INGEST_WITH_LANGFLOW=true if you're experiencing Langflow component issues.`
);
}
// Log deletion status if provided
if (deleteResult) {
if (deleteResult.status === "deleted") {
console.log(
"File successfully cleaned up from Langflow:",
deleteResult.file_id
);
} else if (deleteResult.status === "delete_failed") {
console.warn(
"Failed to cleanup file from Langflow:",
deleteResult.error
);
}
}
// Notify UI
window.dispatchEvent(
new CustomEvent("fileUploaded", {
detail: {
file: file,
result: {
file_id: fileId,
file_path: filePath,
run: runJson,
deletion: deleteResult,
unified: true,
},
},
})
);
await uploadFileUtil(file, replace);
refetchTasks();
} catch (error) {
window.dispatchEvent(
new CustomEvent("fileUploadError", {
detail: {
filename: file.name,
error: error instanceof Error ? error.message : "Upload failed",
},
})
);
toast.error("Upload failed", {
description: error instanceof Error ? error.message : "Unknown error",
});
} finally {
window.dispatchEvent(new CustomEvent("fileUploadComplete"));
setFileUploading(false);
}
};
@ -323,6 +230,7 @@ export function KnowledgeDropdown() {
});
await uploadFile(pendingFile, true);
setPendingFile(null);
setDuplicateFilename("");
}

View file

@ -1,100 +1,100 @@
import { useKnowledgeFilter } from "@/contexts/knowledge-filter-context";
import {
ChangeEvent,
FormEvent,
useCallback,
useEffect,
useState,
} from "react";
import { filterAccentClasses } from "./knowledge-filter-panel";
import { ArrowRight, Search, X } from "lucide-react";
import {
type ChangeEvent,
type FormEvent,
useCallback,
useEffect,
useState,
} from "react";
import { Button } from "@/components/ui/button";
import { useKnowledgeFilter } from "@/contexts/knowledge-filter-context";
import { cn } from "@/lib/utils";
import { filterAccentClasses } from "./knowledge-filter-panel";
export const KnowledgeSearchInput = () => {
const {
selectedFilter,
setSelectedFilter,
parsedFilterData,
queryOverride,
setQueryOverride,
} = useKnowledgeFilter();
const {
selectedFilter,
setSelectedFilter,
parsedFilterData,
queryOverride,
setQueryOverride,
} = useKnowledgeFilter();
const [searchQueryInput, setSearchQueryInput] = useState(queryOverride || "");
const [searchQueryInput, setSearchQueryInput] = useState(queryOverride || "");
const handleSearch = useCallback(
(e?: FormEvent<HTMLFormElement>) => {
if (e) e.preventDefault();
setQueryOverride(searchQueryInput.trim());
},
[searchQueryInput, setQueryOverride]
);
const handleSearch = useCallback(
(e?: FormEvent<HTMLFormElement>) => {
if (e) e.preventDefault();
setQueryOverride(searchQueryInput.trim());
},
[searchQueryInput, setQueryOverride],
);
// Reset the query text when the selected filter changes
useEffect(() => {
setSearchQueryInput(queryOverride);
}, [queryOverride]);
// Reset the query text when the selected filter changes
useEffect(() => {
setSearchQueryInput(queryOverride);
}, [queryOverride]);
return (
<form
className="flex flex-1 max-w-[min(640px,100%)] min-w-[100px]"
onSubmit={handleSearch}
>
<div className="primary-input group/input min-h-10 !flex items-center flex-nowrap focus-within:border-foreground transition-colors !p-[0.3rem]">
{selectedFilter?.name && (
<div
title={selectedFilter?.name}
className={`flex items-center gap-1 h-full px-1.5 py-0.5 mr-1 rounded max-w-[25%] ${
filterAccentClasses[parsedFilterData?.color || "zinc"]
}`}
>
<span className="truncate">{selectedFilter?.name}</span>
<X
aria-label="Remove filter"
className="h-4 w-4 flex-shrink-0 cursor-pointer"
onClick={() => setSelectedFilter(null)}
/>
</div>
)}
<Search
className="h-4 w-4 ml-1 flex-shrink-0 text-placeholder-foreground"
strokeWidth={1.5}
/>
<input
className="bg-transparent w-full h-full ml-2 focus:outline-none focus-visible:outline-none font-mono placeholder:font-mono"
name="search-query"
id="search-query"
type="text"
placeholder="Search your documents..."
value={searchQueryInput}
onChange={(e: ChangeEvent<HTMLInputElement>) =>
setSearchQueryInput(e.target.value)
}
/>
{queryOverride && (
<Button
variant="ghost"
className="h-full rounded-sm !px-1.5 !py-0"
type="button"
onClick={() => {
setSearchQueryInput("");
setQueryOverride("");
}}
>
<X className="h-4 w-4" />
</Button>
)}
<Button
variant="ghost"
className={cn(
"h-full rounded-sm !px-1.5 !py-0 hidden group-focus-within/input:block",
searchQueryInput && "block"
)}
type="submit"
>
<ArrowRight className="h-4 w-4" />
</Button>
</div>
</form>
);
return (
<form
className="flex flex-1 max-w-[min(640px,100%)] min-w-[100px]"
onSubmit={handleSearch}
>
<div className="primary-input group/input min-h-10 !flex items-center flex-nowrap focus-within:border-foreground transition-colors !p-[0.3rem]">
{selectedFilter?.name && (
<div
title={selectedFilter?.name}
className={`flex items-center gap-1 h-full px-1.5 py-0.5 mr-1 rounded max-w-[25%] ${
filterAccentClasses[parsedFilterData?.color || "zinc"]
}`}
>
<span className="truncate text-xs font-medium">{selectedFilter?.name}</span>
<X
aria-label="Remove filter"
className="h-4 w-4 flex-shrink-0 cursor-pointer"
onClick={() => setSelectedFilter(null)}
/>
</div>
)}
<Search
className="h-4 w-4 ml-1 flex-shrink-0 text-placeholder-foreground"
strokeWidth={1.5}
/>
<input
className="bg-transparent w-full h-full ml-2 focus:outline-none focus-visible:outline-none font-mono placeholder:font-mono"
name="search-query"
id="search-query"
type="text"
placeholder="Search your documents..."
value={searchQueryInput}
onChange={(e: ChangeEvent<HTMLInputElement>) =>
setSearchQueryInput(e.target.value)
}
/>
{queryOverride && (
<Button
variant="ghost"
className="h-full rounded-sm !px-1.5 !py-0"
type="button"
onClick={() => {
setSearchQueryInput("");
setQueryOverride("");
}}
>
<X className="h-4 w-4" />
</Button>
)}
<Button
variant="ghost"
className={cn(
"h-full rounded-sm !px-1.5 !py-0 hidden group-focus-within/input:block",
searchQueryInput && "block",
)}
type="submit"
>
<ArrowRight className="h-4 w-4" />
</Button>
</div>
</form>
);
};

View file

@ -3,25 +3,22 @@ interface DogIconProps extends React.SVGProps<SVGSVGElement> {
}
const DogIcon = ({ disabled = false, stroke, ...props }: DogIconProps) => {
const strokeColor = disabled ? "#71717A" : (stroke || "#0F62FE");
const fillColor = disabled ? "#71717A" : (stroke || "#22A7AF");
return (
<svg
width="24"
height="24"
viewBox="0 0 24 24"
fill="none"
xmlns="http://www.w3.org/2000/svg"
{...props}
>
<path
d="M19.9049 23H17.907C17.907 23 15.4096 20.5 16.908 16C17.3753 14.2544 17.3813 12.4181 17.2439 11C17.161 10.1434 17.0256 9.43934 16.908 9C16.7416 8.33333 16.8081 7 18.4065 7C19.5457 7 20.9571 6.92944 21.4034 6.5C22.3268 5.61145 21.9029 4 21.9029 4C21.9029 4 20.9039 3 18.906 3C18.7395 2.33333 17.7072 1 14.9101 1C12.113 1 11.5835 2.16589 10.9143 4C10.4155 5.36686 10.423 6.99637 11.1692 7.71747M14.4106 4C14.2441 5.33333 14.4106 8 11.9132 8C11.5968 8 11.3534 7.89548 11.1692 7.71747M14.9101 23H12.4127M7.91738 23H12.4127M10.4148 15.5C11.5715 16.1667 13.5905 18.6 12.4127 23M3.42204 15C1.02177 18.5 1.64205 23 5.41997 23C5.41997 22 5.71966 19.2 6.91841 16C8.41686 12 11.1692 11.4349 11.1692 7.71747M16.908 4V4.5"
stroke={strokeColor}
strokeWidth="1.5"
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
disabled ? (
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="18" viewBox="0 0 24 18" fill={fillColor}>
<path d="M8 18H2V16H8V18Z"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M20 2H22V6H24V10H20V14H24V16H14V14H2V16H0V8H2V6H8V10H10V12H16V6H14V10H12V8H10V2H12V0H20V2ZM18 6H20V4H18V6Z"/>
</svg>
) : (
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="20" viewBox="0 0 24 20" fill={fillColor}>
<path fill-rule="evenodd" clip-rule="evenodd" d="M11.0769 10.9091H16.6154V5.45455H14.7692V9.09091H12.9231V7.27273H11.0769V1.81818H12.9231V0H20.3077V1.81818H22.1538V5.45455H24V9.09091H20.3077V14.5455H18.4615V20H14.7692V16.3636H12.9231V14.5455H7.38462V16.3636H5.53846V20H1.84615V10.9091H3.69231V9.09091H11.0769V10.9091ZM18.4615 5.45455H20.3077V3.63636H18.4615V5.45455Z"/>
<path d="M1.84615 10.9091H0V7.27273H1.84615V10.9091Z"/>
<path d="M3.69231 7.27273H1.84615V5.45455H3.69231V7.27273Z"/>
<path d="M5.53846 5.45455H3.69231V3.63636H5.53846V5.45455Z"/>
</svg>
)
)
}

View file

@ -1,19 +1,9 @@
export default function Logo(props: React.SVGProps<SVGSVGElement>) {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width="24"
height="22"
viewBox="0 0 24 22"
fill="currentColor"
className="h-6 w-6 text-primary"
aria-label="OpenRAG Logo"
{...props}
>
<svg xmlns="http://www.w3.org/2000/svg" width="50" height="40" viewBox="0 0 50 40" fill="white" {...props}>
<title>OpenRAG Logo</title>
<path d="M13.0486 0.462158H9.75399C9.44371 0.462158 9.14614 0.586082 8.92674 0.806667L4.03751 5.72232C3.81811 5.9429 3.52054 6.06682 3.21026 6.06682H1.16992C0.511975 6.06682 -0.0165756 6.61212 0.000397655 7.2734L0.0515933 9.26798C0.0679586 9.90556 0.586745 10.4139 1.22111 10.4139H3.59097C3.90124 10.4139 4.19881 10.2899 4.41821 10.0694L9.34823 5.11269C9.56763 4.89211 9.8652 4.76818 10.1755 4.76818H13.0486C13.6947 4.76818 14.2185 4.24157 14.2185 3.59195V1.63839C14.2185 0.988773 13.6947 0.462158 13.0486 0.462158Z"></path>
<path d="M19.5355 11.5862H22.8301C23.4762 11.5862 24 12.1128 24 12.7624V14.716C24 15.3656 23.4762 15.8922 22.8301 15.8922H19.957C19.6467 15.8922 19.3491 16.0161 19.1297 16.2367L14.1997 21.1934C13.9803 21.414 13.6827 21.5379 13.3725 21.5379H11.0026C10.3682 21.5379 9.84945 21.0296 9.83309 20.392L9.78189 18.3974C9.76492 17.7361 10.2935 17.1908 10.9514 17.1908H12.9918C13.302 17.1908 13.5996 17.0669 13.819 16.8463L18.7082 11.9307C18.9276 11.7101 19.2252 11.5862 19.5355 11.5862Z"></path>
<path d="M19.5355 2.9796L22.8301 2.9796C23.4762 2.9796 24 3.50622 24 4.15583V6.1094C24 6.75901 23.4762 7.28563 22.8301 7.28563H19.957C19.6467 7.28563 19.3491 7.40955 19.1297 7.63014L14.1997 12.5868C13.9803 12.8074 13.6827 12.9313 13.3725 12.9313H10.493C10.1913 12.9313 9.90126 13.0485 9.68346 13.2583L4.14867 18.5917C3.93087 18.8016 3.64085 18.9187 3.33917 18.9187H1.32174C0.675616 18.9187 0.151832 18.3921 0.151832 17.7425V15.7343C0.151832 15.0846 0.675616 14.558 1.32174 14.558H3.32468C3.63496 14.558 3.93253 14.4341 4.15193 14.2135L9.40827 8.92878C9.62767 8.70819 9.92524 8.58427 10.2355 8.58427H12.9918C13.302 8.58427 13.5996 8.46034 13.819 8.23976L18.7082 3.32411C18.9276 3.10353 19.2252 2.9796 19.5355 2.9796Z"></path>
<path d="M27.9909 33.9523H22.1153V31.4062H27.9909V33.9523Z"/>
<path fillRule="evenodd" clipRule="evenodd" d="M34.9164 0V2.22796H39.7422V4.66797H42.1551V7.2154H44.7791V9.65541H47.192V14.1113H49.5V20.2651H47.192V22.8111H44.7791V25.2525H42.889V29.7084H40.3713V27.4805H39.6374V8.8058H37.1196L37.1183 34.2704H38.5875V40H11.4139V34.2704H12.8818V8.8058H10.364V27.4805H9.63008V29.7084H7.11234V25.2525H5.22369V22.8111H2.80942V20.2651H0.5V14.1113H2.80942V9.65541H5.22369V7.2154H7.84628V4.66797H10.2592V2.22796H15.0863V0H34.9164ZM21.38 19.947V23.8728H23.7942V27.8181L22.7706 29.1643H19.5962V27.2684H17.4978V23.5547H15.3995V27.4805H17.4978V29.3903H19.4913V34.0444H20.5412V35.12H22.1153V36.2863H27.9909V35.12H29.565V34.0444H30.6135V29.3903H32.6056V27.2684H30.5073V29.1643H27.4074L26.312 27.7232V23.8728H28.7249V19.947H21.38ZM32.607 23.5547V27.4805H34.7054V23.5547H32.607ZM17.4978 12.0954V15.067H20.4363V12.0954H17.4978ZM29.6699 12.0954V15.067H32.607V12.0954H29.6699Z"/>
</svg>
);
}

View file

@ -7,6 +7,7 @@ import CodeComponent from "./code-component";
type MarkdownRendererProps = {
chatMessage: string;
className?: string;
};
const preprocessChatMessage = (text: string): string => {
@ -48,7 +49,7 @@ export const cleanupTableEmptyCells = (text: string): string => {
})
.join("\n");
};
export const MarkdownRenderer = ({ chatMessage }: MarkdownRendererProps) => {
export const MarkdownRenderer = ({ chatMessage, className }: MarkdownRendererProps) => {
// Process the chat message to handle <think> tags and clean up tables
const processedChatMessage = preprocessChatMessage(chatMessage);
@ -57,6 +58,7 @@ export const MarkdownRenderer = ({ chatMessage }: MarkdownRendererProps) => {
className={cn(
"markdown prose flex w-full max-w-full flex-col items-baseline text-base font-normal word-break-break-word dark:prose-invert",
!chatMessage ? "text-muted-foreground" : "text-primary",
className,
)}
>
<Markdown
@ -65,11 +67,14 @@ export const MarkdownRenderer = ({ chatMessage }: MarkdownRendererProps) => {
urlTransform={(url) => url}
components={{
p({ node, ...props }) {
return <p className="w-fit max-w-full">{props.children}</p>;
return <p className="w-fit max-w-full first:mt-0 last:mb-0 my-2">{props.children}</p>;
},
ol({ node, ...props }) {
return <ol className="max-w-full">{props.children}</ol>;
},
strong({ node, ...props }) {
return <strong className="font-bold">{props.children}</strong>;
},
h1({ node, ...props }) {
return <h1 className="mb-6 mt-4">{props.children}</h1>;
},

File diff suppressed because it is too large Load diff

View file

@ -14,7 +14,7 @@ const AccordionItem = React.forwardRef<
>(({ className, ...props }, ref) => (
<AccordionPrimitive.Item
ref={ref}
className={cn("border rounded-md", className)}
className={cn("border rounded-xl", className)}
{...props}
/>
));

View file

@ -13,7 +13,7 @@ const TabsList = React.forwardRef<
<TabsPrimitive.List
ref={ref}
className={cn(
"inline-flex h-12 gap-3 items-center justify-center p-0 text-muted-foreground w-full",
"inline-flex h-fit gap-3 items-center justify-center p-0 text-muted-foreground w-full",
className,
)}
{...props}
@ -28,7 +28,7 @@ const TabsTrigger = React.forwardRef<
<TabsPrimitive.Trigger
ref={ref}
className={cn(
"inline-flex w-full h-full border border-border gap-1.5 items-center justify-center whitespace-nowrap rounded-lg px-3 py-1.5 text-sm font-medium transition-all focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 data-[state=active]:border-accent-pink-foreground data-[state=active]:text-foreground",
"flex flex-col items-start justify-between p-5 gap-4 w-full h-full border border-border whitespace-nowrap rounded-lg text-sm font-medium transition-all focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 data-[state=active]:border-muted-foreground data-[state=active]:text-foreground",
className,
)}
{...props}

View file

@ -0,0 +1,227 @@
export interface DuplicateCheckResponse {
exists: boolean;
[key: string]: unknown;
}
export interface UploadFileResult {
fileId: string;
filePath: string;
run: unknown;
deletion: unknown;
unified: boolean;
raw: unknown;
}
export async function duplicateCheck(
file: File
): Promise<DuplicateCheckResponse> {
const response = await fetch(
`/api/documents/check-filename?filename=${encodeURIComponent(file.name)}`
);
if (!response.ok) {
const errorText = await response.text();
throw new Error(
errorText || `Failed to check duplicates: ${response.statusText}`
);
}
return response.json();
}
export async function uploadFileForContext(
file: File
): Promise<UploadFileResult> {
window.dispatchEvent(
new CustomEvent("fileUploadStart", {
detail: { filename: file.name },
})
);
try {
const formData = new FormData();
formData.append("file", file);
const uploadResponse = await fetch("/api/upload_context", {
method: "POST",
body: formData,
});
let payload: unknown;
try {
payload = await uploadResponse.json();
} catch (error) {
throw new Error("Upload failed: unable to parse server response");
}
const uploadJson =
typeof payload === "object" && payload !== null ? payload : {};
if (!uploadResponse.ok) {
const errorMessage =
(uploadJson as { error?: string }).error ||
"Upload failed";
throw new Error(errorMessage);
}
const fileId =
(uploadJson as { response_id?: string }).response_id || "uploaded";
const filePath =
(uploadJson as { filename?: string }).filename || file.name;
const pages = (uploadJson as { pages?: number }).pages;
const contentLength = (uploadJson as { content_length?: number }).content_length;
const confirmation = (uploadJson as { confirmation?: string }).confirmation;
const result: UploadFileResult = {
fileId,
filePath,
run: null,
deletion: null,
unified: false,
raw: uploadJson,
};
window.dispatchEvent(
new CustomEvent("fileUploaded", {
detail: {
file,
result: {
file_id: fileId,
file_path: filePath,
filename: filePath,
pages: pages,
content_length: contentLength,
confirmation: confirmation,
response_id: fileId,
run: null,
deletion: null,
unified: false,
},
},
})
);
return result;
} catch (error) {
window.dispatchEvent(
new CustomEvent("fileUploadError", {
detail: {
filename: file.name,
error:
error instanceof Error ? error.message : "Upload failed",
},
})
);
throw error;
} finally {
window.dispatchEvent(new CustomEvent("fileUploadComplete"));
}
}
export async function uploadFile(
file: File,
replace = false
): Promise<UploadFileResult> {
window.dispatchEvent(
new CustomEvent("fileUploadStart", {
detail: { filename: file.name },
})
);
try {
const formData = new FormData();
formData.append("file", file);
formData.append("replace_duplicates", replace.toString());
const uploadResponse = await fetch("/api/router/upload_ingest", {
method: "POST",
body: formData,
});
let payload: unknown;
try {
payload = await uploadResponse.json();
} catch (error) {
throw new Error("Upload failed: unable to parse server response");
}
const uploadIngestJson =
typeof payload === "object" && payload !== null ? payload : {};
if (!uploadResponse.ok) {
const errorMessage =
(uploadIngestJson as { error?: string }).error ||
"Upload and ingest failed";
throw new Error(errorMessage);
}
const fileId =
(uploadIngestJson as { upload?: { id?: string } }).upload?.id ||
(uploadIngestJson as { id?: string }).id ||
(uploadIngestJson as { task_id?: string }).task_id;
const filePath =
(uploadIngestJson as { upload?: { path?: string } }).upload?.path ||
(uploadIngestJson as { path?: string }).path ||
"uploaded";
const runJson = (uploadIngestJson as { ingestion?: unknown }).ingestion;
const deletionJson = (uploadIngestJson as { deletion?: unknown }).deletion;
if (!fileId) {
throw new Error("Upload successful but no file id returned");
}
if (
runJson &&
typeof runJson === "object" &&
"status" in (runJson as Record<string, unknown>) &&
(runJson as { status?: string }).status !== "COMPLETED" &&
(runJson as { status?: string }).status !== "SUCCESS"
) {
const errorMsg =
(runJson as { error?: string }).error ||
"Ingestion pipeline failed";
throw new Error(
`Ingestion failed: ${errorMsg}. Try setting DISABLE_INGEST_WITH_LANGFLOW=true if you're experiencing Langflow component issues.`
);
}
const result: UploadFileResult = {
fileId,
filePath,
run: runJson,
deletion: deletionJson,
unified: true,
raw: uploadIngestJson,
};
window.dispatchEvent(
new CustomEvent("fileUploaded", {
detail: {
file,
result: {
file_id: fileId,
file_path: filePath,
run: runJson,
deletion: deletionJson,
unified: true,
},
},
})
);
return result;
} catch (error) {
window.dispatchEvent(
new CustomEvent("fileUploadError", {
detail: {
filename: file.name,
error:
error instanceof Error ? error.message : "Upload failed",
},
})
);
throw error;
} finally {
window.dispatchEvent(new CustomEvent("fileUploadComplete"));
}
}

View file

@ -52,6 +52,7 @@
"sonner": "^2.0.6",
"tailwind-merge": "^3.3.1",
"tailwindcss-animate": "^1.0.7",
"use-stick-to-bottom": "^1.1.1",
"zustand": "^5.0.8"
},
"devDependencies": {
@ -10224,6 +10225,15 @@
}
}
},
"node_modules/use-stick-to-bottom": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/use-stick-to-bottom/-/use-stick-to-bottom-1.1.1.tgz",
"integrity": "sha512-JkDp0b0tSmv7HQOOpL1hT7t7QaoUBXkq045WWWOFDTlLGRzgIIyW7vyzOIJzY7L2XVIG7j1yUxeDj2LHm9Vwng==",
"license": "MIT",
"peerDependencies": {
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/use-sync-external-store": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.5.0.tgz",

View file

@ -53,6 +53,7 @@
"sonner": "^2.0.6",
"tailwind-merge": "^3.3.1",
"tailwindcss-animate": "^1.0.7",
"use-stick-to-bottom": "^1.1.1",
"zustand": "^5.0.8"
},
"devDependencies": {

View file

@ -106,9 +106,8 @@ async function proxyRequest(
}
const response = await fetch(backendUrl, init);
const responseBody = await response.text();
const responseHeaders = new Headers();
// Copy response headers
for (const [key, value] of response.headers.entries()) {
if (!key.toLowerCase().startsWith('transfer-encoding') &&
@ -117,11 +116,22 @@ async function proxyRequest(
}
}
return new NextResponse(responseBody, {
status: response.status,
statusText: response.statusText,
headers: responseHeaders,
});
// For streaming responses, pass the body directly without buffering
if (response.body) {
return new NextResponse(response.body, {
status: response.status,
statusText: response.statusText,
headers: responseHeaders,
});
} else {
// Fallback for non-streaming responses
const responseBody = await response.text();
return new NextResponse(responseBody, {
status: response.status,
statusText: response.statusText,
headers: responseHeaders,
});
}
} catch (error) {
console.error('Proxy error:', error);
return NextResponse.json(

View file

@ -158,8 +158,8 @@ function AuthCallbackContent() {
}
return (
<div className="min-h-screen flex items-center justify-center bg-background">
<Card className="w-full max-w-md">
<div className="min-h-screen flex items-center justify-center bg-card rounded-lg m-4">
<Card className="w-full max-w-md bg-card rounded-lg m-4">
<CardHeader className="text-center">
<CardTitle className="flex items-center justify-center gap-2">
{status === "processing" && (

View file

@ -1,63 +1,87 @@
import { Bot, GitBranch } from "lucide-react";
import { GitBranch } from "lucide-react";
import { motion } from "motion/react";
import DogIcon from "@/components/logo/dog-icon";
import { MarkdownRenderer } from "@/components/markdown-renderer";
import { cn } from "@/lib/utils";
import type { FunctionCall } from "../types";
import { FunctionCalls } from "./function-calls";
import { Message } from "./message";
import type { FunctionCall } from "../types";
import DogIcon from "@/components/logo/dog-icon";
interface AssistantMessageProps {
content: string;
functionCalls?: FunctionCall[];
messageIndex?: number;
expandedFunctionCalls: Set<string>;
onToggle: (functionCallId: string) => void;
isStreaming?: boolean;
showForkButton?: boolean;
onFork?: (e: React.MouseEvent) => void;
content: string;
functionCalls?: FunctionCall[];
messageIndex?: number;
expandedFunctionCalls: Set<string>;
onToggle: (functionCallId: string) => void;
isStreaming?: boolean;
showForkButton?: boolean;
onFork?: (e: React.MouseEvent) => void;
isCompleted?: boolean;
animate?: boolean;
delay?: number;
}
export function AssistantMessage({
content,
functionCalls = [],
messageIndex,
expandedFunctionCalls,
onToggle,
isStreaming = false,
showForkButton = false,
onFork,
content,
functionCalls = [],
messageIndex,
expandedFunctionCalls,
onToggle,
isStreaming = false,
showForkButton = false,
onFork,
isCompleted = false,
animate = true,
delay = 0.2,
}: AssistantMessageProps) {
const updatedOnboarding = process.env.UPDATED_ONBOARDING === "true";
const IconComponent = updatedOnboarding ? DogIcon : Bot;
return (
<Message
icon={
<div className="w-8 h-8 rounded-lg bg-accent/20 flex items-center justify-center flex-shrink-0 select-none">
<IconComponent className="h-4 w-4 text-accent-foreground" />
</div>
}
actions={
showForkButton && onFork ? (
<button
onClick={onFork}
className="opacity-0 group-hover:opacity-100 transition-opacity p-1 hover:bg-accent rounded text-muted-foreground hover:text-foreground"
title="Fork conversation from here"
>
<GitBranch className="h-3 w-3" />
</button>
) : undefined
}
>
<FunctionCalls
functionCalls={functionCalls}
messageIndex={messageIndex}
expandedFunctionCalls={expandedFunctionCalls}
onToggle={onToggle}
/>
<MarkdownRenderer chatMessage={content} />
{isStreaming && (
<span className="inline-block w-2 h-4 bg-blue-400 ml-1 animate-pulse"></span>
)}
</Message>
);
return (
<motion.div
initial={animate ? { opacity: 0, y: -20 } : { opacity: 1, y: 0 }}
animate={{ opacity: 1, y: 0 }}
transition={animate ? { duration: 0.4, delay: delay, ease: "easeOut" } : { duration: 0 }}
className={isCompleted ? "opacity-50" : ""}
>
<Message
icon={
<div className="w-8 h-8 rounded-lg bg-accent/20 flex items-center justify-center flex-shrink-0 select-none">
<DogIcon
className="h-6 w-6 transition-colors duration-300"
disabled={isCompleted}
/>
</div>
}
actions={
showForkButton && onFork ? (
<button
type="button"
onClick={onFork}
className="opacity-0 group-hover:opacity-100 transition-opacity p-1 hover:bg-accent rounded text-muted-foreground hover:text-foreground"
title="Fork conversation from here"
>
<GitBranch className="h-3 w-3" />
</button>
) : undefined
}
>
<FunctionCalls
functionCalls={functionCalls}
messageIndex={messageIndex}
expandedFunctionCalls={expandedFunctionCalls}
onToggle={onToggle}
/>
<div className="relative">
<MarkdownRenderer
className={cn("text-sm py-1.5 transition-colors duration-300", isCompleted ? "text-placeholder-foreground" : "text-foreground")}
chatMessage={
isStreaming
? content +
' <span class="inline-block w-1 h-4 bg-primary ml-1 animate-pulse"></span>'
: content
}
/>
</div>
</Message>
</motion.div>
);
}

View file

@ -1,282 +1,281 @@
import { Check, Funnel, Loader2, Plus, X } from "lucide-react";
import TextareaAutosize from "react-textarea-autosize";
import { ArrowRight, Check, Funnel, Loader2, Plus, X } from "lucide-react";
import { forwardRef, useImperativeHandle, useRef } from "react";
import TextareaAutosize from "react-textarea-autosize";
import type { FilterColor } from "@/components/filter-icon-popover";
import { filterAccentClasses } from "@/components/knowledge-filter-panel";
import { Button } from "@/components/ui/button";
import {
Popover,
PopoverAnchor,
PopoverContent,
Popover,
PopoverAnchor,
PopoverContent,
} from "@/components/ui/popover";
import type { KnowledgeFilterData } from "../types";
import { FilterColor } from "@/components/filter-icon-popover";
export interface ChatInputHandle {
focusInput: () => void;
clickFileInput: () => void;
focusInput: () => void;
clickFileInput: () => void;
}
interface ChatInputProps {
input: string;
loading: boolean;
isUploading: boolean;
selectedFilter: KnowledgeFilterData | null;
isFilterDropdownOpen: boolean;
availableFilters: KnowledgeFilterData[];
filterSearchTerm: string;
selectedFilterIndex: number;
anchorPosition: { x: number; y: number } | null;
textareaHeight: number;
parsedFilterData: { color?: FilterColor } | null;
onSubmit: (e: React.FormEvent) => void;
onChange: (e: React.ChangeEvent<HTMLTextAreaElement>) => void;
onKeyDown: (e: React.KeyboardEvent<HTMLTextAreaElement>) => void;
onHeightChange: (height: number) => void;
onFilterSelect: (filter: KnowledgeFilterData | null) => void;
onAtClick: () => void;
onFilePickerChange: (e: React.ChangeEvent<HTMLInputElement>) => void;
onFilePickerClick: () => void;
setSelectedFilter: (filter: KnowledgeFilterData | null) => void;
setIsFilterHighlighted: (highlighted: boolean) => void;
setIsFilterDropdownOpen: (open: boolean) => void;
input: string;
loading: boolean;
isUploading: boolean;
selectedFilter: KnowledgeFilterData | null;
isFilterDropdownOpen: boolean;
availableFilters: KnowledgeFilterData[];
filterSearchTerm: string;
selectedFilterIndex: number;
anchorPosition: { x: number; y: number } | null;
textareaHeight: number;
parsedFilterData: { color?: FilterColor } | null;
onSubmit: (e: React.FormEvent) => void;
onChange: (e: React.ChangeEvent<HTMLTextAreaElement>) => void;
onKeyDown: (e: React.KeyboardEvent<HTMLTextAreaElement>) => void;
onHeightChange: (height: number) => void;
onFilterSelect: (filter: KnowledgeFilterData | null) => void;
onAtClick: () => void;
onFilePickerChange: (e: React.ChangeEvent<HTMLInputElement>) => void;
onFilePickerClick: () => void;
setSelectedFilter: (filter: KnowledgeFilterData | null) => void;
setIsFilterHighlighted: (highlighted: boolean) => void;
setIsFilterDropdownOpen: (open: boolean) => void;
}
export const ChatInput = forwardRef<ChatInputHandle, ChatInputProps>((
{
input,
loading,
isUploading,
selectedFilter,
isFilterDropdownOpen,
availableFilters,
filterSearchTerm,
selectedFilterIndex,
anchorPosition,
textareaHeight,
parsedFilterData,
onSubmit,
onChange,
onKeyDown,
onHeightChange,
onFilterSelect,
onAtClick,
onFilePickerChange,
onFilePickerClick,
setSelectedFilter,
setIsFilterHighlighted,
setIsFilterDropdownOpen,
},
ref
) => {
const inputRef = useRef<HTMLTextAreaElement>(null);
const fileInputRef = useRef<HTMLInputElement>(null);
export const ChatInput = forwardRef<ChatInputHandle, ChatInputProps>(
(
{
input,
loading,
isUploading,
selectedFilter,
isFilterDropdownOpen,
availableFilters,
filterSearchTerm,
selectedFilterIndex,
anchorPosition,
textareaHeight,
parsedFilterData,
onSubmit,
onChange,
onKeyDown,
onHeightChange,
onFilterSelect,
onAtClick,
onFilePickerChange,
onFilePickerClick,
setSelectedFilter,
setIsFilterHighlighted,
setIsFilterDropdownOpen,
},
ref,
) => {
const inputRef = useRef<HTMLTextAreaElement>(null);
const fileInputRef = useRef<HTMLInputElement>(null);
useImperativeHandle(ref, () => ({
focusInput: () => {
inputRef.current?.focus();
},
clickFileInput: () => {
fileInputRef.current?.click();
},
}));
useImperativeHandle(ref, () => ({
focusInput: () => {
inputRef.current?.focus();
},
clickFileInput: () => {
fileInputRef.current?.click();
},
}));
return (
<div className="pb-8 pt-4 flex px-6">
<div className="w-full">
<form onSubmit={onSubmit} className="relative">
<div className="relative w-full bg-muted/20 rounded-lg border border-border/50 focus-within:ring-1 focus-within:ring-ring">
{selectedFilter && (
<div className="flex items-center gap-2 px-4 pt-3 pb-1">
<span
className={`inline-flex items-center gap-1 px-2 py-1 rounded-full text-xs font-medium transition-colors ${
filterAccentClasses[parsedFilterData?.color || "zinc"]
}`}
>
@filter:{selectedFilter.name}
<button
type="button"
onClick={() => {
setSelectedFilter(null);
setIsFilterHighlighted(false);
}}
className="ml-1 rounded-full p-0.5"
>
<X className="h-3 w-3" />
</button>
</span>
</div>
)}
<div
className="relative"
style={{ height: `${textareaHeight + 60}px` }}
>
<TextareaAutosize
ref={inputRef}
value={input}
onChange={onChange}
onKeyDown={onKeyDown}
onHeightChange={onHeightChange}
maxRows={7}
minRows={2}
placeholder="Type to ask a question..."
disabled={loading}
className={`w-full bg-transparent px-4 ${
selectedFilter ? "pt-2" : "pt-4"
} focus-visible:outline-none resize-none`}
rows={2}
/>
{/* Safe area at bottom for buttons */}
<div
className="absolute bottom-0 left-0 right-0 bg-transparent pointer-events-none"
style={{ height: "60px" }}
/>
</div>
</div>
<input
ref={fileInputRef}
type="file"
onChange={onFilePickerChange}
className="hidden"
accept=".pdf,.doc,.docx,.txt,.md,.rtf,.odt"
/>
<Button
type="button"
variant="outline"
size="iconSm"
className="absolute bottom-3 left-3 h-8 w-8 p-0 rounded-full hover:bg-muted/50"
onMouseDown={e => {
e.preventDefault();
}}
onClick={onAtClick}
data-filter-button
>
<Funnel className="h-4 w-4" />
</Button>
<Popover
open={isFilterDropdownOpen}
onOpenChange={open => {
setIsFilterDropdownOpen(open);
}}
>
{anchorPosition && (
<PopoverAnchor
asChild
style={{
position: "fixed",
left: anchorPosition.x,
top: anchorPosition.y,
width: 1,
height: 1,
pointerEvents: "none",
}}
>
<div />
</PopoverAnchor>
)}
<PopoverContent
className="w-64 p-2"
side="top"
align="start"
sideOffset={6}
alignOffset={-18}
onOpenAutoFocus={e => {
// Prevent auto focus on the popover content
e.preventDefault();
// Keep focus on the input
}}
>
<div className="space-y-1">
{filterSearchTerm && (
<div className="px-2 py-1.5 text-xs font-medium text-muted-foreground">
Searching: @{filterSearchTerm}
</div>
)}
{availableFilters.length === 0 ? (
<div className="px-2 py-3 text-sm text-muted-foreground">
No knowledge filters available
</div>
) : (
<>
{!filterSearchTerm && (
<button
type="button"
onClick={() => onFilterSelect(null)}
className={`w-full text-left px-2 py-2 text-sm rounded hover:bg-muted/50 flex items-center justify-between ${
selectedFilterIndex === -1 ? "bg-muted/50" : ""
}`}
>
<span>No knowledge filter</span>
{!selectedFilter && (
<Check className="h-4 w-4 shrink-0" />
)}
</button>
)}
{availableFilters
.filter(filter =>
filter.name
.toLowerCase()
.includes(filterSearchTerm.toLowerCase())
)
.map((filter, index) => (
<button
key={filter.id}
type="button"
onClick={() => onFilterSelect(filter)}
className={`w-full overflow-hidden text-left px-2 py-2 gap-2 text-sm rounded hover:bg-muted/50 flex items-center justify-between ${
index === selectedFilterIndex ? "bg-muted/50" : ""
}`}
>
<div className="overflow-hidden">
<div className="font-medium truncate">
{filter.name}
</div>
{filter.description && (
<div className="text-xs text-muted-foreground truncate">
{filter.description}
</div>
)}
</div>
{selectedFilter?.id === filter.id && (
<Check className="h-4 w-4 shrink-0" />
)}
</button>
))}
{availableFilters.filter(filter =>
filter.name
.toLowerCase()
.includes(filterSearchTerm.toLowerCase())
).length === 0 &&
filterSearchTerm && (
<div className="px-2 py-3 text-sm text-muted-foreground">
No filters match &quot;{filterSearchTerm}&quot;
</div>
)}
</>
)}
</div>
</PopoverContent>
</Popover>
<Button
type="button"
variant="outline"
size="iconSm"
onClick={onFilePickerClick}
disabled={isUploading}
className="absolute bottom-3 left-12 h-8 w-8 p-0 rounded-full hover:bg-muted/50"
>
<Plus className="h-4 w-4" />
</Button>
<Button
type="submit"
disabled={!input.trim() || loading}
className="absolute bottom-3 right-3 rounded-lg h-10 px-4"
>
{loading ? <Loader2 className="h-4 w-4 animate-spin" /> : "Send"}
</Button>
</form>
</div>
</div>
);
});
return (
<div className="w-full">
<form onSubmit={onSubmit} className="relative">
<div className="relative flex items-center w-full p-2 gap-2 rounded-xl border border-input focus-within:ring-1 focus-within:ring-ring">
{selectedFilter ? (
<span
className={`inline-flex items-center p-1 rounded-sm text-xs font-medium transition-colors ${
filterAccentClasses[parsedFilterData?.color || "zinc"]
}`}
>
{selectedFilter.name}
<button
type="button"
onClick={() => {
setSelectedFilter(null);
setIsFilterHighlighted(false);
}}
className="ml-0.5 rounded-full p-0.5"
>
<X className="h-4 w-4" />
</button>
</span>
) : (
<Button
type="button"
variant="ghost"
size="iconSm"
className="h-8 w-8 p-0 rounded-md hover:bg-muted/50"
onMouseDown={(e) => {
e.preventDefault();
}}
onClick={onAtClick}
data-filter-button
>
<Funnel className="h-4 w-4" />
</Button>
)}
<div
className="relative flex-1"
style={{ height: `${textareaHeight}px` }}
>
<TextareaAutosize
ref={inputRef}
value={input}
onChange={onChange}
onKeyDown={onKeyDown}
onHeightChange={onHeightChange}
maxRows={7}
minRows={1}
placeholder="Ask a question..."
disabled={loading}
className={`w-full text-sm bg-transparent focus-visible:outline-none resize-none`}
rows={1}
/>
</div>
<Button
type="button"
variant="ghost"
size="iconSm"
onClick={onFilePickerClick}
disabled={isUploading}
className="h-8 w-8 p-0 !rounded-md hover:bg-muted/50"
>
<Plus className="h-4 w-4" />
</Button>
<Button
variant="default"
type="submit"
size="iconSm"
disabled={!input.trim() || loading}
className="!rounded-md h-8 w-8 p-0"
>
{loading ? (
<Loader2 className="h-4 w-4 animate-spin" />
) : (
<ArrowRight className="h-4 w-4" />
)}
</Button>
</div>
<input
ref={fileInputRef}
type="file"
onChange={onFilePickerChange}
className="hidden"
accept=".pdf,.doc,.docx,.txt,.md,.rtf,.odt"
/>
<Popover
open={isFilterDropdownOpen}
onOpenChange={(open) => {
setIsFilterDropdownOpen(open);
}}
>
{anchorPosition && (
<PopoverAnchor
asChild
style={{
position: "fixed",
left: anchorPosition.x,
top: anchorPosition.y,
width: 1,
height: 1,
pointerEvents: "none",
}}
>
<div />
</PopoverAnchor>
)}
<PopoverContent
className="w-64 p-2"
side="top"
align="start"
sideOffset={6}
alignOffset={-18}
onOpenAutoFocus={(e) => {
// Prevent auto focus on the popover content
e.preventDefault();
// Keep focus on the input
}}
>
<div className="space-y-1">
{filterSearchTerm && (
<div className="px-2 py-1.5 text-xs font-medium text-muted-foreground">
Searching: @{filterSearchTerm}
</div>
)}
{availableFilters.length === 0 ? (
<div className="px-2 py-3 text-sm text-muted-foreground">
No knowledge filters available
</div>
) : (
<>
{!filterSearchTerm && (
<button
type="button"
onClick={() => onFilterSelect(null)}
className={`w-full text-left px-2 py-2 text-sm rounded hover:bg-muted/50 flex items-center justify-between ${
selectedFilterIndex === -1 ? "bg-muted/50" : ""
}`}
>
<span>No knowledge filter</span>
{!selectedFilter && (
<Check className="h-4 w-4 shrink-0" />
)}
</button>
)}
{availableFilters
.filter((filter) =>
filter.name
.toLowerCase()
.includes(filterSearchTerm.toLowerCase()),
)
.map((filter, index) => (
<button
key={filter.id}
type="button"
onClick={() => onFilterSelect(filter)}
className={`w-full overflow-hidden text-left px-2 py-2 gap-2 text-sm rounded hover:bg-muted/50 flex items-center justify-between ${
index === selectedFilterIndex ? "bg-muted/50" : ""
}`}
>
<div className="overflow-hidden">
<div className="font-medium truncate">
{filter.name}
</div>
{filter.description && (
<div className="text-xs text-muted-foreground truncate">
{filter.description}
</div>
)}
</div>
{selectedFilter?.id === filter.id && (
<Check className="h-4 w-4 shrink-0" />
)}
</button>
))}
{availableFilters.filter((filter) =>
filter.name
.toLowerCase()
.includes(filterSearchTerm.toLowerCase()),
).length === 0 &&
filterSearchTerm && (
<div className="px-2 py-3 text-sm text-muted-foreground">
No filters match &quot;{filterSearchTerm}&quot;
</div>
)}
</>
)}
</div>
</PopoverContent>
</Popover>
</form>
</div>
);
},
);
ChatInput.displayName = "ChatInput";

View file

@ -1,33 +1,58 @@
import { User } from "lucide-react";
import { FileText, User } from "lucide-react";
import { motion } from "motion/react";
import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar";
import { useAuth } from "@/contexts/auth-context";
import { cn } from "@/lib/utils";
import { Message } from "./message";
interface UserMessageProps {
content: string;
content: string;
isCompleted?: boolean;
animate?: boolean;
files?: string;
}
export function UserMessage({ content }: UserMessageProps) {
const { user } = useAuth();
export function UserMessage({ content, isCompleted, animate = true, files }: UserMessageProps) {
const { user } = useAuth();
return (
<Message
icon={
<Avatar className="w-8 h-8 flex-shrink-0 select-none">
<AvatarImage draggable={false} src={user?.picture} alt={user?.name} />
<AvatarFallback className="text-sm bg-primary/20 text-primary">
{user?.name ? (
user.name.charAt(0).toUpperCase()
) : (
<User className="h-4 w-4" />
)}
</AvatarFallback>
</Avatar>
}
>
<p className="text-foreground whitespace-pre-wrap break-words overflow-wrap-anywhere">
{content}
</p>
</Message>
);
return (
<motion.div
initial={animate ? { opacity: 0, y: -20 } : { opacity: 1, y: 0 }}
animate={{ opacity: 1, y: 0 }}
transition={animate ? { duration: 0.4, delay: 0.2, ease: "easeOut" } : { duration: 0 }}
className={isCompleted ? "opacity-50" : ""}
>
<Message
icon={
<Avatar className="w-8 h-8 rounded-lg flex-shrink-0 select-none">
<AvatarImage draggable={false} src={user?.picture} alt={user?.name} />
<AvatarFallback
className={cn(
isCompleted ? "text-placeholder-foreground" : "text-primary",
"text-sm bg-accent/20 rounded-lg transition-colors duration-300",
)}
>
{user?.name ? user.name.charAt(0).toUpperCase() : <User className="h-4 w-4" />}
</AvatarFallback>
</Avatar>
}
>
{files && (
<p className="text-muted-foreground flex items-center gap-2 font-normal text-mmd py-1.5 whitespace-pre-wrap break-words overflow-wrap-anywhere transition-colors duration-300">
<FileText className="h-4 w-4" />
{files}
</p>
)}
<p
className={cn(
"text-foreground text-sm py-1.5 whitespace-pre-wrap break-words overflow-wrap-anywhere transition-colors duration-300",
isCompleted ? "text-placeholder-foreground" : "text-foreground",
)}
>
{content}
</p>
</Message>
</motion.div>
);
}

View file

@ -1,46 +1,55 @@
import { motion, AnimatePresence } from "motion/react";
import { AnimatePresence, motion } from "motion/react";
import { cn } from "@/lib/utils";
export default function Nudges({
nudges,
handleSuggestionClick,
nudges,
onboarding,
handleSuggestionClick,
}: {
nudges: string[];
handleSuggestionClick: (suggestion: string) => void;
nudges: string[];
onboarding?: boolean;
handleSuggestionClick: (suggestion: string) => void;
}) {
return (
<div className="flex-shrink-0 h-12 w-full overflow-hidden">
<AnimatePresence>
{nudges.length > 0 && (
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, y: 20 }}
transition={{
duration: 0.2,
ease: "easeInOut",
}}
>
<div className="relative px-6 pt-4 flex justify-center">
<div className="w-full max-w-[75%]">
<div className="flex gap-3 justify-start overflow-x-auto scrollbar-hide">
{nudges.map((suggestion: string, index: number) => (
<button
key={index}
onClick={() => handleSuggestionClick(suggestion)}
className="px-2 py-1.5 bg-muted hover:bg-muted/50 rounded-lg text-sm text-placeholder-foreground hover:text-foreground transition-colors whitespace-nowrap"
>
{suggestion}
</button>
))}
</div>
{/* Fade out gradient on the right */}
<div className="absolute right-0 top-0 bottom-0 w-8 bg-gradient-to-l from-background to-transparent pointer-events-none"></div>
</div>
</div>
</motion.div>
)}
</AnimatePresence>
</div>
);
return (
<div className="flex-shrink-0 h-12 w-full overflow-hidden">
<AnimatePresence>
{nudges.length > 0 && (
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, y: 20 }}
transition={{
duration: 0.2,
ease: "easeInOut",
}}
>
<div
className="relative flex"
>
<div className="w-full">
<div className="flex gap-3 justify-start overflow-x-auto scrollbar-hide">
{nudges.map((suggestion: string, index: number) => (
<button
key={index}
onClick={() => handleSuggestionClick(suggestion)}
className={cn(
onboarding
? "text-foreground"
: "text-placeholder-foreground hover:text-foreground",
"bg-background border hover:bg-background/50 px-2 py-1.5 rounded-lg text-sm transition-colors whitespace-nowrap",
)}
>
{suggestion}
</button>
))}
</div>
{/* Fade out gradient on the right */}
<div className="absolute right-0 top-0 bottom-0 w-8 bg-gradient-to-l from-background to-transparent pointer-events-none"></div>
</div>
</div>
</motion.div>
)}
</AnimatePresence>
</div>
);
}

File diff suppressed because it is too large Load diff

View file

@ -4,6 +4,7 @@ export interface Message {
timestamp: Date;
functionCalls?: FunctionCall[];
isStreaming?: boolean;
source?: "langflow" | "chat";
}
export interface FunctionCall {

View file

@ -109,15 +109,12 @@
@layer components {
.app-grid-arrangement {
--sidebar-width: 0px;
--notifications-width: 0px;
--filters-width: 0px;
--app-header-height: 53px;
--top-banner-height: 0px;
--header-height: 54px;
--sidebar-width: 280px;
@media (width >= 48rem) {
--sidebar-width: 288px;
}
&.notifications-open {
--notifications-width: 320px;
}
@ -132,7 +129,7 @@
width: 100%;
grid-template-rows:
var(--top-banner-height)
var(--app-header-height)
var(--header-height)
1fr;
grid-template-columns:
var(--sidebar-width)
@ -147,10 +144,6 @@
grid-template-rows 0.25s ease-in-out;
}
.header-arrangement {
@apply flex w-full items-center justify-between border-b border-border;
}
.header-start-display {
@apply flex items-center gap-2;
}
@ -352,6 +345,15 @@
@apply text-xs opacity-70;
}
.prose :where(strong):not(:where([class~="not-prose"],[class~="not-prose"] *)) {
@apply text-current;
}
.prose :where(a):not(:where([class~="not-prose"],[class~="not-prose"] *))
{
@apply text-current;
}
.box-shadow-inner::after {
content: " ";
position: absolute;

View file

@ -38,7 +38,7 @@ export default function RootLayout({
return (
<html lang="en" suppressHydrationWarning>
<body
className={`${inter.variable} ${jetbrainsMono.variable} ${chivo.variable} antialiased h-lvh w-full overflow-hidden`}
className={`${inter.variable} ${jetbrainsMono.variable} ${chivo.variable} antialiased h-lvh w-full overflow-hidden bg-black`}
>
<ThemeProvider
attribute="class"

View file

@ -6,40 +6,29 @@ import { Suspense, useEffect } from "react";
import GoogleLogo from "@/components/logo/google-logo";
import Logo from "@/components/logo/logo";
import { Button } from "@/components/ui/button";
import { DotPattern } from "@/components/ui/dot-pattern";
import { useAuth } from "@/contexts/auth-context";
import { cn } from "@/lib/utils";
import { useGetSettingsQuery } from "../api/queries/useGetSettingsQuery";
function LoginPageContent() {
const { isLoading, isAuthenticated, isNoAuthMode, login } = useAuth();
const router = useRouter();
const searchParams = useSearchParams();
const { data: settings, isLoading: isSettingsLoading } = useGetSettingsQuery({
enabled: isAuthenticated || isNoAuthMode,
});
const redirect =
settings && !settings.edited
? "/onboarding"
: searchParams.get("redirect") || "/chat";
const redirect = searchParams.get("redirect") || "/chat";
// Redirect if already authenticated or in no-auth mode
useEffect(() => {
if (!isLoading && !isSettingsLoading && (isAuthenticated || isNoAuthMode)) {
if (!isLoading && (isAuthenticated || isNoAuthMode)) {
router.push(redirect);
}
}, [
isLoading,
isSettingsLoading,
isAuthenticated,
isNoAuthMode,
router,
redirect,
]);
if (isLoading || isSettingsLoading) {
if (isLoading) {
return (
<div className="min-h-screen flex items-center justify-center bg-background">
<div className="flex flex-col items-center gap-4">
@ -55,21 +44,10 @@ function LoginPageContent() {
}
return (
<div className="min-h-dvh relative flex gap-4 flex-col items-center justify-center bg-background p-4">
<DotPattern
width={24}
height={24}
cx={1}
cy={1}
cr={1}
className={cn(
"[mask-image:linear-gradient(to_bottom,white,transparent,transparent)]",
"text-input/70",
)}
/>
<div className="flex flex-col items-center justify-center gap-4 z-10">
<Logo className="fill-primary" width={32} height={28} />
<div className="flex flex-col items-center justify-center gap-8">
<div className="min-h-dvh relative flex gap-4 flex-col items-center justify-center bg-card rounded-lg m-4">
<div className="flex flex-col items-center justify-center gap-4 z-10 ">
<Logo className="fill-primary" width={50} height={40} />
<div className="flex flex-col items-center justify-center gap-16">
<h1 className="text-2xl font-medium font-chivo">Welcome to OpenRAG</h1>
<Button onClick={login} className="w-80 gap-1.5" size="lg">
<GoogleLogo className="h-4 w-4" />

View file

@ -0,0 +1,150 @@
"use client";
import { useEffect, useState } from "react";
import { StickToBottom } from "use-stick-to-bottom";
import { AssistantMessage } from "@/app/chat/components/assistant-message";
import { UserMessage } from "@/app/chat/components/user-message";
import Nudges from "@/app/chat/nudges";
import type { Message } from "@/app/chat/types";
import OnboardingCard from "@/app/onboarding/components/onboarding-card";
import { useChatStreaming } from "@/hooks/useChatStreaming";
import { OnboardingStep } from "./onboarding-step";
import OnboardingUpload from "./onboarding-upload";
export function OnboardingContent({
handleStepComplete,
currentStep,
}: {
handleStepComplete: () => void;
currentStep: number;
}) {
const [responseId, setResponseId] = useState<string | null>(null);
const [selectedNudge, setSelectedNudge] = useState<string>("");
const [assistantMessage, setAssistantMessage] = useState<Message | null>(
null,
);
const [isLoadingModels, setIsLoadingModels] = useState<boolean>(false);
const [loadingStatus, setLoadingStatus] = useState<string[]>([]);
const [hasStartedOnboarding, setHasStartedOnboarding] = useState<boolean>(false);
const { streamingMessage, isLoading, sendMessage } = useChatStreaming({
onComplete: (message, newResponseId) => {
setAssistantMessage(message);
if (newResponseId) {
setResponseId(newResponseId);
}
},
onError: (error) => {
console.error("Chat error:", error);
setAssistantMessage({
role: "assistant",
content:
"Sorry, I couldn't connect to the chat service. Please try again.",
timestamp: new Date(),
});
},
});
const NUDGES = ["What is OpenRAG?"];
const handleNudgeClick = async (nudge: string) => {
setSelectedNudge(nudge);
setAssistantMessage(null);
setTimeout(async () => {
await sendMessage({
prompt: nudge,
previousResponseId: responseId || undefined,
});
}, 1500);
};
// Determine which message to show (streaming takes precedence)
const displayMessage = streamingMessage || assistantMessage;
useEffect(() => {
if (currentStep === 1 && !isLoading && !!displayMessage) {
handleStepComplete();
}
}, [isLoading, displayMessage, handleStepComplete, currentStep]);
return (
<StickToBottom
className="flex h-full flex-1 flex-col"
resize="smooth"
initial="instant"
mass={1}
>
<StickToBottom.Content className="flex flex-col min-h-full overflow-x-hidden px-8 py-6">
<div className="flex flex-col place-self-center w-full space-y-6">
{/* Step 1 */}
<OnboardingStep
isVisible={currentStep >= 0}
isCompleted={currentStep > 0}
text="Let's get started by setting up your model provider."
isLoadingModels={isLoadingModels}
loadingStatus={loadingStatus}
reserveSpaceForThinking={!hasStartedOnboarding}
>
<OnboardingCard
onComplete={() => {
setHasStartedOnboarding(true);
handleStepComplete();
}}
setIsLoadingModels={setIsLoadingModels}
setLoadingStatus={setLoadingStatus}
/>
</OnboardingStep>
{/* Step 2 */}
<OnboardingStep
isVisible={currentStep >= 1}
isCompleted={currentStep > 1 || !!selectedNudge}
text="Excellent, let's move on to learning the basics."
>
<div className="py-2">
<Nudges
onboarding
nudges={NUDGES}
handleSuggestionClick={handleNudgeClick}
/>
</div>
</OnboardingStep>
{/* User message - show when nudge is selected */}
{currentStep >= 1 && !!selectedNudge && (
<UserMessage
content={selectedNudge}
isCompleted={currentStep > 2}
/>
)}
{/* Assistant message - show streaming or final message */}
{currentStep >= 1 &&
!!selectedNudge &&
(displayMessage || isLoading) && (
<AssistantMessage
content={displayMessage?.content || ""}
functionCalls={displayMessage?.functionCalls}
messageIndex={0}
expandedFunctionCalls={new Set()}
onToggle={() => {}}
isStreaming={!!streamingMessage}
isCompleted={currentStep > 2}
/>
)}
{/* Step 3 */}
<OnboardingStep
isVisible={currentStep >= 2 && !isLoading && !!displayMessage}
isCompleted={currentStep > 2}
text="Lastly, let's add your data."
hideIcon={true}
>
<OnboardingUpload onComplete={handleStepComplete} />
</OnboardingStep>
</div>
</StickToBottom.Content>
</StickToBottom>
);
}

View file

@ -1,18 +1,61 @@
import { ReactNode, useEffect, useState } from "react";
import { motion, AnimatePresence } from "motion/react";
import { AnimatePresence, motion } from "motion/react";
import { type ReactNode, useEffect, useState } from "react";
import { Message } from "@/app/chat/components/message";
import DogIcon from "@/components/logo/dog-icon";
import { AnimatedProcessingIcon } from "@/components/ui/animated-processing-icon";
import { MarkdownRenderer } from "@/components/markdown-renderer";
import { cn } from "@/lib/utils";
interface OnboardingStepProps {
text: string;
children: ReactNode;
children?: ReactNode;
isVisible: boolean;
isCompleted?: boolean;
icon?: ReactNode;
isMarkdown?: boolean;
hideIcon?: boolean;
isLoadingModels?: boolean;
loadingStatus?: string[];
reserveSpaceForThinking?: boolean;
}
export function OnboardingStep({ text, children, isVisible, isCompleted = false }: OnboardingStepProps) {
export function OnboardingStep({
text,
children,
isVisible,
isCompleted = false,
icon,
isMarkdown = false,
hideIcon = false,
isLoadingModels = false,
loadingStatus = [],
reserveSpaceForThinking = false,
}: OnboardingStepProps) {
const [displayedText, setDisplayedText] = useState("");
const [showChildren, setShowChildren] = useState(false);
const [currentStatusIndex, setCurrentStatusIndex] = useState<number>(0);
// Cycle through loading status messages once
useEffect(() => {
if (!isLoadingModels || loadingStatus.length === 0) {
setCurrentStatusIndex(0);
return;
}
const interval = setInterval(() => {
setCurrentStatusIndex((prev) => {
const nextIndex = prev + 1;
// Stop at the last message
if (nextIndex >= loadingStatus.length - 1) {
clearInterval(interval);
return loadingStatus.length - 1;
}
return nextIndex;
});
}, 1500); // Change status every 1.5 seconds
return () => clearInterval(interval);
}, [isLoadingModels, loadingStatus]);
useEffect(() => {
if (!isVisible) {
@ -21,6 +64,12 @@ export function OnboardingStep({ text, children, isVisible, isCompleted = false
return;
}
if (isCompleted) {
setDisplayedText(text);
setShowChildren(true);
return;
}
let currentIndex = 0;
setDisplayedText("");
setShowChildren(false);
@ -33,44 +82,109 @@ export function OnboardingStep({ text, children, isVisible, isCompleted = false
clearInterval(interval);
setShowChildren(true);
}
}, 10); // 10ms per character
}, 20); // 20ms per character
return () => clearInterval(interval);
}, [text, isVisible]);
}, [text, isVisible, isCompleted]);
if (!isVisible) return null;
return (
<motion.div
initial={{ opacity: 0, y: 20 }}
initial={{ opacity: 0, y: -20 }}
animate={{ opacity: 1, y: 0 }}
transition={{ duration: 0.4, ease: "easeOut" }}
transition={{ duration: 0.4, delay: 0.4, ease: "easeOut" }}
className={isCompleted ? "opacity-50" : ""}
>
<Message
icon={
<div className="w-8 h-8 rounded-lg bg-accent/20 flex items-center justify-center flex-shrink-0 select-none">
<DogIcon className="h-6 w-6 text-accent-foreground" disabled={isCompleted} />
</div>
hideIcon ? (
<div className="w-8 h-8 rounded-lg flex-shrink-0" />
) : (
icon || (
<div className="w-8 h-8 rounded-lg bg-accent/20 flex items-center justify-center flex-shrink-0 select-none">
<DogIcon
className="h-6 w-6 text-accent-foreground transition-colors duration-300"
disabled={isCompleted}
/>
</div>
)
)
}
>
<div className="space-y-4">
<p className={`text-foreground text-sm py-1.5 ${isCompleted ? "text-placeholder-foreground" : ""}`}>
{displayedText}
{!showChildren && !isCompleted && <span className="inline-block w-1 h-4 bg-primary ml-1 animate-pulse" />}
</p>
<AnimatePresence>
{showChildren && !isCompleted && (
<motion.div
initial={{ opacity: 0, y: 10 }}
animate={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, height: 0 }}
transition={{ duration: 0.3, ease: "easeOut" }}
<div>
{isLoadingModels && loadingStatus.length > 0 ? (
<div className="flex flex-col gap-2 py-1.5">
<div className="flex items-center gap-2">
<div className="relative w-1.5 h-2.5">
<AnimatedProcessingIcon className="text-current shrink-0 absolute inset-0" />
</div>
<span className="text-mmd font-medium text-muted-foreground">
Thinking
</span>
</div>
<div className="overflow-hidden">
<div className="flex items-center gap-5 overflow-y-hidden relative h-6">
<div className="w-px h-6 bg-border" />
<div className="relative h-5 w-full">
<AnimatePresence mode="sync" initial={false}>
<motion.span
key={currentStatusIndex}
initial={{ y: 24, opacity: 0 }}
animate={{ y: 0, opacity: 1 }}
exit={{ y: -24, opacity: 0 }}
transition={{ duration: 0.3, ease: "easeInOut" }}
className="text-mmd font-medium text-primary absolute left-0"
>
{loadingStatus[currentStatusIndex]}
</motion.span>
</AnimatePresence>
</div>
</div>
</div>
</div>
) : isMarkdown ? (
<MarkdownRenderer
className={cn(
isCompleted
? "text-placeholder-foreground"
: "text-foreground",
"text-sm py-1.5 transition-colors duration-300",
)}
chatMessage={text}
/>
) : (
<>
<p
className={`text-foreground text-sm py-1.5 transition-colors duration-300 ${
isCompleted ? "text-placeholder-foreground" : ""
}`}
>
{children}
</motion.div>
)}
</AnimatePresence>
{displayedText}
{!showChildren && !isCompleted && (
<span className="inline-block w-1 h-3.5 bg-primary ml-1 animate-pulse" />
)}
</p>
{reserveSpaceForThinking && (
<div className="h-8" />
)}
</>
)}
{children && (
<AnimatePresence>
{((showChildren && !isCompleted) || isMarkdown) && (
<motion.div
initial={{ opacity: 0, y: -10 }}
animate={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, height: 0 }}
transition={{ duration: 0.3, delay: 0.3, ease: "easeOut" }}
>
<div className="pt-2">
{children}</div>
</motion.div>
)}
</AnimatePresence>
)}
</div>
</Message>
</motion.div>

View file

@ -0,0 +1,108 @@
import { ChangeEvent, useRef, useState } from "react";
import { Button } from "@/components/ui/button";
import { uploadFileForContext } from "@/lib/upload-utils";
import { AnimatePresence, motion } from "motion/react";
import { AnimatedProviderSteps } from "@/app/onboarding/components/animated-provider-steps";
interface OnboardingUploadProps {
onComplete: () => void;
}
const OnboardingUpload = ({ onComplete }: OnboardingUploadProps) => {
const fileInputRef = useRef<HTMLInputElement>(null);
const [isUploading, setIsUploading] = useState(false);
const [currentStep, setCurrentStep] = useState<number | null>(null);
const STEP_LIST = [
"Uploading your document",
"Processing your document",
];
const resetFileInput = () => {
if (fileInputRef.current) {
fileInputRef.current.value = "";
}
};
const handleUploadClick = () => {
fileInputRef.current?.click();
};
const performUpload = async (file: File) => {
setIsUploading(true);
try {
setCurrentStep(0);
await uploadFileForContext(file);
console.log("Document uploaded successfully");
} catch (error) {
console.error("Upload failed", (error as Error).message);
} finally {
setIsUploading(false);
await new Promise(resolve => setTimeout(resolve, 1000));
setCurrentStep(STEP_LIST.length);
await new Promise(resolve => setTimeout(resolve, 500));
onComplete();
}
};
const handleFileChange = async (event: ChangeEvent<HTMLInputElement>) => {
const selectedFile = event.target.files?.[0];
if (!selectedFile) {
resetFileInput();
return;
}
try {
await performUpload(selectedFile);
} catch (error) {
console.error("Unable to prepare file for upload", (error as Error).message);
} finally {
resetFileInput();
}
};
return (
<AnimatePresence mode="wait">
{currentStep === null ? (
<motion.div
key="user-ingest"
initial={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, y: -24 }}
transition={{ duration: 0.4, ease: "easeInOut" }}
>
<Button
size="sm"
variant="outline"
onClick={handleUploadClick}
disabled={isUploading}
>
{isUploading ? "Uploading..." : "Add a Document"}
</Button>
<input
ref={fileInputRef}
type="file"
onChange={handleFileChange}
className="hidden"
accept=".pdf,.doc,.docx,.txt,.md,.rtf,.odt"
/>
</motion.div>
) : (
<motion.div
key="ingest-steps"
initial={{ opacity: 0, y: 24 }}
animate={{ opacity: 1, y: 0 }}
transition={{ duration: 0.4, ease: "easeInOut" }}
>
<AnimatedProviderSteps
currentStep={currentStep}
setCurrentStep={setCurrentStep}
steps={STEP_LIST}
/>
</motion.div>
)}
</AnimatePresence>
)
}
export default OnboardingUpload;

View file

@ -8,17 +8,17 @@ export function ProgressBar({ currentStep, totalSteps }: ProgressBarProps) {
return (
<div className="w-full">
<div className="flex items-center max-w-md mx-auto gap-3">
<div className="flex-1 h-2 bg-background rounded-full overflow-hidden">
<div className="flex items-center max-w-48 mx-auto gap-3">
<div className="flex-1 h-1 bg-background rounded-full overflow-hidden">
<div
className="h-full transition-all duration-300 ease-in-out"
style={{
width: `${progressPercentage}%`,
background: 'linear-gradient(to right, #818CF8, #F472B6)'
background: 'linear-gradient(to right, #818CF8, #22A7AF)'
}}
/>
</div>
<span className="text-sm text-muted-foreground whitespace-nowrap">
<span className="text-xs text-muted-foreground whitespace-nowrap">
{currentStep + 1}/{totalSteps}
</span>
</div>

View file

@ -1,109 +1,44 @@
"use client";
import { Suspense, useState } from "react";
import { ProtectedRoute } from "@/components/protected-route";
import { DoclingHealthBanner } from "@/components/docling-health-banner";
import { DotPattern } from "@/components/ui/dot-pattern";
import { cn } from "@/lib/utils";
import { OnboardingStep } from "./components/onboarding-step";
import { ProtectedRoute } from "@/components/protected-route";
import { OnboardingContent } from "./components/onboarding-content";
import { ProgressBar } from "./components/progress-bar";
import OnboardingCard from "../onboarding/components/onboarding-card";
const TOTAL_STEPS = 4;
function NewOnboardingPage() {
const [currentStep, setCurrentStep] = useState(0);
const [currentStep, setCurrentStep] = useState(0);
const handleStepComplete = () => {
if (currentStep < TOTAL_STEPS - 1) {
setCurrentStep(currentStep + 1);
}
};
const handleStepComplete = () => {
if (currentStep < TOTAL_STEPS - 1) {
setCurrentStep(currentStep + 1);
}
};
return (
<div className="min-h-dvh w-full flex gap-5 flex-col items-center justify-center bg-primary-foreground relative p-4">
<DoclingHealthBanner className="absolute top-0 left-0 right-0 w-full z-20" />
return (
<div className="min-h-dvh w-full flex gap-5 flex-col items-center justify-center bg-primary-foreground relative p-4">
<DoclingHealthBanner className="absolute top-0 left-0 right-0 w-full z-20" />
{/* Chat-like content area */}
<div className="flex flex-col items-center gap-5 w-full max-w-3xl z-10">
<div className="w-full h-[872px] bg-background border rounded-lg p-4 shadow-sm overflow-y-auto">
<div className="space-y-6">
<OnboardingStep
isVisible={currentStep >= 0}
isCompleted={currentStep > 0}
text="Let's get started by setting up your model provider."
>
<OnboardingCard onComplete={handleStepComplete} />
</OnboardingStep>
{/* Chat-like content area */}
<div className="flex flex-col items-center gap-5 w-full max-w-3xl z-10">
<div className="w-full h-[872px] bg-background border rounded-lg p-4 shadow-sm overflow-y-auto">
<OnboardingContent handleStepComplete={handleStepComplete} currentStep={currentStep} />
</div>
<OnboardingStep
isVisible={currentStep >= 1}
isCompleted={currentStep > 1}
text="Step 1: Configure your settings"
>
<div className="space-y-4">
<p className="text-muted-foreground">
Let's configure some basic settings for your account.
</p>
<button
onClick={handleStepComplete}
className="px-4 py-2 bg-primary text-primary-foreground rounded-lg hover:bg-primary/90"
>
Continue
</button>
</div>
</OnboardingStep>
<OnboardingStep
isVisible={currentStep >= 2}
isCompleted={currentStep > 2}
text="Step 2: Connect your model"
>
<div className="space-y-4">
<p className="text-muted-foreground">
Choose and connect your preferred AI model provider.
</p>
<button
onClick={handleStepComplete}
className="px-4 py-2 bg-primary text-primary-foreground rounded-lg hover:bg-primary/90"
>
Continue
</button>
</div>
</OnboardingStep>
<OnboardingStep
isVisible={currentStep >= 3}
isCompleted={currentStep > 3}
text="Step 3: You're all set!"
>
<div className="space-y-4">
<p className="text-muted-foreground">
Your account is ready to use. Let's start chatting!
</p>
<button
onClick={() => window.location.href = "/chat"}
className="px-4 py-2 bg-primary text-primary-foreground rounded-lg hover:bg-primary/90"
>
Go to Chat
</button>
</div>
</OnboardingStep>
</div>
</div>
<ProgressBar currentStep={currentStep} totalSteps={TOTAL_STEPS} />
</div>
</div>
);
<ProgressBar currentStep={currentStep} totalSteps={TOTAL_STEPS} />
</div>
</div>
);
}
export default function ProtectedNewOnboardingPage() {
return (
<ProtectedRoute>
<Suspense fallback={<div>Loading...</div>}>
<NewOnboardingPage />
</Suspense>
</ProtectedRoute>
);
return (
<ProtectedRoute>
<Suspense fallback={<div>Loading...</div>}>
<NewOnboardingPage />
</Suspense>
</ProtectedRoute>
);
}

View file

@ -75,20 +75,6 @@ export function AdvancedOnboarding({
/>
</LabelWrapper>
)}
{(hasLanguageModels || hasEmbeddingModels) && !updatedOnboarding && <Separator />}
{!updatedOnboarding && (
<LabelWrapper
label="Sample dataset"
description="Load sample data to chat with immediately."
id="sample-dataset"
flex
>
<Switch
checked={sampleDataset}
onCheckedChange={setSampleDataset}
/>
</LabelWrapper>
)}
</AccordionContent>
</AccordionItem>
</Accordion>

View file

@ -0,0 +1,87 @@
"use client";
import { AnimatePresence, motion } from "framer-motion";
import { CheckIcon } from "lucide-react";
import { useEffect } from "react";
import { AnimatedProcessingIcon } from "@/components/ui/animated-processing-icon";
import { cn } from "@/lib/utils";
export function AnimatedProviderSteps({
currentStep,
setCurrentStep,
steps,
}: {
currentStep: number;
setCurrentStep: (step: number) => void;
steps: string[];
}) {
useEffect(() => {
if (currentStep < steps.length - 1) {
const interval = setInterval(() => {
setCurrentStep(currentStep + 1);
}, 1500);
return () => clearInterval(interval);
}
}, [currentStep, setCurrentStep, steps]);
const isDone = currentStep >= steps.length;
return (
<div className="flex flex-col gap-2">
<div className="flex items-center gap-2">
<div
className={cn(
"transition-all duration-150 relative",
isDone ? "w-3.5 h-3.5" : "w-1.5 h-2.5",
)}
>
<CheckIcon
className={cn(
"text-accent-emerald-foreground shrink-0 w-3.5 h-3.5 absolute inset-0 transition-all duration-150",
isDone ? "opacity-100" : "opacity-0",
)}
/>
<AnimatedProcessingIcon
className={cn(
"text-current shrink-0 absolute inset-0 transition-all duration-150",
isDone ? "opacity-0" : "opacity-100",
)}
/>
</div>
<span className="text-mmd font-medium text-muted-foreground">
{isDone ? "Done" : "Thinking"}
</span>
</div>
<div className="overflow-hidden">
<AnimatePresence>
{!isDone && (
<motion.div
initial={{ opacity: 1, y: 0, height: "auto" }}
exit={{ opacity: 0, y: -24, height: 0 }}
transition={{ duration: 0.4, ease: "easeInOut" }}
className="flex items-center gap-5 overflow-y-hidden relative h-6"
>
<div className="w-px h-6 bg-border" />
<div className="relative h-5 w-full">
<AnimatePresence mode="sync" initial={false}>
<motion.span
key={currentStep}
initial={{ y: 24, opacity: 0 }}
animate={{ y: 0, opacity: 1 }}
exit={{ y: -24, opacity: 0 }}
transition={{ duration: 0.3, ease: "easeInOut" }}
className="text-mmd font-medium text-primary absolute left-0"
>
{steps[currentStep]}
</motion.span>
</AnimatePresence>
</div>
</motion.div>
)}
</AnimatePresence>
</div>
</div>
);
}

View file

@ -1,4 +1,4 @@
import { useState } from "react";
import { useEffect, useState } from "react";
import { LabelInput } from "@/components/label-input";
import { LabelWrapper } from "@/components/label-wrapper";
import IBMLogo from "@/components/logo/ibm-logo";
@ -14,10 +14,14 @@ export function IBMOnboarding({
setSettings,
sampleDataset,
setSampleDataset,
setIsLoadingModels,
setLoadingStatus,
}: {
setSettings: (settings: OnboardingVariables) => void;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
setLoadingStatus?: (status: string[]) => void;
}) {
const [endpoint, setEndpoint] = useState("https://us-south.ml.cloud.ibm.com");
const [apiKey, setApiKey] = useState("");
@ -99,6 +103,19 @@ export function IBMOnboarding({
},
setSettings,
);
// Notify parent about loading state
useEffect(() => {
setIsLoadingModels?.(isLoadingModels);
// Set detailed loading status
if (isLoadingModels) {
const status = ["Connecting to IBM watsonx.ai", "Fetching language models", "Fetching embedding models"];
setLoadingStatus?.(status);
} else {
setLoadingStatus?.([]);
}
}, [isLoadingModels, setIsLoadingModels, setLoadingStatus]);
return (
<>
<div className="space-y-4">

View file

@ -7,154 +7,160 @@ import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutat
import { useGetOllamaModelsQuery } from "../../api/queries/useGetModelsQuery";
import { useModelSelection } from "../hooks/useModelSelection";
import { useUpdateSettings } from "../hooks/useUpdateSettings";
import { AdvancedOnboarding } from "./advanced";
import { ModelSelector } from "./model-selector";
export function OllamaOnboarding({
setSettings,
sampleDataset,
setSampleDataset,
setSettings,
sampleDataset,
setSampleDataset,
setIsLoadingModels,
setLoadingStatus,
}: {
setSettings: (settings: OnboardingVariables) => void;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setSettings: (settings: OnboardingVariables) => void;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
setLoadingStatus?: (status: string[]) => void;
}) {
const [endpoint, setEndpoint] = useState(`http://localhost:11434`);
const [showConnecting, setShowConnecting] = useState(false);
const debouncedEndpoint = useDebouncedValue(endpoint, 500);
const [endpoint, setEndpoint] = useState(`http://localhost:11434`);
const [showConnecting, setShowConnecting] = useState(false);
const debouncedEndpoint = useDebouncedValue(endpoint, 500);
// Fetch models from API when endpoint is provided (debounced)
const {
data: modelsData,
isLoading: isLoadingModels,
error: modelsError,
} = useGetOllamaModelsQuery(
debouncedEndpoint ? { endpoint: debouncedEndpoint } : undefined,
);
// Fetch models from API when endpoint is provided (debounced)
const {
data: modelsData,
isLoading: isLoadingModels,
error: modelsError,
} = useGetOllamaModelsQuery(
debouncedEndpoint ? { endpoint: debouncedEndpoint } : undefined,
);
// Use custom hook for model selection logic
const {
languageModel,
embeddingModel,
setLanguageModel,
setEmbeddingModel,
languageModels,
embeddingModels,
} = useModelSelection(modelsData);
// Use custom hook for model selection logic
const {
languageModel,
embeddingModel,
setLanguageModel,
setEmbeddingModel,
languageModels,
embeddingModels,
} = useModelSelection(modelsData);
// Handle delayed display of connecting state
useEffect(() => {
let timeoutId: NodeJS.Timeout;
// Handle delayed display of connecting state
useEffect(() => {
let timeoutId: NodeJS.Timeout;
if (debouncedEndpoint && isLoadingModels) {
timeoutId = setTimeout(() => {
setShowConnecting(true);
}, 500);
} else {
setShowConnecting(false);
}
if (debouncedEndpoint && isLoadingModels) {
timeoutId = setTimeout(() => {
setShowConnecting(true);
}, 500);
} else {
setShowConnecting(false);
}
return () => {
if (timeoutId) {
clearTimeout(timeoutId);
}
};
}, [debouncedEndpoint, isLoadingModels]);
return () => {
if (timeoutId) {
clearTimeout(timeoutId);
}
};
}, [debouncedEndpoint, isLoadingModels]);
const handleSampleDatasetChange = (dataset: boolean) => {
setSampleDataset(dataset);
};
// Update settings when values change
useUpdateSettings(
"ollama",
{
endpoint,
languageModel,
embeddingModel,
},
setSettings,
);
// Update settings when values change
useUpdateSettings(
"ollama",
{
endpoint,
languageModel,
embeddingModel,
},
setSettings,
);
// Notify parent about loading state
useEffect(() => {
setIsLoadingModels?.(isLoadingModels);
// Check validation state based on models query
const hasConnectionError = debouncedEndpoint && modelsError;
const hasNoModels =
modelsData &&
!modelsData.language_models?.length &&
!modelsData.embedding_models?.length;
// Set detailed loading status
if (isLoadingModels) {
const status = ["Connecting to Ollama", "Fetching language models", "Fetching embedding models"];
setLoadingStatus?.(status);
} else {
setLoadingStatus?.([]);
}
}, [isLoadingModels, setIsLoadingModels, setLoadingStatus]);
return (
<>
<div className="space-y-4">
<div className="space-y-1">
<LabelInput
label="Ollama Base URL"
helperText="Base URL of your Ollama server"
id="api-endpoint"
required
placeholder="http://localhost:11434"
value={endpoint}
onChange={(e) => setEndpoint(e.target.value)}
/>
{showConnecting && (
<p className="text-mmd text-muted-foreground">
Connecting to Ollama server...
</p>
)}
{hasConnectionError && (
<p className="text-mmd text-accent-amber-foreground">
Cant reach Ollama at {debouncedEndpoint}. Update the base URL or
start the server.
</p>
)}
{hasNoModels && (
<p className="text-mmd text-accent-amber-foreground">
No models found. Install embedding and agent models on your Ollama
server.
</p>
)}
</div>
<LabelWrapper
label="Embedding model"
helperText="Model used for knowledge ingest and retrieval"
id="embedding-model"
required={true}
>
<ModelSelector
options={embeddingModels}
icon={<OllamaLogo className="w-4 h-4" />}
noOptionsPlaceholder={
isLoadingModels
? "Loading models..."
: "No embedding models detected. Install an embedding model to continue."
}
value={embeddingModel}
onValueChange={setEmbeddingModel}
/>
</LabelWrapper>
<LabelWrapper
label="Language model"
helperText="Model used for chat"
id="embedding-model"
required={true}
>
<ModelSelector
options={languageModels}
icon={<OllamaLogo className="w-4 h-4" />}
noOptionsPlaceholder={
isLoadingModels
? "Loading models..."
: "No language models detected. Install a language model to continue."
}
value={languageModel}
onValueChange={setLanguageModel}
/>
</LabelWrapper>
</div>
<AdvancedOnboarding
sampleDataset={sampleDataset}
setSampleDataset={handleSampleDatasetChange}
/>
</>
);
// Check validation state based on models query
const hasConnectionError = debouncedEndpoint && modelsError;
const hasNoModels =
modelsData &&
!modelsData.language_models?.length &&
!modelsData.embedding_models?.length;
return (
<div className="space-y-4">
<div className="space-y-1">
<LabelInput
label="Ollama Base URL"
helperText="Base URL of your Ollama server"
id="api-endpoint"
required
placeholder="http://localhost:11434"
value={endpoint}
onChange={(e) => setEndpoint(e.target.value)}
/>
{showConnecting && (
<p className="text-mmd text-muted-foreground">
Connecting to Ollama server...
</p>
)}
{hasConnectionError && (
<p className="text-mmd text-accent-amber-foreground">
Cant reach Ollama at {debouncedEndpoint}. Update the base URL or
start the server.
</p>
)}
{hasNoModels && (
<p className="text-mmd text-accent-amber-foreground">
No models found. Install embedding and agent models on your Ollama
server.
</p>
)}
</div>
<LabelWrapper
label="Embedding model"
helperText="Model used for knowledge ingest and retrieval"
id="embedding-model"
required={true}
>
<ModelSelector
options={embeddingModels}
icon={<OllamaLogo className="w-4 h-4" />}
noOptionsPlaceholder={
isLoadingModels
? "Loading models..."
: "No embedding models detected. Install an embedding model to continue."
}
value={embeddingModel}
onValueChange={setEmbeddingModel}
/>
</LabelWrapper>
<LabelWrapper
label="Language model"
helperText="Model used for chat"
id="embedding-model"
required={true}
>
<ModelSelector
options={languageModels}
icon={<OllamaLogo className="w-4 h-4" />}
noOptionsPlaceholder={
isLoadingModels
? "Loading models..."
: "No language models detected. Install a language model to continue."
}
value={languageModel}
onValueChange={setLanguageModel}
/>
</LabelWrapper>
</div>
);
}

View file

@ -1,182 +1,322 @@
"use client";
import { useState } from "react";
import { AnimatePresence, motion } from "framer-motion";
import { useEffect, useState } from "react";
import { toast } from "sonner";
import {
type OnboardingVariables,
useOnboardingMutation,
type OnboardingVariables,
useOnboardingMutation,
} from "@/app/api/mutations/useOnboardingMutation";
import { useGetTasksQuery } from "@/app/api/queries/useGetTasksQuery";
import { useDoclingHealth } from "@/components/docling-health-banner";
import IBMLogo from "@/components/logo/ibm-logo";
import OllamaLogo from "@/components/logo/ollama-logo";
import OpenAILogo from "@/components/logo/openai-logo";
import { AnimatedProcessingIcon } from "@/components/ui/animated-processing-icon";
import { Button } from "@/components/ui/button";
import {
Card,
CardContent,
CardFooter,
CardHeader,
Card,
CardContent,
CardFooter,
CardHeader,
} from "@/components/ui/card";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import {
Tooltip,
TooltipContent,
TooltipTrigger,
Tooltip,
TooltipContent,
TooltipTrigger,
} from "@/components/ui/tooltip";
import { cn } from "@/lib/utils";
import { AnimatedProviderSteps } from "./animated-provider-steps";
import { IBMOnboarding } from "./ibm-onboarding";
import { OllamaOnboarding } from "./ollama-onboarding";
import { OpenAIOnboarding } from "./openai-onboarding";
interface OnboardingCardProps {
onComplete: () => void;
onComplete: () => void;
setIsLoadingModels?: (isLoading: boolean) => void;
setLoadingStatus?: (status: string[]) => void;
}
const OnboardingCard = ({ onComplete }: OnboardingCardProps) => {
const updatedOnboarding = process.env.UPDATED_ONBOARDING === "true";
const { isHealthy: isDoclingHealthy } = useDoclingHealth();
const STEP_LIST = [
"Setting up your model provider",
"Defining schema",
"Configuring Langflow",
"Ingesting sample data",
];
const [modelProvider, setModelProvider] = useState<string>("openai");
const TOTAL_PROVIDER_STEPS = STEP_LIST.length;
const [sampleDataset, setSampleDataset] = useState<boolean>(true);
const OnboardingCard = ({
onComplete,
setIsLoadingModels: setIsLoadingModelsParent,
setLoadingStatus: setLoadingStatusParent,
}: OnboardingCardProps) => {
const { isHealthy: isDoclingHealthy } = useDoclingHealth();
const handleSetModelProvider = (provider: string) => {
setModelProvider(provider);
setSettings({
model_provider: provider,
embedding_model: "",
llm_model: "",
});
};
const [modelProvider, setModelProvider] = useState<string>("openai");
const [settings, setSettings] = useState<OnboardingVariables>({
model_provider: modelProvider,
embedding_model: "",
llm_model: "",
});
const [sampleDataset, setSampleDataset] = useState<boolean>(true);
// Mutations
const onboardingMutation = useOnboardingMutation({
onSuccess: (data) => {
console.log("Onboarding completed successfully", data);
onComplete();
},
onError: (error) => {
toast.error("Failed to complete onboarding", {
description: error.message,
});
},
});
const [isLoadingModels, setIsLoadingModels] = useState<boolean>(false);
const handleComplete = () => {
if (
!settings.model_provider ||
!settings.llm_model ||
!settings.embedding_model
) {
toast.error("Please complete all required fields");
return;
}
const [loadingStatus, setLoadingStatus] = useState<string[]>([]);
// Prepare onboarding data
const onboardingData: OnboardingVariables = {
model_provider: settings.model_provider,
llm_model: settings.llm_model,
embedding_model: settings.embedding_model,
sample_data: sampleDataset,
};
const [currentStatusIndex, setCurrentStatusIndex] = useState<number>(0);
// Add API key if available
if (settings.api_key) {
onboardingData.api_key = settings.api_key;
}
// Pass loading state to parent
useEffect(() => {
setIsLoadingModelsParent?.(isLoadingModels);
}, [isLoadingModels, setIsLoadingModelsParent]);
// Add endpoint if available
if (settings.endpoint) {
onboardingData.endpoint = settings.endpoint;
}
useEffect(() => {
setLoadingStatusParent?.(loadingStatus);
}, [loadingStatus, setLoadingStatusParent]);
// Add project_id if available
if (settings.project_id) {
onboardingData.project_id = settings.project_id;
}
// Cycle through loading status messages once
useEffect(() => {
if (!isLoadingModels || loadingStatus.length === 0) {
setCurrentStatusIndex(0);
return;
}
onboardingMutation.mutate(onboardingData);
};
const interval = setInterval(() => {
setCurrentStatusIndex((prev) => {
const nextIndex = prev + 1;
// Stop at the last message
if (nextIndex >= loadingStatus.length - 1) {
clearInterval(interval);
return loadingStatus.length - 1;
}
return nextIndex;
});
}, 1500); // Change status every 1.5 seconds
const isComplete = !!settings.llm_model && !!settings.embedding_model && isDoclingHealthy;
return () => clearInterval(interval);
}, [isLoadingModels, loadingStatus]);
return (
<Card className={`w-full max-w-[600px] ${updatedOnboarding ? "border-none" : ""}`}>
<Tabs
defaultValue={modelProvider}
onValueChange={handleSetModelProvider}
>
<CardHeader className={`${updatedOnboarding ? "px-0" : ""}`}>
<TabsList>
<TabsTrigger value="openai">
<OpenAILogo className="w-4 h-4" />
OpenAI
</TabsTrigger>
<TabsTrigger value="watsonx">
<IBMLogo className="w-4 h-4" />
IBM watsonx.ai
</TabsTrigger>
<TabsTrigger value="ollama">
<OllamaLogo className="w-4 h-4" />
Ollama
</TabsTrigger>
</TabsList>
</CardHeader>
<CardContent className={`${updatedOnboarding ? "px-0" : ""}`}>
<TabsContent value="openai">
<OpenAIOnboarding
setSettings={setSettings}
sampleDataset={sampleDataset}
setSampleDataset={setSampleDataset}
/>
</TabsContent>
<TabsContent value="watsonx">
<IBMOnboarding
setSettings={setSettings}
sampleDataset={sampleDataset}
setSampleDataset={setSampleDataset}
/>
</TabsContent>
<TabsContent value="ollama">
<OllamaOnboarding
setSettings={setSettings}
sampleDataset={sampleDataset}
setSampleDataset={setSampleDataset}
/>
</TabsContent>
</CardContent>
</Tabs>
<CardFooter className={`flex ${updatedOnboarding ? "px-0" : "justify-end"}`}>
<Tooltip>
<TooltipTrigger asChild>
<div>
<Button
size="sm"
onClick={handleComplete}
disabled={!isComplete}
loading={onboardingMutation.isPending}
>
<span className="select-none">Complete</span>
</Button>
</div>
</TooltipTrigger>
{!isComplete && (
<TooltipContent>
{!!settings.llm_model && !!settings.embedding_model && !isDoclingHealthy
? "docling-serve must be running to continue"
: "Please fill in all required fields"}
</TooltipContent>
)}
</Tooltip>
</CardFooter>
</Card>
)
}
const handleSetModelProvider = (provider: string) => {
setModelProvider(provider);
setSettings({
model_provider: provider,
embedding_model: "",
llm_model: "",
});
};
const [settings, setSettings] = useState<OnboardingVariables>({
model_provider: modelProvider,
embedding_model: "",
llm_model: "",
});
const [currentStep, setCurrentStep] = useState<number | null>(null);
// Query tasks to track completion
const { data: tasks } = useGetTasksQuery({
enabled: currentStep !== null, // Only poll when onboarding has started
refetchInterval: currentStep !== null ? 1000 : false, // Poll every 1 second during onboarding
});
// Monitor tasks and call onComplete when all tasks are done
useEffect(() => {
if (currentStep === null || !tasks) {
return;
}
// Check if there are any active tasks (pending, running, or processing)
const activeTasks = tasks.find(
(task) =>
task.status === "pending" ||
task.status === "running" ||
task.status === "processing",
);
// If no active tasks and we've started onboarding, complete it
if (
(!activeTasks || (activeTasks.processed_files ?? 0) > 0) &&
tasks.length > 0
) {
// Set to final step to show "Done"
setCurrentStep(TOTAL_PROVIDER_STEPS);
// Wait a bit before completing
setTimeout(() => {
onComplete();
}, 1000);
}
}, [tasks, currentStep, onComplete]);
// Mutations
const onboardingMutation = useOnboardingMutation({
onSuccess: (data) => {
console.log("Onboarding completed successfully", data);
setCurrentStep(0);
},
onError: (error) => {
toast.error("Failed to complete onboarding", {
description: error.message,
});
},
});
const handleComplete = () => {
if (
!settings.model_provider ||
!settings.llm_model ||
!settings.embedding_model
) {
toast.error("Please complete all required fields");
return;
}
// Prepare onboarding data
const onboardingData: OnboardingVariables = {
model_provider: settings.model_provider,
llm_model: settings.llm_model,
embedding_model: settings.embedding_model,
sample_data: sampleDataset,
};
// Add API key if available
if (settings.api_key) {
onboardingData.api_key = settings.api_key;
}
// Add endpoint if available
if (settings.endpoint) {
onboardingData.endpoint = settings.endpoint;
}
// Add project_id if available
if (settings.project_id) {
onboardingData.project_id = settings.project_id;
}
onboardingMutation.mutate(onboardingData);
setCurrentStep(0);
};
const isComplete =
!!settings.llm_model && !!settings.embedding_model && isDoclingHealthy;
return (
<AnimatePresence mode="wait">
{currentStep === null ? (
<motion.div
key="onboarding-form"
initial={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, y: -24 }}
transition={{ duration: 0.4, ease: "easeInOut" }}
>
<div className={`w-full max-w-[600px] flex flex-col gap-6`}>
<Tabs
defaultValue={modelProvider}
onValueChange={handleSetModelProvider}
>
<TabsList className="mb-4">
<TabsTrigger
value="openai"
>
<div className={cn("flex items-center justify-center gap-2 w-8 h-8 rounded-md", modelProvider === "openai" ? "bg-white" : "bg-muted")}>
<OpenAILogo className={cn("w-4 h-4 shrink-0", modelProvider === "openai" ? "text-black" : "text-muted-foreground")} />
</div>
OpenAI
</TabsTrigger>
<TabsTrigger
value="watsonx"
>
<div className={cn("flex items-center justify-center gap-2 w-8 h-8 rounded-md", modelProvider === "watsonx" ? "bg-[#1063FE]" : "bg-muted")}>
<IBMLogo className={cn("w-4 h-4 shrink-0", modelProvider === "watsonx" ? "text-white" : "text-muted-foreground")} />
</div>
IBM watsonx.ai
</TabsTrigger>
<TabsTrigger
value="ollama"
>
<div className={cn("flex items-center justify-center gap-2 w-8 h-8 rounded-md", modelProvider === "ollama" ? "bg-white" : "bg-muted")}>
<OllamaLogo
className={cn(
"w-4 h-4 shrink-0",
modelProvider === "ollama" ? "text-black" : "text-muted-foreground",
)}
/>
</div>
Ollama
</TabsTrigger>
</TabsList>
<TabsContent value="openai">
<OpenAIOnboarding
setSettings={setSettings}
sampleDataset={sampleDataset}
setSampleDataset={setSampleDataset}
setIsLoadingModels={setIsLoadingModels}
setLoadingStatus={setLoadingStatus}
/>
</TabsContent>
<TabsContent value="watsonx">
<IBMOnboarding
setSettings={setSettings}
sampleDataset={sampleDataset}
setSampleDataset={setSampleDataset}
setIsLoadingModels={setIsLoadingModels}
setLoadingStatus={setLoadingStatus}
/>
</TabsContent>
<TabsContent value="ollama">
<OllamaOnboarding
setSettings={setSettings}
sampleDataset={sampleDataset}
setSampleDataset={setSampleDataset}
setIsLoadingModels={setIsLoadingModels}
setLoadingStatus={setLoadingStatus}
/>
</TabsContent>
</Tabs>
{!isLoadingModels && (
<Tooltip>
<TooltipTrigger asChild>
<div>
<Button
size="sm"
onClick={handleComplete}
disabled={!isComplete}
loading={onboardingMutation.isPending}
>
<span className="select-none">Complete</span>
</Button>
</div>
</TooltipTrigger>
{!isComplete && (
<TooltipContent>
{!!settings.llm_model &&
!!settings.embedding_model &&
!isDoclingHealthy
? "docling-serve must be running to continue"
: "Please fill in all required fields"}
</TooltipContent>
)}
</Tooltip>
)}
</div>
</motion.div>
) : (
<motion.div
key="provider-steps"
initial={{ opacity: 0, y: 24 }}
animate={{ opacity: 1, y: 0 }}
transition={{ duration: 0.4, ease: "easeInOut" }}
>
<AnimatedProviderSteps
currentStep={currentStep}
setCurrentStep={setCurrentStep}
steps={STEP_LIST}
/>
</motion.div>
)}
</AnimatePresence>
);
};
export default OnboardingCard;

View file

@ -1,4 +1,4 @@
import { useState } from "react";
import { useEffect, useState } from "react";
import { LabelInput } from "@/components/label-input";
import { LabelWrapper } from "@/components/label-wrapper";
import OpenAILogo from "@/components/logo/openai-logo";
@ -14,10 +14,14 @@ export function OpenAIOnboarding({
setSettings,
sampleDataset,
setSampleDataset,
setIsLoadingModels,
setLoadingStatus,
}: {
setSettings: (settings: OnboardingVariables) => void;
sampleDataset: boolean;
setSampleDataset: (dataset: boolean) => void;
setIsLoadingModels?: (isLoading: boolean) => void;
setLoadingStatus?: (status: string[]) => void;
}) {
const [apiKey, setApiKey] = useState("");
const [getFromEnv, setGetFromEnv] = useState(true);
@ -68,6 +72,19 @@ export function OpenAIOnboarding({
},
setSettings,
);
// Notify parent about loading state
useEffect(() => {
setIsLoadingModels?.(isLoadingModels);
// Set detailed loading status
if (isLoadingModels) {
const status = ["Connecting to OpenAI", "Fetching language models", "Fetching embedding models"];
setLoadingStatus?.(status);
} else {
setLoadingStatus?.([]);
}
}, [isLoadingModels, setIsLoadingModels, setLoadingStatus]);
return (
<>
<div className="space-y-5">

View file

@ -0,0 +1,53 @@
import { motion } from "framer-motion";
import { ANIMATION_DURATION } from "@/lib/constants";
export const AnimatedConditional = ({
children,
isOpen,
className,
slide = false,
delay,
vertical = false,
}: {
children: React.ReactNode;
isOpen: boolean;
className?: string;
delay?: number;
vertical?: boolean;
slide?: boolean;
}) => {
const animationProperty = slide
? vertical
? "translateY"
: "translateX"
: vertical
? "height"
: "width";
const animationValue = isOpen
? slide
? "0px"
: "auto"
: slide
? "-100%"
: "0px";
return (
<motion.div
initial={{ [animationProperty]: animationValue }}
animate={{ [animationProperty]: animationValue }}
exit={{ [animationProperty]: 0 }}
transition={{
duration: ANIMATION_DURATION,
ease: "easeOut",
delay: delay,
}}
style={{
overflow: "hidden",
whiteSpace: vertical ? "normal" : "nowrap",
}}
className={className}
>
{children}
</motion.div>
);
};

View file

@ -0,0 +1,203 @@
"use client";
import { motion } from "framer-motion";
import { usePathname } from "next/navigation";
import { useEffect, useState } from "react";
import {
type ChatConversation,
useGetConversationsQuery,
} from "@/app/api/queries/useGetConversationsQuery";
import type { Settings } from "@/app/api/queries/useGetSettingsQuery";
import { OnboardingContent } from "@/app/new-onboarding/components/onboarding-content";
import { ProgressBar } from "@/app/new-onboarding/components/progress-bar";
import { AnimatedConditional } from "@/components/animated-conditional";
import { Header } from "@/components/header";
import { Navigation } from "@/components/navigation";
import { useAuth } from "@/contexts/auth-context";
import { useChat } from "@/contexts/chat-context";
import {
ANIMATION_DURATION,
HEADER_HEIGHT,
ONBOARDING_STEP_KEY,
SIDEBAR_WIDTH,
TOTAL_ONBOARDING_STEPS,
} from "@/lib/constants";
import { cn } from "@/lib/utils";
export function ChatRenderer({
settings,
children,
}: {
settings: Settings | undefined;
children: React.ReactNode;
}) {
const pathname = usePathname();
const { isAuthenticated, isNoAuthMode } = useAuth();
const {
endpoint,
refreshTrigger,
refreshConversations,
startNewConversation,
} = useChat();
// Initialize onboarding state based on local storage and settings
const [currentStep, setCurrentStep] = useState<number>(() => {
if (typeof window === "undefined") return 0;
const savedStep = localStorage.getItem(ONBOARDING_STEP_KEY);
return savedStep !== null ? parseInt(savedStep, 10) : 0;
});
const [showLayout, setShowLayout] = useState<boolean>(() => {
if (typeof window === "undefined") return false;
const savedStep = localStorage.getItem(ONBOARDING_STEP_KEY);
// Show layout if settings.edited is true and if no onboarding step is saved
const isEdited = settings?.edited ?? true;
return isEdited ? savedStep === null : false;
});
// Only fetch conversations on chat page
const isOnChatPage = pathname === "/" || pathname === "/chat";
const { data: conversations = [], isLoading: isConversationsLoading } =
useGetConversationsQuery(endpoint, refreshTrigger, {
enabled: isOnChatPage && (isAuthenticated || isNoAuthMode),
}) as { data: ChatConversation[]; isLoading: boolean };
const handleNewConversation = () => {
refreshConversations();
startNewConversation();
};
// Save current step to local storage whenever it changes
useEffect(() => {
if (typeof window !== "undefined" && !showLayout) {
localStorage.setItem(ONBOARDING_STEP_KEY, currentStep.toString());
}
}, [currentStep, showLayout]);
const handleStepComplete = () => {
if (currentStep < TOTAL_ONBOARDING_STEPS - 1) {
setCurrentStep(currentStep + 1);
} else {
// Onboarding is complete - remove from local storage and show layout
if (typeof window !== "undefined") {
localStorage.removeItem(ONBOARDING_STEP_KEY);
}
setShowLayout(true);
}
};
// List of paths with smaller max-width
const smallWidthPaths = ["/settings/connector/new"];
const isSmallWidthPath = smallWidthPaths.includes(pathname);
const x = showLayout ? "0px" : `calc(-${SIDEBAR_WIDTH / 2}px + 50vw)`;
const y = showLayout ? "0px" : `calc(-${HEADER_HEIGHT / 2}px + 50vh)`;
const translateY = showLayout ? "0px" : `-50vh`;
const translateX = showLayout ? "0px" : `-50vw`;
// For all other pages, render with Langflow-styled navigation and task menu
return (
<>
<AnimatedConditional
className="[grid-area:header] bg-background border-b"
vertical
slide
isOpen={showLayout}
delay={ANIMATION_DURATION / 2}
>
<Header />
</AnimatedConditional>
{/* Sidebar Navigation */}
<AnimatedConditional
isOpen={showLayout}
slide
className={`border-r bg-background overflow-hidden [grid-area:nav] w-[${SIDEBAR_WIDTH}px]`}
>
<Navigation
conversations={conversations}
isConversationsLoading={isConversationsLoading}
onNewConversation={handleNewConversation}
/>
</AnimatedConditional>
{/* Main Content */}
<main className="overflow-hidden w-full flex items-center justify-center [grid-area:main]">
<motion.div
initial={{
width: showLayout ? "100%" : "100vw",
height: showLayout ? "100%" : "100vh",
x: x,
y: y,
translateX: translateX,
translateY: translateY,
}}
animate={{
width: showLayout ? "100%" : "850px",
borderRadius: showLayout ? "0" : "16px",
border: showLayout ? "0" : "1px solid #27272A",
height: showLayout ? "100%" : "800px",
x: x,
y: y,
translateX: translateX,
translateY: translateY,
}}
transition={{
duration: ANIMATION_DURATION,
ease: "easeOut",
}}
className={cn(
"flex h-full w-full max-w-full max-h-full items-center justify-center overflow-hidden",
!showLayout && "absolute",
)}
>
<div
className={cn(
"h-full bg-background w-full",
showLayout && !isOnChatPage && "p-6 container overflow-y-auto",
showLayout && isSmallWidthPath && "max-w-[850px] ml-0",
!showLayout &&
"w-full bg-card rounded-lg shadow-2xl p-0 py-2 overflow-y-auto",
)}
>
<motion.div
initial={{
opacity: showLayout ? 1 : 0,
}}
animate={{
opacity: "100%",
}}
transition={{
duration: ANIMATION_DURATION,
ease: "easeOut",
delay: ANIMATION_DURATION,
}}
className={cn("w-full h-full 0v")}
>
<div className={cn("w-full h-full", !showLayout && "hidden")}>
{children}
</div>
{!showLayout && (
<OnboardingContent
handleStepComplete={handleStepComplete}
currentStep={currentStep}
/>
)}
</motion.div>
</div>
</motion.div>
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: showLayout ? 0 : 1, y: showLayout ? 20 : 0 }}
transition={{ duration: ANIMATION_DURATION, ease: "easeOut" }}
className={cn("absolute bottom-10 left-0 right-0")}
>
<ProgressBar
currentStep={currentStep}
totalSteps={TOTAL_ONBOARDING_STEPS}
/>
</motion.div>
</main>
</>
);
}

View file

@ -0,0 +1,60 @@
"use client";
import { Bell } from "lucide-react";
import Logo from "@/components/logo/logo";
import { UserNav } from "@/components/user-nav";
import { useTask } from "@/contexts/task-context";
import { cn } from "@/lib/utils";
export function Header() {
const { tasks, toggleMenu } = useTask();
// Calculate active tasks for the bell icon
const activeTasks = tasks.filter(
(task) =>
task.status === "pending" ||
task.status === "running" ||
task.status === "processing",
);
return (
<header className={cn(`flex w-full h-full items-center justify-between`)}>
<div className="header-start-display px-[16px]">
{/* Logo/Title */}
<div className="flex items-center">
<Logo className="fill-primary" width={24} height={22} />
<span className="text-lg font-semibold pl-2.5">OpenRAG</span>
</div>
</div>
<div className="header-end-division">
<div className="justify-end flex items-center">
{/* Knowledge Filter Dropdown */}
{/* <KnowledgeFilterDropdown
selectedFilter={selectedFilter}
onFilterSelect={setSelectedFilter}
/> */}
{/* GitHub Star Button */}
{/* <GitHubStarButton repo="phact/openrag" /> */}
{/* Discord Link */}
{/* <DiscordLink inviteCode="EqksyE2EX9" /> */}
{/* Task Notification Bell */}
<button
onClick={toggleMenu}
className="relative h-8 w-8 hover:bg-muted rounded-lg flex items-center justify-center"
>
<Bell size={16} className="text-muted-foreground" />
{activeTasks.length > 0 && <div className="header-notifications" />}
</button>
{/* Separator */}
<div className="w-px h-6 bg-border mx-3" />
<UserNav />
</div>
</div>
</header>
);
}

View file

@ -1,39 +1,25 @@
"use client";
import { Bell, Loader2 } from "lucide-react";
import { Loader2 } from "lucide-react";
import { usePathname } from "next/navigation";
import {
useGetConversationsQuery,
type ChatConversation,
} from "@/app/api/queries/useGetConversationsQuery";
import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery";
import { DoclingHealthBanner } from "@/components/docling-health-banner";
import { KnowledgeFilterPanel } from "@/components/knowledge-filter-panel";
import Logo from "@/components/logo/logo";
import { Navigation } from "@/components/navigation";
import { TaskNotificationMenu } from "@/components/task-notification-menu";
import { UserNav } from "@/components/user-nav";
import { useAuth } from "@/contexts/auth-context";
import { useChat } from "@/contexts/chat-context";
import { useKnowledgeFilter } from "@/contexts/knowledge-filter-context";
// import { GitHubStarButton } from "@/components/github-star-button"
// import { DiscordLink } from "@/components/discord-link"
import { useTask } from "@/contexts/task-context";
import { useDoclingHealthQuery } from "@/src/app/api/queries/useDoclingHealthQuery";
import { cn } from "@/lib/utils";
import { useDoclingHealthQuery } from "@/src/app/api/queries/useDoclingHealthQuery";
import { ChatRenderer } from "./chat-renderer";
export function LayoutWrapper({ children }: { children: React.ReactNode }) {
const pathname = usePathname();
const { tasks, isMenuOpen, toggleMenu } = useTask();
const { isMenuOpen } = useTask();
const { isPanelOpen } = useKnowledgeFilter();
const { isLoading, isAuthenticated, isNoAuthMode } = useAuth();
const {
endpoint,
refreshTrigger,
refreshConversations,
startNewConversation,
} = useChat();
const { isLoading: isSettingsLoading } = useGetSettingsQuery({
const { data: settings, isLoading: isSettingsLoading } = useGetSettingsQuery({
enabled: isAuthenticated || isNoAuthMode,
});
const {
@ -42,40 +28,17 @@ export function LayoutWrapper({ children }: { children: React.ReactNode }) {
isError,
} = useDoclingHealthQuery();
// Only fetch conversations on chat page
const isOnChatPage = pathname === "/" || pathname === "/chat";
const { data: conversations = [], isLoading: isConversationsLoading } =
useGetConversationsQuery(endpoint, refreshTrigger, {
enabled: isOnChatPage && (isAuthenticated || isNoAuthMode),
}) as { data: ChatConversation[]; isLoading: boolean };
const handleNewConversation = () => {
refreshConversations();
startNewConversation();
};
// List of paths that should not show navigation
const authPaths = ["/login", "/auth/callback", "/onboarding", "/new-onboarding"];
const authPaths = ["/login", "/auth/callback"];
const isAuthPage = authPaths.includes(pathname);
const isOnKnowledgePage = pathname.startsWith("/knowledge");
// List of paths with smaller max-width
const smallWidthPaths = ["/settings/connector/new"];
const isSmallWidthPath = smallWidthPaths.includes(pathname);
// Calculate active tasks for the bell icon
const activeTasks = tasks.filter(
task =>
task.status === "pending" ||
task.status === "running" ||
task.status === "processing"
);
const isUnhealthy = health?.status === "unhealthy" || isError;
const isBannerVisible = !isHealthLoading && isUnhealthy;
const isSettingsLoadingOrError = isSettingsLoading || !settings;
// Show loading state when backend isn't ready
if (isLoading || isSettingsLoading) {
if (isLoading || (isSettingsLoadingOrError && (isNoAuthMode || isAuthenticated))) {
return (
<div className="min-h-screen flex items-center justify-center bg-background">
<div className="flex flex-col items-center gap-4">
@ -93,88 +56,31 @@ export function LayoutWrapper({ children }: { children: React.ReactNode }) {
// For all other pages, render with Langflow-styled navigation and task menu
return (
<div
className={cn(
"app-grid-arrangement",
isBannerVisible && "banner-visible",
isPanelOpen && isOnKnowledgePage && !isMenuOpen && "filters-open",
isMenuOpen && "notifications-open"
)}
>
<div className="w-full [grid-area:banner]">
<DoclingHealthBanner className="w-full" />
<div className=" h-screen w-screen flex items-center justify-center">
<div
className={cn(
"app-grid-arrangement bg-black relative",
isBannerVisible && "banner-visible",
isPanelOpen && isOnKnowledgePage && !isMenuOpen && "filters-open",
isMenuOpen && "notifications-open",
)}
>
<div className={`w-full z-10 bg-background [grid-area:banner]`}>
<DoclingHealthBanner className="w-full" />
</div>
<ChatRenderer settings={settings}>{children}</ChatRenderer>
{/* Task Notifications Panel */}
<aside className="overflow-y-auto overflow-x-hidden [grid-area:notifications]">
{isMenuOpen && <TaskNotificationMenu />}
</aside>
{/* Knowledge Filter Panel */}
<aside className="overflow-y-auto overflow-x-hidden [grid-area:filters]">
{isPanelOpen && <KnowledgeFilterPanel />}
</aside>
</div>
<header className="header-arrangement bg-background [grid-area:header]">
<div className="header-start-display px-[16px]">
{/* Logo/Title */}
<div className="flex items-center">
<Logo className="fill-primary" width={24} height={22} />
<span className="text-lg font-semibold pl-2.5">OpenRAG</span>
</div>
</div>
<div className="header-end-division">
<div className="justify-end flex items-center">
{/* Knowledge Filter Dropdown */}
{/* <KnowledgeFilterDropdown
selectedFilter={selectedFilter}
onFilterSelect={setSelectedFilter}
/> */}
{/* GitHub Star Button */}
{/* <GitHubStarButton repo="phact/openrag" /> */}
{/* Discord Link */}
{/* <DiscordLink inviteCode="EqksyE2EX9" /> */}
{/* Task Notification Bell */}
<button
onClick={toggleMenu}
className="relative h-8 w-8 hover:bg-muted rounded-lg flex items-center justify-center"
>
<Bell size={16} className="text-muted-foreground" />
{activeTasks.length > 0 && (
<div className="header-notifications" />
)}
</button>
{/* Separator */}
<div className="w-px h-6 bg-border mx-3" />
<UserNav />
</div>
</div>
</header>
{/* Sidebar Navigation */}
<aside className="bg-background border-r overflow-hidden [grid-area:nav]">
<Navigation
conversations={conversations}
isConversationsLoading={isConversationsLoading}
onNewConversation={handleNewConversation}
/>
</aside>
{/* Main Content */}
<main className="overflow-y-auto [grid-area:main]">
<div
className={cn(
"p-6 h-full container",
isSmallWidthPath && "max-w-[850px] ml-0"
)}
>
{children}
</div>
</main>
{/* Task Notifications Panel */}
<aside className="overflow-y-auto overflow-x-hidden [grid-area:notifications]">
{isMenuOpen && <TaskNotificationMenu />}
</aside>
{/* Knowledge Filter Panel */}
<aside className="overflow-y-auto overflow-x-hidden [grid-area:filters]">
{isPanelOpen && <KnowledgeFilterPanel />}
</aside>
</div>
);
}

View file

@ -3,7 +3,6 @@
import { Loader2 } from "lucide-react";
import { usePathname, useRouter } from "next/navigation";
import { useEffect } from "react";
import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery";
import { useAuth } from "@/contexts/auth-context";
interface ProtectedRouteProps {
@ -12,10 +11,6 @@ interface ProtectedRouteProps {
export function ProtectedRoute({ children }: ProtectedRouteProps) {
const { isLoading, isAuthenticated, isNoAuthMode } = useAuth();
const { data: settings = {}, isLoading: isSettingsLoading } =
useGetSettingsQuery({
enabled: isAuthenticated || isNoAuthMode,
});
const router = useRouter();
const pathname = usePathname();
@ -31,30 +26,22 @@ export function ProtectedRoute({ children }: ProtectedRouteProps) {
);
useEffect(() => {
if (!isLoading && !isSettingsLoading && !isAuthenticated && !isNoAuthMode) {
if (!isLoading && !isAuthenticated && !isNoAuthMode) {
// Redirect to login with current path as redirect parameter
const redirectUrl = `/login?redirect=${encodeURIComponent(pathname)}`;
router.push(redirectUrl);
return;
}
if (!isLoading && !isSettingsLoading && !settings.edited) {
const updatedOnboarding = process.env.UPDATED_ONBOARDING === "true";
router.push(updatedOnboarding ? "/new-onboarding" : "/onboarding");
}
}, [
isLoading,
isSettingsLoading,
isAuthenticated,
isNoAuthMode,
router,
pathname,
isSettingsLoading,
settings.edited,
]);
// Show loading state while checking authentication
if (isLoading || isSettingsLoading) {
if (isLoading) {
return (
<div className="flex items-center justify-center h-64">
<div className="flex flex-col items-center gap-4">

View file

@ -0,0 +1,492 @@
import { useRef, useState } from "react";
import type { FunctionCall, Message, SelectedFilters } from "@/app/chat/types";
interface UseChatStreamingOptions {
endpoint?: string;
onComplete?: (message: Message, responseId: string | null) => void;
onError?: (error: Error) => void;
}
interface SendMessageOptions {
prompt: string;
previousResponseId?: string;
filters?: SelectedFilters;
limit?: number;
scoreThreshold?: number;
}
export function useChatStreaming({
endpoint = "/api/langflow",
onComplete,
onError,
}: UseChatStreamingOptions = {}) {
const [streamingMessage, setStreamingMessage] = useState<Message | null>(
null,
);
const [isLoading, setIsLoading] = useState(false);
const streamAbortRef = useRef<AbortController | null>(null);
const streamIdRef = useRef(0);
const sendMessage = async ({
prompt,
previousResponseId,
filters,
limit = 10,
scoreThreshold = 0,
}: SendMessageOptions) => {
try {
setIsLoading(true);
// Abort any existing stream before starting a new one
if (streamAbortRef.current) {
streamAbortRef.current.abort();
}
const controller = new AbortController();
streamAbortRef.current = controller;
const thisStreamId = ++streamIdRef.current;
const requestBody: {
prompt: string;
stream: boolean;
previous_response_id?: string;
filters?: SelectedFilters;
limit?: number;
scoreThreshold?: number;
} = {
prompt,
stream: true,
limit,
scoreThreshold,
};
if (previousResponseId) {
requestBody.previous_response_id = previousResponseId;
}
if (filters) {
requestBody.filters = filters;
}
const response = await fetch(endpoint, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(requestBody),
signal: controller.signal,
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const reader = response.body?.getReader();
if (!reader) {
throw new Error("No reader available");
}
const decoder = new TextDecoder();
let buffer = "";
let currentContent = "";
const currentFunctionCalls: FunctionCall[] = [];
let newResponseId: string | null = null;
// Initialize streaming message
if (!controller.signal.aborted && thisStreamId === streamIdRef.current) {
setStreamingMessage({
role: "assistant",
content: "",
timestamp: new Date(),
isStreaming: true,
});
}
try {
while (true) {
const { done, value } = await reader.read();
if (controller.signal.aborted || thisStreamId !== streamIdRef.current)
break;
if (done) break;
buffer += decoder.decode(value, { stream: true });
// Process complete lines (JSON objects)
const lines = buffer.split("\n");
buffer = lines.pop() || ""; // Keep incomplete line in buffer
for (const line of lines) {
if (line.trim()) {
try {
const chunk = JSON.parse(line);
// Extract response ID if present
if (chunk.id) {
newResponseId = chunk.id;
} else if (chunk.response_id) {
newResponseId = chunk.response_id;
}
// Handle OpenAI Chat Completions streaming format
if (chunk.object === "response.chunk" && chunk.delta) {
// Handle function calls in delta
if (chunk.delta.function_call) {
if (chunk.delta.function_call.name) {
const functionCall: FunctionCall = {
name: chunk.delta.function_call.name,
arguments: undefined,
status: "pending",
argumentsString:
chunk.delta.function_call.arguments || "",
};
currentFunctionCalls.push(functionCall);
} else if (chunk.delta.function_call.arguments) {
const lastFunctionCall =
currentFunctionCalls[currentFunctionCalls.length - 1];
if (lastFunctionCall) {
if (!lastFunctionCall.argumentsString) {
lastFunctionCall.argumentsString = "";
}
lastFunctionCall.argumentsString +=
chunk.delta.function_call.arguments;
if (lastFunctionCall.argumentsString.includes("}")) {
try {
const parsed = JSON.parse(
lastFunctionCall.argumentsString
);
lastFunctionCall.arguments = parsed;
lastFunctionCall.status = "completed";
} catch (e) {
// Arguments not yet complete
}
}
}
}
}
// Handle tool calls in delta
else if (
chunk.delta.tool_calls &&
Array.isArray(chunk.delta.tool_calls)
) {
for (const toolCall of chunk.delta.tool_calls) {
if (toolCall.function) {
if (toolCall.function.name) {
const functionCall: FunctionCall = {
name: toolCall.function.name,
arguments: undefined,
status: "pending",
argumentsString: toolCall.function.arguments || "",
};
currentFunctionCalls.push(functionCall);
} else if (toolCall.function.arguments) {
const lastFunctionCall =
currentFunctionCalls[
currentFunctionCalls.length - 1
];
if (lastFunctionCall) {
if (!lastFunctionCall.argumentsString) {
lastFunctionCall.argumentsString = "";
}
lastFunctionCall.argumentsString +=
toolCall.function.arguments;
if (
lastFunctionCall.argumentsString.includes("}")
) {
try {
const parsed = JSON.parse(
lastFunctionCall.argumentsString
);
lastFunctionCall.arguments = parsed;
lastFunctionCall.status = "completed";
} catch (e) {
// Arguments not yet complete
}
}
}
}
}
}
}
// Handle content/text in delta
else if (chunk.delta.content) {
currentContent += chunk.delta.content;
}
// Handle finish reason
if (chunk.delta.finish_reason) {
currentFunctionCalls.forEach((fc) => {
if (fc.status === "pending" && fc.argumentsString) {
try {
fc.arguments = JSON.parse(fc.argumentsString);
fc.status = "completed";
} catch (e) {
fc.arguments = { raw: fc.argumentsString };
fc.status = "error";
}
}
});
}
}
// Handle Realtime API format - function call added
else if (
chunk.type === "response.output_item.added" &&
chunk.item?.type === "function_call"
) {
let existing = currentFunctionCalls.find(
(fc) => fc.id === chunk.item.id
);
if (!existing) {
existing = [...currentFunctionCalls]
.reverse()
.find(
(fc) =>
fc.status === "pending" &&
!fc.id &&
fc.name === (chunk.item.tool_name || chunk.item.name)
);
}
if (existing) {
existing.id = chunk.item.id;
existing.type = chunk.item.type;
existing.name =
chunk.item.tool_name || chunk.item.name || existing.name;
existing.arguments =
chunk.item.inputs || existing.arguments;
} else {
const functionCall: FunctionCall = {
name:
chunk.item.tool_name || chunk.item.name || "unknown",
arguments: chunk.item.inputs || undefined,
status: "pending",
argumentsString: "",
id: chunk.item.id,
type: chunk.item.type,
};
currentFunctionCalls.push(functionCall);
}
}
// Handle Realtime API format - tool call added
else if (
chunk.type === "response.output_item.added" &&
chunk.item?.type?.includes("_call") &&
chunk.item?.type !== "function_call"
) {
let existing = currentFunctionCalls.find(
(fc) => fc.id === chunk.item.id
);
if (!existing) {
existing = [...currentFunctionCalls]
.reverse()
.find(
(fc) =>
fc.status === "pending" &&
!fc.id &&
fc.name ===
(chunk.item.tool_name ||
chunk.item.name ||
chunk.item.type)
);
}
if (existing) {
existing.id = chunk.item.id;
existing.type = chunk.item.type;
existing.name =
chunk.item.tool_name ||
chunk.item.name ||
chunk.item.type ||
existing.name;
existing.arguments =
chunk.item.inputs || existing.arguments;
} else {
const functionCall = {
name:
chunk.item.tool_name ||
chunk.item.name ||
chunk.item.type ||
"unknown",
arguments: chunk.item.inputs || {},
status: "pending" as const,
id: chunk.item.id,
type: chunk.item.type,
};
currentFunctionCalls.push(functionCall);
}
}
// Handle function call done
else if (
chunk.type === "response.output_item.done" &&
chunk.item?.type === "function_call"
) {
const functionCall = currentFunctionCalls.find(
(fc) =>
fc.id === chunk.item.id ||
fc.name === chunk.item.tool_name ||
fc.name === chunk.item.name
);
if (functionCall) {
functionCall.status =
chunk.item.status === "completed" ? "completed" : "error";
functionCall.id = chunk.item.id;
functionCall.type = chunk.item.type;
functionCall.name =
chunk.item.tool_name ||
chunk.item.name ||
functionCall.name;
functionCall.arguments =
chunk.item.inputs || functionCall.arguments;
if (chunk.item.results) {
functionCall.result = chunk.item.results;
}
}
}
// Handle tool call done with results
else if (
chunk.type === "response.output_item.done" &&
chunk.item?.type?.includes("_call") &&
chunk.item?.type !== "function_call"
) {
const functionCall = currentFunctionCalls.find(
(fc) =>
fc.id === chunk.item.id ||
fc.name === chunk.item.tool_name ||
fc.name === chunk.item.name ||
fc.name === chunk.item.type ||
fc.name.includes(chunk.item.type.replace("_call", "")) ||
chunk.item.type.includes(fc.name)
);
if (functionCall) {
functionCall.arguments =
chunk.item.inputs || functionCall.arguments;
functionCall.status =
chunk.item.status === "completed" ? "completed" : "error";
functionCall.id = chunk.item.id;
functionCall.type = chunk.item.type;
if (chunk.item.results) {
functionCall.result = chunk.item.results;
}
} else {
const newFunctionCall = {
name:
chunk.item.tool_name ||
chunk.item.name ||
chunk.item.type ||
"unknown",
arguments: chunk.item.inputs || {},
status: "completed" as const,
id: chunk.item.id,
type: chunk.item.type,
result: chunk.item.results,
};
currentFunctionCalls.push(newFunctionCall);
}
}
// Handle text output streaming (Realtime API)
else if (chunk.type === "response.output_text.delta") {
currentContent += chunk.delta || "";
}
// Handle OpenRAG backend format
else if (chunk.output_text) {
currentContent += chunk.output_text;
} else if (chunk.delta) {
if (typeof chunk.delta === "string") {
currentContent += chunk.delta;
} else if (typeof chunk.delta === "object") {
if (chunk.delta.content) {
currentContent += chunk.delta.content;
} else if (chunk.delta.text) {
currentContent += chunk.delta.text;
}
}
}
// Update streaming message in real-time
if (
!controller.signal.aborted &&
thisStreamId === streamIdRef.current
) {
setStreamingMessage({
role: "assistant",
content: currentContent,
functionCalls:
currentFunctionCalls.length > 0
? [...currentFunctionCalls]
: undefined,
timestamp: new Date(),
isStreaming: true,
});
}
} catch (parseError) {
console.warn("Failed to parse chunk:", line, parseError);
}
}
}
}
} finally {
reader.releaseLock();
}
// Finalize the message
const finalMessage: Message = {
role: "assistant",
content: currentContent,
functionCalls:
currentFunctionCalls.length > 0 ? currentFunctionCalls : undefined,
timestamp: new Date(),
isStreaming: false,
};
if (!controller.signal.aborted && thisStreamId === streamIdRef.current) {
// Clear streaming message and call onComplete with final message
setStreamingMessage(null);
onComplete?.(finalMessage, newResponseId);
return finalMessage;
}
return null;
} catch (error) {
// If stream was aborted, don't handle as error
if (streamAbortRef.current?.signal.aborted) {
return null;
}
console.error("SSE Stream error:", error);
setStreamingMessage(null);
onError?.(error as Error);
const errorMessage: Message = {
role: "assistant",
content:
"Sorry, I couldn't connect to the chat service. Please try again.",
timestamp: new Date(),
isStreaming: false,
};
return errorMessage;
} finally {
setIsLoading(false);
}
};
const abortStream = () => {
if (streamAbortRef.current) {
streamAbortRef.current.abort();
}
setStreamingMessage(null);
setIsLoading(false);
};
return {
streamingMessage,
isLoading,
sendMessage,
abortStream,
};
}

View file

@ -3,7 +3,7 @@
*/
export const DEFAULT_AGENT_SETTINGS = {
llm_model: "gpt-4o-mini",
system_prompt: "You are a helpful assistant that can use tools to answer questions and perform tasks."
system_prompt: "You are a helpful assistant that can use tools to answer questions and perform tasks. You are part of OpenRAG, an assistant that analyzes documents and provides informations about them. When asked about what is OpenRAG, answer the following:\n\n\"OpenRAG is an open-source package for building agentic RAG systems. It supports integration with a wide range of orchestration tools, vector databases, and LLM providers. OpenRAG connects and amplifies three popular, proven open-source projects into one powerful platform:\n\n**Langflow** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://www.langflow.org/)\n\n**OpenSearch** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://opensearch.org/)\n\n**Docling** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://www.docling.ai/)\""
} as const;
/**
@ -22,4 +22,17 @@ export const DEFAULT_KNOWLEDGE_SETTINGS = {
*/
export const UI_CONSTANTS = {
MAX_SYSTEM_PROMPT_CHARS: 2000,
} as const;
} as const;
export const ANIMATION_DURATION = 0.4;
export const SIDEBAR_WIDTH = 280;
export const HEADER_HEIGHT = 54;
export const TOTAL_ONBOARDING_STEPS = 3;
/**
* Local Storage Keys
*/
export const ONBOARDING_STEP_KEY = "onboarding_current_step";
export const FILES_REGEX =
/(?<=I'm uploading a document called ['"])[^'"]+\.[^.]+(?=['"]\. Here is its content:)/;

View file

@ -72,6 +72,14 @@ const config = {
height: "0",
},
},
shimmer: {
"0%": {
backgroundPosition: "200% 0",
},
"100%": {
backgroundPosition: "-200% 0",
},
},
},
animation: {
overlayShow: "overlayShow 400ms cubic-bezier(0.16, 1, 0.3, 1)",
@ -79,6 +87,7 @@ const config = {
wiggle: "wiggle 150ms ease-in-out 1",
"accordion-down": "accordion-down 0.2s ease-out",
"accordion-up": "accordion-up 0.2s ease-out",
shimmer: "shimmer 3s ease-in-out infinite",
},
colors: {
border: "hsl(var(--border))",

View file

@ -34,7 +34,7 @@ def get_conversation_thread(user_id: str, previous_response_id: str = None):
"messages": [
{
"role": "system",
"content": "You are a helpful assistant. Always use the search_tools to answer questions.",
"content": "You are a helpful assistant that can use tools to answer questions and perform tasks. You are part of OpenRAG, an assistant that analyzes documents and provides informations about them. When asked about what is OpenRAG, answer the following:\n\n\"OpenRAG is an open-source package for building agentic RAG systems. It supports integration with a wide range of orchestration tools, vector databases, and LLM providers. OpenRAG connects and amplifies three popular, proven open-source projects into one powerful platform:\n\n**Langflow** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://www.langflow.org/)\n\n**OpenSearch** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://opensearch.org/)\n\n**Docling** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://www.docling.ai/)\"",
}
],
"previous_response_id": previous_response_id, # Parent response_id for branching

View file

@ -424,13 +424,10 @@ async def onboarding(request, flows_service):
# Get current configuration
current_config = get_openrag_config()
# Check if config is NOT marked as edited (only allow onboarding if not yet configured)
# Warn if config was already edited (onboarding being re-run)
if current_config.edited:
return JSONResponse(
{
"error": "Configuration has already been edited. Use /settings endpoint for updates."
},
status_code=403,
logger.warning(
"Onboarding is being run although configuration was already edited before"
)
# Parse request body

View file

@ -99,13 +99,12 @@ async def upload_context(
# Get optional parameters
previous_response_id = form.get("previous_response_id")
endpoint = form.get("endpoint", "langflow")
jwt_token = session_manager.get_effective_jwt_token(user_id, request.state.jwt_token)
# Get user info from request state (set by auth middleware)
user = request.state.user
user_id = user.user_id if user else None
jwt_token = session_manager.get_effective_jwt_token(user_id, request.state.jwt_token)
# Process document and extract content
doc_result = await document_service.process_upload_context(upload_file, filename)