feat: adds what is openrag prompt, refactors chat design, adds scroll to bottom on chat, adds streaming support (#283)

* Changed prompts to include info about OpenRAG, change status of As Dataframe and As Vector Store to false on OpenSearch component

* added markdown to onboarding step

* added className to markdown renderer

* changed onboarding step to not render span

* Added nudges to onboarding content

* Added onboarding style for nudges

* updated user message and assistant message designs

* updated route.ts to handle streaming messages

* created new useChatStreaming to handle streaming

* changed useChatStreaming to work with the chat page

* changed onboarding content to use default messages instead of onboarding steps, and to use the new hook to send messages

* added span to the markdown renderer on stream

* updated page to use new chat streaming hook

* disable animation on completed steps

* changed markdown renderer margins

* changed css to not display markdown links and texts on white always

* added isCompleted to assistant and user messages

* removed space between elements on onboarding step to ensure smoother animation

* removed opacity 50 on onboarding messages

* changed default api to be langflow on chat streaming

* added fade in and color transition

* added color transition

* Rendered onboarding with use-stick-to-bottom

* Added use stick to bottom on page

* fixed nudges design

* changed chat input design

* fixed nudges design

* made overflow be hidden on main

* Added overflow y auto on other pages

* Put animate on messages

* Add source to types

* Adds animate and delay props to messages
This commit is contained in:
Lucas Oliveira 2025-10-22 14:03:23 -03:00 committed by GitHub
parent c5447f6c5d
commit fcf7a302d0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 2660 additions and 2601 deletions

View file

@ -1261,7 +1261,7 @@
"display_name": "as_dataframe", "display_name": "as_dataframe",
"name": "as_dataframe", "name": "as_dataframe",
"readonly": false, "readonly": false,
"status": true, "status": false,
"tags": [ "tags": [
"as_dataframe" "as_dataframe"
] ]
@ -1280,7 +1280,7 @@
"display_name": "as_vector_store", "display_name": "as_vector_store",
"name": "as_vector_store", "name": "as_vector_store",
"readonly": false, "readonly": false,
"status": true, "status": false,
"tags": [ "tags": [
"as_vector_store" "as_vector_store"
] ]
@ -2086,7 +2086,7 @@
"trace_as_input": true, "trace_as_input": true,
"trace_as_metadata": true, "trace_as_metadata": true,
"type": "str", "type": "str",
"value": "You are a helpful assistant that can use tools to answer questions and perform tasks." "value": "You are a helpful assistant that can use tools to answer questions and perform tasks. You are part of OpenRAG, an assistant that analyzes documents and provides informations about them. When asked about what is OpenRAG, answer the following:\n\n\"OpenRAG is an open-source package for building agentic RAG systems. It supports integration with a wide range of orchestration tools, vector databases, and LLM providers. OpenRAG connects and amplifies three popular, proven open-source projects into one powerful platform:\n\n**Langflow** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://www.langflow.org/)\n\n**OpenSearch** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://opensearch.org/)\n\n**Docling** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://www.docling.ai/)\""
}, },
"tools": { "tools": {
"_input_type": "HandleInput", "_input_type": "HandleInput",

View file

@ -7,6 +7,7 @@ import CodeComponent from "./code-component";
type MarkdownRendererProps = { type MarkdownRendererProps = {
chatMessage: string; chatMessage: string;
className?: string;
}; };
const preprocessChatMessage = (text: string): string => { const preprocessChatMessage = (text: string): string => {
@ -48,7 +49,7 @@ export const cleanupTableEmptyCells = (text: string): string => {
}) })
.join("\n"); .join("\n");
}; };
export const MarkdownRenderer = ({ chatMessage }: MarkdownRendererProps) => { export const MarkdownRenderer = ({ chatMessage, className }: MarkdownRendererProps) => {
// Process the chat message to handle <think> tags and clean up tables // Process the chat message to handle <think> tags and clean up tables
const processedChatMessage = preprocessChatMessage(chatMessage); const processedChatMessage = preprocessChatMessage(chatMessage);
@ -57,6 +58,7 @@ export const MarkdownRenderer = ({ chatMessage }: MarkdownRendererProps) => {
className={cn( className={cn(
"markdown prose flex w-full max-w-full flex-col items-baseline text-base font-normal word-break-break-word dark:prose-invert", "markdown prose flex w-full max-w-full flex-col items-baseline text-base font-normal word-break-break-word dark:prose-invert",
!chatMessage ? "text-muted-foreground" : "text-primary", !chatMessage ? "text-muted-foreground" : "text-primary",
className,
)} )}
> >
<Markdown <Markdown
@ -65,11 +67,14 @@ export const MarkdownRenderer = ({ chatMessage }: MarkdownRendererProps) => {
urlTransform={(url) => url} urlTransform={(url) => url}
components={{ components={{
p({ node, ...props }) { p({ node, ...props }) {
return <p className="w-fit max-w-full">{props.children}</p>; return <p className="w-fit max-w-full first:mt-0 last:mb-0 my-2">{props.children}</p>;
}, },
ol({ node, ...props }) { ol({ node, ...props }) {
return <ol className="max-w-full">{props.children}</ol>; return <ol className="max-w-full">{props.children}</ol>;
}, },
strong({ node, ...props }) {
return <strong className="font-bold">{props.children}</strong>;
},
h1({ node, ...props }) { h1({ node, ...props }) {
return <h1 className="mb-6 mt-4">{props.children}</h1>; return <h1 className="mb-6 mt-4">{props.children}</h1>;
}, },

View file

@ -52,6 +52,7 @@
"sonner": "^2.0.6", "sonner": "^2.0.6",
"tailwind-merge": "^3.3.1", "tailwind-merge": "^3.3.1",
"tailwindcss-animate": "^1.0.7", "tailwindcss-animate": "^1.0.7",
"use-stick-to-bottom": "^1.1.1",
"zustand": "^5.0.8" "zustand": "^5.0.8"
}, },
"devDependencies": { "devDependencies": {
@ -10224,6 +10225,15 @@
} }
} }
}, },
"node_modules/use-stick-to-bottom": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/use-stick-to-bottom/-/use-stick-to-bottom-1.1.1.tgz",
"integrity": "sha512-JkDp0b0tSmv7HQOOpL1hT7t7QaoUBXkq045WWWOFDTlLGRzgIIyW7vyzOIJzY7L2XVIG7j1yUxeDj2LHm9Vwng==",
"license": "MIT",
"peerDependencies": {
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/use-sync-external-store": { "node_modules/use-sync-external-store": {
"version": "1.5.0", "version": "1.5.0",
"resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.5.0.tgz", "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.5.0.tgz",

View file

@ -53,6 +53,7 @@
"sonner": "^2.0.6", "sonner": "^2.0.6",
"tailwind-merge": "^3.3.1", "tailwind-merge": "^3.3.1",
"tailwindcss-animate": "^1.0.7", "tailwindcss-animate": "^1.0.7",
"use-stick-to-bottom": "^1.1.1",
"zustand": "^5.0.8" "zustand": "^5.0.8"
}, },
"devDependencies": { "devDependencies": {

View file

@ -106,7 +106,6 @@ async function proxyRequest(
} }
const response = await fetch(backendUrl, init); const response = await fetch(backendUrl, init);
const responseBody = await response.text();
const responseHeaders = new Headers(); const responseHeaders = new Headers();
// Copy response headers // Copy response headers
@ -117,11 +116,22 @@ async function proxyRequest(
} }
} }
// For streaming responses, pass the body directly without buffering
if (response.body) {
return new NextResponse(response.body, {
status: response.status,
statusText: response.statusText,
headers: responseHeaders,
});
} else {
// Fallback for non-streaming responses
const responseBody = await response.text();
return new NextResponse(responseBody, { return new NextResponse(responseBody, {
status: response.status, status: response.status,
statusText: response.statusText, statusText: response.statusText,
headers: responseHeaders, headers: responseHeaders,
}); });
}
} catch (error) { } catch (error) {
console.error('Proxy error:', error); console.error('Proxy error:', error);
return NextResponse.json( return NextResponse.json(

View file

@ -1,9 +1,11 @@
import { Bot, GitBranch } from "lucide-react"; import { GitBranch } from "lucide-react";
import { motion } from "motion/react";
import DogIcon from "@/components/logo/dog-icon";
import { MarkdownRenderer } from "@/components/markdown-renderer"; import { MarkdownRenderer } from "@/components/markdown-renderer";
import { cn } from "@/lib/utils";
import type { FunctionCall } from "../types";
import { FunctionCalls } from "./function-calls"; import { FunctionCalls } from "./function-calls";
import { Message } from "./message"; import { Message } from "./message";
import type { FunctionCall } from "../types";
import DogIcon from "@/components/logo/dog-icon";
interface AssistantMessageProps { interface AssistantMessageProps {
content: string; content: string;
@ -14,6 +16,9 @@ interface AssistantMessageProps {
isStreaming?: boolean; isStreaming?: boolean;
showForkButton?: boolean; showForkButton?: boolean;
onFork?: (e: React.MouseEvent) => void; onFork?: (e: React.MouseEvent) => void;
isCompleted?: boolean;
animate?: boolean;
delay?: number;
} }
export function AssistantMessage({ export function AssistantMessage({
@ -25,20 +30,31 @@ export function AssistantMessage({
isStreaming = false, isStreaming = false,
showForkButton = false, showForkButton = false,
onFork, onFork,
isCompleted = false,
animate = true,
delay = 0.2,
}: AssistantMessageProps) { }: AssistantMessageProps) {
const updatedOnboarding = process.env.UPDATED_ONBOARDING === "true";
const IconComponent = updatedOnboarding ? DogIcon : Bot;
return ( return (
<motion.div
initial={animate ? { opacity: 0, y: -20 } : { opacity: 1, y: 0 }}
animate={{ opacity: 1, y: 0 }}
transition={animate ? { duration: 0.4, delay: delay, ease: "easeOut" } : { duration: 0 }}
className={isCompleted ? "opacity-50" : ""}
>
<Message <Message
icon={ icon={
<div className="w-8 h-8 rounded-lg bg-accent/20 flex items-center justify-center flex-shrink-0 select-none"> <div className="w-8 h-8 rounded-lg bg-accent/20 flex items-center justify-center flex-shrink-0 select-none">
<IconComponent className="h-4 w-4 text-accent-foreground" /> <DogIcon
className="h-6 w-6 transition-colors duration-300"
disabled={isCompleted}
/>
</div> </div>
} }
actions={ actions={
showForkButton && onFork ? ( showForkButton && onFork ? (
<button <button
type="button"
onClick={onFork} onClick={onFork}
className="opacity-0 group-hover:opacity-100 transition-opacity p-1 hover:bg-accent rounded text-muted-foreground hover:text-foreground" className="opacity-0 group-hover:opacity-100 transition-opacity p-1 hover:bg-accent rounded text-muted-foreground hover:text-foreground"
title="Fork conversation from here" title="Fork conversation from here"
@ -54,10 +70,18 @@ export function AssistantMessage({
expandedFunctionCalls={expandedFunctionCalls} expandedFunctionCalls={expandedFunctionCalls}
onToggle={onToggle} onToggle={onToggle}
/> />
<MarkdownRenderer chatMessage={content} /> <div className="relative">
{isStreaming && ( <MarkdownRenderer
<span className="inline-block w-2 h-4 bg-blue-400 ml-1 animate-pulse"></span> className={cn("text-sm py-1.5 transition-colors duration-300", isCompleted ? "text-placeholder-foreground" : "text-foreground")}
)} chatMessage={
isStreaming
? content +
' <span class="inline-block w-1 h-4 bg-primary ml-1 animate-pulse"></span>'
: content
}
/>
</div>
</Message> </Message>
</motion.div>
); );
} }

View file

@ -1,6 +1,7 @@
import { Check, Funnel, Loader2, Plus, X } from "lucide-react"; import { Check, Funnel, Loader2, Plus, X } from "lucide-react";
import TextareaAutosize from "react-textarea-autosize";
import { forwardRef, useImperativeHandle, useRef } from "react"; import { forwardRef, useImperativeHandle, useRef } from "react";
import TextareaAutosize from "react-textarea-autosize";
import type { FilterColor } from "@/components/filter-icon-popover";
import { filterAccentClasses } from "@/components/knowledge-filter-panel"; import { filterAccentClasses } from "@/components/knowledge-filter-panel";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { import {
@ -9,7 +10,6 @@ import {
PopoverContent, PopoverContent,
} from "@/components/ui/popover"; } from "@/components/ui/popover";
import type { KnowledgeFilterData } from "../types"; import type { KnowledgeFilterData } from "../types";
import { FilterColor } from "@/components/filter-icon-popover";
export interface ChatInputHandle { export interface ChatInputHandle {
focusInput: () => void; focusInput: () => void;
@ -41,7 +41,8 @@ interface ChatInputProps {
setIsFilterDropdownOpen: (open: boolean) => void; setIsFilterDropdownOpen: (open: boolean) => void;
} }
export const ChatInput = forwardRef<ChatInputHandle, ChatInputProps>(( export const ChatInput = forwardRef<ChatInputHandle, ChatInputProps>(
(
{ {
input, input,
loading, loading,
@ -66,7 +67,7 @@ export const ChatInput = forwardRef<ChatInputHandle, ChatInputProps>((
setIsFilterHighlighted, setIsFilterHighlighted,
setIsFilterDropdownOpen, setIsFilterDropdownOpen,
}, },
ref ref,
) => { ) => {
const inputRef = useRef<HTMLTextAreaElement>(null); const inputRef = useRef<HTMLTextAreaElement>(null);
const fileInputRef = useRef<HTMLInputElement>(null); const fileInputRef = useRef<HTMLInputElement>(null);
@ -81,7 +82,7 @@ export const ChatInput = forwardRef<ChatInputHandle, ChatInputProps>((
})); }));
return ( return (
<div className="pb-8 pt-4 flex px-6"> <div className="pb-8 flex px-6">
<div className="w-full"> <div className="w-full">
<form onSubmit={onSubmit} className="relative"> <form onSubmit={onSubmit} className="relative">
<div className="relative w-full bg-muted/20 rounded-lg border border-border/50 focus-within:ring-1 focus-within:ring-ring"> <div className="relative w-full bg-muted/20 rounded-lg border border-border/50 focus-within:ring-1 focus-within:ring-ring">
@ -144,7 +145,7 @@ export const ChatInput = forwardRef<ChatInputHandle, ChatInputProps>((
variant="outline" variant="outline"
size="iconSm" size="iconSm"
className="absolute bottom-3 left-3 h-8 w-8 p-0 rounded-full hover:bg-muted/50" className="absolute bottom-3 left-3 h-8 w-8 p-0 rounded-full hover:bg-muted/50"
onMouseDown={e => { onMouseDown={(e) => {
e.preventDefault(); e.preventDefault();
}} }}
onClick={onAtClick} onClick={onAtClick}
@ -154,7 +155,7 @@ export const ChatInput = forwardRef<ChatInputHandle, ChatInputProps>((
</Button> </Button>
<Popover <Popover
open={isFilterDropdownOpen} open={isFilterDropdownOpen}
onOpenChange={open => { onOpenChange={(open) => {
setIsFilterDropdownOpen(open); setIsFilterDropdownOpen(open);
}} }}
> >
@ -179,7 +180,7 @@ export const ChatInput = forwardRef<ChatInputHandle, ChatInputProps>((
align="start" align="start"
sideOffset={6} sideOffset={6}
alignOffset={-18} alignOffset={-18}
onOpenAutoFocus={e => { onOpenAutoFocus={(e) => {
// Prevent auto focus on the popover content // Prevent auto focus on the popover content
e.preventDefault(); e.preventDefault();
// Keep focus on the input // Keep focus on the input
@ -212,10 +213,10 @@ export const ChatInput = forwardRef<ChatInputHandle, ChatInputProps>((
</button> </button>
)} )}
{availableFilters {availableFilters
.filter(filter => .filter((filter) =>
filter.name filter.name
.toLowerCase() .toLowerCase()
.includes(filterSearchTerm.toLowerCase()) .includes(filterSearchTerm.toLowerCase()),
) )
.map((filter, index) => ( .map((filter, index) => (
<button <button
@ -241,10 +242,10 @@ export const ChatInput = forwardRef<ChatInputHandle, ChatInputProps>((
)} )}
</button> </button>
))} ))}
{availableFilters.filter(filter => {availableFilters.filter((filter) =>
filter.name filter.name
.toLowerCase() .toLowerCase()
.includes(filterSearchTerm.toLowerCase()) .includes(filterSearchTerm.toLowerCase()),
).length === 0 && ).length === 0 &&
filterSearchTerm && ( filterSearchTerm && (
<div className="px-2 py-3 text-sm text-muted-foreground"> <div className="px-2 py-3 text-sm text-muted-foreground">
@ -277,6 +278,7 @@ export const ChatInput = forwardRef<ChatInputHandle, ChatInputProps>((
</div> </div>
</div> </div>
); );
}); },
);
ChatInput.displayName = "ChatInput"; ChatInput.displayName = "ChatInput";

View file

@ -1,33 +1,52 @@
import { User } from "lucide-react"; import { User } from "lucide-react";
import { motion } from "motion/react";
import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar"; import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar";
import { useAuth } from "@/contexts/auth-context"; import { useAuth } from "@/contexts/auth-context";
import { cn } from "@/lib/utils";
import { Message } from "./message"; import { Message } from "./message";
interface UserMessageProps { interface UserMessageProps {
content: string; content: string;
isCompleted?: boolean;
animate?: boolean;
} }
export function UserMessage({ content }: UserMessageProps) { export function UserMessage({ content, isCompleted, animate = true }: UserMessageProps) {
const { user } = useAuth(); const { user } = useAuth();
console.log("animate", animate);
return ( return (
<motion.div
initial={animate ? { opacity: 0, y: -20 } : { opacity: 1, y: 0 }}
animate={{ opacity: 1, y: 0 }}
transition={animate ? { duration: 0.4, delay: 0.2, ease: "easeOut" } : { duration: 0 }}
className={isCompleted ? "opacity-50" : ""}
>
<Message <Message
icon={ icon={
<Avatar className="w-8 h-8 flex-shrink-0 select-none"> <Avatar className="w-8 h-8 rounded-lg flex-shrink-0 select-none">
<AvatarImage draggable={false} src={user?.picture} alt={user?.name} /> <AvatarImage draggable={false} src={user?.picture} alt={user?.name} />
<AvatarFallback className="text-sm bg-primary/20 text-primary"> <AvatarFallback
{user?.name ? ( className={cn(
user.name.charAt(0).toUpperCase() isCompleted ? "text-placeholder-foreground" : "text-primary",
) : ( "text-sm bg-accent/20 rounded-lg transition-colors duration-300",
<User className="h-4 w-4" />
)} )}
>
{user?.name ? user.name.charAt(0).toUpperCase() : <User className="h-4 w-4" />}
</AvatarFallback> </AvatarFallback>
</Avatar> </Avatar>
} }
> >
<p className="text-foreground whitespace-pre-wrap break-words overflow-wrap-anywhere"> <p
className={cn(
"text-foreground text-sm py-1.5 whitespace-pre-wrap break-words overflow-wrap-anywhere transition-colors duration-300",
isCompleted ? "text-placeholder-foreground" : "text-foreground",
)}
>
{content} {content}
</p> </p>
</Message> </Message>
</motion.div>
); );
} }

View file

@ -1,11 +1,13 @@
import { motion, AnimatePresence } from "motion/react"; import { AnimatePresence, motion } from "motion/react";
import { cn } from "@/lib/utils";
export default function Nudges({ export default function Nudges({
nudges, nudges,
onboarding,
handleSuggestionClick, handleSuggestionClick,
}: { }: {
nudges: string[]; nudges: string[];
onboarding?: boolean;
handleSuggestionClick: (suggestion: string) => void; handleSuggestionClick: (suggestion: string) => void;
}) { }) {
return ( return (
@ -21,14 +23,21 @@ export default function Nudges({
ease: "easeInOut", ease: "easeInOut",
}} }}
> >
<div className="relative px-6 pt-4 flex justify-center"> <div
<div className="w-full max-w-[75%]"> className="relative flex"
>
<div className="w-full">
<div className="flex gap-3 justify-start overflow-x-auto scrollbar-hide"> <div className="flex gap-3 justify-start overflow-x-auto scrollbar-hide">
{nudges.map((suggestion: string, index: number) => ( {nudges.map((suggestion: string, index: number) => (
<button <button
key={index} key={index}
onClick={() => handleSuggestionClick(suggestion)} onClick={() => handleSuggestionClick(suggestion)}
className="px-2 py-1.5 bg-muted hover:bg-muted/50 rounded-lg text-sm text-placeholder-foreground hover:text-foreground transition-colors whitespace-nowrap" className={cn(
onboarding
? "text-foreground"
: "text-placeholder-foreground hover:text-foreground",
"bg-background border hover:bg-background/50 px-2 py-1.5 rounded-lg text-sm transition-colors whitespace-nowrap",
)}
> >
{suggestion} {suggestion}
</button> </button>

File diff suppressed because it is too large Load diff

View file

@ -4,6 +4,7 @@ export interface Message {
timestamp: Date; timestamp: Date;
functionCalls?: FunctionCall[]; functionCalls?: FunctionCall[];
isStreaming?: boolean; isStreaming?: boolean;
source?: "langflow" | "chat";
} }
export interface FunctionCall { export interface FunctionCall {

View file

@ -345,6 +345,15 @@
@apply text-xs opacity-70; @apply text-xs opacity-70;
} }
.prose :where(strong):not(:where([class~="not-prose"],[class~="not-prose"] *)) {
@apply text-current;
}
.prose :where(a):not(:where([class~="not-prose"],[class~="not-prose"] *))
{
@apply text-current;
}
.box-shadow-inner::after { .box-shadow-inner::after {
content: " "; content: " ";
position: absolute; position: absolute;

View file

@ -1,6 +1,13 @@
"use client"; "use client";
import { useState } from "react";
import { StickToBottom } from "use-stick-to-bottom";
import { AssistantMessage } from "@/app/chat/components/assistant-message";
import { UserMessage } from "@/app/chat/components/user-message";
import Nudges from "@/app/chat/nudges";
import type { Message } from "@/app/chat/types";
import OnboardingCard from "@/app/onboarding/components/onboarding-card"; import OnboardingCard from "@/app/onboarding/components/onboarding-card";
import { useChatStreaming } from "@/hooks/useChatStreaming";
import { OnboardingStep } from "./onboarding-step"; import { OnboardingStep } from "./onboarding-step";
export function OnboardingContent({ export function OnboardingContent({
@ -10,8 +17,55 @@ export function OnboardingContent({
handleStepComplete: () => void; handleStepComplete: () => void;
currentStep: number; currentStep: number;
}) { }) {
const [responseId, setResponseId] = useState<string | null>(null);
const [selectedNudge, setSelectedNudge] = useState<string>("");
const [assistantMessage, setAssistantMessage] = useState<Message | null>(
null,
);
const { streamingMessage, isLoading, sendMessage } = useChatStreaming({
onComplete: (message, newResponseId) => {
setAssistantMessage(message);
if (newResponseId) {
setResponseId(newResponseId);
}
},
onError: (error) => {
console.error("Chat error:", error);
setAssistantMessage({
role: "assistant",
content:
"Sorry, I couldn't connect to the chat service. Please try again.",
timestamp: new Date(),
});
},
});
const NUDGES = ["What is OpenRAG?"];
const handleNudgeClick = async (nudge: string) => {
setSelectedNudge(nudge);
setAssistantMessage(null);
setTimeout(async () => {
await sendMessage({
prompt: nudge,
previousResponseId: responseId || undefined,
});
}, 1500);
};
// Determine which message to show (streaming takes precedence)
const displayMessage = streamingMessage || assistantMessage;
return ( return (
<div className="space-y-6"> <StickToBottom
className="flex h-full flex-1 flex-col"
resize="smooth"
initial="instant"
mass={1}
>
<StickToBottom.Content className="flex flex-col min-h-full overflow-x-hidden px-8 py-6">
<div className="flex flex-col place-self-center w-full space-y-6">
<OnboardingStep <OnboardingStep
isVisible={currentStep >= 0} isVisible={currentStep >= 0}
isCompleted={currentStep > 0} isCompleted={currentStep > 0}
@ -22,21 +76,53 @@ export function OnboardingContent({
<OnboardingStep <OnboardingStep
isVisible={currentStep >= 1} isVisible={currentStep >= 1}
isCompleted={currentStep > 1} isCompleted={currentStep > 1 || !!selectedNudge}
text="Step 1: Configure your settings" text="Excellent, let's move on to learning the basics."
> >
<div className="space-y-4"> <div className="py-2">
<p className="text-muted-foreground"> <Nudges
Let's configure some basic settings for your account. onboarding
</p> nudges={NUDGES}
handleSuggestionClick={handleNudgeClick}
/>
</div>
</OnboardingStep>
{/* User message - show when nudge is selected */}
{currentStep >= 1 && !!selectedNudge && (
<UserMessage
content={selectedNudge}
isCompleted={currentStep > 1}
/>
)}
{/* Assistant message - show streaming or final message */}
{currentStep >= 1 &&
!!selectedNudge &&
(displayMessage || isLoading) && (
<>
<AssistantMessage
content={displayMessage?.content || ""}
functionCalls={displayMessage?.functionCalls}
messageIndex={0}
expandedFunctionCalls={new Set()}
onToggle={() => {}}
isStreaming={!!streamingMessage}
isCompleted={currentStep > 1}
/>
{!isLoading && displayMessage && currentStep === 1 && (
<div className="mt-4">
<button <button
type="button"
onClick={handleStepComplete} onClick={handleStepComplete}
className="px-4 py-2 bg-primary text-primary-foreground rounded-lg hover:bg-primary/90" className="px-4 py-2 bg-primary text-primary-foreground rounded-lg hover:bg-primary/90"
> >
Continue Continue
</button> </button>
</div> </div>
</OnboardingStep> )}
</>
)}
<OnboardingStep <OnboardingStep
isVisible={currentStep >= 2} isVisible={currentStep >= 2}
@ -48,6 +134,7 @@ export function OnboardingContent({
Choose and connect your preferred AI model provider. Choose and connect your preferred AI model provider.
</p> </p>
<button <button
type="button"
onClick={handleStepComplete} onClick={handleStepComplete}
className="px-4 py-2 bg-primary text-primary-foreground rounded-lg hover:bg-primary/90" className="px-4 py-2 bg-primary text-primary-foreground rounded-lg hover:bg-primary/90"
> >
@ -66,6 +153,7 @@ export function OnboardingContent({
Your account is ready to use. Let's start chatting! Your account is ready to use. Let's start chatting!
</p> </p>
<button <button
type="button"
onClick={handleStepComplete} onClick={handleStepComplete}
className="px-4 py-2 bg-primary text-primary-foreground rounded-lg hover:bg-primary/90" className="px-4 py-2 bg-primary text-primary-foreground rounded-lg hover:bg-primary/90"
> >
@ -74,5 +162,7 @@ export function OnboardingContent({
</div> </div>
</OnboardingStep> </OnboardingStep>
</div> </div>
</StickToBottom.Content>
</StickToBottom>
); );
} }

View file

@ -2,12 +2,16 @@ import { AnimatePresence, motion } from "motion/react";
import { type ReactNode, useEffect, useState } from "react"; import { type ReactNode, useEffect, useState } from "react";
import { Message } from "@/app/chat/components/message"; import { Message } from "@/app/chat/components/message";
import DogIcon from "@/components/logo/dog-icon"; import DogIcon from "@/components/logo/dog-icon";
import { MarkdownRenderer } from "@/components/markdown-renderer";
import { cn } from "@/lib/utils";
interface OnboardingStepProps { interface OnboardingStepProps {
text: string; text: string;
children: ReactNode; children?: ReactNode;
isVisible: boolean; isVisible: boolean;
isCompleted?: boolean; isCompleted?: boolean;
icon?: ReactNode;
isMarkdown?: boolean;
} }
export function OnboardingStep({ export function OnboardingStep({
@ -15,6 +19,8 @@ export function OnboardingStep({
children, children,
isVisible, isVisible,
isCompleted = false, isCompleted = false,
icon,
isMarkdown = false,
}: OnboardingStepProps) { }: OnboardingStepProps) {
const [displayedText, setDisplayedText] = useState(""); const [displayedText, setDisplayedText] = useState("");
const [showChildren, setShowChildren] = useState(false); const [showChildren, setShowChildren] = useState(false);
@ -26,6 +32,12 @@ export function OnboardingStep({
return; return;
} }
if (isCompleted) {
setDisplayedText(text);
setShowChildren(true);
return;
}
let currentIndex = 0; let currentIndex = 0;
setDisplayedText(""); setDisplayedText("");
setShowChildren(false); setShowChildren(false);
@ -41,7 +53,7 @@ export function OnboardingStep({
}, 20); // 20ms per character }, 20); // 20ms per character
return () => clearInterval(interval); return () => clearInterval(interval);
}, [text, isVisible]); }, [text, isVisible, isCompleted]);
if (!isVisible) return null; if (!isVisible) return null;
@ -54,35 +66,54 @@ export function OnboardingStep({
> >
<Message <Message
icon={ icon={
icon || (
<div className="w-8 h-8 rounded-lg bg-accent/20 flex items-center justify-center flex-shrink-0 select-none"> <div className="w-8 h-8 rounded-lg bg-accent/20 flex items-center justify-center flex-shrink-0 select-none">
<DogIcon <DogIcon
className="h-6 w-6 text-accent-foreground" className="h-6 w-6 text-accent-foreground transition-colors duration-300"
disabled={isCompleted} disabled={isCompleted}
/> />
</div> </div>
)
} }
> >
<div className="space-y-4"> <div>
{isMarkdown ? (
<MarkdownRenderer
className={cn(
isCompleted
? "text-placeholder-foreground"
: "text-foreground",
"text-sm py-1.5 transition-colors duration-300",
)}
chatMessage={text}
/>
) : (
<p <p
className={`text-foreground text-sm py-1.5 ${isCompleted ? "text-placeholder-foreground" : ""}`} className={`text-foreground text-sm py-1.5 transition-colors duration-300 ${
isCompleted ? "text-placeholder-foreground" : ""
}`}
> >
{displayedText} {displayedText}
{!showChildren && !isCompleted && ( {!showChildren && !isCompleted && (
<span className="inline-block w-1 h-4 bg-primary ml-1 animate-pulse" /> <span className="inline-block w-1 h-3.5 bg-primary ml-1 animate-pulse" />
)} )}
</p> </p>
)}
{children && (
<AnimatePresence> <AnimatePresence>
{showChildren && !isCompleted && ( {((showChildren && !isCompleted) || isMarkdown) && (
<motion.div <motion.div
initial={{ opacity: 0, y: -10 }} initial={{ opacity: 0, y: -10 }}
animate={{ opacity: 1, y: 0 }} animate={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, height: 0 }} exit={{ opacity: 0, height: 0 }}
transition={{ duration: 0.3, delay: 0.3, ease: "easeOut" }} transition={{ duration: 0.3, delay: 0.3, ease: "easeOut" }}
> >
{children} <div className="pt-2">
{children}</div>
</motion.div> </motion.div>
)} )}
</AnimatePresence> </AnimatePresence>
)}
</div> </div>
</Message> </Message>
</motion.div> </motion.div>

View file

@ -121,7 +121,7 @@ export function ChatRenderer({
</AnimatedConditional> </AnimatedConditional>
{/* Main Content */} {/* Main Content */}
<main className="overflow-visible w-full flex items-center justify-center [grid-area:main]"> <main className="overflow-hidden w-full flex items-center justify-center [grid-area:main]">
<motion.div <motion.div
initial={{ initial={{
width: showLayout ? "100%" : "100vw", width: showLayout ? "100%" : "100vw",
@ -152,11 +152,11 @@ export function ChatRenderer({
> >
<div <div
className={cn( className={cn(
"h-full bg-background", "h-full bg-background w-full",
showLayout && "p-6 container", showLayout && !isOnChatPage && "p-6 container overflow-y-auto",
showLayout && isSmallWidthPath && "max-w-[850px] ml-0", showLayout && isSmallWidthPath && "max-w-[850px] ml-0",
!showLayout && !showLayout &&
"w-full bg-card rounded-lg shadow-2xl p-8 overflow-y-auto", "w-full bg-card rounded-lg shadow-2xl p-0 py-2 overflow-y-auto",
)} )}
> >
<motion.div <motion.div

View file

@ -0,0 +1,492 @@
import { useRef, useState } from "react";
import type { FunctionCall, Message, SelectedFilters } from "@/app/chat/types";
interface UseChatStreamingOptions {
endpoint?: string;
onComplete?: (message: Message, responseId: string | null) => void;
onError?: (error: Error) => void;
}
interface SendMessageOptions {
prompt: string;
previousResponseId?: string;
filters?: SelectedFilters;
limit?: number;
scoreThreshold?: number;
}
export function useChatStreaming({
endpoint = "/api/langflow",
onComplete,
onError,
}: UseChatStreamingOptions = {}) {
const [streamingMessage, setStreamingMessage] = useState<Message | null>(
null,
);
const [isLoading, setIsLoading] = useState(false);
const streamAbortRef = useRef<AbortController | null>(null);
const streamIdRef = useRef(0);
const sendMessage = async ({
prompt,
previousResponseId,
filters,
limit = 10,
scoreThreshold = 0,
}: SendMessageOptions) => {
try {
setIsLoading(true);
// Abort any existing stream before starting a new one
if (streamAbortRef.current) {
streamAbortRef.current.abort();
}
const controller = new AbortController();
streamAbortRef.current = controller;
const thisStreamId = ++streamIdRef.current;
const requestBody: {
prompt: string;
stream: boolean;
previous_response_id?: string;
filters?: SelectedFilters;
limit?: number;
scoreThreshold?: number;
} = {
prompt,
stream: true,
limit,
scoreThreshold,
};
if (previousResponseId) {
requestBody.previous_response_id = previousResponseId;
}
if (filters) {
requestBody.filters = filters;
}
const response = await fetch(endpoint, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(requestBody),
signal: controller.signal,
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const reader = response.body?.getReader();
if (!reader) {
throw new Error("No reader available");
}
const decoder = new TextDecoder();
let buffer = "";
let currentContent = "";
const currentFunctionCalls: FunctionCall[] = [];
let newResponseId: string | null = null;
// Initialize streaming message
if (!controller.signal.aborted && thisStreamId === streamIdRef.current) {
setStreamingMessage({
role: "assistant",
content: "",
timestamp: new Date(),
isStreaming: true,
});
}
try {
while (true) {
const { done, value } = await reader.read();
if (controller.signal.aborted || thisStreamId !== streamIdRef.current)
break;
if (done) break;
buffer += decoder.decode(value, { stream: true });
// Process complete lines (JSON objects)
const lines = buffer.split("\n");
buffer = lines.pop() || ""; // Keep incomplete line in buffer
for (const line of lines) {
if (line.trim()) {
try {
const chunk = JSON.parse(line);
// Extract response ID if present
if (chunk.id) {
newResponseId = chunk.id;
} else if (chunk.response_id) {
newResponseId = chunk.response_id;
}
// Handle OpenAI Chat Completions streaming format
if (chunk.object === "response.chunk" && chunk.delta) {
// Handle function calls in delta
if (chunk.delta.function_call) {
if (chunk.delta.function_call.name) {
const functionCall: FunctionCall = {
name: chunk.delta.function_call.name,
arguments: undefined,
status: "pending",
argumentsString:
chunk.delta.function_call.arguments || "",
};
currentFunctionCalls.push(functionCall);
} else if (chunk.delta.function_call.arguments) {
const lastFunctionCall =
currentFunctionCalls[currentFunctionCalls.length - 1];
if (lastFunctionCall) {
if (!lastFunctionCall.argumentsString) {
lastFunctionCall.argumentsString = "";
}
lastFunctionCall.argumentsString +=
chunk.delta.function_call.arguments;
if (lastFunctionCall.argumentsString.includes("}")) {
try {
const parsed = JSON.parse(
lastFunctionCall.argumentsString
);
lastFunctionCall.arguments = parsed;
lastFunctionCall.status = "completed";
} catch (e) {
// Arguments not yet complete
}
}
}
}
}
// Handle tool calls in delta
else if (
chunk.delta.tool_calls &&
Array.isArray(chunk.delta.tool_calls)
) {
for (const toolCall of chunk.delta.tool_calls) {
if (toolCall.function) {
if (toolCall.function.name) {
const functionCall: FunctionCall = {
name: toolCall.function.name,
arguments: undefined,
status: "pending",
argumentsString: toolCall.function.arguments || "",
};
currentFunctionCalls.push(functionCall);
} else if (toolCall.function.arguments) {
const lastFunctionCall =
currentFunctionCalls[
currentFunctionCalls.length - 1
];
if (lastFunctionCall) {
if (!lastFunctionCall.argumentsString) {
lastFunctionCall.argumentsString = "";
}
lastFunctionCall.argumentsString +=
toolCall.function.arguments;
if (
lastFunctionCall.argumentsString.includes("}")
) {
try {
const parsed = JSON.parse(
lastFunctionCall.argumentsString
);
lastFunctionCall.arguments = parsed;
lastFunctionCall.status = "completed";
} catch (e) {
// Arguments not yet complete
}
}
}
}
}
}
}
// Handle content/text in delta
else if (chunk.delta.content) {
currentContent += chunk.delta.content;
}
// Handle finish reason
if (chunk.delta.finish_reason) {
currentFunctionCalls.forEach((fc) => {
if (fc.status === "pending" && fc.argumentsString) {
try {
fc.arguments = JSON.parse(fc.argumentsString);
fc.status = "completed";
} catch (e) {
fc.arguments = { raw: fc.argumentsString };
fc.status = "error";
}
}
});
}
}
// Handle Realtime API format - function call added
else if (
chunk.type === "response.output_item.added" &&
chunk.item?.type === "function_call"
) {
let existing = currentFunctionCalls.find(
(fc) => fc.id === chunk.item.id
);
if (!existing) {
existing = [...currentFunctionCalls]
.reverse()
.find(
(fc) =>
fc.status === "pending" &&
!fc.id &&
fc.name === (chunk.item.tool_name || chunk.item.name)
);
}
if (existing) {
existing.id = chunk.item.id;
existing.type = chunk.item.type;
existing.name =
chunk.item.tool_name || chunk.item.name || existing.name;
existing.arguments =
chunk.item.inputs || existing.arguments;
} else {
const functionCall: FunctionCall = {
name:
chunk.item.tool_name || chunk.item.name || "unknown",
arguments: chunk.item.inputs || undefined,
status: "pending",
argumentsString: "",
id: chunk.item.id,
type: chunk.item.type,
};
currentFunctionCalls.push(functionCall);
}
}
// Handle Realtime API format - tool call added
else if (
chunk.type === "response.output_item.added" &&
chunk.item?.type?.includes("_call") &&
chunk.item?.type !== "function_call"
) {
let existing = currentFunctionCalls.find(
(fc) => fc.id === chunk.item.id
);
if (!existing) {
existing = [...currentFunctionCalls]
.reverse()
.find(
(fc) =>
fc.status === "pending" &&
!fc.id &&
fc.name ===
(chunk.item.tool_name ||
chunk.item.name ||
chunk.item.type)
);
}
if (existing) {
existing.id = chunk.item.id;
existing.type = chunk.item.type;
existing.name =
chunk.item.tool_name ||
chunk.item.name ||
chunk.item.type ||
existing.name;
existing.arguments =
chunk.item.inputs || existing.arguments;
} else {
const functionCall = {
name:
chunk.item.tool_name ||
chunk.item.name ||
chunk.item.type ||
"unknown",
arguments: chunk.item.inputs || {},
status: "pending" as const,
id: chunk.item.id,
type: chunk.item.type,
};
currentFunctionCalls.push(functionCall);
}
}
// Handle function call done
else if (
chunk.type === "response.output_item.done" &&
chunk.item?.type === "function_call"
) {
const functionCall = currentFunctionCalls.find(
(fc) =>
fc.id === chunk.item.id ||
fc.name === chunk.item.tool_name ||
fc.name === chunk.item.name
);
if (functionCall) {
functionCall.status =
chunk.item.status === "completed" ? "completed" : "error";
functionCall.id = chunk.item.id;
functionCall.type = chunk.item.type;
functionCall.name =
chunk.item.tool_name ||
chunk.item.name ||
functionCall.name;
functionCall.arguments =
chunk.item.inputs || functionCall.arguments;
if (chunk.item.results) {
functionCall.result = chunk.item.results;
}
}
}
// Handle tool call done with results
else if (
chunk.type === "response.output_item.done" &&
chunk.item?.type?.includes("_call") &&
chunk.item?.type !== "function_call"
) {
const functionCall = currentFunctionCalls.find(
(fc) =>
fc.id === chunk.item.id ||
fc.name === chunk.item.tool_name ||
fc.name === chunk.item.name ||
fc.name === chunk.item.type ||
fc.name.includes(chunk.item.type.replace("_call", "")) ||
chunk.item.type.includes(fc.name)
);
if (functionCall) {
functionCall.arguments =
chunk.item.inputs || functionCall.arguments;
functionCall.status =
chunk.item.status === "completed" ? "completed" : "error";
functionCall.id = chunk.item.id;
functionCall.type = chunk.item.type;
if (chunk.item.results) {
functionCall.result = chunk.item.results;
}
} else {
const newFunctionCall = {
name:
chunk.item.tool_name ||
chunk.item.name ||
chunk.item.type ||
"unknown",
arguments: chunk.item.inputs || {},
status: "completed" as const,
id: chunk.item.id,
type: chunk.item.type,
result: chunk.item.results,
};
currentFunctionCalls.push(newFunctionCall);
}
}
// Handle text output streaming (Realtime API)
else if (chunk.type === "response.output_text.delta") {
currentContent += chunk.delta || "";
}
// Handle OpenRAG backend format
else if (chunk.output_text) {
currentContent += chunk.output_text;
} else if (chunk.delta) {
if (typeof chunk.delta === "string") {
currentContent += chunk.delta;
} else if (typeof chunk.delta === "object") {
if (chunk.delta.content) {
currentContent += chunk.delta.content;
} else if (chunk.delta.text) {
currentContent += chunk.delta.text;
}
}
}
// Update streaming message in real-time
if (
!controller.signal.aborted &&
thisStreamId === streamIdRef.current
) {
setStreamingMessage({
role: "assistant",
content: currentContent,
functionCalls:
currentFunctionCalls.length > 0
? [...currentFunctionCalls]
: undefined,
timestamp: new Date(),
isStreaming: true,
});
}
} catch (parseError) {
console.warn("Failed to parse chunk:", line, parseError);
}
}
}
}
} finally {
reader.releaseLock();
}
// Finalize the message
const finalMessage: Message = {
role: "assistant",
content: currentContent,
functionCalls:
currentFunctionCalls.length > 0 ? currentFunctionCalls : undefined,
timestamp: new Date(),
isStreaming: false,
};
if (!controller.signal.aborted && thisStreamId === streamIdRef.current) {
// Clear streaming message and call onComplete with final message
setStreamingMessage(null);
onComplete?.(finalMessage, newResponseId);
return finalMessage;
}
return null;
} catch (error) {
// If stream was aborted, don't handle as error
if (streamAbortRef.current?.signal.aborted) {
return null;
}
console.error("SSE Stream error:", error);
setStreamingMessage(null);
onError?.(error as Error);
const errorMessage: Message = {
role: "assistant",
content:
"Sorry, I couldn't connect to the chat service. Please try again.",
timestamp: new Date(),
isStreaming: false,
};
return errorMessage;
} finally {
setIsLoading(false);
}
};
const abortStream = () => {
if (streamAbortRef.current) {
streamAbortRef.current.abort();
}
setStreamingMessage(null);
setIsLoading(false);
};
return {
streamingMessage,
isLoading,
sendMessage,
abortStream,
};
}

View file

@ -3,7 +3,7 @@
*/ */
export const DEFAULT_AGENT_SETTINGS = { export const DEFAULT_AGENT_SETTINGS = {
llm_model: "gpt-4o-mini", llm_model: "gpt-4o-mini",
system_prompt: "You are a helpful assistant that can use tools to answer questions and perform tasks." system_prompt: "You are a helpful assistant that can use tools to answer questions and perform tasks. You are part of OpenRAG, an assistant that analyzes documents and provides informations about them. When asked about what is OpenRAG, answer the following:\n\n\"OpenRAG is an open-source package for building agentic RAG systems. It supports integration with a wide range of orchestration tools, vector databases, and LLM providers. OpenRAG connects and amplifies three popular, proven open-source projects into one powerful platform:\n\n**Langflow** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://www.langflow.org/)\n\n**OpenSearch** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://opensearch.org/)\n\n**Docling** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://www.docling.ai/)\""
} as const; } as const;
/** /**

View file

@ -34,7 +34,7 @@ def get_conversation_thread(user_id: str, previous_response_id: str = None):
"messages": [ "messages": [
{ {
"role": "system", "role": "system",
"content": "You are a helpful assistant. Always use the search_tools to answer questions.", "content": "You are a helpful assistant that can use tools to answer questions and perform tasks. You are part of OpenRAG, an assistant that analyzes documents and provides informations about them. When asked about what is OpenRAG, answer the following:\n\n\"OpenRAG is an open-source package for building agentic RAG systems. It supports integration with a wide range of orchestration tools, vector databases, and LLM providers. OpenRAG connects and amplifies three popular, proven open-source projects into one powerful platform:\n\n**Langflow** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://www.langflow.org/)\n\n**OpenSearch** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://opensearch.org/)\n\n**Docling** Langflow is a powerful tool to build and deploy AI agents and MCP servers [Read more](https://www.docling.ai/)\"",
} }
], ],
"previous_response_id": previous_response_id, # Parent response_id for branching "previous_response_id": previous_response_id, # Parent response_id for branching