Merge branch 'main' into feature/PRTL-3997
This commit is contained in:
commit
e1982256df
27 changed files with 1197 additions and 438 deletions
|
|
@ -10,9 +10,13 @@
|
|||
"cssVariables": true,
|
||||
"prefix": ""
|
||||
},
|
||||
"iconLibrary": "lucide",
|
||||
"aliases": {
|
||||
"components": "components",
|
||||
"utils": "lib/utils",
|
||||
"ui": "components/ui"
|
||||
},
|
||||
"registries": {
|
||||
"@magicui": "https://magicui.design/r/{name}.json"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
58
frontend/components/delete-session-modal.tsx
Normal file
58
frontend/components/delete-session-modal.tsx
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
"use client";
|
||||
|
||||
import { AlertTriangle } from "lucide-react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
|
||||
interface DeleteSessionModalProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
onConfirm: () => void;
|
||||
sessionTitle: string;
|
||||
isDeleting?: boolean;
|
||||
}
|
||||
|
||||
export function DeleteSessionModal({
|
||||
isOpen,
|
||||
onClose,
|
||||
onConfirm,
|
||||
sessionTitle,
|
||||
isDeleting = false,
|
||||
}: DeleteSessionModalProps) {
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={onClose}>
|
||||
<DialogContent>
|
||||
<DialogHeader>
|
||||
<DialogTitle className="flex items-center gap-2">
|
||||
<AlertTriangle className="h-5 w-5 text-destructive" />
|
||||
Delete Conversation
|
||||
</DialogTitle>
|
||||
<DialogDescription>
|
||||
Are you sure you want to delete "{sessionTitle}"? This
|
||||
action cannot be undone and will permanently remove the conversation
|
||||
and all its messages.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<DialogFooter>
|
||||
<Button variant="outline" onClick={onClose} disabled={isDeleting}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant="destructive"
|
||||
onClick={onConfirm}
|
||||
disabled={isDeleting}
|
||||
>
|
||||
{isDeleting ? "Deleting..." : "Delete"}
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
|
|
@ -11,7 +11,7 @@ export default function IBMLogo(props: React.SVGProps<SVGSVGElement>) {
|
|||
<title>IBM Logo</title>
|
||||
<path
|
||||
d="M15.696 10.9901C15.7213 10.9901 15.7356 10.979 15.7356 10.9552V10.9313C15.7356 10.9076 15.7213 10.8964 15.696 10.8964H15.6359V10.9901H15.696ZM15.6359 11.1649H15.5552V10.8329H15.7055C15.7799 10.8329 15.8179 10.8773 15.8179 10.9378C15.8179 10.9901 15.7942 11.0235 15.7577 11.0378L15.8321 11.1649H15.7436L15.6818 11.0504H15.6359V11.1649ZM15.9255 11.0171V10.9759C15.9255 10.8424 15.821 10.7376 15.6833 10.7376C15.5456 10.7376 15.4412 10.8424 15.4412 10.9759V11.0171C15.4412 11.1505 15.5456 11.2554 15.6833 11.2554C15.821 11.2554 15.9255 11.1505 15.9255 11.0171ZM15.3668 10.9964C15.3668 10.8107 15.5077 10.6693 15.6833 10.6693C15.859 10.6693 16 10.8107 16 10.9964C16 11.1823 15.859 11.3237 15.6833 11.3237C15.5077 11.3237 15.3668 11.1823 15.3668 10.9964ZM10.8069 5.74885L10.6627 5.33301H8.28904V5.74885H10.8069ZM11.0821 6.54285L10.9379 6.12691H8.28904V6.54285H11.0821ZM12.8481 11.3067H14.9203V10.8908H12.8481V11.3067ZM12.8481 10.5126H14.9203V10.0968H12.8481V10.5126ZM12.8481 9.71873H14.0914V9.3028H12.8481V9.71873ZM12.8481 8.92474H14.0914V8.50889H12.8481V8.92474ZM12.8481 8.13084H14.0914V7.7149H11.7212L11.6047 8.05102L11.4882 7.7149H9.11794V8.13084H10.3613V7.74863L10.4951 8.13084H12.7143L12.8481 7.74863V8.13084ZM14.0914 6.921H11.9964L11.8522 7.33675H14.0914V6.921ZM9.11794 8.92474H10.3613V8.50889H9.11794V8.92474ZM9.11794 9.71873H10.3613V9.3028H9.11794V9.71873ZM8.28904 10.5126H10.3613V10.0968H8.28904V10.5126ZM8.28904 11.3067H10.3613V10.8908H8.28904V11.3067ZM12.5466 5.33301L12.4025 5.74885H14.9203V5.33301H12.5466ZM12.1273 6.54285H14.9203V6.12691H12.2714L12.1273 6.54285ZM9.11794 7.33675H11.3572L11.213 6.921H9.11794V7.33675ZM10.7727 8.92474H12.4366L12.5821 8.50889H10.6272L10.7727 8.92474ZM11.0505 9.71873H12.1588L12.3042 9.3028H10.9051L11.0505 9.71873ZM11.3283 10.5126H11.881L12.0265 10.0969H11.1828L11.3283 10.5126ZM11.604 11.3067L11.7487 10.8908H11.4606L11.604 11.3067ZM3.31561 11.3026L6.36754 11.3067C6.78195 11.3067 7.15365 11.1491 7.43506 10.8908H3.31561V11.3026ZM6.55592 9.3028V9.71873H7.94994C7.94994 9.57477 7.93029 9.43551 7.89456 9.3028H6.55592ZM4.14452 9.71873H5.38783V9.3028H4.14452V9.71873ZM6.55592 7.33675H7.89456C7.93029 7.20422 7.94994 7.06486 7.94994 6.921H6.55592V7.33675ZM4.14452 7.33675H5.38783V6.9209H4.14452V7.33675ZM6.36754 5.33301H3.31561V5.74885H7.43506C7.15365 5.49061 6.77892 5.33301 6.36754 5.33301ZM7.73778 6.12691H3.31561V6.54285H7.90448C7.86839 6.39502 7.81172 6.25539 7.73778 6.12691ZM4.14452 7.7149V8.13084H7.39152C7.5292 8.01333 7.64621 7.87268 7.73732 7.7149H4.14452ZM7.39152 8.50889H4.14452V8.92474H7.73732C7.64621 8.76695 7.5292 8.62631 7.39152 8.50889ZM3.31561 10.5126H7.73778C7.81172 10.3843 7.86839 10.2447 7.90448 10.0969H3.31561V10.5126ZM0 5.74885H2.90121V5.33301H0V5.74885ZM0 6.54285H2.90121V6.12691H0V6.54285ZM0.828996 7.33684H2.0723V6.921H0.828996V7.33684ZM0.828996 8.13084H2.0723V7.7149H0.828996V8.13084ZM0.828996 8.92474H2.0723V8.50889H0.828996V8.92474ZM0.828996 9.71873H2.0723V9.3028H0.828996V9.71873ZM0 10.5126H2.90121V10.0968H0V10.5126ZM0 11.3067H2.90121V10.8908H0V11.3067Z"
|
||||
fill="#A1A1AA"
|
||||
fill="currentColor"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ export default function OpenAILogo(props: React.SVGProps<SVGSVGElement>) {
|
|||
<g mask="url(#mask0_2162_638)">
|
||||
<path
|
||||
d="M6.52523 5.82397V4.30397C6.52523 4.17595 6.57329 4.07991 6.68523 4.01599L9.74132 2.25599C10.1573 2.016 10.6533 1.90406 11.1653 1.90406C13.0852 1.90406 14.3013 3.39209 14.3013 4.97602C14.3013 5.088 14.3013 5.21601 14.2853 5.34403L11.1172 3.48799C10.9253 3.37605 10.7332 3.37605 10.5412 3.48799L6.52523 5.82397ZM13.6612 11.744V8.11194C13.6612 7.88789 13.5652 7.7279 13.3732 7.61592L9.35724 5.27993L10.6692 4.52788C10.7812 4.46396 10.8772 4.46396 10.9892 4.52788L14.0453 6.28788C14.9254 6.79995 15.5173 7.88789 15.5173 8.94382C15.5173 10.1598 14.7973 11.2798 13.6612 11.7439V11.744ZM5.58124 8.54404L4.26924 7.77608C4.1573 7.71216 4.10925 7.61609 4.10925 7.48807V3.9681C4.10925 2.25616 5.42125 0.960064 7.19729 0.960064C7.86938 0.960064 8.49325 1.18412 9.02138 1.5841L5.86938 3.40816C5.67744 3.5201 5.58141 3.6801 5.58141 3.90418V8.54417L5.58124 8.54404ZM8.40528 10.176L6.52523 9.12002V6.88011L8.40528 5.82414L10.2852 6.88011V9.12002L8.40528 10.176ZM9.61327 15.0401C8.94122 15.0401 8.31735 14.816 7.78921 14.4161L10.9412 12.592C11.1331 12.48 11.2292 12.32 11.2292 12.096V7.45596L12.5573 8.22392C12.6692 8.28784 12.7172 8.38388 12.7172 8.51193V12.0319C12.7172 13.7438 11.3892 15.0399 9.61327 15.0399V15.0401ZM5.82123 11.4721L2.76514 9.71212C1.88507 9.20002 1.29315 8.11211 1.29315 7.05614C1.29315 5.82414 2.02916 4.72016 3.16509 4.25611V7.9041C3.16509 8.12815 3.26116 8.28814 3.4531 8.40012L7.45319 10.72L6.14119 11.4721C6.02925 11.536 5.93318 11.536 5.82123 11.4721ZM5.64533 14.0961C3.83731 14.0961 2.50928 12.7361 2.50928 11.0561C2.50928 10.928 2.52532 10.8 2.54122 10.672L5.69322 12.4961C5.88516 12.608 6.07726 12.608 6.2692 12.4961L10.2852 10.1762V11.6962C10.2852 11.8242 10.2372 11.9202 10.1252 11.9841L7.06914 13.7441C6.65312 13.9841 6.15709 14.0961 5.64517 14.0961H5.64533ZM9.61327 16C11.5493 16 13.1652 14.624 13.5334 12.8C15.3253 12.3359 16.4773 10.6559 16.4773 8.94399C16.4773 7.82393 15.9974 6.73602 15.1334 5.95199C15.2134 5.61596 15.2614 5.27994 15.2614 4.94407C15.2614 2.65611 13.4053 0.943991 11.2613 0.943991C10.8294 0.943991 10.4134 1.00792 9.99735 1.152C9.27724 0.44797 8.28523 0 7.19729 0C5.26129 0 3.64537 1.37592 3.27724 3.19998C1.48526 3.66402 0.333252 5.34403 0.333252 7.05598C0.333252 8.17603 0.8132 9.26395 1.67723 10.048C1.59723 10.384 1.54921 10.72 1.54921 11.0559C1.54921 13.3439 3.40525 15.056 5.54926 15.056C5.98119 15.056 6.39722 14.9921 6.81324 14.848C7.53318 15.552 8.52519 16 9.61327 16Z"
|
||||
fill="white"
|
||||
fill="currentColor"
|
||||
/>
|
||||
</g>
|
||||
</svg>
|
||||
|
|
|
|||
|
|
@ -1,8 +1,12 @@
|
|||
"use client"
|
||||
"use client";
|
||||
|
||||
import { Navigation } from "@/components/navigation";
|
||||
import { ModeToggle } from "@/components/mode-toggle";
|
||||
import { usePathname } from "next/navigation";
|
||||
import { useGetConversationsQuery } from "@/app/api/queries/useGetConversationsQuery";
|
||||
import { KnowledgeFilterDropdown } from "@/components/knowledge-filter-dropdown";
|
||||
import { ModeToggle } from "@/components/mode-toggle";
|
||||
import { Navigation } from "@/components/navigation";
|
||||
import { useAuth } from "@/contexts/auth-context";
|
||||
import { useChat } from "@/contexts/chat-context";
|
||||
import { useKnowledgeFilter } from "@/contexts/knowledge-filter-context";
|
||||
|
||||
interface NavigationLayoutProps {
|
||||
|
|
@ -11,11 +15,35 @@ interface NavigationLayoutProps {
|
|||
|
||||
export function NavigationLayout({ children }: NavigationLayoutProps) {
|
||||
const { selectedFilter, setSelectedFilter } = useKnowledgeFilter();
|
||||
|
||||
const pathname = usePathname();
|
||||
const { isAuthenticated, isNoAuthMode } = useAuth();
|
||||
const {
|
||||
endpoint,
|
||||
refreshTrigger,
|
||||
refreshConversations,
|
||||
startNewConversation,
|
||||
} = useChat();
|
||||
|
||||
// Only fetch conversations on chat page
|
||||
const isOnChatPage = pathname === "/" || pathname === "/chat";
|
||||
const { data: conversations = [], isLoading: isConversationsLoading } =
|
||||
useGetConversationsQuery(endpoint, refreshTrigger, {
|
||||
enabled: isOnChatPage && (isAuthenticated || isNoAuthMode),
|
||||
});
|
||||
|
||||
const handleNewConversation = () => {
|
||||
refreshConversations();
|
||||
startNewConversation();
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="h-full relative">
|
||||
<div className="hidden h-full md:flex md:w-72 md:flex-col md:fixed md:inset-y-0 z-[80] border-r border-border/40">
|
||||
<Navigation />
|
||||
<Navigation
|
||||
conversations={conversations}
|
||||
isConversationsLoading={isConversationsLoading}
|
||||
onNewConversation={handleNewConversation}
|
||||
/>
|
||||
</div>
|
||||
<main className="md:pl-72">
|
||||
<div className="flex flex-col min-h-screen">
|
||||
|
|
@ -31,7 +59,7 @@ export function NavigationLayout({ children }: NavigationLayoutProps) {
|
|||
{/* Search component could go here */}
|
||||
</div>
|
||||
<nav className="flex items-center space-x-2">
|
||||
<KnowledgeFilterDropdown
|
||||
<KnowledgeFilterDropdown
|
||||
selectedFilter={selectedFilter}
|
||||
onFilterSelect={setSelectedFilter}
|
||||
/>
|
||||
|
|
@ -41,12 +69,10 @@ export function NavigationLayout({ children }: NavigationLayoutProps) {
|
|||
</div>
|
||||
</header>
|
||||
<div className="flex-1">
|
||||
<div className="container py-6 lg:py-8">
|
||||
{children}
|
||||
</div>
|
||||
<div className="container py-6 lg:py-8">{children}</div>
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,24 +1,35 @@
|
|||
"use client";
|
||||
|
||||
import { useChat } from "@/contexts/chat-context";
|
||||
import { cn } from "@/lib/utils";
|
||||
import {
|
||||
EllipsisVertical,
|
||||
FileText,
|
||||
Library,
|
||||
MessageSquare,
|
||||
MoreHorizontal,
|
||||
Plus,
|
||||
Settings2,
|
||||
Trash2,
|
||||
} from "lucide-react";
|
||||
import Link from "next/link";
|
||||
import { usePathname } from "next/navigation";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
|
||||
import { EndpointType } from "@/contexts/chat-context";
|
||||
import { useLoadingStore } from "@/stores/loadingStore";
|
||||
import { KnowledgeFilterList } from "./knowledge-filter-list";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { useDeleteSessionMutation } from "@/app/api/queries/useDeleteSessionMutation";
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/ui/dropdown-menu";
|
||||
import { type EndpointType, useChat } from "@/contexts/chat-context";
|
||||
import { useKnowledgeFilter } from "@/contexts/knowledge-filter-context";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useLoadingStore } from "@/stores/loadingStore";
|
||||
import { DeleteSessionModal } from "./delete-session-modal";
|
||||
import { KnowledgeFilterList } from "./knowledge-filter-list";
|
||||
|
||||
interface RawConversation {
|
||||
// Re-export the types for backward compatibility
|
||||
export interface RawConversation {
|
||||
response_id: string;
|
||||
title: string;
|
||||
endpoint: string;
|
||||
|
|
@ -35,7 +46,7 @@ interface RawConversation {
|
|||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
interface ChatConversation {
|
||||
export interface ChatConversation {
|
||||
response_id: string;
|
||||
title: string;
|
||||
endpoint: EndpointType;
|
||||
|
|
@ -52,11 +63,20 @@ interface ChatConversation {
|
|||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export function Navigation() {
|
||||
interface NavigationProps {
|
||||
conversations?: ChatConversation[];
|
||||
isConversationsLoading?: boolean;
|
||||
onNewConversation?: () => void;
|
||||
}
|
||||
|
||||
export function Navigation({
|
||||
conversations = [],
|
||||
isConversationsLoading = false,
|
||||
onNewConversation,
|
||||
}: NavigationProps = {}) {
|
||||
const pathname = usePathname();
|
||||
const {
|
||||
endpoint,
|
||||
refreshTrigger,
|
||||
loadConversation,
|
||||
currentConversationId,
|
||||
setCurrentConversationId,
|
||||
|
|
@ -70,18 +90,64 @@ export function Navigation() {
|
|||
|
||||
const { loading } = useLoadingStore();
|
||||
|
||||
const [conversations, setConversations] = useState<ChatConversation[]>([]);
|
||||
const [loadingConversations, setLoadingConversations] = useState(false);
|
||||
const [loadingNewConversation, setLoadingNewConversation] = useState(false);
|
||||
const [previousConversationCount, setPreviousConversationCount] = useState(0);
|
||||
const [deleteModalOpen, setDeleteModalOpen] = useState(false);
|
||||
const [conversationToDelete, setConversationToDelete] =
|
||||
useState<ChatConversation | null>(null);
|
||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
const { selectedFilter, setSelectedFilter } = useKnowledgeFilter();
|
||||
|
||||
// Delete session mutation
|
||||
const deleteSessionMutation = useDeleteSessionMutation({
|
||||
onSuccess: () => {
|
||||
toast.success("Conversation deleted successfully");
|
||||
|
||||
// If we deleted the current conversation, select another one
|
||||
if (
|
||||
conversationToDelete &&
|
||||
currentConversationId === conversationToDelete.response_id
|
||||
) {
|
||||
// Filter out the deleted conversation and find the next one
|
||||
const remainingConversations = conversations.filter(
|
||||
(conv) => conv.response_id !== conversationToDelete.response_id,
|
||||
);
|
||||
|
||||
if (remainingConversations.length > 0) {
|
||||
// Load the first available conversation (most recent)
|
||||
loadConversation(remainingConversations[0]);
|
||||
} else {
|
||||
// No conversations left, start a new one
|
||||
setCurrentConversationId(null);
|
||||
if (onNewConversation) {
|
||||
onNewConversation();
|
||||
} else {
|
||||
refreshConversations();
|
||||
startNewConversation();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setDeleteModalOpen(false);
|
||||
setConversationToDelete(null);
|
||||
},
|
||||
onError: (error) => {
|
||||
toast.error(`Failed to delete conversation: ${error.message}`);
|
||||
},
|
||||
});
|
||||
|
||||
const handleNewConversation = () => {
|
||||
setLoadingNewConversation(true);
|
||||
refreshConversations();
|
||||
startNewConversation();
|
||||
|
||||
// Use the prop callback if provided, otherwise use the context method
|
||||
if (onNewConversation) {
|
||||
onNewConversation();
|
||||
} else {
|
||||
refreshConversations();
|
||||
startNewConversation();
|
||||
}
|
||||
|
||||
if (typeof window !== "undefined") {
|
||||
window.dispatchEvent(new CustomEvent("newConversation"));
|
||||
}
|
||||
|
|
@ -98,7 +164,7 @@ export function Navigation() {
|
|||
window.dispatchEvent(
|
||||
new CustomEvent("fileUploadStart", {
|
||||
detail: { filename: file.name },
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
try {
|
||||
|
|
@ -122,7 +188,7 @@ export function Navigation() {
|
|||
filename: file.name,
|
||||
error: "Failed to process document",
|
||||
},
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
// Trigger loading end event
|
||||
|
|
@ -142,7 +208,7 @@ export function Navigation() {
|
|||
window.dispatchEvent(
|
||||
new CustomEvent("fileUploaded", {
|
||||
detail: { file, result },
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
// Trigger loading end event
|
||||
|
|
@ -156,7 +222,7 @@ export function Navigation() {
|
|||
window.dispatchEvent(
|
||||
new CustomEvent("fileUploadError", {
|
||||
detail: { filename: file.name, error: "Failed to process document" },
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
|
@ -176,6 +242,41 @@ export function Navigation() {
|
|||
}
|
||||
};
|
||||
|
||||
const handleDeleteConversation = (
|
||||
conversation: ChatConversation,
|
||||
event?: React.MouseEvent,
|
||||
) => {
|
||||
if (event) {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
}
|
||||
setConversationToDelete(conversation);
|
||||
setDeleteModalOpen(true);
|
||||
};
|
||||
|
||||
const handleContextMenuAction = (
|
||||
action: string,
|
||||
conversation: ChatConversation,
|
||||
) => {
|
||||
switch (action) {
|
||||
case "delete":
|
||||
handleDeleteConversation(conversation);
|
||||
break;
|
||||
// Add more actions here in the future (rename, duplicate, etc.)
|
||||
default:
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
const confirmDeleteConversation = () => {
|
||||
if (conversationToDelete) {
|
||||
deleteSessionMutation.mutate({
|
||||
sessionId: conversationToDelete.response_id,
|
||||
endpoint: endpoint,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const routes = [
|
||||
{
|
||||
label: "Chat",
|
||||
|
|
@ -200,91 +301,6 @@ export function Navigation() {
|
|||
const isOnChatPage = pathname === "/" || pathname === "/chat";
|
||||
const isOnKnowledgePage = pathname.startsWith("/knowledge");
|
||||
|
||||
const createDefaultPlaceholder = useCallback(() => {
|
||||
return {
|
||||
response_id: "new-conversation-" + Date.now(),
|
||||
title: "New conversation",
|
||||
endpoint: endpoint,
|
||||
messages: [
|
||||
{
|
||||
role: "assistant",
|
||||
content: "How can I assist?",
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
],
|
||||
created_at: new Date().toISOString(),
|
||||
last_activity: new Date().toISOString(),
|
||||
total_messages: 1,
|
||||
} as ChatConversation;
|
||||
}, [endpoint]);
|
||||
|
||||
const fetchConversations = useCallback(async () => {
|
||||
setLoadingConversations(true);
|
||||
try {
|
||||
// Fetch from the selected endpoint only
|
||||
const apiEndpoint =
|
||||
endpoint === "chat" ? "/api/chat/history" : "/api/langflow/history";
|
||||
|
||||
const response = await fetch(apiEndpoint);
|
||||
if (response.ok) {
|
||||
const history = await response.json();
|
||||
const rawConversations = history.conversations || [];
|
||||
|
||||
// Cast conversations to proper type and ensure endpoint is correct
|
||||
const conversations: ChatConversation[] = rawConversations.map(
|
||||
(conv: RawConversation) => ({
|
||||
...conv,
|
||||
endpoint: conv.endpoint as EndpointType,
|
||||
})
|
||||
);
|
||||
|
||||
// Sort conversations by last activity (most recent first)
|
||||
conversations.sort((a: ChatConversation, b: ChatConversation) => {
|
||||
const aTime = new Date(
|
||||
a.last_activity || a.created_at || 0
|
||||
).getTime();
|
||||
const bTime = new Date(
|
||||
b.last_activity || b.created_at || 0
|
||||
).getTime();
|
||||
return bTime - aTime;
|
||||
});
|
||||
|
||||
setConversations(conversations);
|
||||
|
||||
// If no conversations exist and no placeholder is shown, create a default placeholder
|
||||
if (conversations.length === 0 && !placeholderConversation) {
|
||||
setPlaceholderConversation(createDefaultPlaceholder());
|
||||
}
|
||||
} else {
|
||||
setConversations([]);
|
||||
|
||||
// Also create placeholder when request fails and no conversations exist
|
||||
if (!placeholderConversation) {
|
||||
setPlaceholderConversation(createDefaultPlaceholder());
|
||||
}
|
||||
}
|
||||
|
||||
// Conversation documents are now managed in chat context
|
||||
} catch (error) {
|
||||
console.error(`Failed to fetch ${endpoint} conversations:`, error);
|
||||
setConversations([]);
|
||||
} finally {
|
||||
setLoadingConversations(false);
|
||||
}
|
||||
}, [
|
||||
endpoint,
|
||||
placeholderConversation,
|
||||
setPlaceholderConversation,
|
||||
createDefaultPlaceholder,
|
||||
]);
|
||||
|
||||
// Fetch chat conversations when on chat page, endpoint changes, or refresh is triggered
|
||||
useEffect(() => {
|
||||
if (isOnChatPage) {
|
||||
fetchConversations();
|
||||
}
|
||||
}, [isOnChatPage, endpoint, refreshTrigger, fetchConversations]);
|
||||
|
||||
// Clear placeholder when conversation count increases (new conversation was created)
|
||||
useEffect(() => {
|
||||
const currentCount = conversations.length;
|
||||
|
|
@ -326,7 +342,7 @@ export function Navigation() {
|
|||
"text-sm group flex p-3 w-full justify-start font-medium cursor-pointer hover:bg-accent hover:text-accent-foreground rounded-lg transition-all",
|
||||
route.active
|
||||
? "bg-accent text-accent-foreground shadow-sm"
|
||||
: "text-foreground hover:text-accent-foreground"
|
||||
: "text-foreground hover:text-accent-foreground",
|
||||
)}
|
||||
>
|
||||
<div className="flex items-center flex-1">
|
||||
|
|
@ -335,7 +351,7 @@ export function Navigation() {
|
|||
"h-4 w-4 mr-3 shrink-0",
|
||||
route.active
|
||||
? "text-accent-foreground"
|
||||
: "text-muted-foreground group-hover:text-foreground"
|
||||
: "text-muted-foreground group-hover:text-foreground",
|
||||
)}
|
||||
/>
|
||||
{route.label}
|
||||
|
|
@ -366,6 +382,7 @@ export function Navigation() {
|
|||
Conversations
|
||||
</h3>
|
||||
<button
|
||||
type="button"
|
||||
className="p-1 hover:bg-accent rounded"
|
||||
onClick={handleNewConversation}
|
||||
title="Start new conversation"
|
||||
|
|
@ -379,7 +396,7 @@ export function Navigation() {
|
|||
<div className="px-3 flex-1 min-h-0 flex flex-col">
|
||||
{/* Conversations List - grows naturally, doesn't fill all space */}
|
||||
<div className="flex-shrink-0 overflow-y-auto scrollbar-hide space-y-1 max-h-full">
|
||||
{loadingNewConversation ? (
|
||||
{loadingNewConversation || isConversationsLoading ? (
|
||||
<div className="text-sm text-muted-foreground p-2">
|
||||
Loading...
|
||||
</div>
|
||||
|
|
@ -387,8 +404,9 @@ export function Navigation() {
|
|||
<>
|
||||
{/* Show placeholder conversation if it exists */}
|
||||
{placeholderConversation && (
|
||||
<div
|
||||
className="p-2 rounded-lg bg-accent/50 border border-dashed border-accent cursor-pointer group"
|
||||
<button
|
||||
type="button"
|
||||
className="w-full p-2 rounded-lg bg-accent/50 border border-dashed border-accent cursor-pointer group text-left"
|
||||
onClick={() => {
|
||||
// Don't load placeholder as a real conversation, just focus the input
|
||||
if (typeof window !== "undefined") {
|
||||
|
|
@ -402,7 +420,7 @@ export function Navigation() {
|
|||
<div className="text-xs text-muted-foreground">
|
||||
Start typing to begin...
|
||||
</div>
|
||||
</div>
|
||||
</button>
|
||||
)}
|
||||
|
||||
{/* Show regular conversations */}
|
||||
|
|
@ -412,9 +430,10 @@ export function Navigation() {
|
|||
</div>
|
||||
) : (
|
||||
conversations.map((conversation) => (
|
||||
<div
|
||||
<button
|
||||
key={conversation.response_id}
|
||||
className={`p-2 rounded-lg group ${
|
||||
type="button"
|
||||
className={`w-full px-3 pr-2 h-11 rounded-lg group relative text-left ${
|
||||
loading
|
||||
? "opacity-50 cursor-not-allowed"
|
||||
: "hover:bg-accent cursor-pointer"
|
||||
|
|
@ -428,21 +447,53 @@ export function Navigation() {
|
|||
loadConversation(conversation);
|
||||
refreshConversations();
|
||||
}}
|
||||
disabled={loading}
|
||||
>
|
||||
<div className="text-sm font-medium text-foreground mb-1 truncate">
|
||||
{conversation.title}
|
||||
</div>
|
||||
<div className="text-xs text-muted-foreground">
|
||||
{conversation.total_messages} messages
|
||||
</div>
|
||||
{conversation.last_activity && (
|
||||
<div className="text-xs text-muted-foreground">
|
||||
{new Date(
|
||||
conversation.last_activity
|
||||
).toLocaleDateString()}
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="text-sm font-medium text-foreground truncate">
|
||||
{conversation.title}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<button
|
||||
type="button"
|
||||
className="opacity-0 group-hover:opacity-100 data-[state=open]:opacity-100 data-[state=open]:text-foreground transition-opacity p-1 hover:bg-accent rounded text-muted-foreground hover:text-foreground ml-2 flex-shrink-0"
|
||||
title="More options"
|
||||
disabled={
|
||||
loading || deleteSessionMutation.isPending
|
||||
}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
}}
|
||||
>
|
||||
<EllipsisVertical className="h-4 w-4" />
|
||||
</button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent
|
||||
side="bottom"
|
||||
align="end"
|
||||
className="w-48"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<DropdownMenuItem
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleContextMenuAction(
|
||||
"delete",
|
||||
conversation,
|
||||
);
|
||||
}}
|
||||
className="cursor-pointer text-destructive focus:text-destructive"
|
||||
>
|
||||
<Trash2 className="mr-2 h-4 w-4" />
|
||||
Delete conversation
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</div>
|
||||
</button>
|
||||
))
|
||||
)}
|
||||
</>
|
||||
|
|
@ -456,6 +507,7 @@ export function Navigation() {
|
|||
Conversation knowledge
|
||||
</h3>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleFilePickerClick}
|
||||
className="p-1 hover:bg-accent rounded"
|
||||
disabled={loading}
|
||||
|
|
@ -476,9 +528,9 @@ export function Navigation() {
|
|||
No documents yet
|
||||
</div>
|
||||
) : (
|
||||
conversationDocs.map((doc, index) => (
|
||||
conversationDocs.map((doc) => (
|
||||
<div
|
||||
key={index}
|
||||
key={`${doc.filename}-${doc.uploadTime.getTime()}`}
|
||||
className="p-2 rounded-lg hover:bg-accent cursor-pointer group flex items-center"
|
||||
>
|
||||
<FileText className="h-4 w-4 mr-2 text-muted-foreground flex-shrink-0" />
|
||||
|
|
@ -495,6 +547,18 @@ export function Navigation() {
|
|||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Delete Session Modal */}
|
||||
<DeleteSessionModal
|
||||
isOpen={deleteModalOpen}
|
||||
onClose={() => {
|
||||
setDeleteModalOpen(false);
|
||||
setConversationToDelete(null);
|
||||
}}
|
||||
onConfirm={confirmDeleteConversation}
|
||||
sessionTitle={conversationToDelete?.title || ""}
|
||||
isDeleting={deleteSessionMutation.isPending}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
158
frontend/components/ui/dot-pattern.tsx
Normal file
158
frontend/components/ui/dot-pattern.tsx
Normal file
|
|
@ -0,0 +1,158 @@
|
|||
"use client";
|
||||
|
||||
import { motion } from "motion/react";
|
||||
import type React from "react";
|
||||
import { useEffect, useId, useRef, useState } from "react";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
/**
|
||||
* DotPattern Component Props
|
||||
*
|
||||
* @param {number} [width=16] - The horizontal spacing between dots
|
||||
* @param {number} [height=16] - The vertical spacing between dots
|
||||
* @param {number} [x=0] - The x-offset of the entire pattern
|
||||
* @param {number} [y=0] - The y-offset of the entire pattern
|
||||
* @param {number} [cx=1] - The x-offset of individual dots
|
||||
* @param {number} [cy=1] - The y-offset of individual dots
|
||||
* @param {number} [cr=1] - The radius of each dot
|
||||
* @param {string} [className] - Additional CSS classes to apply to the SVG container
|
||||
* @param {boolean} [glow=false] - Whether dots should have a glowing animation effect
|
||||
*/
|
||||
interface DotPatternProps extends React.SVGProps<SVGSVGElement> {
|
||||
width?: number;
|
||||
height?: number;
|
||||
x?: number;
|
||||
y?: number;
|
||||
cx?: number;
|
||||
cy?: number;
|
||||
cr?: number;
|
||||
className?: string;
|
||||
glow?: boolean;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
/**
|
||||
* DotPattern Component
|
||||
*
|
||||
* A React component that creates an animated or static dot pattern background using SVG.
|
||||
* The pattern automatically adjusts to fill its container and can optionally display glowing dots.
|
||||
*
|
||||
* @component
|
||||
*
|
||||
* @see DotPatternProps for the props interface.
|
||||
*
|
||||
* @example
|
||||
* // Basic usage
|
||||
* <DotPattern />
|
||||
*
|
||||
* // With glowing effect and custom spacing
|
||||
* <DotPattern
|
||||
* width={20}
|
||||
* height={20}
|
||||
* glow={true}
|
||||
* className="opacity-50"
|
||||
* />
|
||||
*
|
||||
* @notes
|
||||
* - The component is client-side only ("use client")
|
||||
* - Automatically responds to container size changes
|
||||
* - When glow is enabled, dots will animate with random delays and durations
|
||||
* - Uses Motion for animations
|
||||
* - Dots color can be controlled via the text color utility classes
|
||||
*/
|
||||
|
||||
export function DotPattern({
|
||||
width = 16,
|
||||
height = 16,
|
||||
x = 0,
|
||||
y = 0,
|
||||
cx = 1,
|
||||
cy = 1,
|
||||
cr = 1,
|
||||
className,
|
||||
glow = false,
|
||||
...props
|
||||
}: DotPatternProps) {
|
||||
const id = useId();
|
||||
const containerRef = useRef<SVGSVGElement>(null);
|
||||
const [dimensions, setDimensions] = useState({ width: 0, height: 0 });
|
||||
|
||||
useEffect(() => {
|
||||
const updateDimensions = () => {
|
||||
if (containerRef.current) {
|
||||
const { width, height } = containerRef.current.getBoundingClientRect();
|
||||
setDimensions({ width, height });
|
||||
}
|
||||
};
|
||||
|
||||
updateDimensions();
|
||||
window.addEventListener("resize", updateDimensions);
|
||||
return () => window.removeEventListener("resize", updateDimensions);
|
||||
}, []);
|
||||
|
||||
const dots = Array.from(
|
||||
{
|
||||
length:
|
||||
Math.ceil(dimensions.width / width) *
|
||||
Math.ceil(dimensions.height / height),
|
||||
},
|
||||
(_, i) => {
|
||||
const col = i % Math.ceil(dimensions.width / width);
|
||||
const row = Math.floor(i / Math.ceil(dimensions.width / width));
|
||||
return {
|
||||
x: col * width + cx,
|
||||
y: row * height + cy,
|
||||
delay: Math.random() * 5,
|
||||
duration: Math.random() * 3 + 2,
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
return (
|
||||
<svg
|
||||
ref={containerRef}
|
||||
aria-hidden="true"
|
||||
className={cn(
|
||||
"pointer-events-none absolute inset-0 h-full w-full text-neutral-400/80",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<defs>
|
||||
<radialGradient id={`${id}-gradient`}>
|
||||
<stop offset="0%" stopColor="currentColor" stopOpacity="1" />
|
||||
<stop offset="100%" stopColor="currentColor" stopOpacity="0" />
|
||||
</radialGradient>
|
||||
</defs>
|
||||
{dots.map((dot, index) => (
|
||||
<motion.circle
|
||||
key={`${dot.x}-${dot.y}`}
|
||||
cx={dot.x}
|
||||
cy={dot.y}
|
||||
r={cr}
|
||||
fill={glow ? `url(#${id}-gradient)` : "currentColor"}
|
||||
initial={glow ? { opacity: 0.4, scale: 1 } : {}}
|
||||
animate={
|
||||
glow
|
||||
? {
|
||||
opacity: [0.4, 1, 0.4],
|
||||
scale: [1, 1.5, 1],
|
||||
}
|
||||
: {}
|
||||
}
|
||||
transition={
|
||||
glow
|
||||
? {
|
||||
duration: dot.duration,
|
||||
repeat: Infinity,
|
||||
repeatType: "reverse",
|
||||
delay: dot.delay,
|
||||
ease: "easeInOut",
|
||||
}
|
||||
: {}
|
||||
}
|
||||
/>
|
||||
))}
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
import { Eye, EyeOff } from "lucide-react";
|
||||
import * as React from "react";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
|
|
@ -12,6 +13,11 @@ const Input = React.forwardRef<HTMLInputElement, InputProps>(
|
|||
const [hasValue, setHasValue] = React.useState(
|
||||
Boolean(props.value || props.defaultValue),
|
||||
);
|
||||
const [showPassword, setShowPassword] = React.useState(false);
|
||||
|
||||
const handleTogglePassword = () => {
|
||||
setShowPassword(!showPassword);
|
||||
};
|
||||
|
||||
const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
setHasValue(e.target.value.length > 0);
|
||||
|
|
@ -23,8 +29,8 @@ const Input = React.forwardRef<HTMLInputElement, InputProps>(
|
|||
return (
|
||||
<label
|
||||
className={cn(
|
||||
"relative block h-fit w-full text-sm",
|
||||
icon ? className : ""
|
||||
"relative block h-fit w-full text-sm group",
|
||||
icon ? className : "",
|
||||
)}
|
||||
>
|
||||
{icon && (
|
||||
|
|
@ -34,17 +40,32 @@ const Input = React.forwardRef<HTMLInputElement, InputProps>(
|
|||
)}
|
||||
<input
|
||||
autoComplete="off"
|
||||
type={type}
|
||||
type={type === "password" && showPassword ? "text" : type}
|
||||
placeholder={placeholder}
|
||||
className={cn(
|
||||
"primary-input !placeholder-transparent",
|
||||
icon && "pl-9",
|
||||
icon ? inputClassName : className
|
||||
type === "password" && "!pr-8",
|
||||
icon ? inputClassName : className,
|
||||
)}
|
||||
ref={ref}
|
||||
{...props}
|
||||
onChange={handleChange}
|
||||
/>
|
||||
{type === "password" && (
|
||||
<button
|
||||
type="button"
|
||||
className="absolute top-1/2 opacity-0 group-hover:opacity-100 hover:text-primary transition-all right-3 transform -translate-y-1/2 text-sm text-muted-foreground"
|
||||
onMouseDown={(e) => e.preventDefault()}
|
||||
onMouseUp={handleTogglePassword}
|
||||
>
|
||||
{showPassword ? (
|
||||
<Eye className="w-4" />
|
||||
) : (
|
||||
<EyeOff className="w-4" />
|
||||
)}
|
||||
</button>
|
||||
)}
|
||||
<span
|
||||
className={cn(
|
||||
"pointer-events-none absolute top-1/2 -translate-y-1/2 pl-px text-placeholder-foreground font-mono",
|
||||
|
|
@ -56,7 +77,7 @@ const Input = React.forwardRef<HTMLInputElement, InputProps>(
|
|||
</span>
|
||||
</label>
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
Input.displayName = "Input";
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 269 KiB |
57
frontend/src/app/api/queries/useDeleteSessionMutation.ts
Normal file
57
frontend/src/app/api/queries/useDeleteSessionMutation.ts
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
import {
|
||||
type MutationOptions,
|
||||
useMutation,
|
||||
useQueryClient,
|
||||
} from "@tanstack/react-query";
|
||||
import type { EndpointType } from "@/contexts/chat-context";
|
||||
|
||||
interface DeleteSessionParams {
|
||||
sessionId: string;
|
||||
endpoint: EndpointType;
|
||||
}
|
||||
|
||||
interface DeleteSessionResponse {
|
||||
success: boolean;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export const useDeleteSessionMutation = (
|
||||
options?: Omit<
|
||||
MutationOptions<DeleteSessionResponse, Error, DeleteSessionParams>,
|
||||
"mutationFn"
|
||||
>,
|
||||
) => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return useMutation<DeleteSessionResponse, Error, DeleteSessionParams>({
|
||||
mutationFn: async ({ sessionId }: DeleteSessionParams) => {
|
||||
const response = await fetch(`/api/sessions/${sessionId}`, {
|
||||
method: "DELETE",
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}));
|
||||
throw new Error(
|
||||
errorData.error || `Failed to delete session: ${response.status}`,
|
||||
);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
},
|
||||
onSettled: (_data, _error, variables) => {
|
||||
// Invalidate conversations query to refresh the list
|
||||
// Use a slight delay to ensure the success callback completes first
|
||||
setTimeout(() => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ["conversations", variables.endpoint],
|
||||
});
|
||||
|
||||
// Also invalidate any specific conversation queries
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ["conversations"],
|
||||
});
|
||||
}, 0);
|
||||
},
|
||||
...options,
|
||||
});
|
||||
};
|
||||
105
frontend/src/app/api/queries/useGetConversationsQuery.ts
Normal file
105
frontend/src/app/api/queries/useGetConversationsQuery.ts
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
import {
|
||||
type UseQueryOptions,
|
||||
useQuery,
|
||||
useQueryClient,
|
||||
} from "@tanstack/react-query";
|
||||
import type { EndpointType } from "@/contexts/chat-context";
|
||||
|
||||
export interface RawConversation {
|
||||
response_id: string;
|
||||
title: string;
|
||||
endpoint: string;
|
||||
messages: Array<{
|
||||
role: string;
|
||||
content: string;
|
||||
timestamp?: string;
|
||||
response_id?: string;
|
||||
}>;
|
||||
created_at?: string;
|
||||
last_activity?: string;
|
||||
previous_response_id?: string;
|
||||
total_messages: number;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface ChatConversation {
|
||||
response_id: string;
|
||||
title: string;
|
||||
endpoint: EndpointType;
|
||||
messages: Array<{
|
||||
role: string;
|
||||
content: string;
|
||||
timestamp?: string;
|
||||
response_id?: string;
|
||||
}>;
|
||||
created_at?: string;
|
||||
last_activity?: string;
|
||||
previous_response_id?: string;
|
||||
total_messages: number;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface ConversationHistoryResponse {
|
||||
conversations: RawConversation[];
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export const useGetConversationsQuery = (
|
||||
endpoint: EndpointType,
|
||||
refreshTrigger?: number,
|
||||
options?: Omit<UseQueryOptions, "queryKey" | "queryFn">,
|
||||
) => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
async function getConversations(): Promise<ChatConversation[]> {
|
||||
try {
|
||||
// Fetch from the selected endpoint only
|
||||
const apiEndpoint =
|
||||
endpoint === "chat" ? "/api/chat/history" : "/api/langflow/history";
|
||||
|
||||
const response = await fetch(apiEndpoint);
|
||||
|
||||
if (!response.ok) {
|
||||
console.error(`Failed to fetch conversations: ${response.status}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
const history: ConversationHistoryResponse = await response.json();
|
||||
const rawConversations = history.conversations || [];
|
||||
|
||||
// Cast conversations to proper type and ensure endpoint is correct
|
||||
const conversations: ChatConversation[] = rawConversations.map(
|
||||
(conv: RawConversation) => ({
|
||||
...conv,
|
||||
endpoint: conv.endpoint as EndpointType,
|
||||
}),
|
||||
);
|
||||
|
||||
// Sort conversations by last activity (most recent first)
|
||||
conversations.sort((a: ChatConversation, b: ChatConversation) => {
|
||||
const aTime = new Date(a.last_activity || a.created_at || 0).getTime();
|
||||
const bTime = new Date(b.last_activity || b.created_at || 0).getTime();
|
||||
return bTime - aTime;
|
||||
});
|
||||
|
||||
return conversations;
|
||||
} catch (error) {
|
||||
console.error(`Failed to fetch ${endpoint} conversations:`, error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
const queryResult = useQuery(
|
||||
{
|
||||
queryKey: ["conversations", endpoint, refreshTrigger],
|
||||
placeholderData: (prev) => prev,
|
||||
queryFn: getConversations,
|
||||
staleTime: 0, // Always consider data stale to ensure fresh data on trigger changes
|
||||
gcTime: 5 * 60 * 1000, // Keep in cache for 5 minutes
|
||||
...options,
|
||||
},
|
||||
queryClient,
|
||||
);
|
||||
|
||||
return queryResult;
|
||||
};
|
||||
|
|
@ -90,7 +90,6 @@ export const useGetOllamaModelsQuery = (
|
|||
queryKey: ["models", "ollama", params],
|
||||
queryFn: getOllamaModels,
|
||||
retry: 2,
|
||||
enabled: !!params?.endpoint, // Only run if endpoint is provided
|
||||
staleTime: 0, // Always fetch fresh data
|
||||
gcTime: 0, // Don't cache results
|
||||
...options,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,9 @@ import { Suspense, useEffect } from "react";
|
|||
import GoogleLogo from "@/components/logo/google-logo";
|
||||
import Logo from "@/components/logo/logo";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { DotPattern } from "@/components/ui/dot-pattern";
|
||||
import { useAuth } from "@/contexts/auth-context";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useGetSettingsQuery } from "../api/queries/useGetSettingsQuery";
|
||||
|
||||
function LoginPageContent() {
|
||||
|
|
@ -53,15 +55,19 @@ function LoginPageContent() {
|
|||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className="min-h-dvh relative flex gap-4 flex-col items-center justify-center bg-background p-4"
|
||||
style={{
|
||||
backgroundImage: "url('/images/background.png')",
|
||||
backgroundSize: "cover",
|
||||
backgroundPosition: "center",
|
||||
}}
|
||||
>
|
||||
<div className="flex flex-col items-center justify-center gap-4">
|
||||
<div className="min-h-dvh relative flex gap-4 flex-col items-center justify-center bg-background p-4">
|
||||
<DotPattern
|
||||
width={24}
|
||||
height={24}
|
||||
cx={1}
|
||||
cy={1}
|
||||
cr={1}
|
||||
className={cn(
|
||||
"[mask-image:linear-gradient(to_bottom,white,transparent,transparent)]",
|
||||
"text-input/70",
|
||||
)}
|
||||
/>
|
||||
<div className="flex flex-col items-center justify-center gap-4 z-10">
|
||||
<Logo className="fill-primary" width={32} height={28} />
|
||||
<h1 className="text-2xl font-medium font-chivo">Welcome to OpenRAG</h1>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
|
|
@ -72,7 +78,7 @@ function LoginPageContent() {
|
|||
Continue with Google
|
||||
</Button>
|
||||
</div>
|
||||
<div className="flex items-center justify-center gap-2 absolute bottom-6 text-xs text-muted-foreground">
|
||||
<div className="flex items-center justify-center gap-2 absolute bottom-6 text-xs text-muted-foreground z-10">
|
||||
<p className="text-accent-emerald-foreground">Systems Operational</p>•
|
||||
<p>Privacy Policy</p>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -47,8 +47,7 @@ export function AdvancedOnboarding({
|
|||
{hasEmbeddingModels && (
|
||||
<LabelWrapper
|
||||
label="Embedding model"
|
||||
description="It’s recommended that you use XYZ, ABC, or DEF models for best performance."
|
||||
helperText="The embedding model for your Ollama server."
|
||||
helperText="Model used for knowledge ingest and retrieval"
|
||||
id="embedding-model"
|
||||
required={true}
|
||||
>
|
||||
|
|
@ -63,8 +62,7 @@ export function AdvancedOnboarding({
|
|||
{hasLanguageModels && (
|
||||
<LabelWrapper
|
||||
label="Language model"
|
||||
description="It’s recommended that you use XYZ, ABC, or DEF models for best performance."
|
||||
helperText="The embedding model for your Ollama server."
|
||||
helperText="Model used for chat"
|
||||
id="embedding-model"
|
||||
required={true}
|
||||
>
|
||||
|
|
@ -79,7 +77,7 @@ export function AdvancedOnboarding({
|
|||
{(hasLanguageModels || hasEmbeddingModels) && <Separator />}
|
||||
<LabelWrapper
|
||||
label="Sample dataset"
|
||||
description="Ingest two small PDFs"
|
||||
description="Load 2 sample PDFs to chat with data immediately."
|
||||
id="sample-dataset"
|
||||
flex
|
||||
>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import { useState } from "react";
|
||||
import { LabelInput } from "@/components/label-input";
|
||||
import { LabelWrapper } from "@/components/label-wrapper";
|
||||
import IBMLogo from "@/components/logo/ibm-logo";
|
||||
import { useDebouncedValue } from "@/lib/debounce";
|
||||
import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutation";
|
||||
|
|
@ -7,6 +8,7 @@ import { useGetIBMModelsQuery } from "../../api/queries/useGetModelsQuery";
|
|||
import { useModelSelection } from "../hooks/useModelSelection";
|
||||
import { useUpdateSettings } from "../hooks/useUpdateSettings";
|
||||
import { AdvancedOnboarding } from "./advanced";
|
||||
import { ModelSelector } from "./model-selector";
|
||||
|
||||
export function IBMOnboarding({
|
||||
setSettings,
|
||||
|
|
@ -17,10 +19,42 @@ export function IBMOnboarding({
|
|||
sampleDataset: boolean;
|
||||
setSampleDataset: (dataset: boolean) => void;
|
||||
}) {
|
||||
const [endpoint, setEndpoint] = useState("");
|
||||
const [endpoint, setEndpoint] = useState("https://us-south.ml.cloud.ibm.com");
|
||||
const [apiKey, setApiKey] = useState("");
|
||||
const [projectId, setProjectId] = useState("");
|
||||
|
||||
const options = [
|
||||
{
|
||||
value: "https://us-south.ml.cloud.ibm.com",
|
||||
label: "https://us-south.ml.cloud.ibm.com",
|
||||
default: true,
|
||||
},
|
||||
{
|
||||
value: "https://eu-de.ml.cloud.ibm.com",
|
||||
label: "https://eu-de.ml.cloud.ibm.com",
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
value: "https://eu-gb.ml.cloud.ibm.com",
|
||||
label: "https://eu-gb.ml.cloud.ibm.com",
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
value: "https://au-syd.ml.cloud.ibm.com",
|
||||
label: "https://au-syd.ml.cloud.ibm.com",
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
value: "https://jp-tok.ml.cloud.ibm.com",
|
||||
label: "https://jp-tok.ml.cloud.ibm.com",
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
value: "https://ca-tor.ml.cloud.ibm.com",
|
||||
label: "https://ca-tor.ml.cloud.ibm.com",
|
||||
default: false,
|
||||
},
|
||||
];
|
||||
const debouncedEndpoint = useDebouncedValue(endpoint, 500);
|
||||
const debouncedApiKey = useDebouncedValue(apiKey, 500);
|
||||
const debouncedProjectId = useDebouncedValue(projectId, 500);
|
||||
|
|
@ -68,19 +102,26 @@ export function IBMOnboarding({
|
|||
return (
|
||||
<>
|
||||
<div className="space-y-4">
|
||||
<LabelInput
|
||||
<LabelWrapper
|
||||
label="watsonx.ai API Endpoint"
|
||||
helperText="The API endpoint for your watsonx.ai account."
|
||||
helperText="Base URL of the API"
|
||||
id="api-endpoint"
|
||||
required
|
||||
placeholder="https://us-south.ml.cloud.ibm.com"
|
||||
value={endpoint}
|
||||
onChange={(e) => setEndpoint(e.target.value)}
|
||||
/>
|
||||
>
|
||||
<ModelSelector
|
||||
options={options}
|
||||
value={endpoint}
|
||||
onValueChange={setEndpoint}
|
||||
searchPlaceholder="Search endpoint..."
|
||||
noOptionsPlaceholder="No endpoints available"
|
||||
placeholder="Select endpoint..."
|
||||
/>
|
||||
</LabelWrapper>
|
||||
<LabelInput
|
||||
label="IBM API key"
|
||||
helperText="The API key for your watsonx.ai account."
|
||||
id="api-key"
|
||||
type="password"
|
||||
required
|
||||
placeholder="your-api-key"
|
||||
value={apiKey}
|
||||
|
|
@ -102,16 +143,9 @@ export function IBMOnboarding({
|
|||
)}
|
||||
{modelsError && (
|
||||
<p className="text-mmd text-accent-amber-foreground">
|
||||
Invalid configuration or connection failed
|
||||
Connection failed. Check your configuration.
|
||||
</p>
|
||||
)}
|
||||
{modelsData &&
|
||||
(modelsData.language_models?.length > 0 ||
|
||||
modelsData.embedding_models?.length > 0) && (
|
||||
<p className="text-mmd text-accent-emerald-foreground">
|
||||
Configuration is valid
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
<AdvancedOnboarding
|
||||
icon={<IBMLogo className="w-4 h-4" />}
|
||||
|
|
|
|||
|
|
@ -21,6 +21,9 @@ export function ModelSelector({
|
|||
value,
|
||||
onValueChange,
|
||||
icon,
|
||||
placeholder = "Select model...",
|
||||
searchPlaceholder = "Search model...",
|
||||
noOptionsPlaceholder = "No models available",
|
||||
}: {
|
||||
options: {
|
||||
value: string;
|
||||
|
|
@ -29,6 +32,9 @@ export function ModelSelector({
|
|||
}[];
|
||||
value: string;
|
||||
icon?: React.ReactNode;
|
||||
placeholder?: string;
|
||||
searchPlaceholder?: string;
|
||||
noOptionsPlaceholder?: string;
|
||||
onValueChange: (value: string) => void;
|
||||
}) {
|
||||
const [open, setOpen] = useState(false);
|
||||
|
|
@ -50,7 +56,7 @@ export function ModelSelector({
|
|||
>
|
||||
{value ? (
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="w-4 h-4">{icon}</div>
|
||||
{icon && <div className="w-4 h-4">{icon}</div>}
|
||||
{options.find((framework) => framework.value === value)?.label}
|
||||
{options.find((framework) => framework.value === value)
|
||||
?.default && (
|
||||
|
|
@ -60,18 +66,18 @@ export function ModelSelector({
|
|||
)}
|
||||
</div>
|
||||
) : options.length === 0 ? (
|
||||
"No models available"
|
||||
noOptionsPlaceholder
|
||||
) : (
|
||||
"Select model..."
|
||||
placeholder
|
||||
)}
|
||||
<ChevronsUpDownIcon className="ml-2 h-4 w-4 shrink-0 opacity-50" />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align="start" className="w-[400px] p-0">
|
||||
<Command>
|
||||
<CommandInput placeholder="Search model..." />
|
||||
<CommandInput placeholder={searchPlaceholder} />
|
||||
<CommandList>
|
||||
<CommandEmpty>No model found.</CommandEmpty>
|
||||
<CommandEmpty>{noOptionsPlaceholder}</CommandEmpty>
|
||||
<CommandGroup>
|
||||
{options.map((option) => (
|
||||
<CommandItem
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { useState } from "react";
|
||||
import { useEffect, useState } from "react";
|
||||
import { LabelInput } from "@/components/label-input";
|
||||
import { LabelWrapper } from "@/components/label-wrapper";
|
||||
import OllamaLogo from "@/components/logo/ollama-logo";
|
||||
|
|
@ -19,7 +19,8 @@ export function OllamaOnboarding({
|
|||
sampleDataset: boolean;
|
||||
setSampleDataset: (dataset: boolean) => void;
|
||||
}) {
|
||||
const [endpoint, setEndpoint] = useState("");
|
||||
const [endpoint, setEndpoint] = useState("http://localhost:11434");
|
||||
const [showConnecting, setShowConnecting] = useState(false);
|
||||
const debouncedEndpoint = useDebouncedValue(endpoint, 500);
|
||||
|
||||
// Fetch models from API when endpoint is provided (debounced)
|
||||
|
|
@ -41,6 +42,25 @@ export function OllamaOnboarding({
|
|||
embeddingModels,
|
||||
} = useModelSelection(modelsData);
|
||||
|
||||
// Handle delayed display of connecting state
|
||||
useEffect(() => {
|
||||
let timeoutId: NodeJS.Timeout;
|
||||
|
||||
if (debouncedEndpoint && isLoadingModels) {
|
||||
timeoutId = setTimeout(() => {
|
||||
setShowConnecting(true);
|
||||
}, 500);
|
||||
} else {
|
||||
setShowConnecting(false);
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
};
|
||||
}, [debouncedEndpoint, isLoadingModels]);
|
||||
|
||||
const handleSampleDatasetChange = (dataset: boolean) => {
|
||||
setSampleDataset(dataset);
|
||||
};
|
||||
|
|
@ -57,74 +77,75 @@ export function OllamaOnboarding({
|
|||
);
|
||||
|
||||
// Check validation state based on models query
|
||||
const isConnecting = debouncedEndpoint && isLoadingModels;
|
||||
const hasConnectionError = debouncedEndpoint && modelsError;
|
||||
const hasNoModels =
|
||||
modelsData &&
|
||||
!modelsData.language_models?.length &&
|
||||
!modelsData.embedding_models?.length;
|
||||
const isValidConnection =
|
||||
modelsData &&
|
||||
(modelsData.language_models?.length > 0 ||
|
||||
modelsData.embedding_models?.length > 0);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="space-y-4">
|
||||
<div className="space-y-1">
|
||||
<LabelInput
|
||||
label="Ollama Endpoint"
|
||||
helperText="The endpoint for your Ollama server."
|
||||
label="Ollama Base URL"
|
||||
helperText="Base URL of your Ollama server"
|
||||
id="api-endpoint"
|
||||
required
|
||||
placeholder="http://localhost:11434"
|
||||
value={endpoint}
|
||||
onChange={(e) => setEndpoint(e.target.value)}
|
||||
/>
|
||||
{isConnecting && (
|
||||
{showConnecting && (
|
||||
<p className="text-mmd text-muted-foreground">
|
||||
Connecting to Ollama server...
|
||||
</p>
|
||||
)}
|
||||
{hasConnectionError && (
|
||||
<p className="text-mmd text-accent-amber-foreground">
|
||||
Can’t reach Ollama at {debouncedEndpoint}. Update the endpoint or
|
||||
Can’t reach Ollama at {debouncedEndpoint}. Update the base URL or
|
||||
start the server.
|
||||
</p>
|
||||
)}
|
||||
{hasNoModels && (
|
||||
<p className="text-mmd text-accent-amber-foreground">
|
||||
No models found. Please install some models on your Ollama server.
|
||||
</p>
|
||||
)}
|
||||
{isValidConnection && (
|
||||
<p className="text-mmd text-accent-emerald-foreground">
|
||||
Connected successfully
|
||||
No models found. Install embedding and agent models on your Ollama
|
||||
server.
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
<LabelWrapper
|
||||
label="Embedding model"
|
||||
helperText="The embedding model for your Ollama server."
|
||||
helperText="Model used for knowledge ingest and retrieval"
|
||||
id="embedding-model"
|
||||
required={true}
|
||||
>
|
||||
<ModelSelector
|
||||
options={embeddingModels}
|
||||
icon={<OllamaLogo className="w-4 h-4" />}
|
||||
noOptionsPlaceholder={
|
||||
isLoadingModels
|
||||
? "Loading models..."
|
||||
: "No embedding models detected. Install an embedding model to continue."
|
||||
}
|
||||
value={embeddingModel}
|
||||
onValueChange={setEmbeddingModel}
|
||||
/>
|
||||
</LabelWrapper>
|
||||
<LabelWrapper
|
||||
label="Language model"
|
||||
helperText="The embedding model for your Ollama server."
|
||||
helperText="Model used for chat"
|
||||
id="embedding-model"
|
||||
required={true}
|
||||
>
|
||||
<ModelSelector
|
||||
options={languageModels}
|
||||
icon={<OllamaLogo className="w-4 h-4" />}
|
||||
noOptionsPlaceholder={
|
||||
isLoadingModels
|
||||
? "Loading models..."
|
||||
: "No language models detected. Install a language model to continue."
|
||||
}
|
||||
value={languageModel}
|
||||
onValueChange={setLanguageModel}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import { useState } from "react";
|
||||
import { LabelInput } from "@/components/label-input";
|
||||
import { LabelWrapper } from "@/components/label-wrapper";
|
||||
import OpenAILogo from "@/components/logo/openai-logo";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import { useDebouncedValue } from "@/lib/debounce";
|
||||
import type { OnboardingVariables } from "../../api/mutations/useOnboardingMutation";
|
||||
import { useGetOpenAIModelsQuery } from "../../api/queries/useGetModelsQuery";
|
||||
|
|
@ -18,6 +20,7 @@ export function OpenAIOnboarding({
|
|||
setSampleDataset: (dataset: boolean) => void;
|
||||
}) {
|
||||
const [apiKey, setApiKey] = useState("");
|
||||
const [getFromEnv, setGetFromEnv] = useState(true);
|
||||
const debouncedApiKey = useDebouncedValue(apiKey, 500);
|
||||
|
||||
// Fetch models from API when API key is provided
|
||||
|
|
@ -26,7 +29,12 @@ export function OpenAIOnboarding({
|
|||
isLoading: isLoadingModels,
|
||||
error: modelsError,
|
||||
} = useGetOpenAIModelsQuery(
|
||||
debouncedApiKey ? { apiKey: debouncedApiKey } : undefined,
|
||||
getFromEnv
|
||||
? { apiKey: "" }
|
||||
: debouncedApiKey
|
||||
? { apiKey: debouncedApiKey }
|
||||
: undefined,
|
||||
{ enabled: debouncedApiKey !== "" || getFromEnv },
|
||||
);
|
||||
// Use custom hook for model selection logic
|
||||
const {
|
||||
|
|
@ -41,6 +49,15 @@ export function OpenAIOnboarding({
|
|||
setSampleDataset(dataset);
|
||||
};
|
||||
|
||||
const handleGetFromEnvChange = (fromEnv: boolean) => {
|
||||
setGetFromEnv(fromEnv);
|
||||
if (fromEnv) {
|
||||
setApiKey("");
|
||||
}
|
||||
setLanguageModel("");
|
||||
setEmbeddingModel("");
|
||||
};
|
||||
|
||||
// Update settings when values change
|
||||
useUpdateSettings(
|
||||
"openai",
|
||||
|
|
@ -53,33 +70,41 @@ export function OpenAIOnboarding({
|
|||
);
|
||||
return (
|
||||
<>
|
||||
<div className="space-y-1">
|
||||
<LabelInput
|
||||
label="OpenAI API key"
|
||||
helperText="The API key for your OpenAI account."
|
||||
id="api-key"
|
||||
required
|
||||
placeholder="sk-..."
|
||||
value={apiKey}
|
||||
onChange={(e) => setApiKey(e.target.value)}
|
||||
/>
|
||||
{isLoadingModels && (
|
||||
<p className="text-mmd text-muted-foreground">
|
||||
Validating API key...
|
||||
</p>
|
||||
<div className="space-y-5">
|
||||
<LabelWrapper
|
||||
label="Get API key from environment variable"
|
||||
id="get-api-key"
|
||||
flex
|
||||
>
|
||||
<Switch
|
||||
checked={getFromEnv}
|
||||
onCheckedChange={handleGetFromEnvChange}
|
||||
/>
|
||||
</LabelWrapper>
|
||||
{!getFromEnv && (
|
||||
<div className="space-y-1">
|
||||
<LabelInput
|
||||
label="OpenAI API key"
|
||||
helperText="The API key for your OpenAI account."
|
||||
id="api-key"
|
||||
type="password"
|
||||
required
|
||||
placeholder="sk-..."
|
||||
value={apiKey}
|
||||
onChange={(e) => setApiKey(e.target.value)}
|
||||
/>
|
||||
{isLoadingModels && (
|
||||
<p className="text-mmd text-muted-foreground">
|
||||
Validating API key...
|
||||
</p>
|
||||
)}
|
||||
{modelsError && (
|
||||
<p className="text-mmd text-accent-amber-foreground">
|
||||
Invalid OpenAI API key. Verify or replace the key.
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{modelsError && (
|
||||
<p className="text-mmd text-accent-amber-foreground">
|
||||
Invalid API key
|
||||
</p>
|
||||
)}
|
||||
{modelsData &&
|
||||
(modelsData.language_models?.length > 0 ||
|
||||
modelsData.embedding_models?.length > 0) && (
|
||||
<p className="text-mmd text-accent-emerald-foreground">
|
||||
API Key is valid
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
<AdvancedOnboarding
|
||||
icon={<OpenAILogo className="w-4 h-4" />}
|
||||
|
|
|
|||
|
|
@ -4,8 +4,8 @@ import { useRouter } from "next/navigation";
|
|||
import { Suspense, useEffect, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import {
|
||||
type OnboardingVariables,
|
||||
useOnboardingMutation,
|
||||
type OnboardingVariables,
|
||||
useOnboardingMutation,
|
||||
} from "@/app/api/mutations/useOnboardingMutation";
|
||||
import IBMLogo from "@/components/logo/ibm-logo";
|
||||
import OllamaLogo from "@/components/logo/ollama-logo";
|
||||
|
|
@ -13,198 +13,208 @@ import OpenAILogo from "@/components/logo/openai-logo";
|
|||
import { ProtectedRoute } from "@/components/protected-route";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Card,
|
||||
CardContent,
|
||||
CardFooter,
|
||||
CardHeader,
|
||||
Card,
|
||||
CardContent,
|
||||
CardFooter,
|
||||
CardHeader,
|
||||
} from "@/components/ui/card";
|
||||
import { DotPattern } from "@/components/ui/dot-pattern";
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useGetSettingsQuery } from "../api/queries/useGetSettingsQuery";
|
||||
import { IBMOnboarding } from "./components/ibm-onboarding";
|
||||
import { OllamaOnboarding } from "./components/ollama-onboarding";
|
||||
import { OpenAIOnboarding } from "./components/openai-onboarding";
|
||||
|
||||
function OnboardingPage() {
|
||||
const { data: settingsDb, isLoading: isSettingsLoading } =
|
||||
useGetSettingsQuery();
|
||||
const { data: settingsDb, isLoading: isSettingsLoading } =
|
||||
useGetSettingsQuery();
|
||||
|
||||
const redirect = "/";
|
||||
const redirect = "/";
|
||||
|
||||
const router = useRouter();
|
||||
const router = useRouter();
|
||||
|
||||
// Redirect if already authenticated or in no-auth mode
|
||||
useEffect(() => {
|
||||
if (!isSettingsLoading && settingsDb && settingsDb.edited) {
|
||||
router.push(redirect);
|
||||
}
|
||||
}, [isSettingsLoading, settingsDb, router]);
|
||||
// Redirect if already authenticated or in no-auth mode
|
||||
useEffect(() => {
|
||||
if (!isSettingsLoading && settingsDb && settingsDb.edited) {
|
||||
router.push(redirect);
|
||||
}
|
||||
}, [isSettingsLoading, settingsDb, router]);
|
||||
|
||||
const [modelProvider, setModelProvider] = useState<string>("openai");
|
||||
const [modelProvider, setModelProvider] = useState<string>("openai");
|
||||
|
||||
const [sampleDataset, setSampleDataset] = useState<boolean>(true);
|
||||
const [sampleDataset, setSampleDataset] = useState<boolean>(true);
|
||||
|
||||
const handleSetModelProvider = (provider: string) => {
|
||||
setModelProvider(provider);
|
||||
setSettings({
|
||||
model_provider: provider,
|
||||
embedding_model: "",
|
||||
llm_model: "",
|
||||
});
|
||||
};
|
||||
const handleSetModelProvider = (provider: string) => {
|
||||
setModelProvider(provider);
|
||||
setSettings({
|
||||
model_provider: provider,
|
||||
embedding_model: "",
|
||||
llm_model: "",
|
||||
});
|
||||
};
|
||||
|
||||
const [settings, setSettings] = useState<OnboardingVariables>({
|
||||
model_provider: modelProvider,
|
||||
embedding_model: "",
|
||||
llm_model: "",
|
||||
});
|
||||
const [settings, setSettings] = useState<OnboardingVariables>({
|
||||
model_provider: modelProvider,
|
||||
embedding_model: "",
|
||||
llm_model: "",
|
||||
});
|
||||
|
||||
// Mutations
|
||||
const onboardingMutation = useOnboardingMutation({
|
||||
onSuccess: (data) => {
|
||||
toast.success("Onboarding completed successfully!");
|
||||
console.log("Onboarding completed successfully", data);
|
||||
router.push(redirect);
|
||||
},
|
||||
onError: (error) => {
|
||||
toast.error("Failed to complete onboarding", {
|
||||
description: error.message,
|
||||
});
|
||||
},
|
||||
});
|
||||
// Mutations
|
||||
const onboardingMutation = useOnboardingMutation({
|
||||
onSuccess: (data) => {
|
||||
toast.success("Onboarding completed successfully!");
|
||||
console.log("Onboarding completed successfully", data);
|
||||
router.push(redirect);
|
||||
},
|
||||
onError: (error) => {
|
||||
toast.error("Failed to complete onboarding", {
|
||||
description: error.message,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const handleComplete = () => {
|
||||
if (
|
||||
!settings.model_provider ||
|
||||
!settings.llm_model ||
|
||||
!settings.embedding_model
|
||||
) {
|
||||
toast.error("Please complete all required fields");
|
||||
return;
|
||||
}
|
||||
const handleComplete = () => {
|
||||
if (
|
||||
!settings.model_provider ||
|
||||
!settings.llm_model ||
|
||||
!settings.embedding_model
|
||||
) {
|
||||
toast.error("Please complete all required fields");
|
||||
return;
|
||||
}
|
||||
|
||||
// Prepare onboarding data
|
||||
const onboardingData: OnboardingVariables = {
|
||||
model_provider: settings.model_provider,
|
||||
llm_model: settings.llm_model,
|
||||
embedding_model: settings.embedding_model,
|
||||
sample_data: sampleDataset,
|
||||
};
|
||||
// Prepare onboarding data
|
||||
const onboardingData: OnboardingVariables = {
|
||||
model_provider: settings.model_provider,
|
||||
llm_model: settings.llm_model,
|
||||
embedding_model: settings.embedding_model,
|
||||
sample_data: sampleDataset,
|
||||
};
|
||||
|
||||
// Add API key if available
|
||||
if (settings.api_key) {
|
||||
onboardingData.api_key = settings.api_key;
|
||||
}
|
||||
// Add API key if available
|
||||
if (settings.api_key) {
|
||||
onboardingData.api_key = settings.api_key;
|
||||
}
|
||||
|
||||
// Add endpoint if available
|
||||
if (settings.endpoint) {
|
||||
onboardingData.endpoint = settings.endpoint;
|
||||
}
|
||||
// Add endpoint if available
|
||||
if (settings.endpoint) {
|
||||
onboardingData.endpoint = settings.endpoint;
|
||||
}
|
||||
|
||||
// Add project_id if available
|
||||
if (settings.project_id) {
|
||||
onboardingData.project_id = settings.project_id;
|
||||
}
|
||||
// Add project_id if available
|
||||
if (settings.project_id) {
|
||||
onboardingData.project_id = settings.project_id;
|
||||
}
|
||||
|
||||
onboardingMutation.mutate(onboardingData);
|
||||
};
|
||||
onboardingMutation.mutate(onboardingData);
|
||||
};
|
||||
|
||||
const isComplete = !!settings.llm_model && !!settings.embedding_model;
|
||||
const isComplete = !!settings.llm_model && !!settings.embedding_model;
|
||||
|
||||
return (
|
||||
<div
|
||||
className="min-h-dvh w-full flex gap-5 flex-col items-center justify-center bg-background p-4"
|
||||
style={{
|
||||
backgroundImage: "url('/images/background.png')",
|
||||
backgroundSize: "cover",
|
||||
backgroundPosition: "center",
|
||||
}}
|
||||
>
|
||||
<div className="flex flex-col items-center gap-5 min-h-[550px] w-full">
|
||||
<div className="flex flex-col items-center justify-center gap-4">
|
||||
<h1 className="text-2xl font-medium font-chivo">
|
||||
Configure your models
|
||||
</h1>
|
||||
<p className="text-sm text-muted-foreground">[description of task]</p>
|
||||
</div>
|
||||
<Card className="w-full max-w-[580px]">
|
||||
<Tabs
|
||||
defaultValue={modelProvider}
|
||||
onValueChange={handleSetModelProvider}
|
||||
>
|
||||
<CardHeader>
|
||||
<TabsList>
|
||||
<TabsTrigger value="openai">
|
||||
<OpenAILogo className="w-4 h-4" />
|
||||
OpenAI
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="watsonx">
|
||||
<IBMLogo className="w-4 h-4" />
|
||||
IBM
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="ollama">
|
||||
<OllamaLogo className="w-4 h-4" />
|
||||
Ollama
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<TabsContent value="openai">
|
||||
<OpenAIOnboarding
|
||||
setSettings={setSettings}
|
||||
sampleDataset={sampleDataset}
|
||||
setSampleDataset={setSampleDataset}
|
||||
/>
|
||||
</TabsContent>
|
||||
<TabsContent value="watsonx">
|
||||
<IBMOnboarding
|
||||
setSettings={setSettings}
|
||||
sampleDataset={sampleDataset}
|
||||
setSampleDataset={setSampleDataset}
|
||||
/>
|
||||
</TabsContent>
|
||||
<TabsContent value="ollama">
|
||||
<OllamaOnboarding
|
||||
setSettings={setSettings}
|
||||
sampleDataset={sampleDataset}
|
||||
setSampleDataset={setSampleDataset}
|
||||
/>
|
||||
</TabsContent>
|
||||
</CardContent>
|
||||
</Tabs>
|
||||
<CardFooter className="flex justify-end">
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
size="sm"
|
||||
onClick={handleComplete}
|
||||
disabled={!isComplete}
|
||||
loading={onboardingMutation.isPending}
|
||||
>
|
||||
Complete
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{!isComplete ? "Please fill in all required fields" : ""}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</CardFooter>
|
||||
</Card>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
return (
|
||||
<div className="min-h-dvh w-full flex gap-5 flex-col items-center justify-center bg-background relative p-4">
|
||||
<DotPattern
|
||||
width={24}
|
||||
height={24}
|
||||
cx={1}
|
||||
cy={1}
|
||||
cr={1}
|
||||
className={cn(
|
||||
"[mask-image:linear-gradient(to_bottom,white,transparent,transparent)]",
|
||||
"text-input/70",
|
||||
)}
|
||||
/>
|
||||
|
||||
<div className="flex flex-col items-center gap-5 min-h-[550px] w-full z-10">
|
||||
<div className="flex flex-col items-center justify-center gap-4">
|
||||
<h1 className="text-2xl font-medium font-chivo">
|
||||
Connect a model provider
|
||||
</h1>
|
||||
</div>
|
||||
<Card className="w-full max-w-[580px]">
|
||||
<Tabs
|
||||
defaultValue={modelProvider}
|
||||
onValueChange={handleSetModelProvider}
|
||||
>
|
||||
<CardHeader>
|
||||
<TabsList>
|
||||
<TabsTrigger value="openai">
|
||||
<OpenAILogo className="w-4 h-4" />
|
||||
OpenAI
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="watsonx">
|
||||
<IBMLogo className="w-4 h-4" />
|
||||
IBM
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="ollama">
|
||||
<OllamaLogo className="w-4 h-4" />
|
||||
Ollama
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<TabsContent value="openai">
|
||||
<OpenAIOnboarding
|
||||
setSettings={setSettings}
|
||||
sampleDataset={sampleDataset}
|
||||
setSampleDataset={setSampleDataset}
|
||||
/>
|
||||
</TabsContent>
|
||||
<TabsContent value="watsonx">
|
||||
<IBMOnboarding
|
||||
setSettings={setSettings}
|
||||
sampleDataset={sampleDataset}
|
||||
setSampleDataset={setSampleDataset}
|
||||
/>
|
||||
</TabsContent>
|
||||
<TabsContent value="ollama">
|
||||
<OllamaOnboarding
|
||||
setSettings={setSettings}
|
||||
sampleDataset={sampleDataset}
|
||||
setSampleDataset={setSampleDataset}
|
||||
/>
|
||||
</TabsContent>
|
||||
</CardContent>
|
||||
</Tabs>
|
||||
<CardFooter className="flex justify-end">
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div>
|
||||
<Button
|
||||
size="sm"
|
||||
onClick={handleComplete}
|
||||
disabled={!isComplete}
|
||||
loading={onboardingMutation.isPending}
|
||||
>
|
||||
Complete
|
||||
</Button>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
{!isComplete && (
|
||||
<TooltipContent>
|
||||
Please fill in all required fields
|
||||
</TooltipContent>
|
||||
)}
|
||||
</Tooltip>
|
||||
</CardFooter>
|
||||
</Card>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default function ProtectedOnboardingPage() {
|
||||
return (
|
||||
<ProtectedRoute>
|
||||
<Suspense fallback={<div>Loading onboarding...</div>}>
|
||||
<OnboardingPage />
|
||||
</Suspense>
|
||||
</ProtectedRoute>
|
||||
);
|
||||
return (
|
||||
<ProtectedRoute>
|
||||
<Suspense fallback={<div>Loading onboarding...</div>}>
|
||||
<OnboardingPage />
|
||||
</Suspense>
|
||||
</ProtectedRoute>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,28 +2,48 @@
|
|||
|
||||
import { Bell, Loader2 } from "lucide-react";
|
||||
import { usePathname } from "next/navigation";
|
||||
import { useGetConversationsQuery } from "@/app/api/queries/useGetConversationsQuery";
|
||||
import { useGetSettingsQuery } from "@/app/api/queries/useGetSettingsQuery";
|
||||
import { KnowledgeFilterPanel } from "@/components/knowledge-filter-panel";
|
||||
import Logo from "@/components/logo/logo";
|
||||
import { Navigation } from "@/components/navigation";
|
||||
import { TaskNotificationMenu } from "@/components/task-notification-menu";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { UserNav } from "@/components/user-nav";
|
||||
import { useAuth } from "@/contexts/auth-context";
|
||||
import { useChat } from "@/contexts/chat-context";
|
||||
import { useKnowledgeFilter } from "@/contexts/knowledge-filter-context";
|
||||
// import { GitHubStarButton } from "@/components/github-star-button"
|
||||
// import { DiscordLink } from "@/components/discord-link"
|
||||
import { useTask } from "@/contexts/task-context";
|
||||
import Logo from "@/components/logo/logo";
|
||||
|
||||
export function LayoutWrapper({ children }: { children: React.ReactNode }) {
|
||||
const pathname = usePathname();
|
||||
const { tasks, isMenuOpen, toggleMenu } = useTask();
|
||||
const { isPanelOpen } = useKnowledgeFilter();
|
||||
const { isLoading, isAuthenticated, isNoAuthMode } = useAuth();
|
||||
const {
|
||||
endpoint,
|
||||
refreshTrigger,
|
||||
refreshConversations,
|
||||
startNewConversation,
|
||||
} = useChat();
|
||||
const { isLoading: isSettingsLoading, data: settings } = useGetSettingsQuery({
|
||||
enabled: isAuthenticated || isNoAuthMode,
|
||||
});
|
||||
|
||||
// Only fetch conversations on chat page
|
||||
const isOnChatPage = pathname === "/" || pathname === "/chat";
|
||||
const { data: conversations = [], isLoading: isConversationsLoading } =
|
||||
useGetConversationsQuery(endpoint, refreshTrigger, {
|
||||
enabled: isOnChatPage && (isAuthenticated || isNoAuthMode),
|
||||
});
|
||||
|
||||
const handleNewConversation = () => {
|
||||
refreshConversations();
|
||||
startNewConversation();
|
||||
};
|
||||
|
||||
// List of paths that should not show navigation
|
||||
const authPaths = ["/login", "/auth/callback", "/onboarding"];
|
||||
const isAuthPage = authPaths.includes(pathname);
|
||||
|
|
@ -33,7 +53,7 @@ export function LayoutWrapper({ children }: { children: React.ReactNode }) {
|
|||
(task) =>
|
||||
task.status === "pending" ||
|
||||
task.status === "running" ||
|
||||
task.status === "processing"
|
||||
task.status === "processing",
|
||||
);
|
||||
|
||||
// Show loading state when backend isn't ready
|
||||
|
|
@ -99,7 +119,11 @@ export function LayoutWrapper({ children }: { children: React.ReactNode }) {
|
|||
</div>
|
||||
</header>
|
||||
<div className="side-bar-arrangement bg-background fixed left-0 top-[53px] bottom-0 md:flex hidden">
|
||||
<Navigation />
|
||||
<Navigation
|
||||
conversations={conversations}
|
||||
isConversationsLoading={isConversationsLoading}
|
||||
onNewConversation={handleNewConversation}
|
||||
/>
|
||||
</div>
|
||||
<main
|
||||
className={`md:pl-72 transition-all duration-300 overflow-y-auto h-[calc(100vh-53px)] ${
|
||||
|
|
|
|||
34
src/agent.py
34
src/agent.py
|
|
@ -636,6 +636,34 @@ async def async_langflow_chat_stream(
|
|||
logger.debug(
|
||||
f"Stored langflow conversation thread for user {user_id} with response_id: {response_id}"
|
||||
)
|
||||
logger.debug(
|
||||
f"Stored langflow conversation thread for user {user_id} with response_id: {response_id}"
|
||||
)
|
||||
|
||||
|
||||
def delete_user_conversation(user_id: str, response_id: str) -> bool:
|
||||
"""Delete a conversation for a user from both memory and persistent storage"""
|
||||
deleted = False
|
||||
|
||||
try:
|
||||
# Delete from in-memory storage
|
||||
if user_id in active_conversations and response_id in active_conversations[user_id]:
|
||||
del active_conversations[user_id][response_id]
|
||||
logger.debug(f"Deleted conversation {response_id} from memory for user {user_id}")
|
||||
deleted = True
|
||||
|
||||
# Delete from persistent storage
|
||||
conversation_deleted = conversation_persistence.delete_conversation_thread(user_id, response_id)
|
||||
if conversation_deleted:
|
||||
logger.debug(f"Deleted conversation {response_id} from persistent storage for user {user_id}")
|
||||
deleted = True
|
||||
|
||||
# Release session ownership
|
||||
try:
|
||||
from services.session_ownership_service import session_ownership_service
|
||||
session_ownership_service.release_session(user_id, response_id)
|
||||
logger.debug(f"Released session ownership for {response_id} for user {user_id}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to release session ownership: {e}")
|
||||
|
||||
return deleted
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting conversation {response_id} for user {user_id}: {e}")
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -155,3 +155,27 @@ async def langflow_history_endpoint(request: Request, chat_service, session_mana
|
|||
return JSONResponse(
|
||||
{"error": f"Failed to get langflow history: {str(e)}"}, status_code=500
|
||||
)
|
||||
|
||||
|
||||
async def delete_session_endpoint(request: Request, chat_service, session_manager):
|
||||
"""Delete a chat session"""
|
||||
user = request.state.user
|
||||
user_id = user.user_id
|
||||
session_id = request.path_params["session_id"]
|
||||
|
||||
try:
|
||||
# Delete from both local storage and Langflow
|
||||
result = await chat_service.delete_session(user_id, session_id)
|
||||
|
||||
if result.get("success"):
|
||||
return JSONResponse({"message": "Session deleted successfully"})
|
||||
else:
|
||||
return JSONResponse(
|
||||
{"error": result.get("error", "Failed to delete session")},
|
||||
status_code=500
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting session: {e}")
|
||||
return JSONResponse(
|
||||
{"error": f"Failed to delete session: {str(e)}"}, status_code=500
|
||||
)
|
||||
|
|
|
|||
|
|
@ -47,9 +47,6 @@ def get_docling_preset_configs():
|
|||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
async def get_settings(request, session_manager):
|
||||
"""Get application settings"""
|
||||
try:
|
||||
|
|
@ -207,7 +204,9 @@ async def update_settings(request, session_manager):
|
|||
try:
|
||||
flows_service = _get_flows_service()
|
||||
await flows_service.update_chat_flow_model(body["llm_model"])
|
||||
logger.info(f"Successfully updated chat flow model to '{body['llm_model']}'")
|
||||
logger.info(
|
||||
f"Successfully updated chat flow model to '{body['llm_model']}'"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update chat flow model: {str(e)}")
|
||||
# Don't fail the entire settings update if flow update fails
|
||||
|
|
@ -220,7 +219,9 @@ async def update_settings(request, session_manager):
|
|||
# Also update the chat flow with the new system prompt
|
||||
try:
|
||||
flows_service = _get_flows_service()
|
||||
await flows_service.update_chat_flow_system_prompt(body["system_prompt"])
|
||||
await flows_service.update_chat_flow_system_prompt(
|
||||
body["system_prompt"]
|
||||
)
|
||||
logger.info(f"Successfully updated chat flow system prompt")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update chat flow system prompt: {str(e)}")
|
||||
|
|
@ -243,8 +244,12 @@ async def update_settings(request, session_manager):
|
|||
# Also update the ingest flow with the new embedding model
|
||||
try:
|
||||
flows_service = _get_flows_service()
|
||||
await flows_service.update_ingest_flow_embedding_model(body["embedding_model"].strip())
|
||||
logger.info(f"Successfully updated ingest flow embedding model to '{body['embedding_model'].strip()}'")
|
||||
await flows_service.update_ingest_flow_embedding_model(
|
||||
body["embedding_model"].strip()
|
||||
)
|
||||
logger.info(
|
||||
f"Successfully updated ingest flow embedding model to '{body['embedding_model'].strip()}'"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update ingest flow embedding model: {str(e)}")
|
||||
# Don't fail the entire settings update if flow update fails
|
||||
|
|
@ -266,8 +271,12 @@ async def update_settings(request, session_manager):
|
|||
# Also update the flow with the new docling preset
|
||||
try:
|
||||
flows_service = _get_flows_service()
|
||||
await flows_service.update_flow_docling_preset(body["doclingPresets"], preset_configs[body["doclingPresets"]])
|
||||
logger.info(f"Successfully updated docling preset in flow to '{body['doclingPresets']}'")
|
||||
await flows_service.update_flow_docling_preset(
|
||||
body["doclingPresets"], preset_configs[body["doclingPresets"]]
|
||||
)
|
||||
logger.info(
|
||||
f"Successfully updated docling preset in flow to '{body['doclingPresets']}'"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update docling preset in flow: {str(e)}")
|
||||
# Don't fail the entire settings update if flow update fails
|
||||
|
|
@ -285,7 +294,9 @@ async def update_settings(request, session_manager):
|
|||
try:
|
||||
flows_service = _get_flows_service()
|
||||
await flows_service.update_ingest_flow_chunk_size(body["chunk_size"])
|
||||
logger.info(f"Successfully updated ingest flow chunk size to {body['chunk_size']}")
|
||||
logger.info(
|
||||
f"Successfully updated ingest flow chunk size to {body['chunk_size']}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update ingest flow chunk size: {str(e)}")
|
||||
# Don't fail the entire settings update if flow update fails
|
||||
|
|
@ -303,8 +314,12 @@ async def update_settings(request, session_manager):
|
|||
# Also update the ingest flow with the new chunk overlap
|
||||
try:
|
||||
flows_service = _get_flows_service()
|
||||
await flows_service.update_ingest_flow_chunk_overlap(body["chunk_overlap"])
|
||||
logger.info(f"Successfully updated ingest flow chunk overlap to {body['chunk_overlap']}")
|
||||
await flows_service.update_ingest_flow_chunk_overlap(
|
||||
body["chunk_overlap"]
|
||||
)
|
||||
logger.info(
|
||||
f"Successfully updated ingest flow chunk overlap to {body['chunk_overlap']}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update ingest flow chunk overlap: {str(e)}")
|
||||
# Don't fail the entire settings update if flow update fails
|
||||
|
|
@ -588,11 +603,10 @@ async def onboarding(request, flows_service):
|
|||
)
|
||||
|
||||
|
||||
|
||||
|
||||
def _get_flows_service():
|
||||
"""Helper function to get flows service instance"""
|
||||
from services.flows_service import FlowsService
|
||||
|
||||
return FlowsService()
|
||||
|
||||
|
||||
|
|
@ -605,8 +619,7 @@ async def update_docling_preset(request, session_manager):
|
|||
# Validate preset parameter
|
||||
if "preset" not in body:
|
||||
return JSONResponse(
|
||||
{"error": "preset parameter is required"},
|
||||
status_code=400
|
||||
{"error": "preset parameter is required"}, status_code=400
|
||||
)
|
||||
|
||||
preset = body["preset"]
|
||||
|
|
@ -615,8 +628,10 @@ async def update_docling_preset(request, session_manager):
|
|||
if preset not in preset_configs:
|
||||
valid_presets = list(preset_configs.keys())
|
||||
return JSONResponse(
|
||||
{"error": f"Invalid preset '{preset}'. Valid presets: {', '.join(valid_presets)}"},
|
||||
status_code=400
|
||||
{
|
||||
"error": f"Invalid preset '{preset}'. Valid presets: {', '.join(valid_presets)}"
|
||||
},
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
# Get the preset configuration
|
||||
|
|
@ -628,16 +643,16 @@ async def update_docling_preset(request, session_manager):
|
|||
|
||||
logger.info(f"Successfully updated docling preset to '{preset}' in ingest flow")
|
||||
|
||||
return JSONResponse({
|
||||
"message": f"Successfully updated docling preset to '{preset}'",
|
||||
"preset": preset,
|
||||
"preset_config": preset_config
|
||||
})
|
||||
return JSONResponse(
|
||||
{
|
||||
"message": f"Successfully updated docling preset to '{preset}'",
|
||||
"preset": preset,
|
||||
"preset_config": preset_config,
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to update docling preset", error=str(e))
|
||||
return JSONResponse(
|
||||
{"error": f"Failed to update docling preset: {str(e)}"},
|
||||
status_code=500
|
||||
{"error": f"Failed to update docling preset: {str(e)}"}, status_code=500
|
||||
)
|
||||
|
||||
|
|
|
|||
30
src/main.py
30
src/main.py
|
|
@ -392,8 +392,6 @@ async def startup_tasks(services):
|
|||
"""Startup tasks"""
|
||||
logger.info("Starting startup tasks")
|
||||
await init_index()
|
||||
# Sample data ingestion is now handled by the onboarding endpoint when sample_data=True
|
||||
logger.info("Sample data ingestion moved to onboarding endpoint")
|
||||
|
||||
|
||||
async def initialize_services():
|
||||
|
|
@ -786,6 +784,18 @@ async def create_app():
|
|||
),
|
||||
methods=["GET"],
|
||||
),
|
||||
# Session deletion endpoint
|
||||
Route(
|
||||
"/sessions/{session_id}",
|
||||
require_auth(services["session_manager"])(
|
||||
partial(
|
||||
chat.delete_session_endpoint,
|
||||
chat_service=services["chat_service"],
|
||||
session_manager=services["session_manager"],
|
||||
)
|
||||
),
|
||||
methods=["DELETE"],
|
||||
),
|
||||
# Authentication endpoints
|
||||
Route(
|
||||
"/auth/init",
|
||||
|
|
@ -927,7 +937,8 @@ async def create_app():
|
|||
"/settings",
|
||||
require_auth(services["session_manager"])(
|
||||
partial(
|
||||
settings.update_settings, session_manager=services["session_manager"]
|
||||
settings.update_settings,
|
||||
session_manager=services["session_manager"],
|
||||
)
|
||||
),
|
||||
methods=["POST"],
|
||||
|
|
@ -939,7 +950,7 @@ async def create_app():
|
|||
partial(
|
||||
models.get_openai_models,
|
||||
models_service=services["models_service"],
|
||||
session_manager=services["session_manager"]
|
||||
session_manager=services["session_manager"],
|
||||
)
|
||||
),
|
||||
methods=["GET"],
|
||||
|
|
@ -950,7 +961,7 @@ async def create_app():
|
|||
partial(
|
||||
models.get_ollama_models,
|
||||
models_service=services["models_service"],
|
||||
session_manager=services["session_manager"]
|
||||
session_manager=services["session_manager"],
|
||||
)
|
||||
),
|
||||
methods=["GET"],
|
||||
|
|
@ -961,7 +972,7 @@ async def create_app():
|
|||
partial(
|
||||
models.get_ibm_models,
|
||||
models_service=services["models_service"],
|
||||
session_manager=services["session_manager"]
|
||||
session_manager=services["session_manager"],
|
||||
)
|
||||
),
|
||||
methods=["GET", "POST"],
|
||||
|
|
@ -970,10 +981,7 @@ async def create_app():
|
|||
Route(
|
||||
"/onboarding",
|
||||
require_auth(services["session_manager"])(
|
||||
partial(
|
||||
settings.onboarding,
|
||||
flows_service=services["flows_service"]
|
||||
)
|
||||
partial(settings.onboarding, flows_service=services["flows_service"])
|
||||
),
|
||||
methods=["POST"],
|
||||
),
|
||||
|
|
@ -983,7 +991,7 @@ async def create_app():
|
|||
require_auth(services["session_manager"])(
|
||||
partial(
|
||||
settings.update_docling_preset,
|
||||
session_manager=services["session_manager"]
|
||||
session_manager=services["session_manager"],
|
||||
)
|
||||
),
|
||||
methods=["PATCH"],
|
||||
|
|
|
|||
|
|
@ -484,3 +484,55 @@ class ChatService:
|
|||
"total_conversations": len(all_conversations),
|
||||
}
|
||||
|
||||
async def delete_session(self, user_id: str, session_id: str):
|
||||
"""Delete a session from both local storage and Langflow"""
|
||||
try:
|
||||
# Delete from local conversation storage
|
||||
from agent import delete_user_conversation
|
||||
local_deleted = delete_user_conversation(user_id, session_id)
|
||||
|
||||
# Delete from Langflow using the monitor API
|
||||
langflow_deleted = await self._delete_langflow_session(session_id)
|
||||
|
||||
success = local_deleted or langflow_deleted
|
||||
error_msg = None
|
||||
|
||||
if not success:
|
||||
error_msg = "Session not found in local storage or Langflow"
|
||||
|
||||
return {
|
||||
"success": success,
|
||||
"local_deleted": local_deleted,
|
||||
"langflow_deleted": langflow_deleted,
|
||||
"error": error_msg
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting session {session_id} for user {user_id}: {e}")
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
async def _delete_langflow_session(self, session_id: str):
|
||||
"""Delete a session from Langflow using the monitor API"""
|
||||
try:
|
||||
response = await clients.langflow_request(
|
||||
"DELETE",
|
||||
f"/api/v1/monitor/messages/session/{session_id}"
|
||||
)
|
||||
|
||||
if response.status_code == 200 or response.status_code == 204:
|
||||
logger.info(f"Successfully deleted session {session_id} from Langflow")
|
||||
return True
|
||||
else:
|
||||
logger.warning(
|
||||
f"Failed to delete session {session_id} from Langflow: "
|
||||
f"{response.status_code} - {response.text}"
|
||||
)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting session {session_id} from Langflow: {e}")
|
||||
return False
|
||||
|
||||
|
|
|
|||
|
|
@ -86,12 +86,14 @@ class ConversationPersistenceService:
|
|||
user_conversations = self.get_user_conversations(user_id)
|
||||
return user_conversations.get(response_id, {})
|
||||
|
||||
def delete_conversation_thread(self, user_id: str, response_id: str):
|
||||
def delete_conversation_thread(self, user_id: str, response_id: str) -> bool:
|
||||
"""Delete a specific conversation thread"""
|
||||
if user_id in self._conversations and response_id in self._conversations[user_id]:
|
||||
del self._conversations[user_id][response_id]
|
||||
self._save_conversations()
|
||||
logger.debug(f"Deleted conversation {response_id} for user {user_id}")
|
||||
return True
|
||||
return False
|
||||
|
||||
def clear_user_conversations(self, user_id: str):
|
||||
"""Clear all conversations for a user"""
|
||||
|
|
|
|||
|
|
@ -74,6 +74,20 @@ class SessionOwnershipService:
|
|||
"""Filter a list of sessions to only include those owned by the user"""
|
||||
user_sessions = self.get_user_sessions(user_id)
|
||||
return [session for session in session_ids if session in user_sessions]
|
||||
|
||||
def release_session(self, user_id: str, session_id: str) -> bool:
|
||||
"""Release a session from a user (delete ownership record)"""
|
||||
if session_id in self.ownership_data:
|
||||
# Verify the user owns this session before deleting
|
||||
if self.ownership_data[session_id].get("user_id") == user_id:
|
||||
del self.ownership_data[session_id]
|
||||
self._save_ownership_data()
|
||||
logger.debug(f"Released session {session_id} from user {user_id}")
|
||||
return True
|
||||
else:
|
||||
logger.warning(f"User {user_id} tried to release session {session_id} they don't own")
|
||||
return False
|
||||
return False
|
||||
|
||||
def get_ownership_stats(self) -> Dict[str, any]:
|
||||
"""Get statistics about session ownership"""
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue