diff --git a/frontend/components/confirmation-dialog.tsx b/frontend/components/confirmation-dialog.tsx new file mode 100644 index 00000000..1ff8a370 --- /dev/null +++ b/frontend/components/confirmation-dialog.tsx @@ -0,0 +1,84 @@ +"use client"; + +import React from "react"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from "./ui/dialog"; +import { Button } from "./ui/button"; +import { AlertTriangle } from "lucide-react"; + +interface ConfirmationDialogProps { + open: boolean; + onOpenChange: (open: boolean) => void; + title?: string; + description?: string; + confirmText?: string; + cancelText?: string; + onConfirm: () => void | Promise; + isLoading?: boolean; + variant?: "destructive" | "default"; +} + +export const DeleteConfirmationDialog: React.FC = ({ + open, + onOpenChange, + title = "Are you sure?", + description = "This action cannot be undone.", + confirmText = "Confirm", + cancelText = "Cancel", + onConfirm, + isLoading = false, + variant = "destructive", +}) => { + const handleConfirm = async () => { + try { + await onConfirm(); + } finally { + // Only close if not in loading state (let the parent handle this) + if (!isLoading) { + onOpenChange(false); + } + } + }; + + return ( + + + +
+ {variant === "destructive" && ( + + )} + {title} +
+ {description} +
+ + + + + +
+
+ ); +}; diff --git a/frontend/components/knowledge-actions-dropdown.tsx b/frontend/components/knowledge-actions-dropdown.tsx index 59d38e47..f73ffe25 100644 --- a/frontend/components/knowledge-actions-dropdown.tsx +++ b/frontend/components/knowledge-actions-dropdown.tsx @@ -1,5 +1,6 @@ "use client"; +import { useState } from "react"; import { DropdownMenu, DropdownMenuContent, @@ -8,20 +9,72 @@ import { } from "@/components/ui/dropdown-menu"; import { EllipsisVertical } from "lucide-react"; import { Button } from "./ui/button"; +import { DeleteConfirmationDialog } from "./confirmation-dialog"; +import { useDeleteDocument } from "@/app/api/mutations/useDeleteDocument"; +import { toast } from "sonner"; -export function KnowledgeActionsDropdown() { - return ( - - - - - - - Delete - - - - ); +interface KnowledgeActionsDropdownProps { + filename: string; } + +export const KnowledgeActionsDropdown = ({ + filename, +}: KnowledgeActionsDropdownProps) => { + const [showDeleteDialog, setShowDeleteDialog] = useState(false); + const deleteDocumentMutation = useDeleteDocument(); + + const handleDelete = async () => { + try { + await deleteDocumentMutation.mutateAsync({ filename }); + toast.success(`Successfully deleted "${filename}"`); + setShowDeleteDialog(false); + } catch (error) { + toast.error( + error instanceof Error ? error.message : "Failed to delete document" + ); + } + }; + + return ( + <> + + + + + + {/* //TODO: Implement rename and sync */} + {/* alert("Not implemented")} + > + Rename + + alert("Not implemented")} + > + Sync + */} + setShowDeleteDialog(true)} + > + Delete + + + + + + + ); +}; diff --git a/frontend/components/knowledge-dropdown.tsx b/frontend/components/knowledge-dropdown.tsx index 88c61190..ee49fc3a 100644 --- a/frontend/components/knowledge-dropdown.tsx +++ b/frontend/components/knowledge-dropdown.tsx @@ -1,10 +1,10 @@ "use client"; -import { useQueryClient } from "@tanstack/react-query"; import { ChevronDown, Cloud, FolderOpen, + Loader2, PlugZap, Plus, Upload, @@ -45,6 +45,7 @@ export function KnowledgeDropdown({ const [folderLoading, setFolderLoading] = useState(false); const [s3Loading, setS3Loading] = useState(false); const [fileUploading, setFileUploading] = useState(false); + const [isNavigatingToCloud, setIsNavigatingToCloud] = useState(false); const [cloudConnectors, setCloudConnectors] = useState<{ [key: string]: { name: string; @@ -56,12 +57,6 @@ export function KnowledgeDropdown({ const fileInputRef = useRef(null); const dropdownRef = useRef(null); - const queryClient = useQueryClient(); - - const refetchSearch = () => { - queryClient.invalidateQueries({ queryKey: ["search"] }); - }; - // Check AWS availability and cloud connectors on mount useEffect(() => { const checkAvailability = async () => { @@ -108,7 +103,7 @@ export function KnowledgeDropdown({ const connections = statusData.connections || []; const activeConnection = connections.find( (conn: { is_active: boolean; connection_id: string }) => - conn.is_active, + conn.is_active ); const isConnected = activeConnection !== undefined; @@ -118,7 +113,7 @@ export function KnowledgeDropdown({ // Check token availability try { const tokenRes = await fetch( - `/api/connectors/${type}/token?connection_id=${activeConnection.connection_id}`, + `/api/connectors/${type}/token?connection_id=${activeConnection.connection_id}` ); if (tokenRes.ok) { const tokenData = await tokenRes.json(); @@ -179,7 +174,7 @@ export function KnowledgeDropdown({ window.dispatchEvent( new CustomEvent("fileUploadStart", { detail: { filename: files[0].name }, - }), + }) ); try { @@ -191,21 +186,38 @@ export function KnowledgeDropdown({ method: "POST", body: formData, }); + const uploadIngestJson = await uploadIngestRes.json(); + if (!uploadIngestRes.ok) { throw new Error( - uploadIngestJson?.error || "Upload and ingest failed", + uploadIngestJson?.error || "Upload and ingest failed" ); } - // Extract results from the unified response - const fileId = uploadIngestJson?.upload?.id; - const filePath = uploadIngestJson?.upload?.path; + // Extract results from the response - handle both unified and simple formats + const fileId = uploadIngestJson?.upload?.id || uploadIngestJson?.id; + const filePath = + uploadIngestJson?.upload?.path || + uploadIngestJson?.path || + "uploaded"; const runJson = uploadIngestJson?.ingestion; const deleteResult = uploadIngestJson?.deletion; - if (!fileId || !filePath) { - throw new Error("Upload successful but no file id/path returned"); + if (!fileId) { + throw new Error("Upload successful but no file id returned"); + } + + // Check if ingestion actually succeeded + if ( + runJson && + runJson.status !== "COMPLETED" && + runJson.status !== "SUCCESS" + ) { + const errorMsg = runJson.error || "Ingestion pipeline failed"; + throw new Error( + `Ingestion failed: ${errorMsg}. Try setting DISABLE_INGEST_WITH_LANGFLOW=true if you're experiencing Langflow component issues.` + ); } // Log deletion status if provided @@ -213,12 +225,12 @@ export function KnowledgeDropdown({ if (deleteResult.status === "deleted") { console.log( "File successfully cleaned up from Langflow:", - deleteResult.file_id, + deleteResult.file_id ); } else if (deleteResult.status === "delete_failed") { console.warn( "Failed to cleanup file from Langflow:", - deleteResult.error, + deleteResult.error ); } } @@ -236,8 +248,9 @@ export function KnowledgeDropdown({ unified: true, }, }, - }), + }) ); + // Trigger search refresh after successful ingestion window.dispatchEvent(new CustomEvent("knowledgeUpdated")); } catch (error) { @@ -247,12 +260,12 @@ export function KnowledgeDropdown({ filename: files[0].name, error: error instanceof Error ? error.message : "Upload failed", }, - }), + }) ); } finally { window.dispatchEvent(new CustomEvent("fileUploadComplete")); setFileUploading(false); - refetchSearch(); + // Don't call refetchSearch() here - the knowledgeUpdated event will handle it } } @@ -289,9 +302,15 @@ export function KnowledgeDropdown({ addTask(taskId); setFolderPath(""); // Trigger search refresh after successful folder processing starts + console.log( + "Folder upload successful, dispatching knowledgeUpdated event" + ); window.dispatchEvent(new CustomEvent("knowledgeUpdated")); } else if (response.ok) { setFolderPath(""); + console.log( + "Folder upload successful (direct), dispatching knowledgeUpdated event" + ); window.dispatchEvent(new CustomEvent("knowledgeUpdated")); } else { console.error("Folder upload failed:", result.error); @@ -305,7 +324,7 @@ export function KnowledgeDropdown({ console.error("Folder upload error:", error); } finally { setFolderLoading(false); - refetchSearch(); + // Don't call refetchSearch() here - the knowledgeUpdated event will handle it } }; @@ -336,6 +355,7 @@ export function KnowledgeDropdown({ addTask(taskId); setBucketUrl("s3://"); // Trigger search refresh after successful S3 processing starts + console.log("S3 upload successful, dispatching knowledgeUpdated event"); window.dispatchEvent(new CustomEvent("knowledgeUpdated")); } else { console.error("S3 upload failed:", result.error); @@ -349,7 +369,7 @@ export function KnowledgeDropdown({ console.error("S3 upload error:", error); } finally { setS3Loading(false); - refetchSearch(); + // Don't call refetchSearch() here - the knowledgeUpdated event will handle it } }; @@ -358,10 +378,17 @@ export function KnowledgeDropdown({ .map(([type, info]) => ({ label: info.name, icon: PlugZap, - onClick: () => { + onClick: async () => { setIsOpen(false); if (info.connected && info.hasToken) { - router.push(`/upload/${type}`); + setIsNavigatingToCloud(true); + try { + router.push(`/upload/${type}`); + // Keep loading state for a short time to show feedback + setTimeout(() => setIsNavigatingToCloud(false), 1000); + } catch { + setIsNavigatingToCloud(false); + } } else { router.push("/settings"); } @@ -403,14 +430,16 @@ export function KnowledgeDropdown({ ...cloudConnectorItems, ]; + // Comprehensive loading state + const isLoading = + fileUploading || folderLoading || s3Loading || isNavigatingToCloud; + return ( <>
- {isOpen && ( + {isOpen && !isLoading && (
{menuItems.map((item, index) => ( @@ -469,7 +522,7 @@ export function KnowledgeDropdown({ "w-full px-3 py-2 text-left text-sm hover:bg-accent hover:text-accent-foreground", "disabled" in item && item.disabled && - "opacity-50 cursor-not-allowed hover:bg-transparent hover:text-current", + "opacity-50 cursor-not-allowed hover:bg-transparent hover:text-current" )} > {item.label} @@ -508,7 +561,7 @@ export function KnowledgeDropdown({ type="text" placeholder="/path/to/documents" value={folderPath} - onChange={(e) => setFolderPath(e.target.value)} + onChange={e => setFolderPath(e.target.value)} />
@@ -550,7 +603,7 @@ export function KnowledgeDropdown({ type="text" placeholder="s3://bucket/path" value={bucketUrl} - onChange={(e) => setBucketUrl(e.target.value)} + onChange={e => setBucketUrl(e.target.value)} />
diff --git a/frontend/components/ui/input.tsx b/frontend/components/ui/input.tsx index 3dc0b5f0..1eea9079 100644 --- a/frontend/components/ui/input.tsx +++ b/frontend/components/ui/input.tsx @@ -1,7 +1,8 @@ import * as React from "react"; import { cn } from "@/lib/utils"; -export interface InputProps extends React.InputHTMLAttributes { +export interface InputProps + extends React.InputHTMLAttributes { icon?: React.ReactNode; inputClassName?: string; } @@ -9,7 +10,12 @@ export interface InputProps extends React.InputHTMLAttributes const Input = React.forwardRef( ({ className, inputClassName, icon, type, placeholder, ...props }, ref) => { return ( -
- ) + ); } if (error || !connector) { return (
-
- +
-

Provider Not Available

+

+ Provider Not Available +

{error}

-
- ) + ); } if (connector.status !== "connected") { return (
-
- +
-

{connector.name} Not Connected

+

+ {connector.name} Not Connected +

- You need to connect your {connector.name} account before you can select files. + You need to connect your {connector.name} account before you can + select files.

-
- ) + ); } if (!connector.hasAccessToken) { return (
-
- +
-

Access Token Required

+

+ Access Token Required +

- {connector.accessTokenError || `Unable to get access token for ${connector.name}. Try reconnecting your account.`} + {connector.accessTokenError || + `Unable to get access token for ${connector.name}. Try reconnecting your account.`}

-
- ) + ); } return (
-

Add Cloud Knowledge

@@ -337,7 +367,7 @@ export default function UploadProviderPage() { accessToken={accessToken || undefined} /> )} - + {(connector.type === "onedrive" || connector.type === "sharepoint") && ( 0 && (
-
)} - + {/* Success toast notification */} - setShowSuccessToast(false)} duration={20000} />
- ) -} \ No newline at end of file + ); +} diff --git a/frontend/src/components/AgGrid/agGridStyles.css b/frontend/src/components/AgGrid/agGridStyles.css index b595e18c..590046c2 100644 --- a/frontend/src/components/AgGrid/agGridStyles.css +++ b/frontend/src/components/AgGrid/agGridStyles.css @@ -11,11 +11,30 @@ body { --ag-wrapper-border: none; --ag-font-family: var(--font-sans); + /* Checkbox styling */ + --ag-checkbox-background-color: hsl(var(--background)); + --ag-checkbox-border-color: hsl(var(--border)); + --ag-checkbox-checked-color: hsl(var(--primary)); + --ag-checkbox-unchecked-color: transparent; + .ag-header { border-bottom: 1px solid hsl(var(--border)); margin-bottom: 0.5rem; } - .ag-row { - cursor: pointer; + + /* Make sure checkboxes are visible */ + .ag-selection-checkbox, + .ag-header-select-all { + opacity: 1 !important; + } + + .ag-checkbox-input-wrapper { + border: 1px solid hsl(var(--border)); + background-color: hsl(var(--background)); + } + + .ag-checkbox-input-wrapper.ag-checked { + background-color: hsl(var(--primary)); + border-color: hsl(var(--primary)); } } diff --git a/frontend/src/components/AgGrid/registerAgGridModules.ts b/frontend/src/components/AgGrid/registerAgGridModules.ts index da2c5280..6f3f7bc2 100644 --- a/frontend/src/components/AgGrid/registerAgGridModules.ts +++ b/frontend/src/components/AgGrid/registerAgGridModules.ts @@ -11,6 +11,7 @@ import { DateFilterModule, EventApiModule, GridStateModule, + RowSelectionModule, } from 'ag-grid-community'; // Importing necessary modules from ag-grid-community @@ -27,6 +28,7 @@ import { DateFilterModule, EventApiModule, GridStateModule, + RowSelectionModule, // The ValidationModule adds helpful console warnings/errors that can help identify bad configuration during development. ...(process.env.NODE_ENV !== 'production' ? [ValidationModule] : []), ]); diff --git a/frontend/src/contexts/task-context.tsx b/frontend/src/contexts/task-context.tsx index f84b0f95..c58e1d19 100644 --- a/frontend/src/contexts/task-context.tsx +++ b/frontend/src/contexts/task-context.tsx @@ -57,7 +57,10 @@ export function TaskProvider({ children }: { children: React.ReactNode }) { const queryClient = useQueryClient(); const refetchSearch = () => { - queryClient.invalidateQueries({ queryKey: ["search"] }); + queryClient.invalidateQueries({ + queryKey: ["search"], + exact: false, + }); }; const fetchTasks = useCallback(async () => { @@ -71,12 +74,12 @@ export function TaskProvider({ children }: { children: React.ReactNode }) { const newTasks = data.tasks || []; // Update tasks and check for status changes in the same state update - setTasks((prevTasks) => { + setTasks(prevTasks => { // Check for newly completed tasks to show toasts if (prevTasks.length > 0) { newTasks.forEach((newTask: Task) => { const oldTask = prevTasks.find( - (t) => t.task_id === newTask.task_id, + t => t.task_id === newTask.task_id ); if ( oldTask && @@ -92,6 +95,11 @@ export function TaskProvider({ children }: { children: React.ReactNode }) { }, }); refetchSearch(); + // Dispatch knowledge updated event for all knowledge-related pages + console.log( + "Task completed successfully, dispatching knowledgeUpdated event" + ); + window.dispatchEvent(new CustomEvent("knowledgeUpdated")); } else if ( oldTask && oldTask.status !== "failed" && @@ -130,21 +138,19 @@ export function TaskProvider({ children }: { children: React.ReactNode }) { const data = await response.json(); const newTasks = data.tasks || []; const foundTask = newTasks.find( - (task: Task) => task.task_id === taskId, + (task: Task) => task.task_id === taskId ); if (foundTask) { // Task found! Update the tasks state - setTasks((prevTasks) => { + setTasks(prevTasks => { // Check if task is already in the list - const exists = prevTasks.some((t) => t.task_id === taskId); + const exists = prevTasks.some(t => t.task_id === taskId); if (!exists) { return [...prevTasks, foundTask]; } // Update existing task - return prevTasks.map((t) => - t.task_id === taskId ? foundTask : t, - ); + return prevTasks.map(t => (t.task_id === taskId ? foundTask : t)); }); return; // Stop polling, we found it } @@ -169,7 +175,7 @@ export function TaskProvider({ children }: { children: React.ReactNode }) { }, [fetchTasks]); const removeTask = useCallback((taskId: string) => { - setTasks((prev) => prev.filter((task) => task.task_id !== taskId)); + setTasks(prev => prev.filter(task => task.task_id !== taskId)); }, []); const cancelTask = useCallback( @@ -196,11 +202,11 @@ export function TaskProvider({ children }: { children: React.ReactNode }) { }); } }, - [fetchTasks], + [fetchTasks] ); const toggleMenu = useCallback(() => { - setIsMenuOpen((prev) => !prev); + setIsMenuOpen(prev => !prev); }, []); // Periodic polling for task updates diff --git a/src/api/connectors.py b/src/api/connectors.py index 7e64af83..4c9eab49 100644 --- a/src/api/connectors.py +++ b/src/api/connectors.py @@ -373,3 +373,5 @@ async def connector_token(request: Request, connector_service, session_manager): except Exception as e: logger.error("Error getting connector token", error=str(e)) return JSONResponse({"error": str(e)}, status_code=500) + + diff --git a/src/api/documents.py b/src/api/documents.py new file mode 100644 index 00000000..57a4abb3 --- /dev/null +++ b/src/api/documents.py @@ -0,0 +1,59 @@ +from starlette.requests import Request +from starlette.responses import JSONResponse +from utils.logging_config import get_logger +from config.settings import INDEX_NAME + +logger = get_logger(__name__) + + +async def delete_documents_by_filename(request: Request, document_service, session_manager): + """Delete all documents with a specific filename""" + data = await request.json() + filename = data.get("filename") + + if not filename: + return JSONResponse({"error": "filename is required"}, status_code=400) + + user = request.state.user + jwt_token = request.state.jwt_token + + try: + # Get user's OpenSearch client + opensearch_client = session_manager.get_user_opensearch_client( + user.user_id, jwt_token + ) + + # Delete by query to remove all chunks of this document + delete_query = { + "query": { + "bool": { + "must": [ + {"term": {"filename": filename}} + ] + } + } + } + + result = await opensearch_client.delete_by_query( + index=INDEX_NAME, + body=delete_query, + conflicts="proceed" + ) + + deleted_count = result.get("deleted", 0) + logger.info(f"Deleted {deleted_count} chunks for filename {filename}", user_id=user.user_id) + + return JSONResponse({ + "success": True, + "deleted_chunks": deleted_count, + "filename": filename, + "message": f"All documents with filename '{filename}' deleted successfully" + }, status_code=200) + + except Exception as e: + logger.error("Error deleting documents by filename", filename=filename, error=str(e)) + error_str = str(e) + if "AuthenticationException" in error_str: + return JSONResponse({"error": "Access denied: insufficient permissions"}, status_code=403) + else: + return JSONResponse({"error": str(e)}, status_code=500) diff --git a/src/config/settings.py b/src/config/settings.py index 9a580c76..17000f4b 100644 --- a/src/config/settings.py +++ b/src/config/settings.py @@ -398,4 +398,4 @@ class AppClients: # Global clients instance -clients = AppClients() +clients = AppClients() \ No newline at end of file diff --git a/src/main.py b/src/main.py index 1b50dfce..c97bb22a 100644 --- a/src/main.py +++ b/src/main.py @@ -30,6 +30,7 @@ from api import ( auth, chat, connectors, + documents, flows, knowledge_filter, langflow_files, @@ -877,6 +878,18 @@ async def create_app(): ), methods=["POST", "GET"], ), + # Document endpoints + Route( + "/documents/delete-by-filename", + require_auth(services["session_manager"])( + partial( + documents.delete_documents_by_filename, + document_service=services["document_service"], + session_manager=services["session_manager"], + ) + ), + methods=["POST"], + ), # OIDC endpoints Route( "/.well-known/openid-configuration", diff --git a/src/services/document_service.py b/src/services/document_service.py index 70a70942..949515e3 100644 --- a/src/services/document_service.py +++ b/src/services/document_service.py @@ -435,3 +435,4 @@ class DocumentService: if upload_task.processed_files >= upload_task.total_files: upload_task.status = TaskStatus.COMPLETED + diff --git a/src/session_manager.py b/src/session_manager.py index 6eef5c70..6b2023d5 100644 --- a/src/session_manager.py +++ b/src/session_manager.py @@ -232,4 +232,4 @@ class SessionManager: def _create_anonymous_jwt(self) -> str: """Create JWT token for anonymous user in no-auth mode""" anonymous_user = AnonymousUser() - return self.create_jwt_token(anonymous_user) + return self.create_jwt_token(anonymous_user) \ No newline at end of file