fix(export all rows): use cursor pagination if possible (#40536)
Exporting all rows (in CSV, SQL, or JSON format) currently uses offset pagination, which can cause performance problems if the table is large. There is also a correctness problem if the table is being actively updated as the export happens, because the relative row offsets could shift between queries. Now that composite filters are available in postgres-meta, we can change to using cursor pagination on the primary key (or any non-null unique keys) wherever possible. Where this is not possible, the user will be shown a confirmation dialog explaining the possible performance impact. --------- Co-authored-by: Ali Waseem <waseema393@gmail.com> Co-authored-by: Joshen Lim <joshenlimek@gmail.com>
This commit is contained in:
@@ -5,13 +5,13 @@ import { CalculatedColumn, CellKeyboardEvent } from 'react-data-grid'
|
||||
|
||||
import type { Filter, SavedState } from 'components/grid/types'
|
||||
import { Entity, isTableLike } from 'data/table-editor/table-editor-types'
|
||||
import { useSearchParams } from 'next/navigation'
|
||||
import { parseAsBoolean, parseAsNativeArrayOf, parseAsString, useQueryStates } from 'nuqs'
|
||||
import { copyToClipboard } from 'ui'
|
||||
import { FilterOperatorOptions } from './components/header/filter/Filter.constants'
|
||||
import { STORAGE_KEY_PREFIX } from './constants'
|
||||
import type { Sort, SupaColumn, SupaTable } from './types'
|
||||
import { formatClipboardValue } from './utils/common'
|
||||
import { parseAsNativeArrayOf, parseAsBoolean, parseAsString, useQueryStates } from 'nuqs'
|
||||
import { useSearchParams } from 'next/navigation'
|
||||
|
||||
export const LOAD_TAB_FROM_CACHE_PARAM = 'loadFromCache'
|
||||
|
||||
@@ -67,6 +67,7 @@ export function filtersToUrlParams(filters: Filter[]) {
|
||||
export function parseSupaTable(table: Entity): SupaTable {
|
||||
const columns = table.columns
|
||||
const primaryKeys = isTableLike(table) ? table.primary_keys : []
|
||||
const uniqueIndexes = isTableLike(table) ? table.unique_indexes : []
|
||||
const relationships = isTableLike(table) ? table.relationships : []
|
||||
|
||||
const supaColumns: SupaColumn[] = columns.map((column) => {
|
||||
@@ -116,8 +117,14 @@ export function parseSupaTable(table: Entity): SupaTable {
|
||||
name: table.name,
|
||||
comment: table.comment,
|
||||
schema: table.schema,
|
||||
type: table.entity_type,
|
||||
columns: supaColumns,
|
||||
estimateRowCount: isTableLike(table) ? table.live_rows_estimate : 0,
|
||||
primaryKey: primaryKeys?.length > 0 ? primaryKeys.map((col) => col.name) : undefined,
|
||||
uniqueIndexes:
|
||||
!!uniqueIndexes && uniqueIndexes.length > 0
|
||||
? uniqueIndexes.map(({ columns }) => columns)
|
||||
: undefined,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -68,9 +68,9 @@ export const ExportDialog = ({
|
||||
const queryChains = !table ? undefined : getAllTableRowsSql({ table, sorts, filters })
|
||||
const query = !!queryChains
|
||||
? ignoreRoleImpersonation
|
||||
? queryChains.toSql()
|
||||
? queryChains.sql.toSql()
|
||||
: wrapWithRoleImpersonation(
|
||||
queryChains.toSql(),
|
||||
queryChains.sql.toSql(),
|
||||
roleImpersonationState as RoleImpersonationState
|
||||
)
|
||||
: ''
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { PermissionAction } from '@supabase/shared-types/out/constants'
|
||||
import saveAs from 'file-saver'
|
||||
import { ArrowUp, ChevronDown, FileText, Trash } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { ReactNode, useState } from 'react'
|
||||
@@ -10,9 +9,14 @@ import { useTableFilter } from 'components/grid/hooks/useTableFilter'
|
||||
import { useTableSort } from 'components/grid/hooks/useTableSort'
|
||||
import { GridHeaderActions } from 'components/interfaces/TableGridEditor/GridHeaderActions'
|
||||
import { formatTableRowsToSQL } from 'components/interfaces/TableGridEditor/TableEntity.utils'
|
||||
import {
|
||||
useExportAllRowsAsCsv,
|
||||
useExportAllRowsAsJson,
|
||||
useExportAllRowsAsSql,
|
||||
} from 'components/layouts/TableEditorLayout/ExportAllRows'
|
||||
import { ButtonTooltip } from 'components/ui/ButtonTooltip'
|
||||
import { useTableRowsCountQuery } from 'data/table-rows/table-rows-count-query'
|
||||
import { fetchAllTableRows, useTableRowsQuery } from 'data/table-rows/table-rows-query'
|
||||
import { useTableRowsQuery } from 'data/table-rows/table-rows-query'
|
||||
import { useSendEventMutation } from 'data/telemetry/send-event-mutation'
|
||||
import { useAsyncCheckPermissions } from 'hooks/misc/useCheckPermissions'
|
||||
import { useSelectedOrganizationQuery } from 'hooks/misc/useSelectedOrganization'
|
||||
@@ -34,12 +38,12 @@ import {
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
Separator,
|
||||
SonnerProgress,
|
||||
} from 'ui'
|
||||
import { ExportDialog } from './ExportDialog'
|
||||
import { FilterPopover } from './filter/FilterPopover'
|
||||
import { formatRowsForCSV } from './Header.utils'
|
||||
import { SortPopover } from './sort/SortPopover'
|
||||
|
||||
// [Joshen] CSV exports require this guard as a fail-safe if the table is
|
||||
// just too large for a browser to keep all the rows in memory before
|
||||
// exporting. Either that or export as multiple CSV sheets with max n rows each
|
||||
@@ -312,170 +316,82 @@ const RowHeader = ({ tableQueriesEnabled = true }: RowHeaderProps) => {
|
||||
toast.success('Copied rows to clipboard')
|
||||
}
|
||||
|
||||
const exportParams = snap.allRowsSelected
|
||||
? ({ type: 'fetch_all', filters, sorts } as const)
|
||||
: ({
|
||||
type: 'provided_rows',
|
||||
table: snap.table,
|
||||
rows: allRows.filter((x) => snap.selectedRows.has(x.idx)),
|
||||
} as const)
|
||||
|
||||
const { exportCsv, confirmationModal: exportCsvConfirmationModal } = useExportAllRowsAsCsv(
|
||||
project
|
||||
? {
|
||||
enabled: true,
|
||||
projectRef: project.ref,
|
||||
connectionString: project?.connectionString ?? null,
|
||||
entity: snap.table,
|
||||
totalRows,
|
||||
...exportParams,
|
||||
}
|
||||
: { enabled: false }
|
||||
)
|
||||
const onRowsExportCSV = async () => {
|
||||
setIsExporting(true)
|
||||
|
||||
if (snap.allRowsSelected && totalRows > MAX_EXPORT_ROW_COUNT) {
|
||||
toast.error(
|
||||
<div className="prose text-sm text-foreground">{MAX_EXPORT_ROW_COUNT_MESSAGE}</div>
|
||||
)
|
||||
return setIsExporting(false)
|
||||
}
|
||||
|
||||
if (!project) {
|
||||
toast.error('Project is required')
|
||||
return setIsExporting(false)
|
||||
}
|
||||
|
||||
const toastId = snap.allRowsSelected
|
||||
? toast(
|
||||
<SonnerProgress progress={0} message={`Exporting all rows from ${snap.table.name}`} />,
|
||||
{
|
||||
closeButton: false,
|
||||
duration: Infinity,
|
||||
}
|
||||
)
|
||||
: toast.loading(
|
||||
`Exporting ${snap.selectedRows.size} row${snap.selectedRows.size > 1 ? 's' : ''} from ${snap.table.name}`
|
||||
)
|
||||
exportCsv()
|
||||
|
||||
const rows = snap.allRowsSelected
|
||||
? await fetchAllTableRows({
|
||||
projectRef: project.ref,
|
||||
connectionString: project.connectionString,
|
||||
table: snap.table,
|
||||
filters,
|
||||
sorts,
|
||||
roleImpersonationState: roleImpersonationState as RoleImpersonationState,
|
||||
progressCallback: (value: number) => {
|
||||
const progress = Math.min((value / totalRows) * 100, 100)
|
||||
toast(
|
||||
<SonnerProgress
|
||||
progress={progress}
|
||||
message={`Exporting all rows from ${snap.table.name}`}
|
||||
/>,
|
||||
{
|
||||
id: toastId,
|
||||
closeButton: false,
|
||||
duration: Infinity,
|
||||
}
|
||||
)
|
||||
},
|
||||
})
|
||||
: allRows.filter((x) => snap.selectedRows.has(x.idx))
|
||||
|
||||
if (rows.length === 0) {
|
||||
toast.dismiss(toastId)
|
||||
toast.error('Export failed, please try exporting again')
|
||||
setIsExporting(false)
|
||||
return
|
||||
}
|
||||
|
||||
const csv = formatRowsForCSV({
|
||||
rows,
|
||||
columns: snap.table!.columns.map((column) => column.name),
|
||||
})
|
||||
const csvData = new Blob([csv], { type: 'text/csv;charset=utf-8;' })
|
||||
toast.success(`Downloaded ${rows.length} rows as CSV`, {
|
||||
id: toastId,
|
||||
closeButton: true,
|
||||
duration: 4000,
|
||||
})
|
||||
saveAs(csvData, `${snap.table!.name}_rows.csv`)
|
||||
setIsExporting(false)
|
||||
}
|
||||
|
||||
const { exportSql, confirmationModal: exportSqlConfirmationModal } = useExportAllRowsAsSql(
|
||||
project
|
||||
? {
|
||||
enabled: true,
|
||||
projectRef: project.ref,
|
||||
connectionString: project?.connectionString ?? null,
|
||||
entity: snap.table,
|
||||
...exportParams,
|
||||
}
|
||||
: { enabled: false }
|
||||
)
|
||||
const onRowsExportSQL = async () => {
|
||||
setIsExporting(true)
|
||||
|
||||
if (snap.allRowsSelected && totalRows > MAX_EXPORT_ROW_COUNT) {
|
||||
toast.error(
|
||||
<div className="prose text-sm text-foreground">{MAX_EXPORT_ROW_COUNT_MESSAGE}</div>
|
||||
)
|
||||
return setIsExporting(false)
|
||||
}
|
||||
|
||||
if (!project) {
|
||||
toast.error('Project is required')
|
||||
return setIsExporting(false)
|
||||
}
|
||||
|
||||
if (snap.allRowsSelected && totalRows === 0) {
|
||||
toast.error('Export failed, please try exporting again')
|
||||
return setIsExporting(false)
|
||||
}
|
||||
exportSql()
|
||||
|
||||
const toastId = snap.allRowsSelected
|
||||
? toast(
|
||||
<SonnerProgress progress={0} message={`Exporting all rows from ${snap.table.name}`} />,
|
||||
{
|
||||
closeButton: false,
|
||||
duration: Infinity,
|
||||
}
|
||||
)
|
||||
: toast.loading(
|
||||
`Exporting ${snap.selectedRows.size} row${snap.selectedRows.size > 1 ? 's' : ''} from ${snap.table.name}`
|
||||
)
|
||||
|
||||
const rows = snap.allRowsSelected
|
||||
? await fetchAllTableRows({
|
||||
projectRef: project.ref,
|
||||
connectionString: project.connectionString,
|
||||
table: snap.table,
|
||||
filters,
|
||||
sorts,
|
||||
roleImpersonationState: roleImpersonationState as RoleImpersonationState,
|
||||
progressCallback: (value: number) => {
|
||||
const progress = Math.min((value / totalRows) * 100, 100)
|
||||
toast(
|
||||
<SonnerProgress
|
||||
progress={progress}
|
||||
message={`Exporting all rows from ${snap.table.name}`}
|
||||
/>,
|
||||
{
|
||||
id: toastId,
|
||||
closeButton: false,
|
||||
duration: Infinity,
|
||||
}
|
||||
)
|
||||
},
|
||||
})
|
||||
: allRows.filter((x) => snap.selectedRows.has(x.idx))
|
||||
|
||||
if (rows.length === 0) {
|
||||
toast.error('Export failed, please exporting try again')
|
||||
setIsExporting(false)
|
||||
return
|
||||
}
|
||||
|
||||
const sqlStatements = formatTableRowsToSQL(snap.table, rows)
|
||||
const sqlData = new Blob([sqlStatements], { type: 'text/sql;charset=utf-8;' })
|
||||
toast.success(`Downloading ${rows.length} rows as SQL`, {
|
||||
id: toastId,
|
||||
closeButton: true,
|
||||
duration: 4000,
|
||||
})
|
||||
saveAs(sqlData, `${snap.table!.name}_rows.sql`)
|
||||
setIsExporting(false)
|
||||
}
|
||||
|
||||
const { exportJson, confirmationModal: exportJsonConfirmationModal } = useExportAllRowsAsJson(
|
||||
project
|
||||
? {
|
||||
enabled: true,
|
||||
projectRef: project.ref,
|
||||
connectionString: project?.connectionString ?? null,
|
||||
entity: snap.table,
|
||||
...exportParams,
|
||||
}
|
||||
: { enabled: false }
|
||||
)
|
||||
const onRowsExportJSON = async () => {
|
||||
if (!project) {
|
||||
return toast.error('Project is required')
|
||||
}
|
||||
|
||||
setIsExporting(true)
|
||||
const toastId = toast.loading(
|
||||
`Exporting ${snap.selectedRows.size} row${snap.selectedRows.size > 1 ? 's' : ''} from ${snap.table.name}`
|
||||
)
|
||||
const rows = allRows.filter((x) => snap.selectedRows.has(x.idx))
|
||||
const sqlData = new Blob([JSON.stringify(rows)], { type: 'text/sql;charset=utf-8;' })
|
||||
toast.success(`Downloading ${rows.length} rows as JSON`, {
|
||||
id: toastId,
|
||||
closeButton: true,
|
||||
duration: 4000,
|
||||
})
|
||||
saveAs(sqlData, `${snap.table!.name}_rows.json`)
|
||||
|
||||
exportJson()
|
||||
|
||||
setIsExporting(false)
|
||||
}
|
||||
@@ -600,6 +516,10 @@ const RowHeader = ({ tableQueriesEnabled = true }: RowHeaderProps) => {
|
||||
open={showExportModal}
|
||||
onOpenChange={() => setShowExportModal(false)}
|
||||
/>
|
||||
|
||||
{exportCsvConfirmationModal}
|
||||
{exportSqlConfirmationModal}
|
||||
{exportJsonConfirmationModal}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { GridForeignKey } from './base'
|
||||
import type { ENTITY_TYPE } from 'data/entity-types/entity-type-constants'
|
||||
import type { Dictionary } from 'types'
|
||||
import { GridForeignKey } from './base'
|
||||
|
||||
export interface SupaColumn {
|
||||
readonly dataType: string
|
||||
@@ -20,11 +21,14 @@ export interface SupaColumn {
|
||||
|
||||
export interface SupaTable {
|
||||
readonly id: number
|
||||
readonly type: ENTITY_TYPE
|
||||
readonly columns: SupaColumn[]
|
||||
readonly name: string
|
||||
readonly schema?: string | null
|
||||
readonly comment?: string | null
|
||||
readonly estimateRowCount: number
|
||||
readonly primaryKey?: string[]
|
||||
readonly uniqueIndexes?: string[][]
|
||||
}
|
||||
|
||||
export interface SupaRow extends Dictionary<any> {
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import { describe, it, expect } from 'vitest'
|
||||
import type { SupaTable } from 'components/grid/types'
|
||||
import { ENTITY_TYPE } from 'data/entity-types/entity-type-constants'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { formatTableRowsToSQL } from './TableEntity.utils'
|
||||
|
||||
describe('TableEntity.utils: formatTableRowsToSQL', () => {
|
||||
it('should format rows into a single SQL INSERT statement', () => {
|
||||
const table = {
|
||||
const table: SupaTable = {
|
||||
id: 1,
|
||||
type: ENTITY_TYPE.TABLE,
|
||||
columns: [
|
||||
{ name: 'id', dataType: 'bigint', format: 'int8', position: 0 },
|
||||
{ name: 'name', dataType: 'text', format: 'text', position: 1 },
|
||||
@@ -26,8 +29,9 @@ describe('TableEntity.utils: formatTableRowsToSQL', () => {
|
||||
})
|
||||
|
||||
it('should not stringify null values', () => {
|
||||
const table = {
|
||||
const table: SupaTable = {
|
||||
id: 1,
|
||||
type: ENTITY_TYPE.TABLE,
|
||||
columns: [
|
||||
{ name: 'id', dataType: 'bigint', format: 'int8', position: 0 },
|
||||
{ name: 'name', dataType: 'text', format: 'text', position: 1 },
|
||||
@@ -49,8 +53,9 @@ describe('TableEntity.utils: formatTableRowsToSQL', () => {
|
||||
})
|
||||
|
||||
it('should handle PG JSON and array columns', () => {
|
||||
const table = {
|
||||
const table: SupaTable = {
|
||||
id: 1,
|
||||
type: ENTITY_TYPE.TABLE,
|
||||
columns: [
|
||||
{ name: 'id', dataType: 'bigint', format: 'int8', position: 0 },
|
||||
{ name: 'name', dataType: 'text', format: 'text', position: 1 },
|
||||
@@ -79,13 +84,14 @@ describe('TableEntity.utils: formatTableRowsToSQL', () => {
|
||||
},
|
||||
]
|
||||
const result = formatTableRowsToSQL(table, rows)
|
||||
const expected = `INSERT INTO "public"."demo" ("id", "name", "tags", "metadata") VALUES ('2', 'Person 1', '{"tag-a","tag-c"}', '{"version": 1}'), ('3', 'ONeil', '{"tag-a"}', '{"version": 1, "name": "O''Neil"}');`
|
||||
const expected = `INSERT INTO "public"."demo" ("id", "name", "tags", "metadata") VALUES ('2', 'Person 1', ARRAY["tag-a","tag-c"], '{"version": 1}'), ('3', 'ONeil', ARRAY["tag-a"], '{"version": 1, "name": "O''Neil"}');`
|
||||
expect(result).toBe(expected)
|
||||
})
|
||||
|
||||
it('should return an empty string for empty rows', () => {
|
||||
const table = {
|
||||
const table: SupaTable = {
|
||||
id: 1,
|
||||
type: ENTITY_TYPE.TABLE,
|
||||
columns: [
|
||||
{ name: 'id', dataType: 'bigint', format: 'int8', position: 0 },
|
||||
{ name: 'name', dataType: 'text', format: 'text', position: 1 },
|
||||
@@ -100,8 +106,9 @@ describe('TableEntity.utils: formatTableRowsToSQL', () => {
|
||||
})
|
||||
|
||||
it('should remove the idx property', () => {
|
||||
const table = {
|
||||
const table: SupaTable = {
|
||||
id: 1,
|
||||
type: ENTITY_TYPE.TABLE,
|
||||
columns: [
|
||||
{ name: 'id', dataType: 'bigint', format: 'int8', position: 0 },
|
||||
{ name: 'name', dataType: 'text', format: 'text', position: 1 },
|
||||
|
||||
@@ -45,7 +45,8 @@ export const formatTableRowsToSQL = (table: SupaTable, rows: any[]) => {
|
||||
if (val === null) {
|
||||
return 'null'
|
||||
} else if (dataType === 'ARRAY') {
|
||||
return `'${JSON.stringify(val).replace('[', '{').replace(/.$/, '}')}'`
|
||||
const array = Array.isArray(val) ? val : JSON.parse(val as string)
|
||||
return `${formatArrayForSql(array as unknown[])}`
|
||||
} else if (format?.includes('json')) {
|
||||
return `${JSON.stringify(val).replace(/\\"/g, '"').replace(/'/g, "''").replace('"', "'").replace(/.$/, "'")}`
|
||||
} else if (
|
||||
@@ -65,3 +66,60 @@ export const formatTableRowsToSQL = (table: SupaTable, rows: any[]) => {
|
||||
|
||||
return `INSERT INTO "${table.schema}"."${table.name}" (${columns}) VALUES ${valuesSets};`
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a random tag for dollar-quoting of SQL strings
|
||||
*
|
||||
* @return A random tag in the format `$tag$`
|
||||
*/
|
||||
const generateRandomTag = (): `$${string}$` => {
|
||||
const inner = Math.random().toString(36).substring(2, 15)
|
||||
// Ensure the tag starts with a character not a digit to avoid conflicts with
|
||||
// Postgres parameter syntax
|
||||
return `$x${inner}$`
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap a string in dollar-quote tags, ensuring the tag does not appear in the string
|
||||
*
|
||||
* @throws Error if unable to generate a unique tag after multiple attempts
|
||||
*/
|
||||
const safeDollarQuote = (str: string): string => {
|
||||
let tag = generateRandomTag()
|
||||
|
||||
let attempts = 0
|
||||
const maxAttempts = 100
|
||||
while (str.includes(tag)) {
|
||||
if (attempts >= maxAttempts) {
|
||||
throw new Error('Unable to generate a unique dollar-quote tag after multiple attempts.')
|
||||
}
|
||||
|
||||
attempts++
|
||||
tag = generateRandomTag()
|
||||
}
|
||||
return `${tag}${str}${tag}`
|
||||
}
|
||||
|
||||
const formatArrayForSql = (arr: unknown[]): string => {
|
||||
let result = 'ARRAY['
|
||||
|
||||
arr.forEach((item, index) => {
|
||||
if (Array.isArray(item)) {
|
||||
result += formatArrayForSql(item)
|
||||
} else if (typeof item === 'string') {
|
||||
result += `"${item.replace(/"/g, '""')}"`
|
||||
} else if (!!item && typeof item === 'object') {
|
||||
result += `${safeDollarQuote(JSON.stringify(item))}::json`
|
||||
} else {
|
||||
result += `${item}`
|
||||
}
|
||||
|
||||
if (index < arr.length - 1) {
|
||||
result += ','
|
||||
}
|
||||
})
|
||||
|
||||
result += ']'
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,19 +1,12 @@
|
||||
import saveAs from 'file-saver'
|
||||
import { Copy, Download, Edit, Lock, MoreVertical, Trash } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import Papa from 'papaparse'
|
||||
import { type CSSProperties } from 'react'
|
||||
import { toast } from 'sonner'
|
||||
|
||||
import { IS_PLATFORM, useParams } from 'common'
|
||||
import {
|
||||
MAX_EXPORT_ROW_COUNT,
|
||||
MAX_EXPORT_ROW_COUNT_MESSAGE,
|
||||
} from 'components/grid/components/header/Header'
|
||||
import { LOAD_TAB_FROM_CACHE_PARAM, parseSupaTable } from 'components/grid/SupabaseGrid.utils'
|
||||
import {
|
||||
formatTableRowsToSQL,
|
||||
getEntityLintDetails,
|
||||
} from 'components/interfaces/TableGridEditor/TableEntity.utils'
|
||||
import { useParams } from 'common'
|
||||
import { useTableFilter } from 'components/grid/hooks/useTableFilter'
|
||||
import { LOAD_TAB_FROM_CACHE_PARAM } from 'components/grid/SupabaseGrid.utils'
|
||||
import { getEntityLintDetails } from 'components/interfaces/TableGridEditor/TableEntity.utils'
|
||||
import { EntityTypeIcon } from 'components/ui/EntityTypeIcon'
|
||||
import { InlineLink } from 'components/ui/InlineLink'
|
||||
import { getTableDefinition } from 'data/database/table-definition-query'
|
||||
@@ -21,13 +14,14 @@ import { ENTITY_TYPE } from 'data/entity-types/entity-type-constants'
|
||||
import { Entity } from 'data/entity-types/entity-types-infinite-query'
|
||||
import { useProjectLintsQuery } from 'data/lint/lint-query'
|
||||
import { EditorTablePageLink } from 'data/prefetchers/project.$ref.editor.$id'
|
||||
import { getTableEditor } from 'data/table-editor/table-editor-query'
|
||||
import { isTableLike } from 'data/table-editor/table-editor-types'
|
||||
import { fetchAllTableRows } from 'data/table-rows/table-rows-query'
|
||||
import { useTableRowsCountQuery } from 'data/table-rows/table-rows-count-query'
|
||||
import { useQuerySchemaState } from 'hooks/misc/useSchemaQueryState'
|
||||
import { useSelectedProjectQuery } from 'hooks/misc/useSelectedProject'
|
||||
import { formatSql } from 'lib/formatSql'
|
||||
import type { CSSProperties } from 'react'
|
||||
import {
|
||||
useRoleImpersonationStateSnapshot,
|
||||
type RoleImpersonationState,
|
||||
} from 'state/role-impersonation-state'
|
||||
import { useTableEditorStateSnapshot } from 'state/table-editor'
|
||||
import { createTabId, useTabsStateSnapshot } from 'state/tabs'
|
||||
import {
|
||||
@@ -48,6 +42,7 @@ import {
|
||||
TooltipTrigger,
|
||||
TreeViewItemVariant,
|
||||
} from 'ui'
|
||||
import { useExportAllRowsAsCsv, useExportAllRowsAsSql } from './ExportAllRows'
|
||||
|
||||
export interface EntityListItemProps {
|
||||
id: number | string
|
||||
@@ -85,6 +80,23 @@ const EntityListItem = ({
|
||||
const isActive = Number(id) === entity.id
|
||||
const canEdit = isActive && !isLocked
|
||||
|
||||
const { filters } = useTableFilter()
|
||||
const roleImpersonationState = useRoleImpersonationStateSnapshot()
|
||||
const { data: countData } = useTableRowsCountQuery(
|
||||
{
|
||||
projectRef,
|
||||
connectionString: project?.connectionString,
|
||||
tableId: entity.id,
|
||||
filters,
|
||||
enforceExactCount: false,
|
||||
roleImpersonationState: roleImpersonationState as RoleImpersonationState,
|
||||
},
|
||||
{
|
||||
enabled: isTableLikeEntityListItem(entity) && isActive,
|
||||
}
|
||||
)
|
||||
const rowCount = countData?.count
|
||||
|
||||
const { data: lints = [] } = useProjectLintsQuery({
|
||||
projectRef: project?.ref,
|
||||
})
|
||||
@@ -132,112 +144,23 @@ const EntityListItem = ({
|
||||
return text.charAt(0).toUpperCase() + text.slice(1)
|
||||
}
|
||||
|
||||
const exportTableAsCSV = async () => {
|
||||
if (IS_PLATFORM && !project?.connectionString) {
|
||||
return console.error('Connection string is required')
|
||||
}
|
||||
const toastId = toast.loading(`Exporting ${entity.name} as CSV...`)
|
||||
const { exportCsv, confirmationModal: exportCsvConfirmationModal } = useExportAllRowsAsCsv({
|
||||
enabled: true,
|
||||
projectRef,
|
||||
connectionString: project?.connectionString ?? null,
|
||||
entity,
|
||||
type: 'fetch_all',
|
||||
totalRows: rowCount,
|
||||
})
|
||||
|
||||
try {
|
||||
const table = await getTableEditor({
|
||||
id: entity.id,
|
||||
projectRef,
|
||||
connectionString: project?.connectionString,
|
||||
})
|
||||
if (isTableLike(table) && table.live_rows_estimate > MAX_EXPORT_ROW_COUNT) {
|
||||
return toast.error(
|
||||
<div className="text-foreground prose text-sm">{MAX_EXPORT_ROW_COUNT_MESSAGE}</div>,
|
||||
{ id: toastId }
|
||||
)
|
||||
}
|
||||
|
||||
const supaTable = table && parseSupaTable(table)
|
||||
|
||||
if (!supaTable) {
|
||||
return toast.error(`Failed to export table: ${entity.name}`, { id: toastId })
|
||||
}
|
||||
|
||||
const rows = await fetchAllTableRows({
|
||||
projectRef,
|
||||
connectionString: project?.connectionString,
|
||||
table: supaTable,
|
||||
})
|
||||
const formattedRows = rows.map((row) => {
|
||||
const formattedRow = row
|
||||
Object.keys(row).map((column) => {
|
||||
if (typeof row[column] === 'object' && row[column] !== null)
|
||||
formattedRow[column] = JSON.stringify(formattedRow[column])
|
||||
})
|
||||
return formattedRow
|
||||
})
|
||||
|
||||
if (formattedRows.length > 0) {
|
||||
const csv = Papa.unparse(formattedRows, {
|
||||
columns: supaTable.columns.map((column) => column.name),
|
||||
})
|
||||
const csvData = new Blob([csv], { type: 'text/csv;charset=utf-8;' })
|
||||
saveAs(csvData, `${entity!.name}_rows.csv`)
|
||||
}
|
||||
|
||||
toast.success(`Successfully exported ${entity.name} as CSV`, { id: toastId })
|
||||
} catch (error: any) {
|
||||
toast.error(`Failed to export table: ${error.message}`, { id: toastId })
|
||||
}
|
||||
}
|
||||
|
||||
const exportTableAsSQL = async () => {
|
||||
if (IS_PLATFORM && !project?.connectionString) {
|
||||
return console.error('Connection string is required')
|
||||
}
|
||||
const toastId = toast.loading(`Exporting ${entity.name} as SQL...`)
|
||||
|
||||
try {
|
||||
const table = await getTableEditor({
|
||||
id: entity.id,
|
||||
projectRef,
|
||||
connectionString: project?.connectionString,
|
||||
})
|
||||
|
||||
if (isTableLike(table) && table.live_rows_estimate > MAX_EXPORT_ROW_COUNT) {
|
||||
return toast.error(
|
||||
<div className="text-foreground prose text-sm">{MAX_EXPORT_ROW_COUNT_MESSAGE}</div>,
|
||||
{ id: toastId }
|
||||
)
|
||||
}
|
||||
|
||||
const supaTable = table && parseSupaTable(table)
|
||||
|
||||
if (!supaTable) {
|
||||
return toast.error(`Failed to export table: ${entity.name}`, { id: toastId })
|
||||
}
|
||||
|
||||
const rows = await fetchAllTableRows({
|
||||
projectRef,
|
||||
connectionString: project?.connectionString,
|
||||
table: supaTable,
|
||||
})
|
||||
|
||||
const formattedRows = rows.map((row) => {
|
||||
const formattedRow = { ...row }
|
||||
Object.keys(row).forEach((column) => {
|
||||
if (typeof row[column] === 'object' && row[column] !== null) {
|
||||
formattedRow[column] = JSON.stringify(row[column])
|
||||
}
|
||||
})
|
||||
return formattedRow
|
||||
})
|
||||
|
||||
if (formattedRows.length > 0) {
|
||||
const sqlStatements = formatTableRowsToSQL(supaTable, formattedRows)
|
||||
const sqlData = new Blob([sqlStatements], { type: 'text/sql;charset=utf-8;' })
|
||||
saveAs(sqlData, `${entity!.name}_rows.sql`)
|
||||
}
|
||||
|
||||
toast.success(`Successfully exported ${entity.name} as SQL`, { id: toastId })
|
||||
} catch (error: any) {
|
||||
toast.error(`Failed to export table: ${error.message}`, { id: toastId })
|
||||
}
|
||||
}
|
||||
const { exportSql, confirmationModal: exportSqlConfirmationModal } = useExportAllRowsAsSql({
|
||||
enabled: true,
|
||||
projectRef,
|
||||
connectionString: project?.connectionString ?? null,
|
||||
entity,
|
||||
type: 'fetch_all',
|
||||
totalRows: rowCount,
|
||||
})
|
||||
|
||||
return (
|
||||
<EditorTablePageLink
|
||||
@@ -400,7 +323,7 @@ const EntityListItem = ({
|
||||
className="space-x-2"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
exportTableAsCSV()
|
||||
exportCsv()
|
||||
}}
|
||||
>
|
||||
<span>Export table as CSV</span>
|
||||
@@ -410,7 +333,7 @@ const EntityListItem = ({
|
||||
className="gap-x-2"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
exportTableAsSQL()
|
||||
exportSql()
|
||||
}}
|
||||
>
|
||||
<span>Export table as SQL</span>
|
||||
@@ -446,6 +369,8 @@ const EntityListItem = ({
|
||||
</DropdownMenu>
|
||||
)}
|
||||
</>
|
||||
{exportCsvConfirmationModal}
|
||||
{exportSqlConfirmationModal}
|
||||
</EditorTablePageLink>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,93 @@
|
||||
const normalizeCauseAsError = (cause: unknown): Error | undefined => {
|
||||
if (!cause) return undefined
|
||||
|
||||
if (cause instanceof Error) {
|
||||
return cause
|
||||
}
|
||||
|
||||
if (typeof cause === 'object' && 'message' in cause) {
|
||||
return new Error(String(cause.message))
|
||||
}
|
||||
|
||||
return new Error(String(cause))
|
||||
}
|
||||
|
||||
export class ExportAllRowsErrorFamily extends Error {
|
||||
cause?: Error
|
||||
|
||||
constructor(message: string, options: { cause?: unknown } = {}) {
|
||||
super(message, options)
|
||||
}
|
||||
}
|
||||
|
||||
export class NoConnectionStringError extends ExportAllRowsErrorFamily {
|
||||
constructor() {
|
||||
super('No connection string provided for database connection.')
|
||||
this.name = 'NoConnectionStringError'
|
||||
}
|
||||
}
|
||||
|
||||
export class TableDetailsFetchError extends ExportAllRowsErrorFamily {
|
||||
constructor(tableName: string, _cause?: unknown) {
|
||||
const cause = normalizeCauseAsError(_cause)
|
||||
super(`Failed to fetch table details from the database for table ${tableName}.`, { cause })
|
||||
this.name = 'TableDetailsFetchError'
|
||||
}
|
||||
}
|
||||
|
||||
export class NoTableError extends ExportAllRowsErrorFamily {
|
||||
constructor(tableName: string) {
|
||||
super(`The specified table "${tableName}" does not exist in the database.`)
|
||||
this.name = 'NoTableError'
|
||||
}
|
||||
}
|
||||
|
||||
export class NoRowsToExportError extends ExportAllRowsErrorFamily {
|
||||
constructor(tableName: string) {
|
||||
super(`There are no rows to export from the table "${tableName}".`)
|
||||
this.name = 'NoRowsToExportError'
|
||||
}
|
||||
}
|
||||
|
||||
export class TableTooLargeError extends ExportAllRowsErrorFamily {
|
||||
constructor(tableName: string, rowCount: number, maxAllowed: number) {
|
||||
super(
|
||||
`The table "${tableName}" has ${rowCount} rows, which exceeds the maximum allowed limit of ${maxAllowed} rows for export.`
|
||||
)
|
||||
this.name = 'TableTooLargeError'
|
||||
}
|
||||
}
|
||||
|
||||
export class FetchRowsError extends ExportAllRowsErrorFamily {
|
||||
constructor(tableName: string, _cause?: unknown) {
|
||||
const cause = normalizeCauseAsError(_cause)
|
||||
super(`An error occurred while fetching rows from the table "${tableName}".`, { cause })
|
||||
this.name = 'FetchRowsError'
|
||||
}
|
||||
}
|
||||
|
||||
export class OutputConversionError extends ExportAllRowsErrorFamily {
|
||||
constructor(_cause?: unknown) {
|
||||
const cause = normalizeCauseAsError(_cause)
|
||||
super('Failed to convert the fetched rows into the desired output format.', {
|
||||
cause,
|
||||
})
|
||||
this.name = 'OutputConversionError'
|
||||
}
|
||||
}
|
||||
|
||||
export class BlobCreationError extends ExportAllRowsErrorFamily {
|
||||
constructor(_cause?: unknown) {
|
||||
const cause = normalizeCauseAsError(_cause)
|
||||
super('An error occurred while creating a Blob for the exported data.', { cause })
|
||||
this.name = 'BlobCreationError'
|
||||
}
|
||||
}
|
||||
|
||||
export class DownloadSaveError extends ExportAllRowsErrorFamily {
|
||||
constructor(_cause?: unknown) {
|
||||
const cause = normalizeCauseAsError(_cause)
|
||||
super('An error occurred while saving the exported data to a file.', { cause })
|
||||
this.name = 'DownloadSaveError'
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,104 @@
|
||||
import { useCallback, useRef, type ReactNode } from 'react'
|
||||
import { toast } from 'sonner'
|
||||
import { SonnerProgress } from 'ui'
|
||||
|
||||
export const useProgressToasts = () => {
|
||||
const toastIdsRef = useRef(new Map<number, string | number>())
|
||||
|
||||
const startProgressTracker = useCallback(
|
||||
({
|
||||
id,
|
||||
name,
|
||||
trackPercentage = false,
|
||||
}: {
|
||||
id: number
|
||||
name: string
|
||||
trackPercentage?: boolean
|
||||
}) => {
|
||||
if (toastIdsRef.current.has(id)) return
|
||||
|
||||
if (trackPercentage) {
|
||||
toastIdsRef.current.set(
|
||||
id,
|
||||
toast(<SonnerProgress progress={0} message={`Exporting ${name}...`} />, {
|
||||
closeButton: false,
|
||||
duration: Infinity,
|
||||
})
|
||||
)
|
||||
} else {
|
||||
toastIdsRef.current.set(id, toast.loading(`Exporting ${name}...`))
|
||||
}
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
const trackPercentageProgress = useCallback(
|
||||
({
|
||||
id,
|
||||
name,
|
||||
value,
|
||||
totalRows,
|
||||
}: {
|
||||
id: number
|
||||
name: string
|
||||
value: number
|
||||
totalRows: number
|
||||
}) => {
|
||||
const savedToastId = toastIdsRef.current.get(id)
|
||||
|
||||
const progress = Math.min((value / totalRows) * 100, 100)
|
||||
const newToastId = toast(
|
||||
<SonnerProgress progress={progress} message={`Exporting ${name}...`} />,
|
||||
{
|
||||
id: savedToastId,
|
||||
closeButton: false,
|
||||
duration: Infinity,
|
||||
}
|
||||
)
|
||||
|
||||
if (!savedToastId) toastIdsRef.current.set(id, newToastId)
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
const stopTrackerWithError = useCallback(
|
||||
(id: number, name: string, customMessage?: ReactNode) => {
|
||||
const savedToastId = toastIdsRef.current.get(id)
|
||||
if (savedToastId) {
|
||||
toast.dismiss(savedToastId)
|
||||
toastIdsRef.current.delete(id)
|
||||
}
|
||||
|
||||
toast.error(customMessage ?? `There was an error exporting ${name}`)
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
const dismissTrackerSilently = useCallback((id: number) => {
|
||||
const savedToastId = toastIdsRef.current.get(id)
|
||||
if (savedToastId) {
|
||||
toast.dismiss(savedToastId)
|
||||
toastIdsRef.current.delete(id)
|
||||
}
|
||||
}, [])
|
||||
|
||||
const markTrackerComplete = useCallback((id: number, totalRows: number) => {
|
||||
const savedToastId = toastIdsRef.current.get(id)
|
||||
const deleteSavedToastId = () => toastIdsRef.current.delete(id)
|
||||
|
||||
toast.success(`Successfully exported ${totalRows} rows`, {
|
||||
id: savedToastId,
|
||||
duration: 4000,
|
||||
onAutoClose: deleteSavedToastId,
|
||||
onDismiss: deleteSavedToastId,
|
||||
})
|
||||
}, [])
|
||||
|
||||
return {
|
||||
startProgressTracker,
|
||||
trackPercentageProgress,
|
||||
stopTrackerWithError,
|
||||
dismissTrackerSilently,
|
||||
markTrackerComplete,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,436 @@
|
||||
import saveAs from 'file-saver'
|
||||
import Papa from 'papaparse'
|
||||
import { useCallback, useState, type ReactNode } from 'react'
|
||||
|
||||
import { useQueryClient, type QueryClient } from '@tanstack/react-query'
|
||||
import { IS_PLATFORM } from 'common'
|
||||
import {
|
||||
MAX_EXPORT_ROW_COUNT,
|
||||
MAX_EXPORT_ROW_COUNT_MESSAGE,
|
||||
} from 'components/grid/components/header/Header'
|
||||
import { parseSupaTable } from 'components/grid/SupabaseGrid.utils'
|
||||
import type { Filter, Sort, SupaTable } from 'components/grid/types'
|
||||
import { formatTableRowsToSQL } from 'components/interfaces/TableGridEditor/TableEntity.utils'
|
||||
import { ENTITY_TYPE } from 'data/entity-types/entity-type-constants'
|
||||
import type { Entity } from 'data/entity-types/entity-types-infinite-query'
|
||||
import { tableEditorKeys } from 'data/table-editor/keys'
|
||||
import { getTableEditor, type TableEditorData } from 'data/table-editor/table-editor-query'
|
||||
import { isTableLike } from 'data/table-editor/table-editor-types'
|
||||
import { fetchAllTableRows } from 'data/table-rows/table-rows-query'
|
||||
import { useStaticEffectEvent } from 'hooks/useStaticEffectEvent'
|
||||
import type { RoleImpersonationState } from 'lib/role-impersonation'
|
||||
import { ConfirmationModal } from 'ui-patterns/Dialogs/ConfirmationModal'
|
||||
import {
|
||||
BlobCreationError,
|
||||
DownloadSaveError,
|
||||
FetchRowsError,
|
||||
NoConnectionStringError,
|
||||
NoRowsToExportError,
|
||||
NoTableError,
|
||||
OutputConversionError,
|
||||
TableDetailsFetchError,
|
||||
TableTooLargeError,
|
||||
type ExportAllRowsErrorFamily,
|
||||
} from './ExportAllRows.errors'
|
||||
import { useProgressToasts } from './ExportAllRows.progress'
|
||||
|
||||
type OutputCallbacks = {
|
||||
convertToOutputFormat: (formattedRows: Record<string, unknown>[], table: SupaTable) => string
|
||||
convertToBlob: (str: string) => Blob
|
||||
save: (blob: Blob, table: SupaTable) => void
|
||||
}
|
||||
|
||||
type FetchAllRowsParams = {
|
||||
queryClient: QueryClient
|
||||
projectRef: string
|
||||
connectionString: string | null
|
||||
entity: Pick<Entity, 'id' | 'name' | 'type'>
|
||||
bypassConfirmation: boolean
|
||||
filters?: Filter[]
|
||||
sorts?: Sort[]
|
||||
roleImpersonationState?: RoleImpersonationState
|
||||
startCallback?: () => void
|
||||
progressCallback?: (progress: number) => void
|
||||
} & OutputCallbacks
|
||||
|
||||
type FetchAllRowsReturn =
|
||||
| { status: 'require_confirmation'; reason: string }
|
||||
| { status: 'error'; error: ExportAllRowsErrorFamily }
|
||||
| { status: 'success'; rowsExported: number }
|
||||
|
||||
const fetchAllRows = async ({
|
||||
queryClient,
|
||||
projectRef,
|
||||
connectionString,
|
||||
entity,
|
||||
bypassConfirmation,
|
||||
filters,
|
||||
sorts,
|
||||
roleImpersonationState,
|
||||
startCallback,
|
||||
progressCallback,
|
||||
convertToOutputFormat,
|
||||
convertToBlob,
|
||||
save,
|
||||
}: FetchAllRowsParams): Promise<FetchAllRowsReturn> => {
|
||||
if (IS_PLATFORM && !connectionString) {
|
||||
return { status: 'error', error: new NoConnectionStringError() }
|
||||
}
|
||||
|
||||
let table: TableEditorData | undefined
|
||||
try {
|
||||
table = await queryClient.ensureQueryData({
|
||||
// Query is the same even if connectionString changes
|
||||
// eslint-disable-next-line @tanstack/query/exhaustive-deps
|
||||
queryKey: tableEditorKeys.tableEditor(projectRef, entity.id),
|
||||
queryFn: ({ signal }) =>
|
||||
getTableEditor({ projectRef, connectionString, id: entity.id }, signal),
|
||||
})
|
||||
} catch (error: unknown) {
|
||||
return { status: 'error', error: new TableDetailsFetchError(entity.name, error) }
|
||||
}
|
||||
|
||||
if (!table) {
|
||||
return { status: 'error', error: new NoTableError(entity.name) }
|
||||
}
|
||||
|
||||
const type = table.entity_type
|
||||
if (type === ENTITY_TYPE.VIEW && !bypassConfirmation) {
|
||||
return {
|
||||
status: 'require_confirmation',
|
||||
reason: `Exporting a view may cause consistency issues or performance issues on very large views. If possible, we recommend exporting the underlying table instead.`,
|
||||
}
|
||||
} else if (type === ENTITY_TYPE.MATERIALIZED_VIEW && !bypassConfirmation) {
|
||||
return {
|
||||
status: 'require_confirmation',
|
||||
reason: `Exporting a materialized view may cause performance issues on very large views. If possible, we recommend exporting the underlying table instead.`,
|
||||
}
|
||||
} else if (type === ENTITY_TYPE.FOREIGN_TABLE && !bypassConfirmation) {
|
||||
return {
|
||||
status: 'require_confirmation',
|
||||
reason: `Exporting a foreign table may cause consistency issues or performance issues on very large tables.`,
|
||||
}
|
||||
}
|
||||
|
||||
if (isTableLike(table) && table.live_rows_estimate > MAX_EXPORT_ROW_COUNT) {
|
||||
return {
|
||||
status: 'error',
|
||||
error: new TableTooLargeError(table.name, table.live_rows_estimate, MAX_EXPORT_ROW_COUNT),
|
||||
}
|
||||
}
|
||||
|
||||
const supaTable = parseSupaTable(table)
|
||||
|
||||
const primaryKey = supaTable.primaryKey
|
||||
if (!primaryKey && !bypassConfirmation) {
|
||||
return {
|
||||
status: 'require_confirmation',
|
||||
reason: `This table does not have a primary key defined, which may cause performance issues when exporting very large tables.`,
|
||||
}
|
||||
}
|
||||
|
||||
startCallback?.()
|
||||
|
||||
let rows: Record<string, unknown>[]
|
||||
try {
|
||||
rows = await fetchAllTableRows({
|
||||
projectRef,
|
||||
connectionString,
|
||||
table: supaTable,
|
||||
filters,
|
||||
sorts,
|
||||
roleImpersonationState,
|
||||
progressCallback,
|
||||
})
|
||||
} catch (error: unknown) {
|
||||
return { status: 'error', error: new FetchRowsError(supaTable.name, error) }
|
||||
}
|
||||
|
||||
if (rows.length === 0) {
|
||||
return { status: 'error', error: new NoRowsToExportError(entity.name) }
|
||||
}
|
||||
const formattedRows = formatRowsForExport(rows, supaTable)
|
||||
|
||||
return convertAndDownload(formattedRows, supaTable, {
|
||||
convertToOutputFormat,
|
||||
convertToBlob,
|
||||
save,
|
||||
})
|
||||
}
|
||||
|
||||
const formatRowsForExport = (rows: Record<string, unknown>[], table: SupaTable) => {
|
||||
return rows.map((row) => {
|
||||
const formattedRow = { ...row }
|
||||
Object.keys(row).map((column) => {
|
||||
if (column === 'idx' && !table.columns.some((col) => col.name === 'idx')) {
|
||||
// When we fetch this data from the database, we automatically add an
|
||||
// 'idx' column if none exists. We shouldn't export this column since
|
||||
// it's not actually part of the user's table.
|
||||
delete formattedRow[column]
|
||||
return
|
||||
}
|
||||
|
||||
if (typeof row[column] === 'object' && row[column] !== null)
|
||||
formattedRow[column] = JSON.stringify(formattedRow[column])
|
||||
})
|
||||
return formattedRow
|
||||
})
|
||||
}
|
||||
|
||||
const convertAndDownload = (
|
||||
formattedRows: Record<string, unknown>[],
|
||||
table: SupaTable,
|
||||
callbacks: OutputCallbacks
|
||||
):
|
||||
| { status: 'error'; error: ExportAllRowsErrorFamily }
|
||||
| { status: 'success'; rowsExported: number } => {
|
||||
let output: string
|
||||
try {
|
||||
output = callbacks.convertToOutputFormat(formattedRows, table)
|
||||
} catch (error: unknown) {
|
||||
return { status: 'error', error: new OutputConversionError(error) }
|
||||
}
|
||||
let data: Blob
|
||||
try {
|
||||
data = callbacks.convertToBlob(output)
|
||||
} catch (error: unknown) {
|
||||
return { status: 'error', error: new BlobCreationError(error) }
|
||||
}
|
||||
try {
|
||||
callbacks.save(data, table)
|
||||
} catch (error: unknown) {
|
||||
return { status: 'error', error: new DownloadSaveError(error) }
|
||||
}
|
||||
|
||||
return {
|
||||
status: 'success',
|
||||
rowsExported: formattedRows.length,
|
||||
}
|
||||
}
|
||||
|
||||
type UseExportAllRowsParams =
|
||||
| { enabled: false }
|
||||
| ({
|
||||
enabled: true
|
||||
projectRef: string
|
||||
connectionString: string | null
|
||||
entity: Pick<Entity, 'id' | 'name' | 'type'>
|
||||
/**
|
||||
* If known, the total number of rows that will be exported.
|
||||
* This is used to show progress percentage during export.
|
||||
*/
|
||||
totalRows?: number
|
||||
} & (
|
||||
| {
|
||||
/**
|
||||
* Rows need to be fetched from the database.
|
||||
*/
|
||||
type: 'fetch_all'
|
||||
filters?: Filter[]
|
||||
sorts?: Sort[]
|
||||
roleImpersonationState?: RoleImpersonationState
|
||||
}
|
||||
| {
|
||||
/**
|
||||
* Rows are already available and provided directly.
|
||||
*/
|
||||
type: 'provided_rows'
|
||||
table: SupaTable
|
||||
rows: Record<string, unknown>[]
|
||||
}
|
||||
))
|
||||
|
||||
type UseExportAllRowsReturn = {
|
||||
exportInDesiredFormat: () => Promise<void>
|
||||
confirmationModal: ReactNode | null
|
||||
}
|
||||
|
||||
export const useExportAllRowsGeneric = (
|
||||
params: UseExportAllRowsParams & OutputCallbacks
|
||||
): UseExportAllRowsReturn => {
|
||||
const queryClient = useQueryClient()
|
||||
const {
|
||||
startProgressTracker,
|
||||
trackPercentageProgress,
|
||||
stopTrackerWithError,
|
||||
dismissTrackerSilently,
|
||||
markTrackerComplete,
|
||||
} = useProgressToasts()
|
||||
|
||||
const { convertToOutputFormat, convertToBlob, save } = params
|
||||
|
||||
const [confirmationMessage, setConfirmationMessage] = useState<string | null>(null)
|
||||
|
||||
const exportInternal = useStaticEffectEvent(
|
||||
async ({ bypassConfirmation }: { bypassConfirmation: boolean }): Promise<void> => {
|
||||
if (!params.enabled) return
|
||||
|
||||
const { projectRef, connectionString, entity, totalRows } = params
|
||||
|
||||
const exportResult =
|
||||
params.type === 'provided_rows'
|
||||
? convertAndDownload(formatRowsForExport(params.rows, params.table), params.table, {
|
||||
convertToOutputFormat,
|
||||
convertToBlob,
|
||||
save,
|
||||
})
|
||||
: await fetchAllRows({
|
||||
queryClient,
|
||||
projectRef: projectRef,
|
||||
connectionString: connectionString,
|
||||
entity: entity,
|
||||
bypassConfirmation,
|
||||
filters: params.filters,
|
||||
sorts: params.sorts,
|
||||
roleImpersonationState: params.roleImpersonationState,
|
||||
startCallback: () => {
|
||||
startProgressTracker({
|
||||
id: entity.id,
|
||||
name: entity.name,
|
||||
trackPercentage: totalRows !== undefined,
|
||||
})
|
||||
},
|
||||
progressCallback: totalRows
|
||||
? (value: number) =>
|
||||
trackPercentageProgress({
|
||||
id: entity.id,
|
||||
name: entity.name,
|
||||
totalRows: totalRows,
|
||||
value,
|
||||
})
|
||||
: undefined,
|
||||
convertToOutputFormat,
|
||||
convertToBlob,
|
||||
save,
|
||||
})
|
||||
|
||||
if (exportResult.status === 'error') {
|
||||
const error = exportResult.error
|
||||
if (error instanceof NoRowsToExportError) {
|
||||
return stopTrackerWithError(
|
||||
entity.id,
|
||||
entity.name,
|
||||
`The table ${entity.name} has no rows to export.`
|
||||
)
|
||||
}
|
||||
if (error instanceof TableTooLargeError) {
|
||||
return stopTrackerWithError(entity.id, entity.name, MAX_EXPORT_ROW_COUNT_MESSAGE)
|
||||
}
|
||||
console.error(
|
||||
`Export All Rows > Error: %s%s%s`,
|
||||
error.message,
|
||||
error.cause?.message ? `\n${error.cause.message}` : '',
|
||||
error.cause?.stack ? `:\n${error.cause.stack}` : ''
|
||||
)
|
||||
return stopTrackerWithError(entity.id, entity.name)
|
||||
}
|
||||
|
||||
if (exportResult.status === 'require_confirmation') {
|
||||
return setConfirmationMessage(exportResult.reason)
|
||||
}
|
||||
|
||||
markTrackerComplete(entity.id, exportResult.rowsExported)
|
||||
}
|
||||
)
|
||||
|
||||
const exportInDesiredFormat = useCallback(
|
||||
() => exportInternal({ bypassConfirmation: false }),
|
||||
[exportInternal]
|
||||
)
|
||||
|
||||
const onConfirmExport = () => {
|
||||
exportInternal({
|
||||
bypassConfirmation: true,
|
||||
})
|
||||
setConfirmationMessage(null)
|
||||
}
|
||||
const onCancelExport = () => {
|
||||
if (!params.enabled) return
|
||||
|
||||
dismissTrackerSilently(params.entity.id)
|
||||
setConfirmationMessage(null)
|
||||
}
|
||||
|
||||
return {
|
||||
exportInDesiredFormat,
|
||||
confirmationModal: confirmationMessage ? (
|
||||
<ConfirmationModal
|
||||
title="Confirm to export data"
|
||||
visible={true}
|
||||
onCancel={onCancelExport}
|
||||
onConfirm={onConfirmExport}
|
||||
alert={{
|
||||
base: { className: '[&>div>div>h5]:font-normal border-x-0 border-t-0 rounded-none mb-0' },
|
||||
title: confirmationMessage,
|
||||
}}
|
||||
/>
|
||||
) : null,
|
||||
}
|
||||
}
|
||||
|
||||
type UseExportAllRowsAsCsvReturn = {
|
||||
exportCsv: () => Promise<void>
|
||||
confirmationModal: ReactNode | null
|
||||
}
|
||||
|
||||
export const useExportAllRowsAsCsv = (
|
||||
params: UseExportAllRowsParams
|
||||
): UseExportAllRowsAsCsvReturn => {
|
||||
const { exportInDesiredFormat: exportCsv, confirmationModal } = useExportAllRowsGeneric({
|
||||
...params,
|
||||
convertToOutputFormat: (formattedRows, table) =>
|
||||
Papa.unparse(formattedRows, {
|
||||
columns: table.columns.map((col) => col.name),
|
||||
}),
|
||||
convertToBlob: (csv) => new Blob([csv], { type: 'text/csv;charset=utf-8;' }),
|
||||
save: (csvData, table) => saveAs(csvData, `${table.name}_rows.csv`),
|
||||
})
|
||||
|
||||
return {
|
||||
exportCsv,
|
||||
confirmationModal,
|
||||
}
|
||||
}
|
||||
|
||||
type UseExportAllRowsAsSqlReturn = {
|
||||
exportSql: () => Promise<void>
|
||||
confirmationModal: ReactNode | null
|
||||
}
|
||||
|
||||
export const useExportAllRowsAsSql = (
|
||||
params: UseExportAllRowsParams
|
||||
): UseExportAllRowsAsSqlReturn => {
|
||||
const { exportInDesiredFormat: exportSql, confirmationModal } = useExportAllRowsGeneric({
|
||||
...params,
|
||||
convertToOutputFormat: (formattedRows, table) => formatTableRowsToSQL(table, formattedRows),
|
||||
convertToBlob: (sqlStatements) =>
|
||||
new Blob([sqlStatements], { type: 'text/sql;charset=utf-8;' }),
|
||||
save: (sqlData, table) => saveAs(sqlData, `${table.name}_rows.sql`),
|
||||
})
|
||||
|
||||
return {
|
||||
exportSql,
|
||||
confirmationModal,
|
||||
}
|
||||
}
|
||||
|
||||
type UseExportAllRowsAsJsonReturn = {
|
||||
exportJson: () => Promise<void>
|
||||
confirmationModal: ReactNode | null
|
||||
}
|
||||
|
||||
export const useExportAllRowsAsJson = (
|
||||
params: UseExportAllRowsParams
|
||||
): UseExportAllRowsAsJsonReturn => {
|
||||
const { exportInDesiredFormat: exportJson, confirmationModal } = useExportAllRowsGeneric({
|
||||
...params,
|
||||
convertToOutputFormat: (formattedRows) => JSON.stringify(formattedRows),
|
||||
convertToBlob: (jsonStr) => new Blob([jsonStr], { type: 'application/json;charset=utf-8;' }),
|
||||
save: (jsonData, table) => saveAs(jsonData, `${table.name}_rows.json`),
|
||||
})
|
||||
|
||||
return {
|
||||
exportJson,
|
||||
confirmationModal,
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ export function getTableEditorSql(id?: number) {
|
||||
|
||||
return minify(/* SQL */ `
|
||||
with base_table_info as (
|
||||
select
|
||||
select
|
||||
c.oid::int8 as id,
|
||||
nc.nspname as schema,
|
||||
c.relname as name,
|
||||
@@ -36,7 +36,7 @@ export function getTableEditorSql(id?: number) {
|
||||
)
|
||||
),
|
||||
table_stats as (
|
||||
select
|
||||
select
|
||||
b.id,
|
||||
case
|
||||
when b.relreplident = 'd' then 'DEFAULT'
|
||||
@@ -52,23 +52,58 @@ export function getTableEditorSql(id?: number) {
|
||||
where b.relkind in ('r', 'p')
|
||||
),
|
||||
primary_keys as (
|
||||
select
|
||||
select
|
||||
i.indrelid as table_id,
|
||||
jsonb_agg(jsonb_build_object(
|
||||
'schema', n.nspname,
|
||||
'table_name', c.relname,
|
||||
'table_id', i.indrelid::int8,
|
||||
'name', a.attname
|
||||
)) as primary_keys
|
||||
jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'schema', n.nspname,
|
||||
'table_name', c.relname,
|
||||
'table_id', i.indrelid::int8,
|
||||
'name', a.attname
|
||||
)
|
||||
order by array_position(i.indkey, a.attnum)
|
||||
) as primary_keys
|
||||
from pg_index i
|
||||
join pg_class c on i.indrelid = c.oid
|
||||
join pg_attribute a on (a.attrelid = c.oid and a.attnum = any(i.indkey))
|
||||
join pg_namespace n on c.relnamespace = n.oid
|
||||
join pg_attribute a on a.attrelid = c.oid and a.attnum = any(i.indkey)
|
||||
where i.indisprimary
|
||||
group by i.indrelid
|
||||
),
|
||||
index_cols as (
|
||||
select
|
||||
i.indrelid as table_id,
|
||||
i.indkey,
|
||||
array_agg(
|
||||
a.attname
|
||||
order by array_position(i.indkey, a.attnum)
|
||||
) as columns
|
||||
from pg_index i
|
||||
join pg_class c on i.indrelid = c.oid
|
||||
join pg_attribute a on a.attrelid = c.oid
|
||||
and a.attnum = any(i.indkey)
|
||||
where i.indisunique
|
||||
and i.indisprimary = false
|
||||
group by i.indrelid, i.indkey
|
||||
),
|
||||
unique_indexes as (
|
||||
select
|
||||
ic.table_id,
|
||||
jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'schema', n.nspname,
|
||||
'table_name', c.relname,
|
||||
'table_id', ic.table_id::int8,
|
||||
'columns', ic.columns
|
||||
)
|
||||
) as unique_indexes
|
||||
from index_cols ic
|
||||
join pg_class c on c.oid = ic.table_id
|
||||
join pg_namespace n on n.oid = c.relnamespace
|
||||
group by ic.table_id
|
||||
),
|
||||
relationships as (
|
||||
select
|
||||
select
|
||||
c.conrelid as source_id,
|
||||
c.confrelid as target_id,
|
||||
jsonb_build_object(
|
||||
@@ -93,7 +128,7 @@ export function getTableEditorSql(id?: number) {
|
||||
where c.contype = 'f'
|
||||
),
|
||||
columns as (
|
||||
select
|
||||
select
|
||||
a.attrelid as table_id,
|
||||
jsonb_agg(jsonb_build_object(
|
||||
'id', (a.attrelid || '.' || a.attnum),
|
||||
@@ -102,19 +137,19 @@ export function getTableEditorSql(id?: number) {
|
||||
'table', c.relname,
|
||||
'ordinal_position', a.attnum,
|
||||
'name', a.attname,
|
||||
'default_value', case
|
||||
'default_value', case
|
||||
when a.atthasdef then pg_get_expr(ad.adbin, ad.adrelid)
|
||||
else null
|
||||
end,
|
||||
'data_type', case
|
||||
when t.typtype = 'd' then
|
||||
case
|
||||
'data_type', case
|
||||
when t.typtype = 'd' then
|
||||
case
|
||||
when bt.typelem <> 0::oid and bt.typlen = -1 then 'ARRAY'
|
||||
when nbt.nspname = 'pg_catalog' then format_type(t.typbasetype, null)
|
||||
else 'USER-DEFINED'
|
||||
end
|
||||
else
|
||||
case
|
||||
else
|
||||
case
|
||||
when t.typelem <> 0::oid and t.typlen = -1 then 'ARRAY'
|
||||
when nt.nspname = 'pg_catalog' then format_type(a.atttypid, null)
|
||||
else 'USER-DEFINED'
|
||||
@@ -138,7 +173,7 @@ export function getTableEditorSql(id?: number) {
|
||||
'is_generated', a.attgenerated in ('s'),
|
||||
'is_nullable', not (a.attnotnull or t.typtype = 'd' and t.typnotnull),
|
||||
'is_updatable', (
|
||||
b.relkind in ('r', 'p') or
|
||||
b.relkind in ('r', 'p') or
|
||||
(b.relkind in ('v', 'f') and pg_column_is_updatable(b.id, a.attnum, false))
|
||||
),
|
||||
'is_unique', uniques.table_id is not null,
|
||||
@@ -164,7 +199,7 @@ export function getTableEditorSql(id?: number) {
|
||||
left join pg_type bt on (t.typtype = 'd' and t.typbasetype = bt.oid)
|
||||
left join pg_namespace nbt on bt.typnamespace = nbt.oid
|
||||
left join (
|
||||
select
|
||||
select
|
||||
conrelid as table_id,
|
||||
conkey[1] as ordinal_position
|
||||
from pg_catalog.pg_constraint
|
||||
@@ -183,13 +218,13 @@ export function getTableEditorSql(id?: number) {
|
||||
from pg_constraint
|
||||
where contype = 'c' and cardinality(conkey) = 1
|
||||
order by conrelid, conkey[1], oid asc
|
||||
) as check_constraints on check_constraints.table_id = a.attrelid
|
||||
) as check_constraints on check_constraints.table_id = a.attrelid
|
||||
and check_constraints.ordinal_position = a.attnum
|
||||
where a.attnum > 0
|
||||
where a.attnum > 0
|
||||
and not a.attisdropped
|
||||
group by a.attrelid
|
||||
)
|
||||
select
|
||||
select
|
||||
case b.relkind
|
||||
when 'r' then jsonb_build_object(
|
||||
'entity_type', b.relkind,
|
||||
@@ -205,10 +240,11 @@ export function getTableEditorSql(id?: number) {
|
||||
'dead_rows_estimate', ts.dead_rows_estimate,
|
||||
'comment', b.comment,
|
||||
'primary_keys', coalesce(pk.primary_keys, '[]'::jsonb),
|
||||
'unique_indexes', coalesce(ui.unique_indexes, '[]'::jsonb),
|
||||
'relationships', coalesce(
|
||||
(select jsonb_agg(r.rel_info)
|
||||
from relationships r
|
||||
where r.source_id = b.id or r.target_id = b.id),
|
||||
where r.source_id = b.id or r.target_id = b.id),
|
||||
'[]'::jsonb
|
||||
),
|
||||
'columns', coalesce(c.columns, '[]'::jsonb)
|
||||
@@ -227,10 +263,11 @@ export function getTableEditorSql(id?: number) {
|
||||
'dead_rows_estimate', ts.dead_rows_estimate,
|
||||
'comment', b.comment,
|
||||
'primary_keys', coalesce(pk.primary_keys, '[]'::jsonb),
|
||||
'unique_indexes', coalesce(ui.unique_indexes, '[]'::jsonb),
|
||||
'relationships', coalesce(
|
||||
(select jsonb_agg(r.rel_info)
|
||||
from relationships r
|
||||
where r.source_id = b.id or r.target_id = b.id),
|
||||
where r.source_id = b.id or r.target_id = b.id),
|
||||
'[]'::jsonb
|
||||
),
|
||||
'columns', coalesce(c.columns, '[]'::jsonb)
|
||||
@@ -268,6 +305,7 @@ export function getTableEditorSql(id?: number) {
|
||||
from base_table_info b
|
||||
left join table_stats ts on b.id = ts.id
|
||||
left join primary_keys pk on b.id = pk.table_id
|
||||
left join unique_indexes ui on b.id = ui.table_id
|
||||
left join columns c on b.id = c.table_id;
|
||||
`)
|
||||
}
|
||||
|
||||
@@ -13,16 +13,25 @@ interface TableRelationship extends PostgresRelationship {
|
||||
update_action: 'a' | 'r' | 'c' | 'n' | 'd'
|
||||
}
|
||||
|
||||
interface TableUniqueIndex {
|
||||
schema: string
|
||||
table_name: string
|
||||
table_id: number
|
||||
columns: string[]
|
||||
}
|
||||
|
||||
export interface Table extends PostgresTable {
|
||||
entity_type: ENTITY_TYPE.TABLE
|
||||
columns: PostgresColumn[]
|
||||
relationships: TableRelationship[]
|
||||
unique_indexes?: TableUniqueIndex[]
|
||||
}
|
||||
|
||||
export interface PartitionedTable extends PostgresTable {
|
||||
entity_type: ENTITY_TYPE.PARTITIONED_TABLE
|
||||
columns: PostgresColumn[]
|
||||
relationships: TableRelationship[]
|
||||
unique_indexes?: TableUniqueIndex[]
|
||||
}
|
||||
|
||||
export interface View extends PostgresView {
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Query } from '@supabase/pg-meta/src/query'
|
||||
import { Query, type QueryFilter } from '@supabase/pg-meta/src/query'
|
||||
import { getTableRowsSql } from '@supabase/pg-meta/src/query/table-row-query'
|
||||
import { useQuery, useQueryClient, type QueryClient } from '@tanstack/react-query'
|
||||
|
||||
import { IS_PLATFORM } from 'common'
|
||||
import { parseSupaTable } from 'components/grid/SupabaseGrid.utils'
|
||||
import { Filter, Sort, SupaRow, SupaTable } from 'components/grid/types'
|
||||
import { ENTITY_TYPE } from 'data/entity-types/entity-type-constants'
|
||||
import { prefetchTableEditor } from 'data/table-editor/table-editor-query'
|
||||
import { isMsSqlForeignTable } from 'data/table-editor/table-editor-types'
|
||||
import {
|
||||
@@ -27,15 +28,40 @@ export interface GetTableRowsArgs {
|
||||
roleImpersonationState?: RoleImpersonationState
|
||||
}
|
||||
|
||||
// return the primary key columns if exists, otherwise return the first column to use as a default sort
|
||||
const getDefaultOrderByColumns = (table: SupaTable) => {
|
||||
const primaryKeyColumns = table.columns.filter((col) => col?.isPrimaryKey).map((col) => col.name)
|
||||
if (primaryKeyColumns.length === 0) {
|
||||
const eligibleColumnsForSorting = table.columns.filter((x) => !x.dataType.includes('json'))
|
||||
if (eligibleColumnsForSorting.length > 0) return [eligibleColumnsForSorting[0]?.name]
|
||||
else return []
|
||||
} else {
|
||||
return primaryKeyColumns
|
||||
/**
|
||||
* Get the preferred columns for sorting of a table.
|
||||
*
|
||||
* Use the primary key if it exists, otherwise use a unique index with
|
||||
* non-nullable columns. If all else fails, fall back to any sortable column.
|
||||
*/
|
||||
const getPreferredOrderByColumns = (
|
||||
table: SupaTable
|
||||
): { cursorPaginationEligible: string[][]; cursorPaginationNonEligible: string[] } => {
|
||||
const cursorPaginationEligible: string[][] = []
|
||||
const cursorPaginationNonEligible: string[] = []
|
||||
|
||||
const primaryKeyColumns = table.primaryKey
|
||||
if (primaryKeyColumns) {
|
||||
cursorPaginationEligible.push(primaryKeyColumns)
|
||||
}
|
||||
|
||||
const uniqueIndexes = table.uniqueIndexes
|
||||
const cursorFriendlyUniqueIndexes = uniqueIndexes?.filter((index) => {
|
||||
return index.every((columnName) => {
|
||||
const column = table.columns.find((column) => column.name === columnName)
|
||||
return !!column && !column.isNullable
|
||||
})
|
||||
})
|
||||
if (cursorFriendlyUniqueIndexes) {
|
||||
cursorPaginationEligible.push(...cursorFriendlyUniqueIndexes)
|
||||
}
|
||||
|
||||
const eligibleColumnsForSorting = table.columns.filter((x) => !x.dataType.includes('json'))
|
||||
cursorPaginationNonEligible.push(...eligibleColumnsForSorting.map((col) => col.name))
|
||||
|
||||
return {
|
||||
cursorPaginationEligible,
|
||||
cursorPaginationNonEligible,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,6 +114,11 @@ export async function executeWithRetry<T>(
|
||||
throw new Error('Max retries reached without success')
|
||||
}
|
||||
|
||||
const checkIfCtidAvailable = (table: SupaTable): boolean =>
|
||||
table.type === ENTITY_TYPE.TABLE ||
|
||||
table.type === ENTITY_TYPE.PARTITIONED_TABLE ||
|
||||
table.type === ENTITY_TYPE.MATERIALIZED_VIEW
|
||||
|
||||
export const getAllTableRowsSql = ({
|
||||
table,
|
||||
filters = [],
|
||||
@@ -96,7 +127,7 @@ export const getAllTableRowsSql = ({
|
||||
table: SupaTable
|
||||
filters?: Filter[]
|
||||
sorts?: Sort[]
|
||||
}) => {
|
||||
}): { sql: QueryFilter; cursorColumns: string[] | false } => {
|
||||
const query = new Query()
|
||||
|
||||
const arrayBasedColumns = table.columns
|
||||
@@ -116,36 +147,54 @@ export const getAllTableRowsSql = ({
|
||||
queryChains = queryChains.filter(filter.column, filter.operator, value)
|
||||
})
|
||||
|
||||
// Always enforce deterministic ordering for pagination/export
|
||||
const primaryKeys = getDefaultOrderByColumns(table)
|
||||
let cursorColumns: string[] | false = false
|
||||
const { cursorPaginationEligible, cursorPaginationNonEligible } =
|
||||
getPreferredOrderByColumns(table)
|
||||
|
||||
const hasCtid = checkIfCtidAvailable(table)
|
||||
|
||||
if (sorts.length === 0) {
|
||||
if (primaryKeys.length > 0) {
|
||||
primaryKeys.forEach((col) => {
|
||||
if (cursorPaginationEligible.length > 0) {
|
||||
cursorColumns = cursorPaginationEligible[0]
|
||||
cursorPaginationEligible[0].forEach((col) => {
|
||||
queryChains = queryChains.order(table.name, col)
|
||||
})
|
||||
// Cursor paginated columns do not require ctid fallback as they
|
||||
// guarantee uniqueness
|
||||
} else if (cursorPaginationNonEligible.length > 0) {
|
||||
queryChains = queryChains.order(table.name, cursorPaginationNonEligible[0])
|
||||
if (hasCtid) {
|
||||
queryChains = queryChains.order(table.name, 'ctid')
|
||||
}
|
||||
} else {
|
||||
if (hasCtid) {
|
||||
queryChains = queryChains.order(table.name, 'ctid')
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sorts.forEach((sort) => {
|
||||
queryChains = queryChains.order(sort.table, sort.column, sort.ascending, sort.nullsFirst)
|
||||
})
|
||||
|
||||
// Add primary keys as tie-breakers so page order doesn't shuffle
|
||||
if (primaryKeys.length > 0) {
|
||||
// Add tie-breakers so page order doesn't shuffle
|
||||
const tieBreaker = cursorPaginationEligible[0]
|
||||
if (tieBreaker) {
|
||||
const sortedColumns = new Set(
|
||||
sorts.filter((s) => s.table === table.name).map((s) => s.column)
|
||||
)
|
||||
primaryKeys
|
||||
.filter((pk) => !sortedColumns.has(pk))
|
||||
.forEach((pk) => {
|
||||
queryChains = queryChains.order(table.name, pk)
|
||||
tieBreaker
|
||||
.filter((col) => !sortedColumns.has(col))
|
||||
.forEach((col) => {
|
||||
queryChains = queryChains.order(table.name, col)
|
||||
})
|
||||
} else {
|
||||
if (hasCtid) {
|
||||
queryChains = queryChains.order(table.name, 'ctid')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Final tie-breaker: use system column ctid to guarantee a stable, unique order
|
||||
queryChains = queryChains.order(table.name, 'ctid')
|
||||
|
||||
return queryChains
|
||||
return { sql: queryChains, cursorColumns }
|
||||
}
|
||||
|
||||
// TODO: fetchAllTableRows is used for CSV export, but since it doesn't actually truncate anything, (compare to getTableRows)
|
||||
@@ -177,35 +226,79 @@ export const fetchAllTableRows = async ({
|
||||
}
|
||||
|
||||
const rows: any[] = []
|
||||
const queryChains = getAllTableRowsSql({ table, sorts, filters })
|
||||
const { sql: queryChains, cursorColumns } = getAllTableRowsSql({
|
||||
table,
|
||||
sorts,
|
||||
filters,
|
||||
})
|
||||
|
||||
const rowsPerPage = 500
|
||||
const THROTTLE_DELAY = 500
|
||||
|
||||
let page = -1
|
||||
while (true) {
|
||||
page += 1
|
||||
const from = page * rowsPerPage
|
||||
const to = (page + 1) * rowsPerPage - 1
|
||||
const query = wrapWithRoleImpersonation(
|
||||
queryChains.range(from, to).toSql(),
|
||||
roleImpersonationState
|
||||
)
|
||||
if (cursorColumns) {
|
||||
let cursor: Record<string, any> | null = null
|
||||
while (true) {
|
||||
let queryChainsWithCursor = queryChains.clone()
|
||||
|
||||
try {
|
||||
const { result } = await executeWithRetry(async () =>
|
||||
executeSql({ projectRef, connectionString, sql: query })
|
||||
if (cursor) {
|
||||
queryChainsWithCursor = queryChainsWithCursor.filter(
|
||||
cursorColumns,
|
||||
'>',
|
||||
cursorColumns.map((col) => cursor![col])
|
||||
)
|
||||
}
|
||||
const query = wrapWithRoleImpersonation(
|
||||
queryChainsWithCursor.range(0, rowsPerPage - 1).toSql(),
|
||||
roleImpersonationState
|
||||
)
|
||||
rows.push(...result)
|
||||
progressCallback?.(rows.length)
|
||||
|
||||
if (result.length < rowsPerPage) break
|
||||
try {
|
||||
const { result } = await executeWithRetry(async () =>
|
||||
executeSql({ projectRef, connectionString, sql: query })
|
||||
)
|
||||
rows.push(...result)
|
||||
progressCallback?.(rows.length)
|
||||
|
||||
await sleep(THROTTLE_DELAY)
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Error fetching all table rows: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
cursor = {}
|
||||
for (const col of cursorColumns) {
|
||||
cursor[col] = result[result.length - 1]?.[col]
|
||||
}
|
||||
|
||||
if (result.length < rowsPerPage) break
|
||||
|
||||
await sleep(THROTTLE_DELAY)
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Error fetching all table rows: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let page = -1
|
||||
while (true) {
|
||||
page += 1
|
||||
const from = page * rowsPerPage
|
||||
const to = (page + 1) * rowsPerPage - 1
|
||||
const query = wrapWithRoleImpersonation(
|
||||
queryChains.range(from, to).toSql(),
|
||||
roleImpersonationState
|
||||
)
|
||||
|
||||
try {
|
||||
const { result } = await executeWithRetry(async () =>
|
||||
executeSql({ projectRef, connectionString, sql: query })
|
||||
)
|
||||
rows.push(...result)
|
||||
progressCallback?.(rows.length)
|
||||
|
||||
if (result.length < rowsPerPage) break
|
||||
|
||||
await sleep(THROTTLE_DELAY)
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Error fetching all table rows: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
15
apps/studio/hooks/useStaticEffectEvent.ts
Normal file
15
apps/studio/hooks/useStaticEffectEvent.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { useCallback, useLayoutEffect, useRef } from 'react'
|
||||
|
||||
export const useStaticEffectEvent = <Callback extends Function>(callback: Callback) => {
|
||||
const callbackRef = useRef(callback)
|
||||
|
||||
useLayoutEffect(() => {
|
||||
callbackRef.current = callback
|
||||
})
|
||||
|
||||
const eventFn = useCallback((...args: any) => {
|
||||
return callbackRef.current(...args)
|
||||
}, [])
|
||||
|
||||
return eventFn as unknown as Callback
|
||||
}
|
||||
@@ -662,7 +662,7 @@ testRunner('table editor', () => {
|
||||
const downloadJsonPath = await downloadJson.path()
|
||||
const jsonContent = fs.readFileSync(downloadJsonPath, 'utf-8')
|
||||
expect(jsonContent).toBe(
|
||||
`[{"idx":0,"id":4,"created_at":"2025-01-01 12:00:00+00","pw_column":"value 4 to export"},{"idx":1,"id":5,"created_at":"2025-01-01 12:00:00+00","pw_column":"value 5 to export"},{"idx":2,"id":6,"created_at":"2025-01-01 12:00:00+00","pw_column":"value 6 to export"}]`
|
||||
`[{"id":4,"created_at":"2025-01-01 12:00:00+00","pw_column":"value 4 to export"},{"id":5,"created_at":"2025-01-01 12:00:00+00","pw_column":"value 5 to export"},{"id":6,"created_at":"2025-01-01 12:00:00+00","pw_column":"value 6 to export"}]`
|
||||
)
|
||||
await page.waitForTimeout(1000) // wait for event processing to complete
|
||||
fs.unlinkSync(downloadJsonPath)
|
||||
|
||||
@@ -44,6 +44,29 @@ export class QueryFilter implements IQueryFilter, IQueryModifier {
|
||||
return this._getQueryModifier().range(from, to)
|
||||
}
|
||||
|
||||
clone(): QueryFilter {
|
||||
const clonedData = structuredClone({
|
||||
table: this.table,
|
||||
action: this.action,
|
||||
actionValue: this.actionValue,
|
||||
actionOptions: this.actionOptions,
|
||||
filters: this.filters,
|
||||
sorts: this.sorts,
|
||||
})
|
||||
|
||||
const cloned = new QueryFilter(
|
||||
clonedData.table,
|
||||
clonedData.action,
|
||||
clonedData.actionValue,
|
||||
clonedData.actionOptions
|
||||
)
|
||||
|
||||
cloned.filters = clonedData.filters
|
||||
cloned.sorts = clonedData.sorts
|
||||
|
||||
return cloned
|
||||
}
|
||||
|
||||
toSql(options?: { isCTE: boolean; isFinal: boolean }) {
|
||||
return this._getQueryModifier().toSql(options)
|
||||
}
|
||||
|
||||
@@ -91,7 +91,12 @@ export const ConfirmationModal = forwardRef<
|
||||
}
|
||||
}}
|
||||
>
|
||||
<DialogContent ref={ref} className="p-0 gap-0 pb-5 !block" size={size}>
|
||||
<DialogContent
|
||||
aria-describedby={undefined}
|
||||
ref={ref}
|
||||
className="p-0 gap-0 pb-5 !block"
|
||||
size={size}
|
||||
>
|
||||
<DialogHeader className={cn('border-b')} padding={'small'}>
|
||||
<DialogTitle>{title}</DialogTitle>
|
||||
{description && <DialogDescription>{description}</DialogDescription>}
|
||||
|
||||
Reference in New Issue
Block a user