feat(plugin-import-export): add debug logging option (#12705)

This commit is contained in:
Germán Jabloñski
2025-06-09 11:23:36 -03:00
committed by GitHub
parent 7b21270959
commit 678bfc7d93
4 changed files with 69 additions and 5 deletions

View File

@@ -1,3 +1,4 @@
/* eslint-disable perfectionist/sort-objects */
import type { PaginatedDocs, PayloadRequest, Sort, User, Where } from 'payload'
import { stringify } from 'csv-stringify/sync'
@@ -10,6 +11,10 @@ import { getSelect } from './getSelect.js'
type Export = {
collectionSlug: string
/**
* If true, enables debug logging
*/
debug?: boolean
drafts?: 'no' | 'yes'
exportsCollection: string
fields?: string[]
@@ -42,6 +47,7 @@ export const createExport = async (args: CreateExportArgs) => {
id,
name: nameArg,
collectionSlug,
debug = false,
drafts,
exportsCollection,
fields,
@@ -54,6 +60,17 @@ export const createExport = async (args: CreateExportArgs) => {
req: { locale: localeArg, payload },
req,
} = args
if (debug) {
req.payload.logger.info({
message: 'Starting export process with args:',
collectionSlug,
drafts,
fields,
format,
})
}
const locale = localeInput ?? localeArg
const collectionConfig = payload.config.collections.find(({ slug }) => slug === collectionSlug)
if (!collectionConfig) {
@@ -63,6 +80,10 @@ export const createExport = async (args: CreateExportArgs) => {
const name = `${nameArg ?? `${getFilename()}-${collectionSlug}`}.${format}`
const isCSV = format === 'csv'
if (debug) {
req.payload.logger.info({ message: 'Export configuration:', name, isCSV, locale })
}
const findArgs = {
collection: collectionSlug,
depth: 0,
@@ -77,9 +98,16 @@ export const createExport = async (args: CreateExportArgs) => {
where,
}
if (debug) {
req.payload.logger.info({ message: 'Find arguments:', findArgs })
}
let result: PaginatedDocs = { hasNextPage: true } as PaginatedDocs
if (download) {
if (debug) {
req.payload.logger.info('Starting download stream')
}
const encoder = new TextEncoder()
const stream = new Readable({
async read() {
@@ -87,12 +115,20 @@ export const createExport = async (args: CreateExportArgs) => {
let isFirstBatch = true
while (result.docs.length > 0) {
if (debug) {
req.payload.logger.info(
`Processing batch ${findArgs.page + 1} with ${result.docs.length} documents`,
)
}
const csvInput = result.docs.map((doc) => flattenObject({ doc, fields }))
const csvString = stringify(csvInput, { header: isFirstBatch })
this.push(encoder.encode(csvString))
isFirstBatch = false
if (!result.hasNextPage) {
if (debug) {
req.payload.logger.info('Stream complete - no more pages')
}
this.push(null) // End the stream
break
}
@@ -111,6 +147,9 @@ export const createExport = async (args: CreateExportArgs) => {
})
}
if (debug) {
req.payload.logger.info('Starting file generation')
}
const outputData: string[] = []
let isFirstBatch = true
@@ -118,6 +157,12 @@ export const createExport = async (args: CreateExportArgs) => {
findArgs.page += 1
result = await payload.find(findArgs)
if (debug) {
req.payload.logger.info(
`Processing batch ${findArgs.page} with ${result.docs.length} documents`,
)
}
if (isCSV) {
const csvInput = result.docs.map((doc) => flattenObject({ doc, fields }))
outputData.push(stringify(csvInput, { header: isFirstBatch }))
@@ -129,8 +174,14 @@ export const createExport = async (args: CreateExportArgs) => {
}
const buffer = Buffer.from(format === 'json' ? `[${outputData.join(',')}]` : outputData.join(''))
if (debug) {
req.payload.logger.info(`${format} file generation complete`)
}
if (!id) {
if (debug) {
req.payload.logger.info('Creating new export file')
}
req.file = {
name,
data: buffer,
@@ -138,6 +189,9 @@ export const createExport = async (args: CreateExportArgs) => {
size: buffer.length,
}
} else {
if (debug) {
req.payload.logger.info(`Updating existing export with id: ${id}`)
}
await req.payload.update({
id,
collection: exportsCollection,
@@ -151,4 +205,7 @@ export const createExport = async (args: CreateExportArgs) => {
user,
})
}
if (debug) {
req.payload.logger.info('Export process completed successfully')
}
}

View File

@@ -1,10 +1,10 @@
import type { PayloadHandler } from 'payload'
import type { PayloadRequest } from 'payload'
import { APIError } from 'payload'
import { createExport } from './createExport.js'
export const download: PayloadHandler = async (req) => {
export const download = async (req: PayloadRequest, debug = false) => {
let body
if (typeof req?.json === 'function') {
body = await req.json()
@@ -20,7 +20,7 @@ export const download: PayloadHandler = async (req) => {
return createExport({
download: true,
input: body.data,
input: { ...body.data, debug },
req,
}) as Promise<Response>
}

View File

@@ -36,7 +36,9 @@ export const getExportCollection = ({
disableDuplicate: true,
endpoints: [
{
handler: download,
handler: (req) => {
return download(req, pluginConfig.debug)
},
method: 'post',
path: '/download',
},
@@ -63,7 +65,8 @@ export const getExportCollection = ({
return
}
const { user } = req
await createExport({ input: { ...args.data, user }, req })
const debug = pluginConfig.debug
await createExport({ input: { ...args.data, debug, user }, req })
})
} else {
afterChange.push(async ({ doc, operation, req }) => {

View File

@@ -11,6 +11,10 @@ export type ImportExportPluginConfig = {
* Defaults to all collections
*/
collections?: string[]
/**
* If true, enables debug logging
*/
debug?: boolean
/**
* Enable to force the export to run synchronously
*/