fix(plugin-import-export): csv export column order (#12258)

### What?
The order of fields, when specified for the create export function was
not used for constructing the data. Now the fields order will be used.

### Why?
This is important to building CSV data for consumption in other systems.

### How?
Adds logic to handle ordering the field values assigned to the export
data prior to building the CSV.
This commit is contained in:
Dan Ribbens
2025-04-29 12:28:16 -07:00
committed by GitHub
parent 8fee0163b5
commit 47a1eee765
3 changed files with 90 additions and 18 deletions

View File

@@ -87,7 +87,7 @@ export const createExport = async (args: CreateExportArgs) => {
let isFirstBatch = true let isFirstBatch = true
while (result.docs.length > 0) { while (result.docs.length > 0) {
const csvInput = result.docs.map((doc) => flattenObject(doc)) const csvInput = result.docs.map((doc) => flattenObject({ doc, fields }))
const csvString = stringify(csvInput, { header: isFirstBatch }) const csvString = stringify(csvInput, { header: isFirstBatch })
this.push(encoder.encode(csvString)) this.push(encoder.encode(csvString))
isFirstBatch = false isFirstBatch = false
@@ -119,7 +119,7 @@ export const createExport = async (args: CreateExportArgs) => {
result = await payload.find(findArgs) result = await payload.find(findArgs)
if (isCSV) { if (isCSV) {
const csvInput = result.docs.map((doc) => flattenObject(doc)) const csvInput = result.docs.map((doc) => flattenObject({ doc, fields }))
outputData.push(stringify(csvInput, { header: isFirstBatch })) outputData.push(stringify(csvInput, { header: isFirstBatch }))
isFirstBatch = false isFirstBatch = false
} else { } else {

View File

@@ -1,23 +1,61 @@
export const flattenObject = (obj: any, prefix: string = ''): Record<string, unknown> => { import type { Document } from 'payload'
type Args = {
doc: Document
fields?: string[]
prefix?: string
}
export const flattenObject = ({ doc, fields, prefix }: Args): Record<string, unknown> => {
const result: Record<string, unknown> = {} const result: Record<string, unknown> = {}
Object.entries(obj).forEach(([key, value]) => { const flatten = (doc: Document, prefix?: string) => {
const newKey = prefix ? `${prefix}_${key}` : key Object.entries(doc).forEach(([key, value]) => {
const newKey = prefix ? `${prefix}_${key}` : key
if (Array.isArray(value)) { if (Array.isArray(value)) {
value.forEach((item, index) => { value.forEach((item, index) => {
if (typeof item === 'object' && item !== null) { if (typeof item === 'object' && item !== null) {
Object.assign(result, flattenObject(item, `${newKey}_${index}`)) flatten(item, `${newKey}_${index}`)
} else { } else {
result[`${newKey}_${index}`] = item result[`${newKey}_${index}`] = item
} }
}) })
} else if (typeof value === 'object' && value !== null) { } else if (typeof value === 'object' && value !== null) {
Object.assign(result, flattenObject(value, newKey)) flatten(value, newKey)
} else { } else {
result[newKey] = value result[newKey] = value
}
})
}
flatten(doc, prefix)
if (fields) {
const orderedResult: Record<string, unknown> = {}
const fieldToRegex = (field: string): RegExp => {
const parts = field.split('.').map((part) => `${part}(?:_\\d+)?`)
const pattern = `^${parts.join('_')}`
return new RegExp(pattern)
} }
})
fields.forEach((field) => {
if (result[field.replace(/\./g, '_')]) {
const sanitizedField = field.replace(/\./g, '_')
orderedResult[sanitizedField] = result[sanitizedField]
} else {
const regex = fieldToRegex(field)
Object.keys(result).forEach((key) => {
if (regex.test(key)) {
orderedResult[key] = result[key]
}
})
}
})
return orderedResult
}
return result return result
} }

View File

@@ -1,5 +1,6 @@
import type { CollectionSlug, Payload } from 'payload' import type { CollectionSlug, Payload } from 'payload'
import fs from 'fs'
import path from 'path' import path from 'path'
import { fileURLToPath } from 'url' import { fileURLToPath } from 'url'
@@ -221,6 +222,39 @@ describe('@payloadcms/plugin-import-export', () => {
expect(data[0].array_1_field2).toStrictEqual('baz') expect(data[0].array_1_field2).toStrictEqual('baz')
}) })
it('should create a CSV file with columns matching the order of the fields array', async () => {
const fields = ['id', 'group.value', 'group.array.field1', 'title', 'createdAt', 'updatedAt']
const doc = await payload.create({
collection: 'exports',
user,
data: {
collectionSlug: 'pages',
fields,
format: 'csv',
where: {
title: { contains: 'Title ' },
},
},
})
const exportDoc = await payload.findByID({
collection: 'exports',
id: doc.id,
})
expect(exportDoc.filename).toBeDefined()
const expectedPath = path.join(dirname, './uploads', exportDoc.filename as string)
const buffer = fs.readFileSync(expectedPath)
const str = buffer.toString()
// Assert that the header row matches the fields array
expect(str.indexOf('id')).toBeLessThan(str.indexOf('title'))
expect(str.indexOf('group_value')).toBeLessThan(str.indexOf('title'))
expect(str.indexOf('group_value')).toBeLessThan(str.indexOf('group_array'))
expect(str.indexOf('title')).toBeLessThan(str.indexOf('createdAt'))
expect(str.indexOf('createdAt')).toBeLessThan(str.indexOf('updatedAt'))
})
it('should create a file for collection csv from array.subfield', async () => { it('should create a file for collection csv from array.subfield', async () => {
let doc = await payload.create({ let doc = await payload.create({
collection: 'exports', collection: 'exports',