fix(plugin-import-export): json preview and downloads preserve nesting and exclude disabled fields (#13210)
### What? Improves both the JSON preview and export functionality in the import-export plugin: - Preserves proper nesting of object and array fields (e.g., groups, tabs, arrays) - Excludes any fields explicitly marked as `disabled` via `custom.plugin-import-export` - Ensures downloaded files use proper JSON formatting when `format` is `json` (no CSV-style flattening) ### Why? Previously: - The JSON preview flattened all fields to a single level and included disabled fields. - Exported files with `format: json` were still CSV-style data encoded as `.json`, rather than real JSON. ### How? - Refactored `/preview-data` JSON handling to preserve original document shape. - Applied `removeDisabledFields` to clean nested fields using dot-notation paths. - Updated `createExport` to skip `flattenObject` for JSON formats, using a nested JSON filter instead. - Fixed streaming and buffered export paths to output valid JSON arrays when `format` is `json`.
This commit is contained in:
@@ -68,6 +68,7 @@ export const Preview = () => {
|
|||||||
collectionSlug,
|
collectionSlug,
|
||||||
draft,
|
draft,
|
||||||
fields,
|
fields,
|
||||||
|
format,
|
||||||
limit,
|
limit,
|
||||||
locale,
|
locale,
|
||||||
sort,
|
sort,
|
||||||
@@ -115,8 +116,13 @@ export const Preview = () => {
|
|||||||
|
|
||||||
const fieldKeys =
|
const fieldKeys =
|
||||||
Array.isArray(fields) && fields.length > 0
|
Array.isArray(fields) && fields.length > 0
|
||||||
? selectedKeys // strictly only what was selected
|
? selectedKeys // strictly use selected fields only
|
||||||
: [...selectedKeys, ...defaultMetaFields.filter((key) => allKeys.includes(key))]
|
: [
|
||||||
|
...selectedKeys,
|
||||||
|
...defaultMetaFields.filter(
|
||||||
|
(key) => allKeys.includes(key) && !selectedKeys.includes(key),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
// Build columns based on flattened keys
|
// Build columns based on flattened keys
|
||||||
const newColumns: Column[] = fieldKeys.map((key) => ({
|
const newColumns: Column[] = fieldKeys.map((key) => ({
|
||||||
@@ -158,6 +164,7 @@ export const Preview = () => {
|
|||||||
disabledFieldRegexes,
|
disabledFieldRegexes,
|
||||||
draft,
|
draft,
|
||||||
fields,
|
fields,
|
||||||
|
format,
|
||||||
i18n,
|
i18n,
|
||||||
limit,
|
limit,
|
||||||
locale,
|
locale,
|
||||||
|
|||||||
@@ -114,7 +114,7 @@ export const createExport = async (args: CreateExportArgs) => {
|
|||||||
|
|
||||||
const disabledRegexes: RegExp[] = disabledFields.map(buildDisabledFieldRegex)
|
const disabledRegexes: RegExp[] = disabledFields.map(buildDisabledFieldRegex)
|
||||||
|
|
||||||
const filterDisabled = (row: Record<string, unknown>): Record<string, unknown> => {
|
const filterDisabledCSV = (row: Record<string, unknown>): Record<string, unknown> => {
|
||||||
const filtered: Record<string, unknown> = {}
|
const filtered: Record<string, unknown> = {}
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(row)) {
|
for (const [key, value] of Object.entries(row)) {
|
||||||
@@ -127,35 +127,62 @@ export const createExport = async (args: CreateExportArgs) => {
|
|||||||
return filtered
|
return filtered
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const filterDisabledJSON = (doc: any, parentPath = ''): any => {
|
||||||
|
if (Array.isArray(doc)) {
|
||||||
|
return doc.map((item) => filterDisabledJSON(item, parentPath))
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof doc !== 'object' || doc === null) {
|
||||||
|
return doc
|
||||||
|
}
|
||||||
|
|
||||||
|
const filtered: Record<string, any> = {}
|
||||||
|
for (const [key, value] of Object.entries(doc)) {
|
||||||
|
const currentPath = parentPath ? `${parentPath}.${key}` : key
|
||||||
|
|
||||||
|
// Only remove if this exact path is disabled
|
||||||
|
const isDisabled = disabledFields.includes(currentPath)
|
||||||
|
|
||||||
|
if (!isDisabled) {
|
||||||
|
filtered[key] = filterDisabledJSON(value, currentPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filtered
|
||||||
|
}
|
||||||
|
|
||||||
if (download) {
|
if (download) {
|
||||||
if (debug) {
|
if (debug) {
|
||||||
req.payload.logger.debug('Pre-scanning all columns before streaming')
|
req.payload.logger.debug('Pre-scanning all columns before streaming')
|
||||||
}
|
}
|
||||||
|
|
||||||
const allColumnsSet = new Set<string>()
|
|
||||||
const allColumns: string[] = []
|
const allColumns: string[] = []
|
||||||
let scanPage = 1
|
|
||||||
let hasMore = true
|
|
||||||
|
|
||||||
while (hasMore) {
|
if (isCSV) {
|
||||||
const result = await payload.find({ ...findArgs, page: scanPage })
|
const allColumnsSet = new Set<string>()
|
||||||
|
let scanPage = 1
|
||||||
|
let hasMore = true
|
||||||
|
|
||||||
result.docs.forEach((doc) => {
|
while (hasMore) {
|
||||||
const flat = filterDisabled(flattenObject({ doc, fields, toCSVFunctions }))
|
const result = await payload.find({ ...findArgs, page: scanPage })
|
||||||
Object.keys(flat).forEach((key) => {
|
|
||||||
if (!allColumnsSet.has(key)) {
|
result.docs.forEach((doc) => {
|
||||||
allColumnsSet.add(key)
|
const flat = filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions }))
|
||||||
allColumns.push(key)
|
Object.keys(flat).forEach((key) => {
|
||||||
}
|
if (!allColumnsSet.has(key)) {
|
||||||
|
allColumnsSet.add(key)
|
||||||
|
allColumns.push(key)
|
||||||
|
}
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
|
||||||
|
|
||||||
hasMore = result.hasNextPage
|
hasMore = result.hasNextPage
|
||||||
scanPage += 1
|
scanPage += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
if (debug) {
|
if (debug) {
|
||||||
req.payload.logger.debug(`Discovered ${allColumns.length} columns`)
|
req.payload.logger.debug(`Discovered ${allColumns.length} columns`)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const encoder = new TextEncoder()
|
const encoder = new TextEncoder()
|
||||||
@@ -171,28 +198,48 @@ export const createExport = async (args: CreateExportArgs) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (result.docs.length === 0) {
|
if (result.docs.length === 0) {
|
||||||
|
// Close JSON array properly if JSON
|
||||||
|
if (!isCSV) {
|
||||||
|
this.push(encoder.encode(']'))
|
||||||
|
}
|
||||||
this.push(null)
|
this.push(null)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const batchRows = result.docs.map((doc) =>
|
if (isCSV) {
|
||||||
filterDisabled(flattenObject({ doc, fields, toCSVFunctions })),
|
// --- CSV Streaming ---
|
||||||
)
|
const batchRows = result.docs.map((doc) =>
|
||||||
|
filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })),
|
||||||
|
)
|
||||||
|
|
||||||
const paddedRows = batchRows.map((row) => {
|
const paddedRows = batchRows.map((row) => {
|
||||||
const fullRow: Record<string, unknown> = {}
|
const fullRow: Record<string, unknown> = {}
|
||||||
for (const col of allColumns) {
|
for (const col of allColumns) {
|
||||||
fullRow[col] = row[col] ?? ''
|
fullRow[col] = row[col] ?? ''
|
||||||
|
}
|
||||||
|
return fullRow
|
||||||
|
})
|
||||||
|
|
||||||
|
const csvString = stringify(paddedRows, {
|
||||||
|
header: isFirstBatch,
|
||||||
|
columns: allColumns,
|
||||||
|
})
|
||||||
|
|
||||||
|
this.push(encoder.encode(csvString))
|
||||||
|
} else {
|
||||||
|
// --- JSON Streaming ---
|
||||||
|
const batchRows = result.docs.map((doc) => filterDisabledJSON(doc))
|
||||||
|
|
||||||
|
// Convert each filtered/flattened row into JSON string
|
||||||
|
const batchJSON = batchRows.map((row) => JSON.stringify(row)).join(',')
|
||||||
|
|
||||||
|
if (isFirstBatch) {
|
||||||
|
this.push(encoder.encode('[' + batchJSON))
|
||||||
|
} else {
|
||||||
|
this.push(encoder.encode(',' + batchJSON))
|
||||||
}
|
}
|
||||||
return fullRow
|
}
|
||||||
})
|
|
||||||
|
|
||||||
const csvString = stringify(paddedRows, {
|
|
||||||
header: isFirstBatch,
|
|
||||||
columns: allColumns,
|
|
||||||
})
|
|
||||||
|
|
||||||
this.push(encoder.encode(csvString))
|
|
||||||
isFirstBatch = false
|
isFirstBatch = false
|
||||||
streamPage += 1
|
streamPage += 1
|
||||||
|
|
||||||
@@ -200,6 +247,9 @@ export const createExport = async (args: CreateExportArgs) => {
|
|||||||
if (debug) {
|
if (debug) {
|
||||||
req.payload.logger.debug('Stream complete - no more pages')
|
req.payload.logger.debug('Stream complete - no more pages')
|
||||||
}
|
}
|
||||||
|
if (!isCSV) {
|
||||||
|
this.push(encoder.encode(']'))
|
||||||
|
}
|
||||||
this.push(null) // End the stream
|
this.push(null) // End the stream
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -239,7 +289,7 @@ export const createExport = async (args: CreateExportArgs) => {
|
|||||||
|
|
||||||
if (isCSV) {
|
if (isCSV) {
|
||||||
const batchRows = result.docs.map((doc) =>
|
const batchRows = result.docs.map((doc) =>
|
||||||
filterDisabled(flattenObject({ doc, fields, toCSVFunctions })),
|
filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })),
|
||||||
)
|
)
|
||||||
|
|
||||||
// Track discovered column keys
|
// Track discovered column keys
|
||||||
@@ -254,8 +304,8 @@ export const createExport = async (args: CreateExportArgs) => {
|
|||||||
|
|
||||||
rows.push(...batchRows)
|
rows.push(...batchRows)
|
||||||
} else {
|
} else {
|
||||||
const jsonInput = result.docs.map((doc) => JSON.stringify(doc))
|
const batchRows = result.docs.map((doc) => filterDisabledJSON(doc))
|
||||||
outputData.push(jsonInput.join(',\n'))
|
outputData.push(batchRows.map((doc) => JSON.stringify(doc)).join(',\n'))
|
||||||
}
|
}
|
||||||
|
|
||||||
hasNextPage = result.hasNextPage
|
hasNextPage = result.hasNextPage
|
||||||
|
|||||||
@@ -13,6 +13,9 @@ import { getExportCollection } from './getExportCollection.js'
|
|||||||
import { translations } from './translations/index.js'
|
import { translations } from './translations/index.js'
|
||||||
import { collectDisabledFieldPaths } from './utilities/collectDisabledFieldPaths.js'
|
import { collectDisabledFieldPaths } from './utilities/collectDisabledFieldPaths.js'
|
||||||
import { getFlattenedFieldKeys } from './utilities/getFlattenedFieldKeys.js'
|
import { getFlattenedFieldKeys } from './utilities/getFlattenedFieldKeys.js'
|
||||||
|
import { getValueAtPath } from './utilities/getvalueAtPath.js'
|
||||||
|
import { removeDisabledFields } from './utilities/removeDisabledFields.js'
|
||||||
|
import { setNestedValue } from './utilities/setNestedValue.js'
|
||||||
|
|
||||||
export const importExportPlugin =
|
export const importExportPlugin =
|
||||||
(pluginConfig: ImportExportPluginConfig) =>
|
(pluginConfig: ImportExportPluginConfig) =>
|
||||||
@@ -91,6 +94,7 @@ export const importExportPlugin =
|
|||||||
collectionSlug: string
|
collectionSlug: string
|
||||||
draft?: 'no' | 'yes'
|
draft?: 'no' | 'yes'
|
||||||
fields?: string[]
|
fields?: string[]
|
||||||
|
format?: 'csv' | 'json'
|
||||||
limit?: number
|
limit?: number
|
||||||
locale?: string
|
locale?: string
|
||||||
sort?: any
|
sort?: any
|
||||||
@@ -120,29 +124,58 @@ export const importExportPlugin =
|
|||||||
where,
|
where,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const isCSV = req?.data?.format === 'csv'
|
||||||
const docs = result.docs
|
const docs = result.docs
|
||||||
|
|
||||||
const toCSVFunctions = getCustomFieldFunctions({
|
let transformed: Record<string, unknown>[] = []
|
||||||
fields: collection.config.fields as FlattenedField[],
|
|
||||||
})
|
|
||||||
|
|
||||||
const possibleKeys = getFlattenedFieldKeys(collection.config.fields as FlattenedField[])
|
if (isCSV) {
|
||||||
|
const toCSVFunctions = getCustomFieldFunctions({
|
||||||
const transformed = docs.map((doc) => {
|
fields: collection.config.fields as FlattenedField[],
|
||||||
const row = flattenObject({
|
|
||||||
doc,
|
|
||||||
fields,
|
|
||||||
toCSVFunctions,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
for (const key of possibleKeys) {
|
const possibleKeys = getFlattenedFieldKeys(collection.config.fields as FlattenedField[])
|
||||||
if (!(key in row)) {
|
|
||||||
row[key] = null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return row
|
transformed = docs.map((doc) => {
|
||||||
})
|
const row = flattenObject({
|
||||||
|
doc,
|
||||||
|
fields,
|
||||||
|
toCSVFunctions,
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const key of possibleKeys) {
|
||||||
|
if (!(key in row)) {
|
||||||
|
row[key] = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return row
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
const disabledFields =
|
||||||
|
collection.config.admin.custom?.['plugin-import-export']?.disabledFields
|
||||||
|
|
||||||
|
transformed = docs.map((doc) => {
|
||||||
|
let output: Record<string, unknown> = { ...doc }
|
||||||
|
|
||||||
|
// Remove disabled fields first
|
||||||
|
output = removeDisabledFields(output, disabledFields)
|
||||||
|
|
||||||
|
// Then trim to selected fields only (if fields are provided)
|
||||||
|
if (Array.isArray(fields) && fields.length > 0) {
|
||||||
|
const trimmed: Record<string, unknown> = {}
|
||||||
|
|
||||||
|
for (const key of fields) {
|
||||||
|
const value = getValueAtPath(output, key)
|
||||||
|
setNestedValue(trimmed, key, value ?? null)
|
||||||
|
}
|
||||||
|
|
||||||
|
output = trimmed
|
||||||
|
}
|
||||||
|
|
||||||
|
return output
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
return Response.json({
|
return Response.json({
|
||||||
docs: transformed,
|
docs: transformed,
|
||||||
|
|||||||
@@ -22,21 +22,18 @@ export const getFlattenedFieldKeys = (fields: FieldWithPresentational[], prefix
|
|||||||
'plugin-import-export' in field.custom &&
|
'plugin-import-export' in field.custom &&
|
||||||
field.custom['plugin-import-export']?.toCSV
|
field.custom['plugin-import-export']?.toCSV
|
||||||
|
|
||||||
if (!('name' in field) || typeof field.name !== 'string' || fieldHasToCSVFunction) {
|
const name = 'name' in field && typeof field.name === 'string' ? field.name : undefined
|
||||||
return
|
const fullKey = name && prefix ? `${prefix}_${name}` : (name ?? prefix)
|
||||||
}
|
|
||||||
|
|
||||||
const name = prefix ? `${prefix}_${field.name}` : field.name
|
|
||||||
|
|
||||||
switch (field.type) {
|
switch (field.type) {
|
||||||
case 'array': {
|
case 'array': {
|
||||||
const subKeys = getFlattenedFieldKeys(field.fields as FlattenedField[], `${name}_0`)
|
const subKeys = getFlattenedFieldKeys(field.fields as FlattenedField[], `${fullKey}_0`)
|
||||||
keys.push(...subKeys)
|
keys.push(...subKeys)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
case 'blocks': {
|
case 'blocks': {
|
||||||
field.blocks.forEach((block) => {
|
field.blocks.forEach((block) => {
|
||||||
const blockPrefix = `${name}_0_${block.slug}`
|
const blockPrefix = `${fullKey}_0_${block.slug}`
|
||||||
keys.push(`${blockPrefix}_blockType`)
|
keys.push(`${blockPrefix}_blockType`)
|
||||||
keys.push(`${blockPrefix}_id`)
|
keys.push(`${blockPrefix}_id`)
|
||||||
keys.push(...getFlattenedFieldKeys(block.fields as FlattenedField[], blockPrefix))
|
keys.push(...getFlattenedFieldKeys(block.fields as FlattenedField[], blockPrefix))
|
||||||
@@ -46,45 +43,42 @@ export const getFlattenedFieldKeys = (fields: FieldWithPresentational[], prefix
|
|||||||
case 'collapsible':
|
case 'collapsible':
|
||||||
case 'group':
|
case 'group':
|
||||||
case 'row':
|
case 'row':
|
||||||
keys.push(...getFlattenedFieldKeys(field.fields as FlattenedField[], name))
|
keys.push(...getFlattenedFieldKeys(field.fields as FlattenedField[], fullKey))
|
||||||
break
|
break
|
||||||
case 'relationship':
|
case 'relationship':
|
||||||
if (field.hasMany) {
|
if (field.hasMany) {
|
||||||
if (Array.isArray(field.relationTo)) {
|
if (Array.isArray(field.relationTo)) {
|
||||||
// hasMany polymorphic
|
// hasMany polymorphic
|
||||||
keys.push(`${name}_0_relationTo`, `${name}_0_id`)
|
keys.push(`${fullKey}_0_relationTo`, `${fullKey}_0_id`)
|
||||||
} else {
|
} else {
|
||||||
// hasMany monomorphic
|
// hasMany monomorphic
|
||||||
keys.push(`${name}_0`)
|
keys.push(`${fullKey}_0`)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (Array.isArray(field.relationTo)) {
|
if (Array.isArray(field.relationTo)) {
|
||||||
// hasOne polymorphic
|
// hasOne polymorphic
|
||||||
keys.push(`${name}_relationTo`, `${name}_id`)
|
keys.push(`${fullKey}_relationTo`, `${fullKey}_id`)
|
||||||
} else {
|
} else {
|
||||||
// hasOne monomorphic
|
// hasOne monomorphic
|
||||||
keys.push(name)
|
keys.push(fullKey)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
case 'tabs':
|
case 'tabs':
|
||||||
if (field.tabs) {
|
field.tabs?.forEach((tab) => {
|
||||||
field.tabs.forEach((tab) => {
|
const tabPrefix = tab.name ? `${fullKey}_${tab.name}` : fullKey
|
||||||
if (tab.name) {
|
keys.push(...getFlattenedFieldKeys(tab.fields || [], tabPrefix))
|
||||||
const tabPrefix = prefix ? `${prefix}_${tab.name}` : tab.name
|
})
|
||||||
keys.push(...getFlattenedFieldKeys(tab.fields, tabPrefix))
|
|
||||||
} else {
|
|
||||||
keys.push(...getFlattenedFieldKeys(tab.fields, prefix))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
break
|
break
|
||||||
default:
|
default:
|
||||||
|
if (!name || fieldHasToCSVFunction) {
|
||||||
|
break
|
||||||
|
}
|
||||||
if ('hasMany' in field && field.hasMany) {
|
if ('hasMany' in field && field.hasMany) {
|
||||||
// Push placeholder for first index
|
// Push placeholder for first index
|
||||||
keys.push(`${name}_0`)
|
keys.push(`${fullKey}_0`)
|
||||||
} else {
|
} else {
|
||||||
keys.push(name)
|
keys.push(fullKey)
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,59 @@
|
|||||||
|
/**
|
||||||
|
* Safely retrieves a deeply nested value from an object using a dot-notation path.
|
||||||
|
*
|
||||||
|
* Supports:
|
||||||
|
* - Indexed array access (e.g., "array.0.field1")
|
||||||
|
* - Polymorphic blocks or keyed unions (e.g., "blocks.0.hero.title"), where the block key
|
||||||
|
* (e.g., "hero") maps to a nested object inside the block item.
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @param obj - The input object to traverse.
|
||||||
|
* @param path - A dot-separated string representing the path to retrieve.
|
||||||
|
* @returns The value at the specified path, or undefined if not found.
|
||||||
|
*/
|
||||||
|
export const getValueAtPath = (obj: unknown, path: string): unknown => {
|
||||||
|
if (!obj || typeof obj !== 'object') {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
const parts = path.split('.')
|
||||||
|
let current: any = obj
|
||||||
|
|
||||||
|
for (const part of parts) {
|
||||||
|
if (current == null) {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the path part is a number, treat it as an array index
|
||||||
|
if (!isNaN(Number(part))) {
|
||||||
|
current = current[Number(part)]
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Special case: if current is an array of blocks like [{ hero: { title: '...' } }]
|
||||||
|
// and the path is "blocks.0.hero.title", then `part` would be "hero"
|
||||||
|
if (Array.isArray(current)) {
|
||||||
|
const idx = Number(parts[parts.indexOf(part) - 1])
|
||||||
|
const blockItem = current[idx]
|
||||||
|
|
||||||
|
if (typeof blockItem === 'object') {
|
||||||
|
const keys = Object.keys(blockItem)
|
||||||
|
|
||||||
|
// Find the key (e.g., "hero") that maps to an object
|
||||||
|
const matchingBlock = keys.find(
|
||||||
|
(key) => blockItem[key] && typeof blockItem[key] === 'object',
|
||||||
|
)
|
||||||
|
|
||||||
|
if (matchingBlock && part === matchingBlock) {
|
||||||
|
current = blockItem[matchingBlock]
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback to plain object key access
|
||||||
|
current = current[part]
|
||||||
|
}
|
||||||
|
|
||||||
|
return current
|
||||||
|
}
|
||||||
@@ -0,0 +1,80 @@
|
|||||||
|
/**
|
||||||
|
* Recursively removes fields from a deeply nested object based on dot-notation paths.
|
||||||
|
*
|
||||||
|
* This utility supports removing:
|
||||||
|
* - Nested fields in plain objects (e.g., "group.value")
|
||||||
|
* - Fields inside arrays of objects (e.g., "group.array.field1")
|
||||||
|
*
|
||||||
|
* It safely traverses both object and array structures and avoids mutating the original input.
|
||||||
|
*
|
||||||
|
* @param obj - The original object to clean.
|
||||||
|
* @param disabled - An array of dot-separated paths indicating which fields to remove.
|
||||||
|
* @returns A deep clone of the original object with specified fields removed.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const removeDisabledFields = (
|
||||||
|
obj: Record<string, unknown>,
|
||||||
|
disabled: string[] = [],
|
||||||
|
): Record<string, unknown> => {
|
||||||
|
if (!disabled.length) {
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
|
||||||
|
const clone = structuredClone(obj)
|
||||||
|
|
||||||
|
// Process each disabled path independently
|
||||||
|
for (const path of disabled) {
|
||||||
|
const parts = path.split('.')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursively walks the object tree according to the dot path,
|
||||||
|
* and deletes the field once the full path is reached.
|
||||||
|
*
|
||||||
|
* @param target - The current object or array being traversed
|
||||||
|
* @param i - The index of the current path part
|
||||||
|
*/
|
||||||
|
const removeRecursively = (target: any, i = 0): void => {
|
||||||
|
if (target == null) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const key = parts[i]
|
||||||
|
|
||||||
|
// If at the final part of the path, perform the deletion
|
||||||
|
if (i === parts.length - 1) {
|
||||||
|
// If the current level is an array, delete the key from each item
|
||||||
|
if (Array.isArray(target)) {
|
||||||
|
for (const item of target) {
|
||||||
|
if (item && typeof item === 'object' && key !== undefined) {
|
||||||
|
delete item[key as keyof typeof item]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (typeof target === 'object' && key !== undefined) {
|
||||||
|
delete target[key]
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (key === undefined) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Traverse to the next level in the path
|
||||||
|
const next = target[key]
|
||||||
|
|
||||||
|
if (Array.isArray(next)) {
|
||||||
|
// If the next value is an array, recurse into each item
|
||||||
|
for (const item of next) {
|
||||||
|
removeRecursively(item, i + 1)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Otherwise, continue down the object path
|
||||||
|
removeRecursively(next, i + 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
removeRecursively(clone)
|
||||||
|
}
|
||||||
|
|
||||||
|
return clone
|
||||||
|
}
|
||||||
@@ -0,0 +1,65 @@
|
|||||||
|
/**
|
||||||
|
* Sets a value deeply into a nested object or array, based on a dot-notation path.
|
||||||
|
*
|
||||||
|
* This function:
|
||||||
|
* - Supports array indexing (e.g., "array.0.field1")
|
||||||
|
* - Creates intermediate arrays/objects as needed
|
||||||
|
* - Mutates the target object directly
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* const obj = {}
|
||||||
|
* setNestedValue(obj, 'group.array.0.field1', 'hello')
|
||||||
|
* // Result: { group: { array: [ { field1: 'hello' } ] } }
|
||||||
|
*
|
||||||
|
* @param obj - The target object to mutate.
|
||||||
|
* @param path - A dot-separated string path indicating where to assign the value.
|
||||||
|
* @param value - The value to set at the specified path.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const setNestedValue = (
|
||||||
|
obj: Record<string, unknown>,
|
||||||
|
path: string,
|
||||||
|
value: unknown,
|
||||||
|
): void => {
|
||||||
|
const parts = path.split('.')
|
||||||
|
let current: any = obj
|
||||||
|
|
||||||
|
for (let i = 0; i < parts.length; i++) {
|
||||||
|
const part = parts[i]
|
||||||
|
const isLast = i === parts.length - 1
|
||||||
|
const isIndex = !Number.isNaN(Number(part))
|
||||||
|
|
||||||
|
if (isIndex) {
|
||||||
|
const index = Number(part)
|
||||||
|
|
||||||
|
// Ensure the current target is an array
|
||||||
|
if (!Array.isArray(current)) {
|
||||||
|
current = []
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure the array slot is initialized
|
||||||
|
if (!current[index]) {
|
||||||
|
current[index] = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isLast) {
|
||||||
|
current[index] = value
|
||||||
|
} else {
|
||||||
|
current = current[index] as Record<string, unknown>
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Ensure the object key exists
|
||||||
|
if (isLast) {
|
||||||
|
if (typeof part === 'string') {
|
||||||
|
current[part] = value
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (typeof current[part as string] !== 'object' || current[part as string] === null) {
|
||||||
|
current[part as string] = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
current = current[part as string] as Record<string, unknown>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -61,6 +61,11 @@ export const Pages: CollectionConfig = {
|
|||||||
name: 'value',
|
name: 'value',
|
||||||
type: 'text',
|
type: 'text',
|
||||||
defaultValue: 'group value',
|
defaultValue: 'group value',
|
||||||
|
// custom: {
|
||||||
|
// 'plugin-import-export': {
|
||||||
|
// disabled: true,
|
||||||
|
// },
|
||||||
|
// },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'ignore',
|
name: 'ignore',
|
||||||
@@ -216,5 +221,20 @@ export const Pages: CollectionConfig = {
|
|||||||
relationTo: ['users', 'posts'],
|
relationTo: ['users', 'posts'],
|
||||||
hasMany: true,
|
hasMany: true,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
type: 'collapsible',
|
||||||
|
label: 'Collapsible Field',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
name: 'textFieldInCollapsible',
|
||||||
|
type: 'text',
|
||||||
|
// custom: {
|
||||||
|
// 'plugin-import-export': {
|
||||||
|
// disabled: true,
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -467,6 +467,29 @@ describe('@payloadcms/plugin-import-export', () => {
|
|||||||
expect(data[0].title).toStrictEqual('JSON 0')
|
expect(data[0].title).toStrictEqual('JSON 0')
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should download an existing export JSON file', async () => {
|
||||||
|
const response = await restClient.POST('/exports/download', {
|
||||||
|
body: JSON.stringify({
|
||||||
|
data: {
|
||||||
|
collectionSlug: 'pages',
|
||||||
|
fields: ['id', 'title'],
|
||||||
|
format: 'json',
|
||||||
|
sort: 'title',
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(response.status).toBe(200)
|
||||||
|
expect(response.headers.get('content-type')).toMatch(/application\/json/)
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
expect(Array.isArray(data)).toBe(true)
|
||||||
|
expect(['string', 'number']).toContain(typeof data[0].id)
|
||||||
|
expect(typeof data[0].title).toBe('string')
|
||||||
|
})
|
||||||
|
|
||||||
it('should create an export with every field when no fields are defined', async () => {
|
it('should create an export with every field when no fields are defined', async () => {
|
||||||
let doc = await payload.create({
|
let doc = await payload.create({
|
||||||
collection: 'exports',
|
collection: 'exports',
|
||||||
|
|||||||
@@ -242,6 +242,7 @@ export interface Page {
|
|||||||
}
|
}
|
||||||
)[]
|
)[]
|
||||||
| null;
|
| null;
|
||||||
|
textFieldInCollapsible?: string | null;
|
||||||
updatedAt: string;
|
updatedAt: string;
|
||||||
createdAt: string;
|
createdAt: string;
|
||||||
_status?: ('draft' | 'published') | null;
|
_status?: ('draft' | 'published') | null;
|
||||||
@@ -579,6 +580,7 @@ export interface PagesSelect<T extends boolean = true> {
|
|||||||
excerpt?: T;
|
excerpt?: T;
|
||||||
hasOnePolymorphic?: T;
|
hasOnePolymorphic?: T;
|
||||||
hasManyPolymorphic?: T;
|
hasManyPolymorphic?: T;
|
||||||
|
textFieldInCollapsible?: T;
|
||||||
updatedAt?: T;
|
updatedAt?: T;
|
||||||
createdAt?: T;
|
createdAt?: T;
|
||||||
_status?: T;
|
_status?: T;
|
||||||
|
|||||||
Reference in New Issue
Block a user