From 7cd682c66af5f9e61fbacf6e368b8988c13a829b Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Wed, 16 Jul 2025 09:45:02 -0700 Subject: [PATCH 01/91] perf(drizzle): further optimize postgres row updates (#13184) This is a follow-up to https://github.com/payloadcms/payload/pull/13060. There are a bunch of other db adapter methods that use `upsertRow` for updates: `updateGlobal`, `updateGlobalVersion`, `updateJobs`, `updateMany`, `updateVersion`. The previous PR had the logic for using the optimized row updating logic inside the `updateOne` adapter. This PR moves that logic to the original `upsertRow` function. Benefits: - all the other db methods will benefit from this massive optimization as well. This will be especially relevant for optimizing postgres job queue initial updates - we should be able to close https://github.com/payloadcms/payload/pull/11865 after another follow-up PR - easier to read db adapter methods due to less code. --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210803039809810 --- packages/drizzle/src/updateOne.ts | 119 +-- packages/drizzle/src/upsertRow/index.ts | 775 +++++++++--------- .../upsertRow/shouldUseOptimizedUpsertRow.ts | 52 ++ test/database/int.spec.ts | 67 +- 4 files changed, 518 insertions(+), 495 deletions(-) create mode 100644 packages/drizzle/src/upsertRow/shouldUseOptimizedUpsertRow.ts diff --git a/packages/drizzle/src/updateOne.ts b/packages/drizzle/src/updateOne.ts index 3bd37e4682..8fddd9378f 100644 --- a/packages/drizzle/src/updateOne.ts +++ b/packages/drizzle/src/updateOne.ts @@ -1,67 +1,15 @@ import type { LibSQLDatabase } from 'drizzle-orm/libsql' -import type { FlattenedField, UpdateOne } from 'payload' +import type { UpdateOne } from 'payload' -import { eq } from 'drizzle-orm' import toSnakeCase from 'to-snake-case' import type { DrizzleAdapter } from './types.js' -import { buildFindManyArgs } from './find/buildFindManyArgs.js' import { buildQuery } from './queries/buildQuery.js' import { selectDistinct } from './queries/selectDistinct.js' -import { transform } from './transform/read/index.js' -import { transformForWrite } from './transform/write/index.js' import { upsertRow } from './upsertRow/index.js' import { getTransaction } from './utilities/getTransaction.js' -/** - * Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call. - * We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships. - */ -const shouldUseUpsertRow = ({ - data, - fields, -}: { - data: Record - fields: FlattenedField[] -}) => { - for (const key in data) { - const value = data[key] - const field = fields.find((each) => each.name === key) - - if (!field) { - continue - } - - if ( - field.type === 'array' || - field.type === 'blocks' || - ((field.type === 'text' || - field.type === 'relationship' || - field.type === 'upload' || - field.type === 'select' || - field.type === 'number') && - field.hasMany) || - ((field.type === 'relationship' || field.type === 'upload') && - Array.isArray(field.relationTo)) || - field.localized - ) { - return true - } - - if ( - (field.type === 'group' || field.type === 'tab') && - value && - typeof value === 'object' && - shouldUseUpsertRow({ data: value as Record, fields: field.flattenedFields }) - ) { - return true - } - } - - return false -} - export const updateOne: UpdateOne = async function updateOne( this: DrizzleAdapter, { @@ -126,72 +74,23 @@ export const updateOne: UpdateOne = async function updateOne( return null } - if (!idToUpdate || shouldUseUpsertRow({ data, fields: collection.flattenedFields })) { - const result = await upsertRow({ - id: idToUpdate, - adapter: this, - data, - db, - fields: collection.flattenedFields, - ignoreResult: returning === false, - joinQuery, - operation: 'update', - req, - select, - tableName, - }) - - if (returning === false) { - return null - } - - return result - } - - const { row } = transformForWrite({ + const result = await upsertRow({ + id: idToUpdate, adapter: this, data, - enableAtomicWrites: true, + db, fields: collection.flattenedFields, + ignoreResult: returning === false, + joinQuery, + operation: 'update', + req, + select, tableName, }) - const drizzle = db as LibSQLDatabase - await drizzle - .update(this.tables[tableName]) - .set(row) - // TODO: we can skip fetching idToUpdate here with using the incoming where - .where(eq(this.tables[tableName].id, idToUpdate)) - if (returning === false) { return null } - const findManyArgs = buildFindManyArgs({ - adapter: this, - depth: 0, - fields: collection.flattenedFields, - joinQuery: false, - select, - tableName, - }) - - findManyArgs.where = eq(this.tables[tableName].id, idToUpdate) - - const doc = await db.query[tableName].findFirst(findManyArgs) - - // ////////////////////////////////// - // TRANSFORM DATA - // ////////////////////////////////// - - const result = transform({ - adapter: this, - config: this.payload.config, - data: doc, - fields: collection.flattenedFields, - joinQuery: false, - tableName, - }) - return result } diff --git a/packages/drizzle/src/upsertRow/index.ts b/packages/drizzle/src/upsertRow/index.ts index ad10c5fd14..72f89435ec 100644 --- a/packages/drizzle/src/upsertRow/index.ts +++ b/packages/drizzle/src/upsertRow/index.ts @@ -1,3 +1,4 @@ +import type { LibSQLDatabase } from 'drizzle-orm/libsql' import type { TypeWithID } from 'payload' import { eq } from 'drizzle-orm' @@ -12,13 +13,14 @@ import { transformForWrite } from '../transform/write/index.js' import { deleteExistingArrayRows } from './deleteExistingArrayRows.js' import { deleteExistingRowsByPath } from './deleteExistingRowsByPath.js' import { insertArrays } from './insertArrays.js' +import { shouldUseOptimizedUpsertRow } from './shouldUseOptimizedUpsertRow.js' /** * If `id` is provided, it will update the row with that ID. * If `where` is provided, it will update the row that matches the `where` * If neither `id` nor `where` is provided, it will create a new row. * - * This function replaces the entire row and does not support partial updates. + * adapter function replaces the entire row and does not support partial updates. */ export const upsertRow = async | TypeWithID>({ id, @@ -39,429 +41,446 @@ export const upsertRow = async | TypeWithID>( upsertTarget, where, }: Args): Promise => { - // Split out the incoming data into the corresponding: - // base row, locales, relationships, blocks, and arrays - const rowToInsert = transformForWrite({ - adapter, - data, - enableAtomicWrites: false, - fields, - path, - tableName, - }) - - // First, we insert the main row - let insertedRow: Record - - try { - if (operation === 'update') { - const target = upsertTarget || adapter.tables[tableName].id - - if (id) { - rowToInsert.row.id = id - ;[insertedRow] = await adapter.insert({ - db, - onConflictDoUpdate: { set: rowToInsert.row, target }, - tableName, - values: rowToInsert.row, - }) - } else { - ;[insertedRow] = await adapter.insert({ - db, - onConflictDoUpdate: { set: rowToInsert.row, target, where }, - tableName, - values: rowToInsert.row, - }) - } - } else { - if (adapter.allowIDOnCreate && data.id) { - rowToInsert.row.id = data.id - } - ;[insertedRow] = await adapter.insert({ - db, - tableName, - values: rowToInsert.row, - }) - } - - const localesToInsert: Record[] = [] - const relationsToInsert: Record[] = [] - const textsToInsert: Record[] = [] - const numbersToInsert: Record[] = [] - const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {} - const selectsToInsert: { [selectTableName: string]: Record[] } = {} - - // If there are locale rows with data, add the parent and locale to each - if (Object.keys(rowToInsert.locales).length > 0) { - Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => { - localeRow._parentID = insertedRow.id - localeRow._locale = locale - localesToInsert.push(localeRow) - }) - } - - // If there are relationships, add parent to each - if (rowToInsert.relationships.length > 0) { - rowToInsert.relationships.forEach((relation) => { - relation.parent = insertedRow.id - relationsToInsert.push(relation) - }) - } - - // If there are texts, add parent to each - if (rowToInsert.texts.length > 0) { - rowToInsert.texts.forEach((textRow) => { - textRow.parent = insertedRow.id - textsToInsert.push(textRow) - }) - } - - // If there are numbers, add parent to each - if (rowToInsert.numbers.length > 0) { - rowToInsert.numbers.forEach((numberRow) => { - numberRow.parent = insertedRow.id - numbersToInsert.push(numberRow) - }) - } - - // If there are selects, add parent to each, and then - // store by table name and rows - if (Object.keys(rowToInsert.selects).length > 0) { - Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => { - selectsToInsert[selectTableName] = [] - - selectRows.forEach((row) => { - if (typeof row.parent === 'undefined') { - row.parent = insertedRow.id - } - - selectsToInsert[selectTableName].push(row) - }) - }) - } - - // If there are blocks, add parent to each, and then - // store by table name and rows - Object.keys(rowToInsert.blocks).forEach((tableName) => { - rowToInsert.blocks[tableName].forEach((blockRow) => { - blockRow.row._parentID = insertedRow.id - if (!blocksToInsert[tableName]) { - blocksToInsert[tableName] = [] - } - if (blockRow.row.uuid) { - delete blockRow.row.uuid - } - blocksToInsert[tableName].push(blockRow) - }) + let insertedRow: Record = { id } + if (id && shouldUseOptimizedUpsertRow({ data, fields })) { + const { row } = transformForWrite({ + adapter, + data, + enableAtomicWrites: true, + fields, + tableName, }) - // ////////////////////////////////// - // INSERT LOCALES - // ////////////////////////////////// + const drizzle = db as LibSQLDatabase - if (localesToInsert.length > 0) { - const localeTableName = `${tableName}${adapter.localesSuffix}` - const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`] + await drizzle + .update(adapter.tables[tableName]) + .set(row) + // TODO: we can skip fetching idToUpdate here with using the incoming where + .where(eq(adapter.tables[tableName].id, id)) + } else { + // Split out the incoming data into the corresponding: + // base row, locales, relationships, blocks, and arrays + const rowToInsert = transformForWrite({ + adapter, + data, + enableAtomicWrites: false, + fields, + path, + tableName, + }) + // First, we insert the main row + try { if (operation === 'update') { - await adapter.deleteWhere({ - db, - tableName: localeTableName, - where: eq(localeTable._parentID, insertedRow.id), - }) - } + const target = upsertTarget || adapter.tables[tableName].id - await adapter.insert({ - db, - tableName: localeTableName, - values: localesToInsert, - }) - } - - // ////////////////////////////////// - // INSERT RELATIONSHIPS - // ////////////////////////////////// - - const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}` - - if (operation === 'update') { - await deleteExistingRowsByPath({ - adapter, - db, - localeColumnName: 'locale', - parentColumnName: 'parent', - parentID: insertedRow.id, - pathColumnName: 'path', - rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete], - tableName: relationshipsTableName, - }) - } - - if (relationsToInsert.length > 0) { - await adapter.insert({ - db, - tableName: relationshipsTableName, - values: relationsToInsert, - }) - } - - // ////////////////////////////////// - // INSERT hasMany TEXTS - // ////////////////////////////////// - - const textsTableName = `${tableName}_texts` - - if (operation === 'update') { - await deleteExistingRowsByPath({ - adapter, - db, - localeColumnName: 'locale', - parentColumnName: 'parent', - parentID: insertedRow.id, - pathColumnName: 'path', - rows: [...textsToInsert, ...rowToInsert.textsToDelete], - tableName: textsTableName, - }) - } - - if (textsToInsert.length > 0) { - await adapter.insert({ - db, - tableName: textsTableName, - values: textsToInsert, - }) - } - - // ////////////////////////////////// - // INSERT hasMany NUMBERS - // ////////////////////////////////// - - const numbersTableName = `${tableName}_numbers` - - if (operation === 'update') { - await deleteExistingRowsByPath({ - adapter, - db, - localeColumnName: 'locale', - parentColumnName: 'parent', - parentID: insertedRow.id, - pathColumnName: 'path', - rows: [...numbersToInsert, ...rowToInsert.numbersToDelete], - tableName: numbersTableName, - }) - } - - if (numbersToInsert.length > 0) { - await adapter.insert({ - db, - tableName: numbersTableName, - values: numbersToInsert, - }) - } - - // ////////////////////////////////// - // INSERT BLOCKS - // ////////////////////////////////// - - const insertedBlockRows: Record[]> = {} - - if (operation === 'update') { - for (const tableName of rowToInsert.blocksToDelete) { - const blockTable = adapter.tables[tableName] - await adapter.deleteWhere({ + if (id) { + rowToInsert.row.id = id + ;[insertedRow] = await adapter.insert({ + db, + onConflictDoUpdate: { set: rowToInsert.row, target }, + tableName, + values: rowToInsert.row, + }) + } else { + ;[insertedRow] = await adapter.insert({ + db, + onConflictDoUpdate: { set: rowToInsert.row, target, where }, + tableName, + values: rowToInsert.row, + }) + } + } else { + if (adapter.allowIDOnCreate && data.id) { + rowToInsert.row.id = data.id + } + ;[insertedRow] = await adapter.insert({ db, tableName, - where: eq(blockTable._parentID, insertedRow.id), + values: rowToInsert.row, }) } - } - // When versions are enabled, this is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions. - const arraysBlocksUUIDMap: Record = {} + const localesToInsert: Record[] = [] + const relationsToInsert: Record[] = [] + const textsToInsert: Record[] = [] + const numbersToInsert: Record[] = [] + const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {} + const selectsToInsert: { [selectTableName: string]: Record[] } = {} - for (const [tableName, blockRows] of Object.entries(blocksToInsert)) { - insertedBlockRows[tableName] = await adapter.insert({ - db, - tableName, - values: blockRows.map(({ row }) => row), - }) + // If there are locale rows with data, add the parent and locale to each + if (Object.keys(rowToInsert.locales).length > 0) { + Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => { + localeRow._parentID = insertedRow.id + localeRow._locale = locale + localesToInsert.push(localeRow) + }) + } - insertedBlockRows[tableName].forEach((row, i) => { - blockRows[i].row = row - if ( - typeof row._uuid === 'string' && - (typeof row.id === 'string' || typeof row.id === 'number') - ) { - arraysBlocksUUIDMap[row._uuid] = row.id - } - }) + // If there are relationships, add parent to each + if (rowToInsert.relationships.length > 0) { + rowToInsert.relationships.forEach((relation) => { + relation.parent = insertedRow.id + relationsToInsert.push(relation) + }) + } - const blockLocaleIndexMap: number[] = [] + // If there are texts, add parent to each + if (rowToInsert.texts.length > 0) { + rowToInsert.texts.forEach((textRow) => { + textRow.parent = insertedRow.id + textsToInsert.push(textRow) + }) + } - const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => { - if (Object.entries(blockRow.locales).length > 0) { - Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => { - if (Object.keys(blockLocaleData).length > 0) { - blockLocaleData._parentID = blockRow.row.id - blockLocaleData._locale = blockLocale - acc.push(blockLocaleData) - blockLocaleIndexMap.push(i) + // If there are numbers, add parent to each + if (rowToInsert.numbers.length > 0) { + rowToInsert.numbers.forEach((numberRow) => { + numberRow.parent = insertedRow.id + numbersToInsert.push(numberRow) + }) + } + + // If there are selects, add parent to each, and then + // store by table name and rows + if (Object.keys(rowToInsert.selects).length > 0) { + Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => { + selectsToInsert[selectTableName] = [] + + selectRows.forEach((row) => { + if (typeof row.parent === 'undefined') { + row.parent = insertedRow.id } + + selectsToInsert[selectTableName].push(row) + }) + }) + } + + // If there are blocks, add parent to each, and then + // store by table name and rows + Object.keys(rowToInsert.blocks).forEach((tableName) => { + rowToInsert.blocks[tableName].forEach((blockRow) => { + blockRow.row._parentID = insertedRow.id + if (!blocksToInsert[tableName]) { + blocksToInsert[tableName] = [] + } + if (blockRow.row.uuid) { + delete blockRow.row.uuid + } + blocksToInsert[tableName].push(blockRow) + }) + }) + + // ////////////////////////////////// + // INSERT LOCALES + // ////////////////////////////////// + + if (localesToInsert.length > 0) { + const localeTableName = `${tableName}${adapter.localesSuffix}` + const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`] + + if (operation === 'update') { + await adapter.deleteWhere({ + db, + tableName: localeTableName, + where: eq(localeTable._parentID, insertedRow.id), }) } - return acc - }, []) - - if (blockLocaleRowsToInsert.length > 0) { await adapter.insert({ db, - tableName: `${tableName}${adapter.localesSuffix}`, - values: blockLocaleRowsToInsert, + tableName: localeTableName, + values: localesToInsert, }) } + // ////////////////////////////////// + // INSERT RELATIONSHIPS + // ////////////////////////////////// + + const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}` + + if (operation === 'update') { + await deleteExistingRowsByPath({ + adapter, + db, + localeColumnName: 'locale', + parentColumnName: 'parent', + parentID: insertedRow.id, + pathColumnName: 'path', + rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete], + tableName: relationshipsTableName, + }) + } + + if (relationsToInsert.length > 0) { + await adapter.insert({ + db, + tableName: relationshipsTableName, + values: relationsToInsert, + }) + } + + // ////////////////////////////////// + // INSERT hasMany TEXTS + // ////////////////////////////////// + + const textsTableName = `${tableName}_texts` + + if (operation === 'update') { + await deleteExistingRowsByPath({ + adapter, + db, + localeColumnName: 'locale', + parentColumnName: 'parent', + parentID: insertedRow.id, + pathColumnName: 'path', + rows: [...textsToInsert, ...rowToInsert.textsToDelete], + tableName: textsTableName, + }) + } + + if (textsToInsert.length > 0) { + await adapter.insert({ + db, + tableName: textsTableName, + values: textsToInsert, + }) + } + + // ////////////////////////////////// + // INSERT hasMany NUMBERS + // ////////////////////////////////// + + const numbersTableName = `${tableName}_numbers` + + if (operation === 'update') { + await deleteExistingRowsByPath({ + adapter, + db, + localeColumnName: 'locale', + parentColumnName: 'parent', + parentID: insertedRow.id, + pathColumnName: 'path', + rows: [...numbersToInsert, ...rowToInsert.numbersToDelete], + tableName: numbersTableName, + }) + } + + if (numbersToInsert.length > 0) { + await adapter.insert({ + db, + tableName: numbersTableName, + values: numbersToInsert, + }) + } + + // ////////////////////////////////// + // INSERT BLOCKS + // ////////////////////////////////// + + const insertedBlockRows: Record[]> = {} + + if (operation === 'update') { + for (const tableName of rowToInsert.blocksToDelete) { + const blockTable = adapter.tables[tableName] + await adapter.deleteWhere({ + db, + tableName, + where: eq(blockTable._parentID, insertedRow.id), + }) + } + } + + // When versions are enabled, adapter is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions. + const arraysBlocksUUIDMap: Record = {} + + for (const [tableName, blockRows] of Object.entries(blocksToInsert)) { + insertedBlockRows[tableName] = await adapter.insert({ + db, + tableName, + values: blockRows.map(({ row }) => row), + }) + + insertedBlockRows[tableName].forEach((row, i) => { + blockRows[i].row = row + if ( + typeof row._uuid === 'string' && + (typeof row.id === 'string' || typeof row.id === 'number') + ) { + arraysBlocksUUIDMap[row._uuid] = row.id + } + }) + + const blockLocaleIndexMap: number[] = [] + + const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => { + if (Object.entries(blockRow.locales).length > 0) { + Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => { + if (Object.keys(blockLocaleData).length > 0) { + blockLocaleData._parentID = blockRow.row.id + blockLocaleData._locale = blockLocale + acc.push(blockLocaleData) + blockLocaleIndexMap.push(i) + } + }) + } + + return acc + }, []) + + if (blockLocaleRowsToInsert.length > 0) { + await adapter.insert({ + db, + tableName: `${tableName}${adapter.localesSuffix}`, + values: blockLocaleRowsToInsert, + }) + } + + await insertArrays({ + adapter, + arrays: blockRows.map(({ arrays }) => arrays), + db, + parentRows: insertedBlockRows[tableName], + uuidMap: arraysBlocksUUIDMap, + }) + } + + // ////////////////////////////////// + // INSERT ARRAYS RECURSIVELY + // ////////////////////////////////// + + if (operation === 'update') { + for (const arrayTableName of Object.keys(rowToInsert.arrays)) { + await deleteExistingArrayRows({ + adapter, + db, + parentID: insertedRow.id, + tableName: arrayTableName, + }) + } + } + await insertArrays({ adapter, - arrays: blockRows.map(({ arrays }) => arrays), + arrays: [rowToInsert.arrays], db, - parentRows: insertedBlockRows[tableName], + parentRows: [insertedRow], uuidMap: arraysBlocksUUIDMap, }) - } - // ////////////////////////////////// - // INSERT ARRAYS RECURSIVELY - // ////////////////////////////////// + // ////////////////////////////////// + // INSERT hasMany SELECTS + // ////////////////////////////////// - if (operation === 'update') { - for (const arrayTableName of Object.keys(rowToInsert.arrays)) { - await deleteExistingArrayRows({ - adapter, - db, - parentID: insertedRow.id, - tableName: arrayTableName, - }) - } - } + for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) { + const selectTable = adapter.tables[selectTableName] + if (operation === 'update') { + await adapter.deleteWhere({ + db, + tableName: selectTableName, + where: eq(selectTable.parent, insertedRow.id), + }) + } - await insertArrays({ - adapter, - arrays: [rowToInsert.arrays], - db, - parentRows: [insertedRow], - uuidMap: arraysBlocksUUIDMap, - }) + if (Object.keys(arraysBlocksUUIDMap).length > 0) { + tableRows.forEach((row: any) => { + if (row.parent in arraysBlocksUUIDMap) { + row.parent = arraysBlocksUUIDMap[row.parent] + } + }) + } - // ////////////////////////////////// - // INSERT hasMany SELECTS - // ////////////////////////////////// - - for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) { - const selectTable = adapter.tables[selectTableName] - if (operation === 'update') { - await adapter.deleteWhere({ - db, - tableName: selectTableName, - where: eq(selectTable.parent, insertedRow.id), - }) + if (tableRows.length) { + await adapter.insert({ + db, + tableName: selectTableName, + values: tableRows, + }) + } } - if (Object.keys(arraysBlocksUUIDMap).length > 0) { - tableRows.forEach((row: any) => { - if (row.parent in arraysBlocksUUIDMap) { - row.parent = arraysBlocksUUIDMap[row.parent] + // ////////////////////////////////// + // Error Handling + // ////////////////////////////////// + } catch (caughtError) { + // Unique constraint violation error + // '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite + + let error = caughtError + if (typeof caughtError === 'object' && 'cause' in caughtError) { + error = caughtError.cause + } + + if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') { + let fieldName: null | string = null + // We need to try and find the right constraint for the field but if we can't we fallback to a generic message + if (error.code === '23505') { + // For PostgreSQL, we can try to extract the field name from the error constraint + if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) { + fieldName = adapter.fieldConstraints[tableName]?.[error.constraint] + } else { + const replacement = `${tableName}_` + + if (error.constraint.includes(replacement)) { + const replacedConstraint = error.constraint.replace(replacement, '') + + if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) { + fieldName = adapter.fieldConstraints[tableName][replacedConstraint] + } + } } - }) - } - if (tableRows.length) { - await adapter.insert({ - db, - tableName: selectTableName, - values: tableRows, - }) - } - } + if (!fieldName) { + // Last case scenario we extract the key and value from the detail on the error + const detail = error.detail + const regex = /Key \(([^)]+)\)=\(([^)]+)\)/ + const match: string[] = detail.match(regex) - // ////////////////////////////////// - // Error Handling - // ////////////////////////////////// - } catch (caughtError) { - // Unique constraint violation error - // '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite + if (match && match[1]) { + const key = match[1] - let error = caughtError - if (typeof caughtError === 'object' && 'cause' in caughtError) { - error = caughtError.cause - } + fieldName = key + } + } + } else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') { + /** + * For SQLite, we can try to extract the field name from the error message + * The message typically looks like: + * "UNIQUE constraint failed: table_name.field_name" + */ + const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/ + const match: string[] = error.message.match(regex) - if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') { - let fieldName: null | string = null - // We need to try and find the right constraint for the field but if we can't we fallback to a generic message - if (error.code === '23505') { - // For PostgreSQL, we can try to extract the field name from the error constraint - if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) { - fieldName = adapter.fieldConstraints[tableName]?.[error.constraint] - } else { - const replacement = `${tableName}_` + if (match && match[2]) { + if (adapter.fieldConstraints[tableName]) { + fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`] + } - if (error.constraint.includes(replacement)) { - const replacedConstraint = error.constraint.replace(replacement, '') - - if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) { - fieldName = adapter.fieldConstraints[tableName][replacedConstraint] + if (!fieldName) { + fieldName = match[2] } } } - if (!fieldName) { - // Last case scenario we extract the key and value from the detail on the error - const detail = error.detail - const regex = /Key \(([^)]+)\)=\(([^)]+)\)/ - const match: string[] = detail.match(regex) - - if (match && match[1]) { - const key = match[1] - - fieldName = key - } - } - } else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') { - /** - * For SQLite, we can try to extract the field name from the error message - * The message typically looks like: - * "UNIQUE constraint failed: table_name.field_name" - */ - const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/ - const match: string[] = error.message.match(regex) - - if (match && match[2]) { - if (adapter.fieldConstraints[tableName]) { - fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`] - } - - if (!fieldName) { - fieldName = match[2] - } - } + throw new ValidationError( + { + id, + errors: [ + { + message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique', + path: fieldName, + }, + ], + req, + }, + req?.t, + ) + } else { + throw error } - - throw new ValidationError( - { - id, - errors: [ - { - message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique', - path: fieldName, - }, - ], - req, - }, - req?.t, - ) - } else { - throw error } } diff --git a/packages/drizzle/src/upsertRow/shouldUseOptimizedUpsertRow.ts b/packages/drizzle/src/upsertRow/shouldUseOptimizedUpsertRow.ts new file mode 100644 index 0000000000..096d22a5cf --- /dev/null +++ b/packages/drizzle/src/upsertRow/shouldUseOptimizedUpsertRow.ts @@ -0,0 +1,52 @@ +import type { FlattenedField } from 'payload' + +/** + * Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call. + * We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships. + */ +export const shouldUseOptimizedUpsertRow = ({ + data, + fields, +}: { + data: Record + fields: FlattenedField[] +}) => { + for (const key in data) { + const value = data[key] + const field = fields.find((each) => each.name === key) + + if (!field) { + continue + } + + if ( + field.type === 'array' || + field.type === 'blocks' || + ((field.type === 'text' || + field.type === 'relationship' || + field.type === 'upload' || + field.type === 'select' || + field.type === 'number') && + field.hasMany) || + ((field.type === 'relationship' || field.type === 'upload') && + Array.isArray(field.relationTo)) || + field.localized + ) { + return false + } + + if ( + (field.type === 'group' || field.type === 'tab') && + value && + typeof value === 'object' && + !shouldUseOptimizedUpsertRow({ + data: value as Record, + fields: field.flattenedFields, + }) + ) { + return false + } + } + + return true +} diff --git a/test/database/int.spec.ts b/test/database/int.spec.ts index ecaf364acb..9bd4ae5418 100644 --- a/test/database/int.spec.ts +++ b/test/database/int.spec.ts @@ -1,7 +1,13 @@ import type { MongooseAdapter } from '@payloadcms/db-mongodb' import type { PostgresAdapter } from '@payloadcms/db-postgres/types' import type { NextRESTClient } from 'helpers/NextRESTClient.js' -import type { Payload, PayloadRequest, TypeWithID, ValidationError } from 'payload' +import type { + DataFromCollectionSlug, + Payload, + PayloadRequest, + TypeWithID, + ValidationError, +} from 'payload' import { migrateRelationshipsV2_V3, @@ -2807,7 +2813,7 @@ describe('database', () => { } }) - it('should update simple', async () => { + it('should use optimized updateOne', async () => { const post = await payload.create({ collection: 'posts', data: { @@ -2818,7 +2824,7 @@ describe('database', () => { arrayWithIDs: [{ text: 'some text' }], }, }) - const res = await payload.db.updateOne({ + const res = (await payload.db.updateOne({ where: { id: { equals: post.id } }, data: { title: 'hello updated', @@ -2826,14 +2832,61 @@ describe('database', () => { tab: { text: 'in tab updated' }, }, collection: 'posts', - }) + })) as unknown as DataFromCollectionSlug<'posts'> expect(res.title).toBe('hello updated') expect(res.text).toBe('other text (should not be nuked)') - expect(res.group.text).toBe('in group updated') - expect(res.tab.text).toBe('in tab updated') + expect(res.group?.text).toBe('in group updated') + expect(res.tab?.text).toBe('in tab updated') expect(res.arrayWithIDs).toHaveLength(1) - expect(res.arrayWithIDs[0].text).toBe('some text') + expect(res.arrayWithIDs?.[0]?.text).toBe('some text') + }) + + it('should use optimized updateMany', async () => { + const post1 = await payload.create({ + collection: 'posts', + data: { + text: 'other text (should not be nuked)', + title: 'hello', + group: { text: 'in group' }, + tab: { text: 'in tab' }, + arrayWithIDs: [{ text: 'some text' }], + }, + }) + const post2 = await payload.create({ + collection: 'posts', + data: { + text: 'other text 2 (should not be nuked)', + title: 'hello', + group: { text: 'in group' }, + tab: { text: 'in tab' }, + arrayWithIDs: [{ text: 'some text' }], + }, + }) + + const res = (await payload.db.updateMany({ + where: { id: { in: [post1.id, post2.id] } }, + data: { + title: 'hello updated', + group: { text: 'in group updated' }, + tab: { text: 'in tab updated' }, + }, + collection: 'posts', + })) as unknown as Array> + + expect(res).toHaveLength(2) + const resPost1 = res?.find((r) => r.id === post1.id) + const resPost2 = res?.find((r) => r.id === post2.id) + expect(resPost1?.text).toBe('other text (should not be nuked)') + expect(resPost2?.text).toBe('other text 2 (should not be nuked)') + + for (const post of res) { + expect(post.title).toBe('hello updated') + expect(post.group?.text).toBe('in group updated') + expect(post.tab?.text).toBe('in tab updated') + expect(post.arrayWithIDs).toHaveLength(1) + expect(post.arrayWithIDs?.[0]?.text).toBe('some text') + } }) it('should allow incremental number update', async () => { From e6da384a439cc68ce1216c491ae2c3df9ae571c7 Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Wed, 16 Jul 2025 12:56:42 -0400 Subject: [PATCH 02/91] ci: disable bundle analysis for forks (#13198) The bundle analysis action requires comment permissions which are not available to PRs from forks. This PR disables bundle analysis until we can implement this in a separate workflow as shown in [the docs here](https://github.com/exoego/esbuild-bundle-analyzer?tab=readme-ov-file#github-action-setup-for-public-repositories). --- .github/workflows/main.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9ca829bcbe..bf9fe7d359 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -718,6 +718,8 @@ jobs: DO_NOT_TRACK: 1 # Disable Turbopack telemetry - name: Analyze esbuild bundle size + # Temporarily disable this for community PRs until this can be implemented in a separate workflow + if: github.event.pull_request.head.repo.fork == false uses: exoego/esbuild-bundle-analyzer@v1 with: metafiles: 'packages/payload/meta_index.json,packages/payload/meta_shared.json,packages/ui/meta_client.json,packages/ui/meta_shared.json,packages/next/meta_index.json,packages/richtext-lexical/meta_client.json' From 41cff6d436d2a4ee0920caacdbfa60f6ed1397da Mon Sep 17 00:00:00 2001 From: Elliott W Date: Thu, 17 Jul 2025 01:02:43 +0545 Subject: [PATCH 03/91] fix(db-mongodb): improve compatibility with Firestore database (#12763) ### What? Adds four more arguments to the `mongooseAdapter`: ```typescript useJoinAggregations?: boolean /* The big one */ useAlternativeDropDatabase?: boolean useBigIntForNumberIDs?: boolean usePipelineInSortLookup?: boolean ``` Also export a new `compatabilityOptions` object from `@payloadcms/db-mongodb` where each key is a mongo-compatible database and the value is the recommended `mongooseAdapter` settings for compatability. ### Why? When using firestore and visiting `/admin/collections/media/payload-folders`, we get: ``` MongoServerError: invalid field(s) in lookup: [let, pipeline], only lookup(from, localField, foreignField, as) is supported ``` Firestore doesn't support the full MongoDB aggregation API used by Payload which gets used when building aggregations for populating join fields. There are several other compatability issues with Firestore: - The invalid `pipeline` property is used in the `$lookup` aggregation in `buildSortParams` - Firestore only supports number IDs of type `Long`, but Mongoose converts custom ID fields of type number to `Double` - Firestore does not support the `dropDatabase` command - Firestore does not support the `createIndex` command (not addressed in this PR) ### How? ```typescript useJoinAggregations?: boolean /* The big one */ ``` When this is `false` we skip the `buildJoinAggregation()` pipeline and resolve the join fields through multiple queries. This can potentially be used with AWS DocumentDB and Azure Cosmos DB to support join fields, but I have not tested with either of these databases. ```typescript useAlternativeDropDatabase?: boolean ``` When `true`, monkey-patch (replace) the `dropDatabase` function so that it calls `collection.deleteMany({})` on every collection instead of sending a single `dropDatabase` command to the database ```typescript useBigIntForNumberIDs?: boolean ``` When `true`, use `mongoose.Schema.Types.BigInt` for custom ID fields of type `number` which converts to a firestore `Long` behind the scenes ```typescript usePipelineInSortLookup?: boolean ``` When `false`, modify the sortAggregation pipeline in `buildSortParams()` so that we don't use the `pipeline` property in the `$lookup` aggregation. Results in slightly worse performance when sorting by relationship properties. ### Limitations This PR does not add support for transactions or creating indexes in firestore. ### Fixes Fixed a bug (and added a test) where you weren't able to sort by multiple properties on a relationship field. ### Future work 1. Firestore supports simple `$lookup` aggregations but other databases might not. Could add a `useSortAggregations` property which can be used to disable aggregations in sorting. --------- Co-authored-by: Claude Co-authored-by: Sasha <64744993+r1tsuu@users.noreply.github.com> --- .github/workflows/main.yml | 1 + docs/database/mongodb.mdx | 50 +- package.json | 1 + packages/db-mongodb/src/connect.ts | 19 + packages/db-mongodb/src/find.ts | 11 + packages/db-mongodb/src/findOne.ts | 11 + packages/db-mongodb/src/index.ts | 41 ++ packages/db-mongodb/src/models/buildSchema.ts | 13 +- .../db-mongodb/src/queries/buildSortParam.ts | 58 +- packages/db-mongodb/src/queryDrafts.ts | 12 + .../src/utilities/aggregatePaginate.ts | 6 +- .../src/utilities/buildJoinAggregation.ts | 3 + .../src/utilities/compatabilityOptions.ts | 25 + .../db-mongodb/src/utilities/resolveJoins.ts | 647 ++++++++++++++++++ .../db-mongodb/src/utilities/transform.ts | 5 + test/generateDatabaseAdapter.ts | 19 + test/generateDatabaseSchema.ts | 2 +- test/helpers/isMongoose.ts | 5 +- test/helpers/startMemoryDB.ts | 6 +- test/relationships/int.spec.ts | 43 +- 20 files changed, 938 insertions(+), 40 deletions(-) create mode 100644 packages/db-mongodb/src/utilities/compatabilityOptions.ts create mode 100644 packages/db-mongodb/src/utilities/resolveJoins.ts diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index bf9fe7d359..60b6ac9655 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -153,6 +153,7 @@ jobs: matrix: database: - mongodb + - firestore - postgres - postgres-custom-schema - postgres-uuid diff --git a/docs/database/mongodb.mdx b/docs/database/mongodb.mdx index 16958cd1c6..26a139bae3 100644 --- a/docs/database/mongodb.mdx +++ b/docs/database/mongodb.mdx @@ -30,18 +30,22 @@ export default buildConfig({ ## Options -| Option | Description | -| -------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `autoPluralization` | Tell Mongoose to auto-pluralize any collection names if it encounters any singular words used as collection `slug`s. | -| `connectOptions` | Customize MongoDB connection options. Payload will connect to your MongoDB database using default options which you can override and extend to include all the [options](https://mongoosejs.com/docs/connections.html#options) available to mongoose. | -| `collectionsSchemaOptions` | Customize Mongoose schema options for collections. | -| `disableIndexHints` | Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination, as it increases the speed of the count function used in that query. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false | -| `migrationDir` | Customize the directory that migrations are stored. | -| `transactionOptions` | An object with configuration properties used in [transactions](https://www.mongodb.com/docs/manual/core/transactions/) or `false` which will disable the use of transactions. | -| `collation` | Enable language-specific string comparison with customizable options. Available on MongoDB 3.4+. Defaults locale to "en". Example: `{ strength: 3 }`. For a full list of collation options and their definitions, see the [MongoDB documentation](https://www.mongodb.com/docs/manual/reference/collation/). | -| `allowAdditionalKeys` | By default, Payload strips all additional keys from MongoDB data that don't exist in the Payload schema. If you have some data that you want to include to the result but it doesn't exist in Payload, you can set this to `true`. Be careful as Payload access control _won't_ work for this data. | -| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. | -| `disableFallbackSort` | Set to `true` to disable the adapter adding a fallback sort when sorting by non-unique fields, this can affect performance in some cases but it ensures a consistent order of results. | +| Option | Description | +| ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `autoPluralization` | Tell Mongoose to auto-pluralize any collection names if it encounters any singular words used as collection `slug`s. | +| `connectOptions` | Customize MongoDB connection options. Payload will connect to your MongoDB database using default options which you can override and extend to include all the [options](https://mongoosejs.com/docs/connections.html#options) available to mongoose. | +| `collectionsSchemaOptions` | Customize Mongoose schema options for collections. | +| `disableIndexHints` | Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination, as it increases the speed of the count function used in that query. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false | +| `migrationDir` | Customize the directory that migrations are stored. | +| `transactionOptions` | An object with configuration properties used in [transactions](https://www.mongodb.com/docs/manual/core/transactions/) or `false` which will disable the use of transactions. | +| `collation` | Enable language-specific string comparison with customizable options. Available on MongoDB 3.4+. Defaults locale to "en". Example: `{ strength: 3 }`. For a full list of collation options and their definitions, see the [MongoDB documentation](https://www.mongodb.com/docs/manual/reference/collation/). | +| `allowAdditionalKeys` | By default, Payload strips all additional keys from MongoDB data that don't exist in the Payload schema. If you have some data that you want to include to the result but it doesn't exist in Payload, you can set this to `true`. Be careful as Payload access control _won't_ work for this data. | +| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. | +| `disableFallbackSort` | Set to `true` to disable the adapter adding a fallback sort when sorting by non-unique fields, this can affect performance in some cases but it ensures a consistent order of results. | +| `useAlternativeDropDatabase` | Set to `true` to use an alternative `dropDatabase` implementation that calls `collection.deleteMany({})` on every collection instead of sending a raw `dropDatabase` command. Payload only uses `dropDatabase` for testing purposes. Defaults to `false`. | +| `useBigIntForNumberIDs` | Set to `true` to use `BigInt` for custom ID fields of type `'number'`. Useful for databases that don't support `double` or `int32` IDs. Defaults to `false`. | +| `useJoinAggregations` | Set to `false` to disable join aggregations (which use correlated subqueries) and instead populate join fields via multiple `find` queries. Defaults to `true`. | +| `usePipelineInSortLookup` | Set to `false` to disable the use of `pipeline` in the `$lookup` aggregation in sorting. Defaults to `true`. | ## Access to Mongoose models @@ -56,9 +60,21 @@ You can access Mongoose models as follows: ## Using other MongoDB implementations -Limitations with [DocumentDB](https://aws.amazon.com/documentdb/) and [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db): +You can import the `compatabilityOptions` object to get the recommended settings for other MongoDB implementations. Since these databases aren't officially supported by payload, you may still encounter issues even with these settings (please create an issue or PR if you believe these options should be updated): -- For Azure Cosmos DB you must pass `transactionOptions: false` to the adapter options. Azure Cosmos DB does not support transactions that update two and more documents in different collections, which is a common case when using Payload (via hooks). -- For Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`. -- The [Join Field](../fields/join) is not supported in DocumentDB and Azure Cosmos DB, as we internally use MongoDB aggregations to query data for that field, which are limited there. This can be changed in the future. -- For DocumentDB pass `disableIndexHints: true` to disable hinting to the DB to use `id` as index which can cause problems with DocumentDB. +```ts +import { mongooseAdapter, compatabilityOptions } from '@payloadcms/db-mongodb' + +export default buildConfig({ + db: mongooseAdapter({ + url: process.env.DATABASE_URI, + // For example, if you're using firestore: + ...compatabilityOptions.firestore, + }), +}) +``` + +We export compatability options for [DocumentDB](https://aws.amazon.com/documentdb/), [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db) and [Firestore](https://cloud.google.com/firestore/mongodb-compatibility/docs/overview). Known limitations: + +- Azure Cosmos DB does not support transactions that update two or more documents in different collections, which is a common case when using Payload (via hooks). +- Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`. diff --git a/package.json b/package.json index d00e17bd8d..7bd509f311 100644 --- a/package.json +++ b/package.json @@ -112,6 +112,7 @@ "test:e2e:prod:ci": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod", "test:e2e:prod:ci:noturbo": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod --no-turbo", "test:int": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand", + "test:int:firestore": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=firestore DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand", "test:int:postgres": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand", "test:int:sqlite": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=sqlite DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand", "test:types": "tstyche", diff --git a/packages/db-mongodb/src/connect.ts b/packages/db-mongodb/src/connect.ts index 6210bde286..ba2c9c4db3 100644 --- a/packages/db-mongodb/src/connect.ts +++ b/packages/db-mongodb/src/connect.ts @@ -36,6 +36,25 @@ export const connect: Connect = async function connect( try { this.connection = (await mongoose.connect(urlToConnect, connectionOptions)).connection + if (this.useAlternativeDropDatabase) { + if (this.connection.db) { + // Firestore doesn't support dropDatabase, so we monkey patch + // dropDatabase to delete all documents from all collections instead + this.connection.db.dropDatabase = async function (): Promise { + const existingCollections = await this.listCollections().toArray() + await Promise.all( + existingCollections.map(async (collectionInfo) => { + const collection = this.collection(collectionInfo.name) + await collection.deleteMany({}) + }), + ) + return true + } + this.connection.dropDatabase = async function () { + await this.db?.dropDatabase() + } + } + } // If we are running a replica set with MongoDB Memory Server, // wait until the replica set elects a primary before proceeding diff --git a/packages/db-mongodb/src/find.ts b/packages/db-mongodb/src/find.ts index 938940c513..6f1124e503 100644 --- a/packages/db-mongodb/src/find.ts +++ b/packages/db-mongodb/src/find.ts @@ -12,6 +12,7 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js' import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js' import { getCollection } from './utilities/getEntity.js' import { getSession } from './utilities/getSession.js' +import { resolveJoins } from './utilities/resolveJoins.js' import { transform } from './utilities/transform.js' export const find: Find = async function find( @@ -155,6 +156,16 @@ export const find: Find = async function find( result = await Model.paginate(query, paginationOptions) } + if (!this.useJoinAggregations) { + await resolveJoins({ + adapter: this, + collectionSlug, + docs: result.docs as Record[], + joins, + locale, + }) + } + transform({ adapter: this, data: result.docs, diff --git a/packages/db-mongodb/src/findOne.ts b/packages/db-mongodb/src/findOne.ts index 0ffe97b108..cf6edb34f0 100644 --- a/packages/db-mongodb/src/findOne.ts +++ b/packages/db-mongodb/src/findOne.ts @@ -10,6 +10,7 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js' import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js' import { getCollection } from './utilities/getEntity.js' import { getSession } from './utilities/getSession.js' +import { resolveJoins } from './utilities/resolveJoins.js' import { transform } from './utilities/transform.js' export const findOne: FindOne = async function findOne( @@ -67,6 +68,16 @@ export const findOne: FindOne = async function findOne( doc = await Model.findOne(query, {}, options) } + if (doc && !this.useJoinAggregations) { + await resolveJoins({ + adapter: this, + collectionSlug, + docs: [doc] as Record[], + joins, + locale, + }) + } + if (!doc) { return null } diff --git a/packages/db-mongodb/src/index.ts b/packages/db-mongodb/src/index.ts index de2dc1c862..f2f152533a 100644 --- a/packages/db-mongodb/src/index.ts +++ b/packages/db-mongodb/src/index.ts @@ -143,6 +143,29 @@ export interface Args { /** The URL to connect to MongoDB or false to start payload and prevent connecting */ url: false | string + + /** + * Set to `true` to use an alternative `dropDatabase` implementation that calls `collection.deleteMany({})` on every collection instead of sending a raw `dropDatabase` command. + * Payload only uses `dropDatabase` for testing purposes. + * @default false + */ + useAlternativeDropDatabase?: boolean + /** + * Set to `true` to use `BigInt` for custom ID fields of type `'number'`. + * Useful for databases that don't support `double` or `int32` IDs. + * @default false + */ + useBigIntForNumberIDs?: boolean + /** + * Set to `false` to disable join aggregations (which use correlated subqueries) and instead populate join fields via multiple `find` queries. + * @default true + */ + useJoinAggregations?: boolean + /** + * Set to `false` to disable the use of `pipeline` in the `$lookup` aggregation in sorting. + * @default true + */ + usePipelineInSortLookup?: boolean } export type MongooseAdapter = { @@ -159,6 +182,10 @@ export type MongooseAdapter = { up: (args: MigrateUpArgs) => Promise }[] sessions: Record + useAlternativeDropDatabase: boolean + useBigIntForNumberIDs: boolean + useJoinAggregations: boolean + usePipelineInSortLookup: boolean versions: { [slug: string]: CollectionModel } @@ -194,6 +221,10 @@ declare module 'payload' { updateVersion: ( args: { options?: QueryOptions } & UpdateVersionArgs, ) => Promise> + useAlternativeDropDatabase: boolean + useBigIntForNumberIDs: boolean + useJoinAggregations: boolean + usePipelineInSortLookup: boolean versions: { [slug: string]: CollectionModel } @@ -214,6 +245,10 @@ export function mongooseAdapter({ prodMigrations, transactionOptions = {}, url, + useAlternativeDropDatabase = false, + useBigIntForNumberIDs = false, + useJoinAggregations = true, + usePipelineInSortLookup = true, }: Args): DatabaseAdapterObj { function adapter({ payload }: { payload: Payload }) { const migrationDir = findMigrationDir(migrationDirArg) @@ -279,6 +314,10 @@ export function mongooseAdapter({ updateOne, updateVersion, upsert, + useAlternativeDropDatabase, + useBigIntForNumberIDs, + useJoinAggregations, + usePipelineInSortLookup, }) } @@ -290,6 +329,8 @@ export function mongooseAdapter({ } } +export { compatabilityOptions } from './utilities/compatabilityOptions.js' + /** * Attempt to find migrations directory. * diff --git a/packages/db-mongodb/src/models/buildSchema.ts b/packages/db-mongodb/src/models/buildSchema.ts index 56e2cf1130..719f474ef7 100644 --- a/packages/db-mongodb/src/models/buildSchema.ts +++ b/packages/db-mongodb/src/models/buildSchema.ts @@ -143,7 +143,12 @@ export const buildSchema = (args: { const idField = schemaFields.find((field) => fieldAffectsData(field) && field.name === 'id') if (idField) { fields = { - _id: idField.type === 'number' ? Number : String, + _id: + idField.type === 'number' + ? payload.db.useBigIntForNumberIDs + ? mongoose.Schema.Types.BigInt + : Number + : String, } schemaFields = schemaFields.filter( (field) => !(fieldAffectsData(field) && field.name === 'id'), @@ -900,7 +905,11 @@ const getRelationshipValueType = (field: RelationshipField | UploadField, payloa } if (customIDType === 'number') { - return mongoose.Schema.Types.Number + if (payload.db.useBigIntForNumberIDs) { + return mongoose.Schema.Types.BigInt + } else { + return mongoose.Schema.Types.Number + } } return mongoose.Schema.Types.String diff --git a/packages/db-mongodb/src/queries/buildSortParam.ts b/packages/db-mongodb/src/queries/buildSortParam.ts index 0133736932..551d5bfad7 100644 --- a/packages/db-mongodb/src/queries/buildSortParam.ts +++ b/packages/db-mongodb/src/queries/buildSortParam.ts @@ -99,31 +99,57 @@ const relationshipSort = ({ sortFieldPath = foreignFieldPath.localizedPath.replace('', locale) } - if ( - !sortAggregation.some((each) => { - return '$lookup' in each && each.$lookup.as === `__${path}` - }) - ) { + const as = `__${relationshipPath.replace(/\./g, '__')}` + + // If we have not already sorted on this relationship yet, we need to add a lookup stage + if (!sortAggregation.some((each) => '$lookup' in each && each.$lookup.as === as)) { + let localField = versions ? `version.${relationshipPath}` : relationshipPath + + if (adapter.usePipelineInSortLookup) { + const flattenedField = `__${localField.replace(/\./g, '__')}_lookup` + sortAggregation.push({ + $addFields: { + [flattenedField]: `$${localField}`, + }, + }) + localField = flattenedField + } + sortAggregation.push({ $lookup: { - as: `__${path}`, + as, foreignField: '_id', from: foreignCollection.Model.collection.name, - localField: versions ? `version.${relationshipPath}` : relationshipPath, - pipeline: [ - { - $project: { - [sortFieldPath]: true, + localField, + ...(!adapter.usePipelineInSortLookup && { + pipeline: [ + { + $project: { + [sortFieldPath]: true, + }, }, - }, - ], + ], + }), }, }) - sort[`__${path}.${sortFieldPath}`] = sortDirection - - return true + if (adapter.usePipelineInSortLookup) { + sortAggregation.push({ + $unset: localField, + }) + } } + + if (!adapter.usePipelineInSortLookup) { + const lookup = sortAggregation.find( + (each) => '$lookup' in each && each.$lookup.as === as, + ) as PipelineStage.Lookup + const pipeline = lookup.$lookup.pipeline![0] as PipelineStage.Project + pipeline.$project[sortFieldPath] = true + } + + sort[`${as}.${sortFieldPath}`] = sortDirection + return true } } diff --git a/packages/db-mongodb/src/queryDrafts.ts b/packages/db-mongodb/src/queryDrafts.ts index c43e0c52f4..1dd0e84daf 100644 --- a/packages/db-mongodb/src/queryDrafts.ts +++ b/packages/db-mongodb/src/queryDrafts.ts @@ -12,6 +12,7 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js' import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js' import { getCollection } from './utilities/getEntity.js' import { getSession } from './utilities/getSession.js' +import { resolveJoins } from './utilities/resolveJoins.js' import { transform } from './utilities/transform.js' export const queryDrafts: QueryDrafts = async function queryDrafts( @@ -158,6 +159,17 @@ export const queryDrafts: QueryDrafts = async function queryDrafts( result = await Model.paginate(versionQuery, paginationOptions) } + if (!this.useJoinAggregations) { + await resolveJoins({ + adapter: this, + collectionSlug, + docs: result.docs as Record[], + joins, + locale, + versions: true, + }) + } + transform({ adapter: this, data: result.docs, diff --git a/packages/db-mongodb/src/utilities/aggregatePaginate.ts b/packages/db-mongodb/src/utilities/aggregatePaginate.ts index 237d0a00c9..5e0b6d1de3 100644 --- a/packages/db-mongodb/src/utilities/aggregatePaginate.ts +++ b/packages/db-mongodb/src/utilities/aggregatePaginate.ts @@ -76,7 +76,11 @@ export const aggregatePaginate = async ({ countPromise = Model.estimatedDocumentCount(query) } else { const hint = adapter.disableIndexHints !== true ? { _id: 1 } : undefined - countPromise = Model.countDocuments(query, { collation, hint, session }) + countPromise = Model.countDocuments(query, { + collation, + session, + ...(hint ? { hint } : {}), + }) } } diff --git a/packages/db-mongodb/src/utilities/buildJoinAggregation.ts b/packages/db-mongodb/src/utilities/buildJoinAggregation.ts index 0d8afb3688..da737d62fc 100644 --- a/packages/db-mongodb/src/utilities/buildJoinAggregation.ts +++ b/packages/db-mongodb/src/utilities/buildJoinAggregation.ts @@ -44,6 +44,9 @@ export const buildJoinAggregation = async ({ projection, versions, }: BuildJoinAggregationArgs): Promise => { + if (!adapter.useJoinAggregations) { + return + } if ( (Object.keys(collectionConfig.joins).length === 0 && collectionConfig.polymorphicJoins.length == 0) || diff --git a/packages/db-mongodb/src/utilities/compatabilityOptions.ts b/packages/db-mongodb/src/utilities/compatabilityOptions.ts new file mode 100644 index 0000000000..bf797895b7 --- /dev/null +++ b/packages/db-mongodb/src/utilities/compatabilityOptions.ts @@ -0,0 +1,25 @@ +import type { Args } from '../index.js' + +/** + * Each key is a mongo-compatible database and the value + * is the recommended `mongooseAdapter` settings for compatability. + */ +export const compatabilityOptions = { + cosmosdb: { + transactionOptions: false, + useJoinAggregations: false, + usePipelineInSortLookup: false, + }, + documentdb: { + disableIndexHints: true, + }, + firestore: { + disableIndexHints: true, + ensureIndexes: false, + transactionOptions: false, + useAlternativeDropDatabase: true, + useBigIntForNumberIDs: true, + useJoinAggregations: false, + usePipelineInSortLookup: false, + }, +} satisfies Record> diff --git a/packages/db-mongodb/src/utilities/resolveJoins.ts b/packages/db-mongodb/src/utilities/resolveJoins.ts new file mode 100644 index 0000000000..fa28c63d76 --- /dev/null +++ b/packages/db-mongodb/src/utilities/resolveJoins.ts @@ -0,0 +1,647 @@ +import type { JoinQuery, SanitizedJoins, Where } from 'payload' + +import { + appendVersionToQueryKey, + buildVersionCollectionFields, + combineQueries, + getQueryDraftsSort, +} from 'payload' +import { fieldShouldBeLocalized } from 'payload/shared' + +import type { MongooseAdapter } from '../index.js' + +import { buildQuery } from '../queries/buildQuery.js' +import { buildSortParam } from '../queries/buildSortParam.js' +import { transform } from './transform.js' + +export type ResolveJoinsArgs = { + /** The MongoDB adapter instance */ + adapter: MongooseAdapter + /** The slug of the collection being queried */ + collectionSlug: string + /** Array of documents to resolve joins for */ + docs: Record[] + /** Join query specifications (which joins to resolve and how) */ + joins?: JoinQuery + /** Optional locale for localized queries */ + locale?: string + /** Optional projection for the join query */ + projection?: Record + /** Whether to resolve versions instead of published documents */ + versions?: boolean +} + +/** + * Resolves join relationships for a collection of documents. + * This function fetches related documents based on join configurations and + * attaches them to the original documents with pagination support. + */ +export async function resolveJoins({ + adapter, + collectionSlug, + docs, + joins, + locale, + projection, + versions = false, +}: ResolveJoinsArgs): Promise { + // Early return if no joins are specified or no documents to process + if (!joins || docs.length === 0) { + return + } + + // Get the collection configuration from the adapter + const collectionConfig = adapter.payload.collections[collectionSlug]?.config + if (!collectionConfig) { + return + } + + // Build a map of join paths to their configurations for quick lookup + // This flattens the nested join structure into a single map keyed by join path + const joinMap: Record = {} + + // Add regular joins + for (const [target, joinList] of Object.entries(collectionConfig.joins)) { + for (const join of joinList) { + joinMap[join.joinPath] = { ...join, targetCollection: target } + } + } + + // Add polymorphic joins + for (const join of collectionConfig.polymorphicJoins || []) { + // For polymorphic joins, we use the collections array as the target + joinMap[join.joinPath] = { ...join, targetCollection: join.field.collection as string } + } + + // Process each requested join concurrently + const joinPromises = Object.entries(joins).map(async ([joinPath, joinQuery]) => { + if (!joinQuery) { + return null + } + + // If a projection is provided, and the join path is not in the projection, skip it + if (projection && !projection[joinPath]) { + return null + } + + // Get the join definition from our map + const joinDef = joinMap[joinPath] + if (!joinDef) { + return null + } + + // Normalize collections to always be an array for unified processing + const allCollections = Array.isArray(joinDef.field.collection) + ? joinDef.field.collection + : [joinDef.field.collection] + + // Use the provided locale or fall back to the default locale for localized fields + const localizationConfig = adapter.payload.config.localization + const effectiveLocale = + locale || + (typeof localizationConfig === 'object' && + localizationConfig && + localizationConfig.defaultLocale) + + // Extract relationTo filter from the where clause to determine which collections to query + const relationToFilter = extractRelationToFilter(joinQuery.where || {}) + + // Determine which collections to query based on relationTo filter + const collections = relationToFilter + ? allCollections.filter((col) => relationToFilter.includes(col)) + : allCollections + + // Check if this is a polymorphic collection join (where field.collection is an array) + const isPolymorphicJoin = Array.isArray(joinDef.field.collection) + + // Apply pagination settings + const limit = joinQuery.limit ?? joinDef.field.defaultLimit ?? 10 + const page = joinQuery.page ?? 1 + const skip = (page - 1) * limit + + // Process collections concurrently + const collectionPromises = collections.map(async (joinCollectionSlug) => { + const targetConfig = adapter.payload.collections[joinCollectionSlug]?.config + if (!targetConfig) { + return null + } + + const useDrafts = versions && Boolean(targetConfig.versions?.drafts) + let JoinModel + if (useDrafts) { + JoinModel = adapter.versions[targetConfig.slug] + } else { + JoinModel = adapter.collections[targetConfig.slug] + } + + if (!JoinModel) { + return null + } + + // Extract all parent document IDs to use in the join query + const parentIDs = docs.map((d) => (versions ? (d.parent ?? d._id ?? d.id) : (d._id ?? d.id))) + + // Build the base query + let whereQuery: null | Record = null + whereQuery = isPolymorphicJoin + ? filterWhereForCollection( + joinQuery.where || {}, + targetConfig.flattenedFields, + true, // exclude relationTo for individual collections + ) + : joinQuery.where || {} + + // Skip this collection if the WHERE clause cannot be satisfied for polymorphic collection joins + if (whereQuery === null) { + return null + } + whereQuery = useDrafts + ? await JoinModel.buildQuery({ + locale, + payload: adapter.payload, + where: combineQueries(appendVersionToQueryKey(whereQuery as Where), { + latest: { + equals: true, + }, + }), + }) + : await buildQuery({ + adapter, + collectionSlug: joinCollectionSlug, + fields: targetConfig.flattenedFields, + locale, + where: whereQuery as Where, + }) + + // Handle localized paths and version prefixes + let dbFieldName = joinDef.field.on + + if (effectiveLocale && typeof localizationConfig === 'object' && localizationConfig) { + const pathSegments = joinDef.field.on.split('.') + const transformedSegments: string[] = [] + const fields = useDrafts + ? buildVersionCollectionFields(adapter.payload.config, targetConfig, true) + : targetConfig.flattenedFields + + for (let i = 0; i < pathSegments.length; i++) { + const segment = pathSegments[i]! + transformedSegments.push(segment) + + // Check if this segment corresponds to a localized field + const fieldAtSegment = fields.find((f) => f.name === segment) + if (fieldAtSegment && fieldAtSegment.localized) { + transformedSegments.push(effectiveLocale) + } + } + + dbFieldName = transformedSegments.join('.') + } + + // Add version prefix for draft queries + if (useDrafts) { + dbFieldName = `version.${dbFieldName}` + } + + // Check if the target field is a polymorphic relationship + const isPolymorphic = joinDef.targetField + ? Array.isArray(joinDef.targetField.relationTo) + : false + + if (isPolymorphic) { + // For polymorphic relationships, we need to match both relationTo and value + whereQuery[`${dbFieldName}.relationTo`] = collectionSlug + whereQuery[`${dbFieldName}.value`] = { $in: parentIDs } + } else { + // For regular relationships and polymorphic collection joins + whereQuery[dbFieldName] = { $in: parentIDs } + } + + // Build the sort parameters for the query + const fields = useDrafts + ? buildVersionCollectionFields(adapter.payload.config, targetConfig, true) + : targetConfig.flattenedFields + + const sort = buildSortParam({ + adapter, + config: adapter.payload.config, + fields, + locale, + sort: useDrafts + ? getQueryDraftsSort({ + collectionConfig: targetConfig, + sort: joinQuery.sort || joinDef.field.defaultSort || targetConfig.defaultSort, + }) + : joinQuery.sort || joinDef.field.defaultSort || targetConfig.defaultSort, + timestamps: true, + }) + + const projection = buildJoinProjection(dbFieldName, useDrafts, sort) + + const [results, dbCount] = await Promise.all([ + JoinModel.find(whereQuery, projection, { + sort, + ...(isPolymorphicJoin ? {} : { limit, skip }), + }).lean(), + isPolymorphicJoin ? Promise.resolve(0) : JoinModel.countDocuments(whereQuery), + ]) + + const count = isPolymorphicJoin ? results.length : dbCount + + transform({ + adapter, + data: results, + fields: useDrafts + ? buildVersionCollectionFields(adapter.payload.config, targetConfig, false) + : targetConfig.fields, + operation: 'read', + }) + + // Return results with collection info for grouping + return { + collectionSlug: joinCollectionSlug, + count, + dbFieldName, + results, + sort, + useDrafts, + } + }) + + const collectionResults = await Promise.all(collectionPromises) + + // Group the results by parent ID + const grouped: Record< + string, + { + docs: Record[] + sort: Record + } + > = {} + + let totalCount = 0 + for (const collectionResult of collectionResults) { + if (!collectionResult) { + continue + } + + const { collectionSlug, count, dbFieldName, results, sort, useDrafts } = collectionResult + + totalCount += count + + for (const result of results) { + if (useDrafts) { + result.id = result.parent + } + + const parentValues = getByPathWithArrays(result, dbFieldName) as ( + | { relationTo: string; value: number | string } + | number + | string + )[] + + if (parentValues.length === 0) { + continue + } + + for (let parentValue of parentValues) { + if (!parentValue) { + continue + } + + if (typeof parentValue === 'object') { + parentValue = parentValue.value + } + + const joinData = { + relationTo: collectionSlug, + value: result.id, + } + + const parentKey = parentValue as string + if (!grouped[parentKey]) { + grouped[parentKey] = { + docs: [], + sort, + } + } + + // Always store the ObjectID reference in polymorphic format + grouped[parentKey].docs.push({ + ...result, + __joinData: joinData, + }) + } + } + } + + for (const results of Object.values(grouped)) { + results.docs.sort((a, b) => { + for (const [fieldName, sortOrder] of Object.entries(results.sort)) { + const sort = sortOrder === 'asc' ? 1 : -1 + const aValue = a[fieldName] as Date | number | string + const bValue = b[fieldName] as Date | number | string + if (aValue < bValue) { + return -1 * sort + } + if (aValue > bValue) { + return 1 * sort + } + } + return 0 + }) + results.docs = results.docs.map( + (doc) => (isPolymorphicJoin ? doc.__joinData : doc.id) as Record, + ) + } + + // Determine if the join field should be localized + const localeSuffix = + fieldShouldBeLocalized({ + field: joinDef.field, + parentIsLocalized: joinDef.parentIsLocalized, + }) && + adapter.payload.config.localization && + effectiveLocale + ? `.${effectiveLocale}` + : '' + + // Adjust the join path with locale suffix if needed + const localizedJoinPath = `${joinPath}${localeSuffix}` + + return { + grouped, + isPolymorphicJoin, + joinQuery, + limit, + localizedJoinPath, + page, + skip, + totalCount, + } + }) + + // Wait for all join operations to complete + const joinResults = await Promise.all(joinPromises) + + // Process the results and attach them to documents + for (const joinResult of joinResults) { + if (!joinResult) { + continue + } + + const { grouped, isPolymorphicJoin, joinQuery, limit, localizedJoinPath, skip, totalCount } = + joinResult + + // Attach the joined data to each parent document + for (const doc of docs) { + const id = (versions ? (doc.parent ?? doc._id ?? doc.id) : (doc._id ?? doc.id)) as string + const all = grouped[id]?.docs || [] + + // Calculate the slice for pagination + // When limit is 0, it means unlimited - return all results + const slice = isPolymorphicJoin + ? limit === 0 + ? all + : all.slice(skip, skip + limit) + : // For non-polymorphic joins, we assume that page and limit were applied at the database level + all + + // Create the join result object with pagination metadata + const value: Record = { + docs: slice, + hasNextPage: limit === 0 ? false : totalCount > skip + slice.length, + } + + // Include total count if requested + if (joinQuery.count) { + value.totalDocs = totalCount + } + + // Navigate to the correct nested location in the document and set the join data + // This handles nested join paths like "user.posts" by creating intermediate objects + const segments = localizedJoinPath.split('.') + let ref: Record + if (versions) { + if (!doc.version) { + doc.version = {} + } + ref = doc.version as Record + } else { + ref = doc + } + + for (let i = 0; i < segments.length - 1; i++) { + const seg = segments[i]! + if (!ref[seg]) { + ref[seg] = {} + } + ref = ref[seg] as Record + } + // Set the final join data at the target path + ref[segments[segments.length - 1]!] = value + } + } +} + +/** + * Extracts relationTo filter values from a WHERE clause + * @param where - The WHERE clause to search + * @returns Array of collection slugs if relationTo filter found, null otherwise + */ +function extractRelationToFilter(where: Record): null | string[] { + if (!where || typeof where !== 'object') { + return null + } + + // Check for direct relationTo conditions + if (where.relationTo && typeof where.relationTo === 'object') { + const relationTo = where.relationTo as Record + if (relationTo.in && Array.isArray(relationTo.in)) { + return relationTo.in as string[] + } + if (relationTo.equals) { + return [relationTo.equals as string] + } + } + + // Check for relationTo in logical operators + if (where.and && Array.isArray(where.and)) { + for (const condition of where.and) { + const result = extractRelationToFilter(condition) + if (result) { + return result + } + } + } + + if (where.or && Array.isArray(where.or)) { + for (const condition of where.or) { + const result = extractRelationToFilter(condition) + if (result) { + return result + } + } + } + + return null +} + +/** + * Filters a WHERE clause to only include fields that exist in the target collection + * This is needed for polymorphic joins where different collections have different fields + * @param where - The original WHERE clause + * @param availableFields - The fields available in the target collection + * @param excludeRelationTo - Whether to exclude relationTo field (for individual collections) + * @returns A filtered WHERE clause, or null if the query cannot match this collection + */ +function filterWhereForCollection( + where: Record, + availableFields: Array<{ name: string }>, + excludeRelationTo: boolean = false, +): null | Record { + if (!where || typeof where !== 'object') { + return where + } + + const fieldNames = new Set(availableFields.map((f) => f.name)) + // Add special fields that are available in polymorphic relationships + if (!excludeRelationTo) { + fieldNames.add('relationTo') + } + + const filtered: Record = {} + + for (const [key, value] of Object.entries(where)) { + if (key === 'and') { + // Handle AND operator - all conditions must be satisfiable + if (Array.isArray(value)) { + const filteredConditions: Record[] = [] + + for (const condition of value) { + const filteredCondition = filterWhereForCollection( + condition, + availableFields, + excludeRelationTo, + ) + + // If any condition in AND cannot be satisfied, the whole AND fails + if (filteredCondition === null) { + return null + } + + if (Object.keys(filteredCondition).length > 0) { + filteredConditions.push(filteredCondition) + } + } + + if (filteredConditions.length > 0) { + filtered[key] = filteredConditions + } + } + } else if (key === 'or') { + // Handle OR operator - at least one condition must be satisfiable + if (Array.isArray(value)) { + const filteredConditions = value + .map((condition) => + filterWhereForCollection(condition, availableFields, excludeRelationTo), + ) + .filter((condition) => condition !== null && Object.keys(condition).length > 0) + + if (filteredConditions.length > 0) { + filtered[key] = filteredConditions + } + // If no OR conditions can be satisfied, we still continue (OR is more permissive) + } + } else if (key === 'relationTo' && excludeRelationTo) { + // Skip relationTo field for non-polymorphic collections + continue + } else if (fieldNames.has(key)) { + // Include the condition if the field exists in this collection + filtered[key] = value + } else { + // Field doesn't exist in this collection - this makes the query unsatisfiable + return null + } + } + + return filtered +} + +type SanitizedJoin = SanitizedJoins[string][number] + +/** + * Builds projection for join queries + */ +function buildJoinProjection( + baseFieldName: string, + useDrafts: boolean, + sort: Record, +): Record { + const projection: Record = { + _id: 1, + [baseFieldName]: 1, + } + + if (useDrafts) { + projection.parent = 1 + } + + for (const fieldName of Object.keys(sort)) { + projection[fieldName] = 1 + } + + return projection +} + +/** + * Enhanced utility function to safely traverse nested object properties using dot notation + * Handles arrays by searching through array elements for matching values + * @param doc - The document to traverse + * @param path - Dot-separated path (e.g., "array.category") + * @returns Array of values found at the specified path (for arrays) or single value + */ +function getByPathWithArrays(doc: unknown, path: string): unknown[] { + const segments = path.split('.') + let current = doc + + for (let i = 0; i < segments.length; i++) { + const segment = segments[i]! + + if (current === undefined || current === null) { + return [] + } + + // Get the value at the current segment + const value = (current as Record)[segment] + + if (value === undefined || value === null) { + return [] + } + + // If this is the last segment, return the value(s) + if (i === segments.length - 1) { + return Array.isArray(value) ? value : [value] + } + + // If the value is an array and we have more segments to traverse + if (Array.isArray(value)) { + const remainingPath = segments.slice(i + 1).join('.') + const results: unknown[] = [] + + // Search through each array element + for (const item of value) { + if (item && typeof item === 'object') { + const subResults = getByPathWithArrays(item, remainingPath) + results.push(...subResults) + } + } + + return results + } + + // Continue traversing + current = value + } + + return [] +} diff --git a/packages/db-mongodb/src/utilities/transform.ts b/packages/db-mongodb/src/utilities/transform.ts index 7318c29cee..24113806ae 100644 --- a/packages/db-mongodb/src/utilities/transform.ts +++ b/packages/db-mongodb/src/utilities/transform.ts @@ -426,6 +426,11 @@ export const transform = ({ data.id = data.id.toHexString() } + // Handle BigInt conversion for custom ID fields of type 'number' + if (adapter.useBigIntForNumberIDs && typeof data.id === 'bigint') { + data.id = Number(data.id) + } + if (!adapter.allowAdditionalKeys) { stripFields({ config, diff --git a/test/generateDatabaseAdapter.ts b/test/generateDatabaseAdapter.ts index 5d28069b8b..dd079d4f6a 100644 --- a/test/generateDatabaseAdapter.ts +++ b/test/generateDatabaseAdapter.ts @@ -21,6 +21,25 @@ export const allDatabaseAdapters = { strength: 1, }, })`, + firestore: ` + import { mongooseAdapter, compatabilityOptions } from '@payloadcms/db-mongodb' + + export const databaseAdapter = mongooseAdapter({ + ...compatabilityOptions.firestore, + url: + process.env.DATABASE_URI || + process.env.MONGODB_MEMORY_SERVER_URI || + 'mongodb://127.0.0.1/payloadtests', + collation: { + strength: 1, + }, + // The following options prevent some tests from failing. + // More work needed to get tests succeeding without these options. + ensureIndexes: true, + transactionOptions: {}, + disableIndexHints: false, + useAlternativeDropDatabase: false, + })`, postgres: ` import { postgresAdapter } from '@payloadcms/db-postgres' diff --git a/test/generateDatabaseSchema.ts b/test/generateDatabaseSchema.ts index a7a84621d7..1adff62d77 100644 --- a/test/generateDatabaseSchema.ts +++ b/test/generateDatabaseSchema.ts @@ -13,7 +13,7 @@ const dirname = path.dirname(filename) const writeDBAdapter = process.env.WRITE_DB_ADAPTER !== 'false' process.env.PAYLOAD_DROP_DATABASE = process.env.PAYLOAD_DROP_DATABASE || 'true' -if (process.env.PAYLOAD_DATABASE === 'mongodb') { +if (process.env.PAYLOAD_DATABASE === 'mongodb' || process.env.PAYLOAD_DATABASE === 'firestore') { throw new Error('Not supported') } diff --git a/test/helpers/isMongoose.ts b/test/helpers/isMongoose.ts index 965e83851f..2f1b7e152f 100644 --- a/test/helpers/isMongoose.ts +++ b/test/helpers/isMongoose.ts @@ -1,5 +1,8 @@ import type { Payload } from 'payload' export function isMongoose(_payload?: Payload) { - return _payload?.db?.name === 'mongoose' || ['mongodb'].includes(process.env.PAYLOAD_DATABASE) + return ( + _payload?.db?.name === 'mongoose' || + ['firestore', 'mongodb'].includes(process.env.PAYLOAD_DATABASE) + ) } diff --git a/test/helpers/startMemoryDB.ts b/test/helpers/startMemoryDB.ts index a090524021..17d2a89d45 100644 --- a/test/helpers/startMemoryDB.ts +++ b/test/helpers/startMemoryDB.ts @@ -14,13 +14,17 @@ declare global { */ // eslint-disable-next-line no-restricted-exports export default async () => { + if (process.env.DATABASE_URI) { + return + } process.env.NODE_ENV = 'test' process.env.PAYLOAD_DROP_DATABASE = 'true' process.env.NODE_OPTIONS = '--no-deprecation' process.env.DISABLE_PAYLOAD_HMR = 'true' if ( - (!process.env.PAYLOAD_DATABASE || process.env.PAYLOAD_DATABASE === 'mongodb') && + (!process.env.PAYLOAD_DATABASE || + ['firestore', 'mongodb'].includes(process.env.PAYLOAD_DATABASE)) && !global._mongoMemoryServer ) { console.log('Starting memory db...') diff --git a/test/relationships/int.spec.ts b/test/relationships/int.spec.ts index da6189c663..e02ecf86f0 100644 --- a/test/relationships/int.spec.ts +++ b/test/relationships/int.spec.ts @@ -38,7 +38,7 @@ const dirname = path.dirname(filename) type EasierChained = { id: string; relation: EasierChained } -const mongoIt = process.env.PAYLOAD_DATABASE === 'mongodb' ? it : it.skip +const mongoIt = ['firestore', 'mongodb'].includes(process.env.PAYLOAD_DATABASE || '') ? it : it.skip describe('Relationships', () => { beforeAll(async () => { @@ -791,6 +791,47 @@ describe('Relationships', () => { expect(localized_res_2.docs).toStrictEqual([movie_1, movie_2]) }) + it('should sort by multiple properties of a relationship', async () => { + await payload.delete({ collection: 'directors', where: {} }) + await payload.delete({ collection: 'movies', where: {} }) + + const createDirector = { + collection: 'directors', + data: { + name: 'Dan', + }, + } as const + + const director_1 = await payload.create(createDirector) + const director_2 = await payload.create(createDirector) + + const movie_1 = await payload.create({ + collection: 'movies', + depth: 0, + data: { director: director_1.id, name: 'Some Movie 1' }, + }) + + const movie_2 = await payload.create({ + collection: 'movies', + depth: 0, + data: { director: director_2.id, name: 'Some Movie 2' }, + }) + + const res_1 = await payload.find({ + collection: 'movies', + sort: ['director.name', 'director.createdAt'], + depth: 0, + }) + const res_2 = await payload.find({ + collection: 'movies', + sort: ['director.name', '-director.createdAt'], + depth: 0, + }) + + expect(res_1.docs).toStrictEqual([movie_1, movie_2]) + expect(res_2.docs).toStrictEqual([movie_2, movie_1]) + }) + it('should sort by a property of a hasMany relationship', async () => { const movie1 = await payload.create({ collection: 'movies', From cab7ba4a8aae9c806bf64fb2b23bb2ec95534e60 Mon Sep 17 00:00:00 2001 From: Sean Zubrickas Date: Wed, 16 Jul 2025 12:36:32 -0700 Subject: [PATCH 04/91] =?UTF-8?q?fix:=20Enhances=20field-level=20access=20?= =?UTF-8?q?controls=20on=20Users=20collection=20to=20address=20s=E2=80=A6?= =?UTF-8?q?=20(#13197)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Enhance field-level access controls on Users collection to address security concerns - Restricted read/update access on `email` field to admins and the user themselves - Locked down `roles` field so only admins can create, read, or update it --- examples/auth/src/collections/Users.ts | 35 ++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/examples/auth/src/collections/Users.ts b/examples/auth/src/collections/Users.ts index 67f00ebdd4..a0f9d334a0 100644 --- a/examples/auth/src/collections/Users.ts +++ b/examples/auth/src/collections/Users.ts @@ -6,6 +6,8 @@ import { anyone } from './access/anyone' import { checkRole } from './access/checkRole' import { loginAfterCreate } from './hooks/loginAfterCreate' import { protectRoles } from './hooks/protectRoles' +import { access } from 'fs' +import { create } from 'domain' export const Users: CollectionConfig = { slug: 'users', @@ -32,6 +34,34 @@ export const Users: CollectionConfig = { afterChange: [loginAfterCreate], }, fields: [ + { + name: 'email', + type: 'email', + required: true, + unique: true, + access: { + read: adminsAndUser, + update: adminsAndUser, + }, + }, + { + name: 'password', + type: 'password', + required: true, + admin: { + description: 'Leave blank to keep the current password.', + }, + }, + { + name: 'resetPasswordToken', + type: 'text', + hidden: true, + }, + { + name: 'resetPasswordExpiration', + type: 'date', + hidden: true, + }, { name: 'firstName', type: 'text', @@ -45,6 +75,11 @@ export const Users: CollectionConfig = { type: 'select', hasMany: true, saveToJWT: true, + access: { + read: admins, + update: admins, + create: admins, + }, hooks: { beforeChange: [protectRoles], }, From a20b43624b108412aef646f6628c6ce07ad64e55 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Thu, 17 Jul 2025 00:18:14 +0300 Subject: [PATCH 05/91] feat: add `findDistinct` operation (#13102) Adds a new operation findDistinct that can give you distinct values of a field for a given collection Example: Assume you have a collection posts with multiple documents, and some of them share the same title: ```js // Example dataset (some titles appear multiple times) [ { title: 'title-1' }, { title: 'title-2' }, { title: 'title-1' }, { title: 'title-3' }, { title: 'title-2' }, { title: 'title-4' }, { title: 'title-5' }, { title: 'title-6' }, { title: 'title-7' }, { title: 'title-8' }, { title: 'title-9' }, ] ``` You can now retrieve all unique title values using findDistinct: ```js const result = await payload.findDistinct({ collection: 'posts', field: 'title', }) console.log(result.values) // Output: // [ // 'title-1', // 'title-2', // 'title-3', // 'title-4', // 'title-5', // 'title-6', // 'title-7', // 'title-8', // 'title-9' // ] ``` You can also limit the number of distinct results: ```js const limitedResult = await payload.findDistinct({ collection: 'posts', field: 'title', sortOrder: 'desc', limit: 3, }) console.log(limitedResult.values) // Output: // [ // 'title-1', // 'title-2', // 'title-3' // ] ``` You can also pass a `where` query to filter the documents. --- docs/local-api/overview.mdx | 21 ++ packages/db-mongodb/src/findDistinct.ts | 141 +++++++++++++ packages/db-mongodb/src/index.ts | 2 + packages/db-postgres/src/index.ts | 2 + packages/db-sqlite/src/countDistinct.ts | 6 +- packages/db-sqlite/src/index.ts | 2 + packages/db-sqlite/src/types.ts | 2 + packages/db-vercel-postgres/src/index.ts | 2 + packages/drizzle/src/findDistinct.ts | 108 ++++++++++ packages/drizzle/src/index.ts | 1 + .../drizzle/src/postgres/countDistinct.ts | 6 +- packages/drizzle/src/postgres/types.ts | 2 + packages/drizzle/src/queries/parseParams.ts | 18 +- .../drizzle/src/queries/selectDistinct.ts | 4 +- packages/drizzle/src/types.ts | 1 + .../drizzle/src/utilities/rawConstraint.ts | 2 + .../payload/src/collections/config/types.ts | 1 + .../src/collections/endpoints/findDistinct.ts | 46 +++++ .../src/collections/endpoints/index.ts | 7 + .../collections/operations/findDistinct.ts | 189 ++++++++++++++++++ .../operations/local/findDistinct.ts | 138 +++++++++++++ .../src/collections/operations/utils.ts | 7 + packages/payload/src/database/types.ts | 37 +++- packages/payload/src/index.ts | 24 ++- test/database/int.spec.ts | 112 +++++++++++ ...50707_123508.json => 20250714_201659.json} | 2 +- ...{20250707_123508.ts => 20250714_201659.ts} | 0 .../up-down-migration/migrations/index.ts | 10 +- test/helpers/NextRESTClient.ts | 2 +- 29 files changed, 869 insertions(+), 26 deletions(-) create mode 100644 packages/db-mongodb/src/findDistinct.ts create mode 100644 packages/drizzle/src/findDistinct.ts create mode 100644 packages/payload/src/collections/endpoints/findDistinct.ts create mode 100644 packages/payload/src/collections/operations/findDistinct.ts create mode 100644 packages/payload/src/collections/operations/local/findDistinct.ts rename test/database/up-down-migration/migrations/{20250707_123508.json => 20250714_201659.json} (99%) rename test/database/up-down-migration/migrations/{20250707_123508.ts => 20250714_201659.ts} (100%) diff --git a/docs/local-api/overview.mdx b/docs/local-api/overview.mdx index cb2e74f4fa..4d39424b5f 100644 --- a/docs/local-api/overview.mdx +++ b/docs/local-api/overview.mdx @@ -194,6 +194,27 @@ const result = await payload.count({ }) ``` +### FindDistinct#collection-find-distinct + +```js +// Result will be an object with: +// { +// values: ['value-1', 'value-2'], // array of distinct values, +// field: 'title', // the field +// totalDocs: 10, // count of the distinct values satisfies query, +// perPage: 10, // count of distinct values per page (based on provided limit) +// } +const result = await payload.findDistinct({ + collection: 'posts', // required + locale: 'en', + where: {}, // pass a `where` query here + user: dummyUser, + overrideAccess: false, + field: 'title', + sort: 'title', +}) +``` + ### Update by ID#collection-update-by-id ```js diff --git a/packages/db-mongodb/src/findDistinct.ts b/packages/db-mongodb/src/findDistinct.ts new file mode 100644 index 0000000000..bc77a8cab4 --- /dev/null +++ b/packages/db-mongodb/src/findDistinct.ts @@ -0,0 +1,141 @@ +import type { PipelineStage } from 'mongoose' + +import { type FindDistinct, getFieldByPath } from 'payload' + +import type { MongooseAdapter } from './index.js' + +import { buildQuery } from './queries/buildQuery.js' +import { buildSortParam } from './queries/buildSortParam.js' +import { getCollection } from './utilities/getEntity.js' +import { getSession } from './utilities/getSession.js' + +export const findDistinct: FindDistinct = async function (this: MongooseAdapter, args) { + const { collectionConfig, Model } = getCollection({ + adapter: this, + collectionSlug: args.collection, + }) + + const session = await getSession(this, args.req) + + const { where = {} } = args + + const sortAggregation: PipelineStage[] = [] + + const sort = buildSortParam({ + adapter: this, + config: this.payload.config, + fields: collectionConfig.flattenedFields, + locale: args.locale, + sort: args.sort ?? args.field, + sortAggregation, + timestamps: true, + }) + + const query = await buildQuery({ + adapter: this, + collectionSlug: args.collection, + fields: collectionConfig.flattenedFields, + locale: args.locale, + where, + }) + + const fieldPathResult = getFieldByPath({ + fields: collectionConfig.flattenedFields, + path: args.field, + }) + let fieldPath = args.field + if (fieldPathResult?.pathHasLocalized && args.locale) { + fieldPath = fieldPathResult.localizedPath.replace('', args.locale) + } + + const page = args.page || 1 + + const sortProperty = Object.keys(sort)[0]! // assert because buildSortParam always returns at least 1 key. + const sortDirection = sort[sortProperty] === 'asc' ? 1 : -1 + + const pipeline: PipelineStage[] = [ + { + $match: query, + }, + ...(sortAggregation.length > 0 ? sortAggregation : []), + + { + $group: { + _id: { + _field: `$${fieldPath}`, + ...(sortProperty === fieldPath + ? {} + : { + _sort: `$${sortProperty}`, + }), + }, + }, + }, + { + $sort: { + [sortProperty === fieldPath ? '_id._field' : '_id._sort']: sortDirection, + }, + }, + ] + + const getValues = async () => { + return Model.aggregate(pipeline, { session }).then((res) => + res.map((each) => ({ + [args.field]: JSON.parse(JSON.stringify(each._id._field)), + })), + ) + } + + if (args.limit) { + pipeline.push({ + $skip: (page - 1) * args.limit, + }) + pipeline.push({ $limit: args.limit }) + const totalDocs = await Model.aggregate( + [ + { + $match: query, + }, + { + $group: { + _id: `$${fieldPath}`, + }, + }, + { $count: 'count' }, + ], + { + session, + }, + ).then((res) => res[0]?.count ?? 0) + const totalPages = Math.ceil(totalDocs / args.limit) + const hasPrevPage = page > 1 + const hasNextPage = totalPages > page + const pagingCounter = (page - 1) * args.limit + 1 + + return { + hasNextPage, + hasPrevPage, + limit: args.limit, + nextPage: hasNextPage ? page + 1 : null, + page, + pagingCounter, + prevPage: hasPrevPage ? page - 1 : null, + totalDocs, + totalPages, + values: await getValues(), + } + } + + const values = await getValues() + + return { + hasNextPage: false, + hasPrevPage: false, + limit: 0, + page: 1, + pagingCounter: 1, + totalDocs: values.length, + totalPages: 1, + values, + } +} diff --git a/packages/db-mongodb/src/index.ts b/packages/db-mongodb/src/index.ts index f2f152533a..08c8e6cb6f 100644 --- a/packages/db-mongodb/src/index.ts +++ b/packages/db-mongodb/src/index.ts @@ -42,6 +42,7 @@ import { deleteOne } from './deleteOne.js' import { deleteVersions } from './deleteVersions.js' import { destroy } from './destroy.js' import { find } from './find.js' +import { findDistinct } from './findDistinct.js' import { findGlobal } from './findGlobal.js' import { findGlobalVersions } from './findGlobalVersions.js' import { findOne } from './findOne.js' @@ -297,6 +298,7 @@ export function mongooseAdapter({ destroy, disableFallbackSort, find, + findDistinct, findGlobal, findGlobalVersions, findOne, diff --git a/packages/db-postgres/src/index.ts b/packages/db-postgres/src/index.ts index a6769cb735..df431424bd 100644 --- a/packages/db-postgres/src/index.ts +++ b/packages/db-postgres/src/index.ts @@ -17,6 +17,7 @@ import { deleteVersions, destroy, find, + findDistinct, findGlobal, findGlobalVersions, findMigrationDir, @@ -120,6 +121,7 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj json: true, }, fieldConstraints: {}, + findDistinct, generateSchema: createSchemaGenerator({ columnToCodeConverter, corePackageSuffix: 'pg-core', diff --git a/packages/db-sqlite/src/countDistinct.ts b/packages/db-sqlite/src/countDistinct.ts index ae729138f0..cbb51cee1d 100644 --- a/packages/db-sqlite/src/countDistinct.ts +++ b/packages/db-sqlite/src/countDistinct.ts @@ -6,13 +6,13 @@ import type { CountDistinct, SQLiteAdapter } from './types.js' export const countDistinct: CountDistinct = async function countDistinct( this: SQLiteAdapter, - { db, joins, tableName, where }, + { column, db, joins, tableName, where }, ) { // When we don't have any joins - use a simple COUNT(*) query. if (joins.length === 0) { const countResult = await db .select({ - count: count(), + count: column ? count(sql`DISTINCT ${column}`) : count(), }) .from(this.tables[tableName]) .where(where) @@ -25,7 +25,7 @@ export const countDistinct: CountDistinct = async function countDistinct( }) .from(this.tables[tableName]) .where(where) - .groupBy(this.tables[tableName].id) + .groupBy(column ?? this.tables[tableName].id) .limit(1) .$dynamic() diff --git a/packages/db-sqlite/src/index.ts b/packages/db-sqlite/src/index.ts index 015ce9ba92..0cae319680 100644 --- a/packages/db-sqlite/src/index.ts +++ b/packages/db-sqlite/src/index.ts @@ -18,6 +18,7 @@ import { deleteVersions, destroy, find, + findDistinct, findGlobal, findGlobalVersions, findMigrationDir, @@ -101,6 +102,7 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj { json: true, }, fieldConstraints: {}, + findDistinct, generateSchema: createSchemaGenerator({ columnToCodeConverter, corePackageSuffix: 'sqlite-core', diff --git a/packages/db-sqlite/src/types.ts b/packages/db-sqlite/src/types.ts index 568f3a4dc3..5aa84c9935 100644 --- a/packages/db-sqlite/src/types.ts +++ b/packages/db-sqlite/src/types.ts @@ -5,6 +5,7 @@ import type { DrizzleConfig, Relation, Relations, SQL } from 'drizzle-orm' import type { LibSQLDatabase } from 'drizzle-orm/libsql' import type { AnySQLiteColumn, + SQLiteColumn, SQLiteInsertOnConflictDoUpdateConfig, SQLiteTableWithColumns, SQLiteTransactionConfig, @@ -87,6 +88,7 @@ export type GenericTable = SQLiteTableWithColumns<{ export type GenericRelation = Relations>> export type CountDistinct = (args: { + column?: SQLiteColumn db: LibSQLDatabase joins: BuildQueryJoinAliases tableName: string diff --git a/packages/db-vercel-postgres/src/index.ts b/packages/db-vercel-postgres/src/index.ts index a9fd65f63c..155bdc2a2d 100644 --- a/packages/db-vercel-postgres/src/index.ts +++ b/packages/db-vercel-postgres/src/index.ts @@ -18,6 +18,7 @@ import { deleteVersions, destroy, find, + findDistinct, findGlobal, findGlobalVersions, findMigrationDir, @@ -174,6 +175,7 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj { + query = query.orderBy(() => orderBy.map(({ column, order }) => order(column))) + + if (args.limit) { + if (offset) { + query = query.offset(offset) + } + + query = query.limit(args.limit) + } + + return query + }, + selectFields: { + _selected: selectFields['_selected'], + ...(orderBy[0].column === selectFields['_selected'] ? {} : { _order: orderBy[0].column }), + } as Record, + tableName, + where, + }) + + const values = selectDistinctResult.map((each) => ({ + [args.field]: (each as Record)._selected, + })) + + if (args.limit) { + const totalDocs = await this.countDistinct({ + column: selectFields['_selected'], + db, + joins, + tableName, + where, + }) + + const totalPages = Math.ceil(totalDocs / args.limit) + const hasPrevPage = page > 1 + const hasNextPage = totalPages > page + const pagingCounter = (page - 1) * args.limit + 1 + + return { + hasNextPage, + hasPrevPage, + limit: args.limit, + nextPage: hasNextPage ? page + 1 : null, + page, + pagingCounter, + prevPage: hasPrevPage ? page - 1 : null, + totalDocs, + totalPages, + values, + } + } + + return { + hasNextPage: false, + hasPrevPage: false, + limit: 0, + page: 1, + pagingCounter: 1, + totalDocs: values.length, + totalPages: 1, + values, + } +} diff --git a/packages/drizzle/src/index.ts b/packages/drizzle/src/index.ts index 6650b26178..dd1055bdfc 100644 --- a/packages/drizzle/src/index.ts +++ b/packages/drizzle/src/index.ts @@ -12,6 +12,7 @@ export { deleteVersions } from './deleteVersions.js' export { destroy } from './destroy.js' export { find } from './find.js' export { chainMethods } from './find/chainMethods.js' +export { findDistinct } from './findDistinct.js' export { findGlobal } from './findGlobal.js' export { findGlobalVersions } from './findGlobalVersions.js' export { findMigrationDir } from './findMigrationDir.js' diff --git a/packages/drizzle/src/postgres/countDistinct.ts b/packages/drizzle/src/postgres/countDistinct.ts index 04d7559fcf..55f4ea8ad9 100644 --- a/packages/drizzle/src/postgres/countDistinct.ts +++ b/packages/drizzle/src/postgres/countDistinct.ts @@ -6,13 +6,13 @@ import type { BasePostgresAdapter, CountDistinct } from './types.js' export const countDistinct: CountDistinct = async function countDistinct( this: BasePostgresAdapter, - { db, joins, tableName, where }, + { column, db, joins, tableName, where }, ) { // When we don't have any joins - use a simple COUNT(*) query. if (joins.length === 0) { const countResult = await db .select({ - count: count(), + count: column ? count(sql`DISTINCT ${column}`) : count(), }) .from(this.tables[tableName]) .where(where) @@ -26,7 +26,7 @@ export const countDistinct: CountDistinct = async function countDistinct( }) .from(this.tables[tableName]) .where(where) - .groupBy(this.tables[tableName].id) + .groupBy(column || this.tables[tableName].id) .limit(1) .$dynamic() diff --git a/packages/drizzle/src/postgres/types.ts b/packages/drizzle/src/postgres/types.ts index 696d13797d..60ed3a0749 100644 --- a/packages/drizzle/src/postgres/types.ts +++ b/packages/drizzle/src/postgres/types.ts @@ -20,6 +20,7 @@ import type { UniqueConstraintBuilder, } from 'drizzle-orm/pg-core' import type { PgTableFn } from 'drizzle-orm/pg-core/table' +import type { SQLiteColumn } from 'drizzle-orm/sqlite-core' import type { Payload, PayloadRequest } from 'payload' import type { ClientConfig, QueryResult } from 'pg' @@ -64,6 +65,7 @@ export type GenericRelation = Relations> export type PostgresDB = NodePgDatabase> export type CountDistinct = (args: { + column?: PgColumn | SQLiteColumn db: PostgresDB | TransactionPg joins: BuildQueryJoinAliases tableName: string diff --git a/packages/drizzle/src/queries/parseParams.ts b/packages/drizzle/src/queries/parseParams.ts index a5b88d4d74..b43dad70a4 100644 --- a/packages/drizzle/src/queries/parseParams.ts +++ b/packages/drizzle/src/queries/parseParams.ts @@ -10,6 +10,7 @@ import type { DrizzleAdapter, GenericColumn } from '../types.js' import type { BuildQueryJoinAliases } from './buildQuery.js' import { getNameFromDrizzleTable } from '../utilities/getNameFromDrizzleTable.js' +import { DistinctSymbol } from '../utilities/rawConstraint.js' import { buildAndOrConditions } from './buildAndOrConditions.js' import { getTableColumnFromPath } from './getTableColumnFromPath.js' import { sanitizeQueryValue } from './sanitizeQueryValue.js' @@ -108,6 +109,17 @@ export function parseParams({ value: val, }) + const resolvedColumn = + rawColumn || + (aliasTable && tableName === getNameFromDrizzleTable(table) + ? aliasTable[columnName] + : table[columnName]) + + if (val === DistinctSymbol) { + selectFields['_selected'] = resolvedColumn + break + } + queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => { if (typeof value === 'string' && value.indexOf('%') > -1) { constraints.push(adapter.operators.like(constraintTable[col], value)) @@ -281,12 +293,6 @@ export function parseParams({ break } - const resolvedColumn = - rawColumn || - (aliasTable && tableName === getNameFromDrizzleTable(table) - ? aliasTable[columnName] - : table[columnName]) - if (queryOperator === 'not_equals' && queryValue !== null) { constraints.push( or( diff --git a/packages/drizzle/src/queries/selectDistinct.ts b/packages/drizzle/src/queries/selectDistinct.ts index 7cb6b5fc0f..25bf75ba4d 100644 --- a/packages/drizzle/src/queries/selectDistinct.ts +++ b/packages/drizzle/src/queries/selectDistinct.ts @@ -14,6 +14,7 @@ import type { BuildQueryJoinAliases } from './buildQuery.js' type Args = { adapter: DrizzleAdapter db: DrizzleAdapter['drizzle'] | DrizzleTransaction + forceRun?: boolean joins: BuildQueryJoinAliases query?: (args: { query: SQLiteSelect }) => SQLiteSelect selectFields: Record @@ -27,13 +28,14 @@ type Args = { export const selectDistinct = ({ adapter, db, + forceRun, joins, query: queryModifier = ({ query }) => query, selectFields, tableName, where, }: Args): QueryPromise<{ id: number | string }[] & Record> => { - if (Object.keys(joins).length > 0) { + if (forceRun || Object.keys(joins).length > 0) { let query: SQLiteSelect const table = adapter.tables[tableName] diff --git a/packages/drizzle/src/types.ts b/packages/drizzle/src/types.ts index 42f01b7ce8..84dd5f1e74 100644 --- a/packages/drizzle/src/types.ts +++ b/packages/drizzle/src/types.ts @@ -89,6 +89,7 @@ export type TransactionPg = PgTransaction< export type DrizzleTransaction = TransactionPg | TransactionSQLite export type CountDistinct = (args: { + column?: PgColumn | SQLiteColumn db: DrizzleTransaction | LibSQLDatabase | PostgresDB joins: BuildQueryJoinAliases tableName: string diff --git a/packages/drizzle/src/utilities/rawConstraint.ts b/packages/drizzle/src/utilities/rawConstraint.ts index f47ceed9c0..2105532e3b 100644 --- a/packages/drizzle/src/utilities/rawConstraint.ts +++ b/packages/drizzle/src/utilities/rawConstraint.ts @@ -1,5 +1,7 @@ const RawConstraintSymbol = Symbol('RawConstraint') +export const DistinctSymbol = Symbol('DistinctSymbol') + /** * You can use this to inject a raw query to where */ diff --git a/packages/payload/src/collections/config/types.ts b/packages/payload/src/collections/config/types.ts index 43f80ae91d..4414715544 100644 --- a/packages/payload/src/collections/config/types.ts +++ b/packages/payload/src/collections/config/types.ts @@ -82,6 +82,7 @@ export type HookOperationType = | 'forgotPassword' | 'login' | 'read' + | 'readDistinct' | 'refresh' | 'resetPassword' | 'update' diff --git a/packages/payload/src/collections/endpoints/findDistinct.ts b/packages/payload/src/collections/endpoints/findDistinct.ts new file mode 100644 index 0000000000..3a7eb4b927 --- /dev/null +++ b/packages/payload/src/collections/endpoints/findDistinct.ts @@ -0,0 +1,46 @@ +import { status as httpStatus } from 'http-status' + +import type { PayloadHandler } from '../../config/types.js' +import type { Where } from '../../types/index.js' + +import { APIError } from '../../errors/APIError.js' +import { getRequestCollection } from '../../utilities/getRequestEntity.js' +import { headersWithCors } from '../../utilities/headersWithCors.js' +import { isNumber } from '../../utilities/isNumber.js' +import { findDistinctOperation } from '../operations/findDistinct.js' + +export const findDistinctHandler: PayloadHandler = async (req) => { + const collection = getRequestCollection(req) + const { depth, field, limit, page, sort, where } = req.query as { + depth?: string + field?: string + limit?: string + page?: string + sort?: string + sortOrder?: string + where?: Where + } + + if (!field) { + throw new APIError('field must be specified', httpStatus.BAD_REQUEST) + } + + const result = await findDistinctOperation({ + collection, + depth: isNumber(depth) ? Number(depth) : undefined, + field, + limit: isNumber(limit) ? Number(limit) : undefined, + page: isNumber(page) ? Number(page) : undefined, + req, + sort: typeof sort === 'string' ? sort.split(',') : undefined, + where, + }) + + return Response.json(result, { + headers: headersWithCors({ + headers: new Headers(), + req, + }), + status: httpStatus.OK, + }) +} diff --git a/packages/payload/src/collections/endpoints/index.ts b/packages/payload/src/collections/endpoints/index.ts index bab76e2db5..368cd58eb6 100644 --- a/packages/payload/src/collections/endpoints/index.ts +++ b/packages/payload/src/collections/endpoints/index.ts @@ -9,6 +9,7 @@ import { docAccessHandler } from './docAccess.js' import { duplicateHandler } from './duplicate.js' import { findHandler } from './find.js' import { findByIDHandler } from './findByID.js' +import { findDistinctHandler } from './findDistinct.js' import { findVersionByIDHandler } from './findVersionByID.js' import { findVersionsHandler } from './findVersions.js' import { previewHandler } from './preview.js' @@ -48,6 +49,12 @@ export const defaultCollectionEndpoints: Endpoint[] = [ method: 'get', path: '/versions', }, + // Might be uncommented in the future + // { + // handler: findDistinctHandler, + // method: 'get', + // path: '/distinct', + // }, { handler: duplicateHandler, method: 'post', diff --git a/packages/payload/src/collections/operations/findDistinct.ts b/packages/payload/src/collections/operations/findDistinct.ts new file mode 100644 index 0000000000..2c814f5f4f --- /dev/null +++ b/packages/payload/src/collections/operations/findDistinct.ts @@ -0,0 +1,189 @@ +import httpStatus from 'http-status' + +import type { AccessResult } from '../../config/types.js' +import type { PaginatedDistinctDocs } from '../../database/types.js' +import type { PayloadRequest, PopulateType, Sort, Where } from '../../types/index.js' +import type { Collection } from '../config/types.js' + +import { executeAccess } from '../../auth/executeAccess.js' +import { combineQueries } from '../../database/combineQueries.js' +import { validateQueryPaths } from '../../database/queryValidation/validateQueryPaths.js' +import { sanitizeWhereQuery } from '../../database/sanitizeWhereQuery.js' +import { APIError } from '../../errors/APIError.js' +import { Forbidden } from '../../errors/Forbidden.js' +import { relationshipPopulationPromise } from '../../fields/hooks/afterRead/relationshipPopulationPromise.js' +import { getFieldByPath } from '../../utilities/getFieldByPath.js' +import { killTransaction } from '../../utilities/killTransaction.js' +import { buildAfterOperation } from './utils.js' + +export type Arguments = { + collection: Collection + depth?: number + disableErrors?: boolean + field: string + limit?: number + locale?: string + overrideAccess?: boolean + page?: number + populate?: PopulateType + req?: PayloadRequest + showHiddenFields?: boolean + sort?: Sort + where?: Where +} +export const findDistinctOperation = async ( + incomingArgs: Arguments, +): Promise>> => { + let args = incomingArgs + + try { + // ///////////////////////////////////// + // beforeOperation - Collection + // ///////////////////////////////////// + + if (args.collection.config.hooks?.beforeOperation?.length) { + for (const hook of args.collection.config.hooks.beforeOperation) { + args = + (await hook({ + args, + collection: args.collection.config, + context: args.req!.context, + operation: 'readDistinct', + req: args.req!, + })) || args + } + } + + const { + collection: { config: collectionConfig }, + disableErrors, + overrideAccess, + populate, + showHiddenFields = false, + where, + } = args + + const req = args.req! + const { locale, payload } = req + + // ///////////////////////////////////// + // Access + // ///////////////////////////////////// + + let accessResult: AccessResult + + if (!overrideAccess) { + accessResult = await executeAccess({ disableErrors, req }, collectionConfig.access.read) + + // If errors are disabled, and access returns false, return empty results + if (accessResult === false) { + return { + hasNextPage: false, + hasPrevPage: false, + limit: args.limit || 0, + nextPage: null, + page: 1, + pagingCounter: 1, + prevPage: null, + totalDocs: 0, + totalPages: 0, + values: [], + } + } + } + + // ///////////////////////////////////// + // Find Distinct + // ///////////////////////////////////// + + const fullWhere = combineQueries(where!, accessResult!) + sanitizeWhereQuery({ fields: collectionConfig.flattenedFields, payload, where: fullWhere }) + + await validateQueryPaths({ + collectionConfig, + overrideAccess: overrideAccess!, + req, + where: where ?? {}, + }) + + const fieldResult = getFieldByPath({ + fields: collectionConfig.flattenedFields, + path: args.field, + }) + + if (!fieldResult) { + throw new APIError( + `Field ${args.field} was not found in the collection ${collectionConfig.slug}`, + httpStatus.BAD_REQUEST, + ) + } + + if (fieldResult.field.hidden && !showHiddenFields) { + throw new Forbidden(req.t) + } + + if (fieldResult.field.access?.read) { + const hasAccess = await fieldResult.field.access.read({ req }) + if (!hasAccess) { + throw new Forbidden(req.t) + } + } + + let result = await payload.db.findDistinct({ + collection: collectionConfig.slug, + field: args.field, + limit: args.limit, + locale: locale!, + page: args.page, + req, + sort: args.sort, + where: fullWhere, + }) + + if ( + (fieldResult.field.type === 'relationship' || fieldResult.field.type === 'upload') && + args.depth + ) { + const populationPromises: Promise[] = [] + for (const doc of result.values) { + populationPromises.push( + relationshipPopulationPromise({ + currentDepth: 0, + depth: args.depth, + draft: false, + fallbackLocale: req.fallbackLocale || null, + field: fieldResult.field, + locale: req.locale || null, + overrideAccess: args.overrideAccess ?? true, + parentIsLocalized: false, + populate, + req, + showHiddenFields: false, + siblingDoc: doc, + }), + ) + } + await Promise.all(populationPromises) + } + + // ///////////////////////////////////// + // afterOperation - Collection + // ///////////////////////////////////// + + result = await buildAfterOperation({ + args, + collection: collectionConfig, + operation: 'findDistinct', + result, + }) + + // ///////////////////////////////////// + // Return results + // ///////////////////////////////////// + + return result + } catch (error: unknown) { + await killTransaction(args.req!) + throw error + } +} diff --git a/packages/payload/src/collections/operations/local/findDistinct.ts b/packages/payload/src/collections/operations/local/findDistinct.ts new file mode 100644 index 0000000000..2a0ca5cd73 --- /dev/null +++ b/packages/payload/src/collections/operations/local/findDistinct.ts @@ -0,0 +1,138 @@ +import type { + CollectionSlug, + DataFromCollectionSlug, + Document, + PaginatedDistinctDocs, + Payload, + PayloadRequest, + PopulateType, + RequestContext, + Sort, + TypedLocale, + Where, +} from '../../../index.js' +import type { CreateLocalReqOptions } from '../../../utilities/createLocalReq.js' + +import { APIError, createLocalReq } from '../../../index.js' +import { findDistinctOperation } from '../findDistinct.js' + +export type Options< + TSlug extends CollectionSlug, + TField extends keyof DataFromCollectionSlug, +> = { + /** + * the Collection slug to operate against. + */ + collection: TSlug + /** + * [Context](https://payloadcms.com/docs/hooks/context), which will then be passed to `context` and `req.context`, + * which can be read by hooks. Useful if you want to pass additional information to the hooks which + * shouldn't be necessarily part of the document, for example a `triggerBeforeChange` option which can be read by the BeforeChange hook + * to determine if it should run or not. + */ + context?: RequestContext + /** + * [Control auto-population](https://payloadcms.com/docs/queries/depth) of nested relationship and upload fields. + */ + depth?: number + /** + * When set to `true`, errors will not be thrown. + */ + disableErrors?: boolean + /** + * The field to get distinct values for + */ + field: TField + /** + * The maximum distinct field values to be returned. + * By default the operation returns all the values. + */ + limit?: number + /** + * Specify [locale](https://payloadcms.com/docs/configuration/localization) for any returned documents. + */ + locale?: 'all' | TypedLocale + /** + * Skip access control. + * Set to `false` if you want to respect Access Control for the operation, for example when fetching data for the fron-end. + * @default true + */ + overrideAccess?: boolean + /** + * Get a specific page number (if limit is specified) + * @default 1 + */ + page?: number + /** + * Specify [populate](https://payloadcms.com/docs/queries/select#populate) to control which fields to include to the result from populated documents. + */ + populate?: PopulateType + /** + * The `PayloadRequest` object. You can pass it to thread the current [transaction](https://payloadcms.com/docs/database/transactions), user and locale to the operation. + * Recommended to pass when using the Local API from hooks, as usually you want to execute the operation within the current transaction. + */ + req?: Partial + /** + * Opt-in to receiving hidden fields. By default, they are hidden from returned documents in accordance to your config. + * @default false + */ + showHiddenFields?: boolean + /** + * Sort the documents, can be a string or an array of strings + * @example '-createdAt' // Sort DESC by createdAt + * @example ['group', '-createdAt'] // sort by 2 fields, ASC group and DESC createdAt + */ + sort?: Sort + /** + * If you set `overrideAccess` to `false`, you can pass a user to use against the access control checks. + */ + user?: Document + /** + * A filter [query](https://payloadcms.com/docs/queries/overview) + */ + where?: Where +} + +export async function findDistinct< + TSlug extends CollectionSlug, + TField extends keyof DataFromCollectionSlug & string, +>( + payload: Payload, + options: Options, +): Promise[TField]>>> { + const { + collection: collectionSlug, + depth = 0, + disableErrors, + field, + limit, + overrideAccess = true, + page, + populate, + showHiddenFields, + sort, + where, + } = options + const collection = payload.collections[collectionSlug] + + if (!collection) { + throw new APIError( + `The collection with slug ${String(collectionSlug)} can't be found. Find Operation.`, + ) + } + + return findDistinctOperation({ + collection, + depth, + disableErrors, + field, + limit, + overrideAccess, + page, + populate, + req: await createLocalReq(options as CreateLocalReqOptions, payload), + showHiddenFields, + sort, + where, + }) as Promise[TField]>>> +} diff --git a/packages/payload/src/collections/operations/utils.ts b/packages/payload/src/collections/operations/utils.ts index 6ea8497248..faade92d48 100644 --- a/packages/payload/src/collections/operations/utils.ts +++ b/packages/payload/src/collections/operations/utils.ts @@ -12,6 +12,7 @@ import type { deleteOperation } from './delete.js' import type { deleteByIDOperation } from './deleteByID.js' import type { findOperation } from './find.js' import type { findByIDOperation } from './findByID.js' +import type { findDistinctOperation } from './findDistinct.js' import type { updateOperation } from './update.js' import type { updateByIDOperation } from './updateByID.js' @@ -30,6 +31,7 @@ export type AfterOperationMap = { boolean, SelectFromCollectionSlug > + findDistinct: typeof findDistinctOperation forgotPassword: typeof forgotPasswordOperation login: typeof loginOperation refresh: typeof refreshOperation @@ -81,6 +83,11 @@ export type AfterOperationArg = { operation: 'findByID' result: Awaited['findByID']>> } + | { + args: Parameters['findDistinct']>[0] + operation: 'findDistinct' + result: Awaited['findDistinct']>> + } | { args: Parameters['forgotPassword']>[0] operation: 'forgotPassword' diff --git a/packages/payload/src/database/types.ts b/packages/payload/src/database/types.ts index ca94f76997..c88b8324c8 100644 --- a/packages/payload/src/database/types.ts +++ b/packages/payload/src/database/types.ts @@ -63,6 +63,8 @@ export interface BaseDatabaseAdapter { find: Find + findDistinct: FindDistinct + findGlobal: FindGlobal findGlobalVersions: FindGlobalVersions @@ -82,16 +84,15 @@ export interface BaseDatabaseAdapter { * Run any migration up functions that have not yet been performed and update the status */ migrate: (args?: { migrations?: Migration[] }) => Promise - /** * Run any migration down functions that have been performed */ migrateDown: () => Promise + /** * Drop the current database and run all migrate up functions */ migrateFresh: (args: { forceAcceptWarning?: boolean }) => Promise - /** * Run all migration down functions before running up */ @@ -104,6 +105,7 @@ export interface BaseDatabaseAdapter { * Read the current state of migrations and output the result to show which have been run */ migrateStatus: () => Promise + /** * Path to read and write migration files from */ @@ -113,7 +115,6 @@ export interface BaseDatabaseAdapter { * The name of the database adapter */ name: string - /** * Full package name of the database adapter * @@ -124,6 +125,7 @@ export interface BaseDatabaseAdapter { * reference to the instance of payload */ payload: Payload + queryDrafts: QueryDrafts /** @@ -151,7 +153,6 @@ export interface BaseDatabaseAdapter { updateMany: UpdateMany updateOne: UpdateOne - updateVersion: UpdateVersion upsert: Upsert } @@ -481,6 +482,34 @@ export type CreateArgs = { select?: SelectType } +export type FindDistinctArgs = { + collection: CollectionSlug + field: string + limit?: number + locale?: string + page?: number + req?: Partial + sort?: Sort + where?: Where +} + +export type PaginatedDistinctDocs> = { + hasNextPage: boolean + hasPrevPage: boolean + limit: number + nextPage?: null | number | undefined + page: number + pagingCounter: number + prevPage?: null | number | undefined + totalDocs: number + totalPages: number + values: T[] +} + +export type FindDistinct = ( + args: FindDistinctArgs, +) => Promise>> + export type Create = (args: CreateArgs) => Promise export type UpdateOneArgs = { diff --git a/packages/payload/src/index.ts b/packages/payload/src/index.ts index 0c2ea26805..5909e20dd6 100644 --- a/packages/payload/src/index.ts +++ b/packages/payload/src/index.ts @@ -40,7 +40,7 @@ import { } from './auth/operations/local/verifyEmail.js' export type { FieldState } from './admin/forms/Form.js' import type { InitOptions, SanitizedConfig } from './config/types.js' -import type { BaseDatabaseAdapter, PaginatedDocs } from './database/types.js' +import type { BaseDatabaseAdapter, PaginatedDistinctDocs, PaginatedDocs } from './database/types.js' import type { InitializedEmailAdapter } from './email/types.js' import type { DataFromGlobalSlug, Globals, SelectFromGlobalSlug } from './globals/config/types.js' import type { @@ -72,6 +72,10 @@ import { findByIDLocal, type Options as FindByIDOptions, } from './collections/operations/local/findByID.js' +import { + findDistinct as findDistinctLocal, + type Options as FindDistinctOptions, +} from './collections/operations/local/findDistinct.js' import { findVersionByIDLocal, type Options as FindVersionByIDOptions, @@ -464,6 +468,20 @@ export class BasePayload { return findByIDLocal(this, options) } + /** + * @description Find distinct field values + * @param options + * @returns result with distinct field values + */ + findDistinct = async < + TSlug extends CollectionSlug, + TField extends keyof DataFromCollectionSlug & string, + >( + options: FindDistinctOptions, + ): Promise[TField]>>> => { + return findDistinctLocal(this, options) + } + findGlobal = async >( options: FindGlobalOptions, ): Promise> => { @@ -1174,7 +1192,6 @@ export { updateOperation } from './collections/operations/update.js' export { updateByIDOperation } from './collections/operations/updateByID.js' export { buildConfig } from './config/build.js' - export { type ClientConfig, createClientConfig, @@ -1183,6 +1200,7 @@ export { type UnsanitizedClientConfig, } from './config/client.js' export { defaults } from './config/defaults.js' + export { type OrderableEndpointBody } from './config/orderable/index.js' export { sanitizeConfig } from './config/sanitize.js' export type * from './config/types.js' @@ -1237,6 +1255,7 @@ export type { Destroy, Find, FindArgs, + FindDistinct, FindGlobal, FindGlobalArgs, FindGlobalVersions, @@ -1250,6 +1269,7 @@ export type { Migration, MigrationData, MigrationTemplateArgs, + PaginatedDistinctDocs, PaginatedDocs, QueryDrafts, QueryDraftsArgs, diff --git a/test/database/int.spec.ts b/test/database/int.spec.ts index 9bd4ae5418..8cc74b84f7 100644 --- a/test/database/int.spec.ts +++ b/test/database/int.spec.ts @@ -385,6 +385,118 @@ describe('database', () => { }) }) + it('should find distinct field values of the collection', async () => { + await payload.delete({ collection: 'posts', where: {} }) + const titles = [ + 'title-1', + 'title-2', + 'title-3', + 'title-4', + 'title-5', + 'title-6', + 'title-7', + 'title-8', + 'title-9', + ].map((title) => ({ title })) + + for (const { title } of titles) { + // eslint-disable-next-line jest/no-conditional-in-test + const docsCount = Math.random() > 0.5 ? 3 : Math.random() > 0.5 ? 2 : 1 + for (let i = 0; i < docsCount; i++) { + await payload.create({ collection: 'posts', data: { title } }) + } + } + + const res = await payload.findDistinct({ + collection: 'posts', + field: 'title', + }) + + expect(res.values).toStrictEqual(titles) + + // const resREST = await restClient + // .GET('/posts/distinct', { + // headers: { + // Authorization: `Bearer ${token}`, + // }, + // query: { sortOrder: 'asc', field: 'title' }, + // }) + // .then((res) => res.json()) + + // expect(resREST.values).toEqual(titles) + + const resLimit = await payload.findDistinct({ + collection: 'posts', + field: 'title', + limit: 3, + }) + + expect(resLimit.values).toStrictEqual( + ['title-1', 'title-2', 'title-3'].map((title) => ({ title })), + ) + // count is still 9 + expect(resLimit.totalDocs).toBe(9) + + const resDesc = await payload.findDistinct({ + collection: 'posts', + sort: '-title', + field: 'title', + }) + + expect(resDesc.values).toStrictEqual(titles.toReversed()) + + const resAscDefault = await payload.findDistinct({ + collection: 'posts', + field: 'title', + }) + + expect(resAscDefault.values).toStrictEqual(titles) + }) + + it('should populate distinct relationships when depth>0', async () => { + await payload.delete({ collection: 'posts', where: {} }) + + const categories = ['category-1', 'category-2', 'category-3', 'category-4'].map((title) => ({ + title, + })) + + const categoriesIDS: { category: string }[] = [] + + for (const { title } of categories) { + const doc = await payload.create({ collection: 'categories', data: { title } }) + categoriesIDS.push({ category: doc.id }) + } + + for (const { category } of categoriesIDS) { + // eslint-disable-next-line jest/no-conditional-in-test + const docsCount = Math.random() > 0.5 ? 3 : Math.random() > 0.5 ? 2 : 1 + for (let i = 0; i < docsCount; i++) { + await payload.create({ collection: 'posts', data: { title: randomUUID(), category } }) + } + } + + const resultDepth0 = await payload.findDistinct({ + collection: 'posts', + sort: 'category.title', + field: 'category', + }) + expect(resultDepth0.values).toStrictEqual(categoriesIDS) + const resultDepth1 = await payload.findDistinct({ + depth: 1, + collection: 'posts', + field: 'category', + sort: 'category.title', + }) + + for (let i = 0; i < resultDepth1.values.length; i++) { + const fromRes = resultDepth1.values[i] as any + const id = categoriesIDS[i].category as any + const title = categories[i]?.title + expect(fromRes.category.title).toBe(title) + expect(fromRes.category.id).toBe(id) + } + }) + describe('Compound Indexes', () => { beforeEach(async () => { await payload.delete({ collection: 'compound-indexes', where: {} }) diff --git a/test/database/up-down-migration/migrations/20250707_123508.json b/test/database/up-down-migration/migrations/20250714_201659.json similarity index 99% rename from test/database/up-down-migration/migrations/20250707_123508.json rename to test/database/up-down-migration/migrations/20250714_201659.json index f541345218..18d7fcf69c 100644 --- a/test/database/up-down-migration/migrations/20250707_123508.json +++ b/test/database/up-down-migration/migrations/20250714_201659.json @@ -1,5 +1,5 @@ { - "id": "bf183b76-944c-4e83-bd58-4aa993885106", + "id": "80e7a0d2-ffb3-4f22-8597-0442b3ab8102", "prevId": "00000000-0000-0000-0000-000000000000", "version": "7", "dialect": "postgresql", diff --git a/test/database/up-down-migration/migrations/20250707_123508.ts b/test/database/up-down-migration/migrations/20250714_201659.ts similarity index 100% rename from test/database/up-down-migration/migrations/20250707_123508.ts rename to test/database/up-down-migration/migrations/20250714_201659.ts diff --git a/test/database/up-down-migration/migrations/index.ts b/test/database/up-down-migration/migrations/index.ts index 0c0f710443..fea58e46c2 100644 --- a/test/database/up-down-migration/migrations/index.ts +++ b/test/database/up-down-migration/migrations/index.ts @@ -1,9 +1,9 @@ -import * as migration_20250707_123508 from './20250707_123508.js' +import * as migration_20250714_201659 from './20250714_201659.js'; export const migrations = [ { - up: migration_20250707_123508.up, - down: migration_20250707_123508.down, - name: '20250707_123508', + up: migration_20250714_201659.up, + down: migration_20250714_201659.down, + name: '20250714_201659' }, -] +]; diff --git a/test/helpers/NextRESTClient.ts b/test/helpers/NextRESTClient.ts index d49d11f33b..30c65eec53 100644 --- a/test/helpers/NextRESTClient.ts +++ b/test/helpers/NextRESTClient.ts @@ -16,7 +16,7 @@ import { devUser } from '../credentials.js' type ValidPath = `/${string}` type RequestOptions = { auth?: boolean - query?: { + query?: { [key: string]: unknown } & { depth?: number fallbackLocale?: string joins?: JoinQuery From 6ae730b33b7124d7c343f968ce67106004d5c6cc Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Thu, 17 Jul 2025 06:24:37 -0700 Subject: [PATCH 06/91] feat(richtext-lexical): export $createLinkNode and $isLinkNode for server use (#13205) Exports `$createLinkNode`, `$isLinkNode` and the equivalent modules for autolinks. --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210710489889573 --- packages/richtext-lexical/src/index.ts | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/richtext-lexical/src/index.ts b/packages/richtext-lexical/src/index.ts index d10a51c166..1256d68880 100644 --- a/packages/richtext-lexical/src/index.ts +++ b/packages/richtext-lexical/src/index.ts @@ -925,11 +925,16 @@ export { HeadingFeature, type HeadingFeatureProps } from './features/heading/ser export { HorizontalRuleFeature } from './features/horizontalRule/server/index.js' export { IndentFeature } from './features/indent/server/index.js' -export { AutoLinkNode } from './features/link/nodes/AutoLinkNode.js' -export { LinkNode } from './features/link/nodes/LinkNode.js' +export { + $createAutoLinkNode, + $isAutoLinkNode, + AutoLinkNode, +} from './features/link/nodes/AutoLinkNode.js' +export { $createLinkNode, $isLinkNode, LinkNode } from './features/link/nodes/LinkNode.js' export type { LinkFields } from './features/link/nodes/types.js' export { LinkFeature, type LinkFeatureServerProps } from './features/link/server/index.js' + export { ChecklistFeature } from './features/lists/checklist/server/index.js' export { OrderedListFeature } from './features/lists/orderedList/server/index.js' From 12539c61d4d47aa6caac217414b3dfad676bb2f3 Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Thu, 17 Jul 2025 13:24:22 -0400 Subject: [PATCH 07/91] feat(ui): supports collection scoped folders (#12797) As discussed in [this RFC](https://github.com/payloadcms/payload/discussions/12729), this PR supports collection-scoped folders. You can scope folders to multiple collection types or just one. This unlocks the possibility to have folders on a per collection instead of always being shared on every collection. You can combine this feature with the `browseByFolder: false` to completely isolate a collection from other collections. Things left to do: - [x] ~~Create a custom react component for the selecting of collectionSlugs to filter out available options based on the current folders parameters~~ https://github.com/user-attachments/assets/14cb1f09-8d70-4cb9-b1e2-09da89302995 --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210564397815557 --- .github/workflows/main.yml | 2 + .../src/views/BrowseByFolder/buildView.tsx | 88 +++- .../src/views/CollectionFolders/buildView.tsx | 28 +- packages/payload/src/admin/functions/index.ts | 32 +- .../payload/src/admin/views/folderList.ts | 1 + packages/payload/src/config/defaults.ts | 17 +- packages/payload/src/config/sanitize.ts | 43 +- .../src/folders/addFolderCollection.ts | 51 ++ .../src/folders/addFolderCollections.ts | 56 --- .../src/folders/addFolderFieldToCollection.ts | 33 ++ .../payload/src/folders/buildFolderField.ts | 108 +++++ .../src/folders/createFolderCollection.ts | 163 ++++--- .../folders/endpoints/populateFolderData.ts | 135 ------ .../hooks/ensureSafeCollectionsChange.ts | 144 ++++++ packages/payload/src/folders/types.ts | 15 +- .../utils/formatFolderOrDocumentItem.ts | 1 + .../src/folders/utils/getFolderBreadcrumbs.ts | 2 + .../src/folders/utils/getFolderData.ts | 45 +- .../utils/getFoldersAndDocumentsFromJoin.ts | 8 +- .../utilities/combineWhereConstraints.spec.ts | 86 ++++ .../src/utilities/combineWhereConstraints.ts | 25 +- packages/translations/src/clientKeys.ts | 1 + packages/translations/src/languages/ar.ts | 1 + packages/translations/src/languages/az.ts | 1 + packages/translations/src/languages/bg.ts | 2 + packages/translations/src/languages/bnBd.ts | 2 + packages/translations/src/languages/bnIn.ts | 2 + packages/translations/src/languages/ca.ts | 2 + packages/translations/src/languages/cs.ts | 2 + packages/translations/src/languages/da.ts | 2 + packages/translations/src/languages/de.ts | 2 + packages/translations/src/languages/en.ts | 2 + packages/translations/src/languages/es.ts | 2 + packages/translations/src/languages/et.ts | 1 + packages/translations/src/languages/fa.ts | 1 + packages/translations/src/languages/fr.ts | 2 + packages/translations/src/languages/he.ts | 1 + packages/translations/src/languages/hr.ts | 2 + packages/translations/src/languages/hu.ts | 2 + packages/translations/src/languages/hy.ts | 2 + packages/translations/src/languages/it.ts | 2 + packages/translations/src/languages/ja.ts | 2 + packages/translations/src/languages/ko.ts | 1 + packages/translations/src/languages/lt.ts | 2 + packages/translations/src/languages/lv.ts | 2 + packages/translations/src/languages/my.ts | 1 + packages/translations/src/languages/nb.ts | 1 + packages/translations/src/languages/nl.ts | 2 + packages/translations/src/languages/pl.ts | 2 + packages/translations/src/languages/pt.ts | 2 + packages/translations/src/languages/ro.ts | 2 + packages/translations/src/languages/rs.ts | 2 + .../translations/src/languages/rsLatin.ts | 2 + packages/translations/src/languages/ru.ts | 2 + packages/translations/src/languages/sk.ts | 2 + packages/translations/src/languages/sl.ts | 2 + packages/translations/src/languages/sv.ts | 1 + packages/translations/src/languages/th.ts | 1 + packages/translations/src/languages/tr.ts | 2 + packages/translations/src/languages/uk.ts | 2 + packages/translations/src/languages/vi.ts | 1 + packages/translations/src/languages/zh.ts | 1 + packages/translations/src/languages/zhTw.ts | 1 + .../elements/FolderView/Breadcrumbs/index.tsx | 9 +- .../FolderView/CurrentFolderActions/index.tsx | 1 + .../FolderView/DragOverlaySelection/index.tsx | 12 +- .../FolderView/DraggableTableRow/index.tsx | 1 - .../FolderView/DraggableWithClick/index.scss | 2 +- .../FolderView/DraggableWithClick/index.tsx | 20 +- .../Drawers/MoveToFolder/index.scss | 4 + .../FolderView/Drawers/MoveToFolder/index.tsx | 16 +- .../index.scss | 0 .../index.tsx | 2 +- .../{Field => FolderField}/index.scss | 0 .../{Field => FolderField}/index.server.tsx | 2 +- .../FolderView/FolderFileCard/index.scss | 48 +- .../FolderView/FolderFileCard/index.tsx | 75 ++- .../FolderView/FolderFileTable/index.tsx | 32 +- .../FolderView/FolderTypeField/index.tsx | 140 ++++++ .../FolderView/MoveDocToFolder/index.tsx | 5 +- .../elements/FolderView/SortByPill/index.tsx | 31 +- .../ListCreateNewDocInFolderButton.tsx | 5 + packages/ui/src/exports/client/index.ts | 3 +- packages/ui/src/exports/rsc/index.ts | 2 +- packages/ui/src/fields/Checkbox/Input.tsx | 6 +- packages/ui/src/fields/Select/index.tsx | 2 +- .../Folders/groupItemIDsByRelation.ts | 15 + packages/ui/src/providers/Folders/index.tsx | 455 +++++++++++++----- .../ui/src/providers/Folders/selection.ts | 52 -- .../getFolderResultsComponentAndData.tsx | 75 ++- .../ui/src/views/BrowseByFolder/index.tsx | 46 +- .../CollectionFolder/ListSelection/index.tsx | 7 +- .../ui/src/views/CollectionFolder/index.tsx | 23 +- test/folders/e2e.spec.ts | 339 +++++++++---- test/folders/int.spec.ts | 169 ++++++- test/folders/payload-types.ts | 2 + test/folders/tsconfig.json | 3 + .../folders/applyBrowseByFolderTypeFilter.ts | 41 ++ test/helpers/folders/clickFolderCard.ts | 22 +- test/helpers/folders/createFolder.ts | 24 +- test/helpers/folders/createFolderDoc.ts | 26 + test/helpers/folders/createFolderFromDoc.ts | 25 +- 102 files changed, 2127 insertions(+), 768 deletions(-) create mode 100644 packages/payload/src/folders/addFolderCollection.ts delete mode 100644 packages/payload/src/folders/addFolderCollections.ts create mode 100644 packages/payload/src/folders/addFolderFieldToCollection.ts create mode 100644 packages/payload/src/folders/buildFolderField.ts delete mode 100644 packages/payload/src/folders/endpoints/populateFolderData.ts create mode 100644 packages/payload/src/folders/hooks/ensureSafeCollectionsChange.ts create mode 100644 packages/payload/src/utilities/combineWhereConstraints.spec.ts rename packages/ui/src/elements/FolderView/{CollectionTypePill => FilterFolderTypePill}/index.scss (100%) rename packages/ui/src/elements/FolderView/{CollectionTypePill => FilterFolderTypePill}/index.tsx (97%) rename packages/ui/src/elements/FolderView/{Field => FolderField}/index.scss (100%) rename packages/ui/src/elements/FolderView/{Field => FolderField}/index.server.tsx (87%) create mode 100644 packages/ui/src/elements/FolderView/FolderTypeField/index.tsx create mode 100644 packages/ui/src/providers/Folders/groupItemIDsByRelation.ts delete mode 100644 packages/ui/src/providers/Folders/selection.ts create mode 100644 test/folders/tsconfig.json create mode 100644 test/helpers/folders/applyBrowseByFolderTypeFilter.ts create mode 100644 test/helpers/folders/createFolderDoc.ts diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 60b6ac9655..e10abad457 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -284,6 +284,7 @@ jobs: - fields__collections__Text - fields__collections__UI - fields__collections__Upload + - folders - hooks - lexical__collections__Lexical__e2e__main - lexical__collections__Lexical__e2e__blocks @@ -418,6 +419,7 @@ jobs: - fields__collections__Text - fields__collections__UI - fields__collections__Upload + - folders - hooks - lexical__collections__Lexical__e2e__main - lexical__collections__Lexical__e2e__blocks diff --git a/packages/next/src/views/BrowseByFolder/buildView.tsx b/packages/next/src/views/BrowseByFolder/buildView.tsx index b57da8a60a..59775ad7d3 100644 --- a/packages/next/src/views/BrowseByFolder/buildView.tsx +++ b/packages/next/src/views/BrowseByFolder/buildView.tsx @@ -58,20 +58,45 @@ export const buildBrowseByFolderView = async ( throw new Error('not-found') } - const browseByFolderSlugs = browseByFolderSlugsFromArgs.filter( + const foldersSlug = config.folders.slug + + /** + * All visiible folder enabled collection slugs that the user has read permissions for. + */ + const allowReadCollectionSlugs = browseByFolderSlugsFromArgs.filter( (collectionSlug) => permissions?.collections?.[collectionSlug]?.read && visibleEntities.collections.includes(collectionSlug), ) - const query = queryFromArgs || queryFromReq - const activeCollectionFolderSlugs: string[] = - Array.isArray(query?.relationTo) && query.relationTo.length - ? query.relationTo.filter( - (slug) => - browseByFolderSlugs.includes(slug) || (config.folders && slug === config.folders.slug), - ) - : [...browseByFolderSlugs, config.folders.slug] + const query = + queryFromArgs || + ((queryFromReq + ? { + ...queryFromReq, + relationTo: + typeof queryFromReq?.relationTo === 'string' + ? JSON.parse(queryFromReq.relationTo) + : undefined, + } + : {}) as ListQuery) + + /** + * If a folderID is provided and the relationTo query param exists, + * we filter the collection slugs to only those that are allowed to be read. + * + * If no folderID is provided, only folders should be active and displayed (the root view). + */ + let collectionsToDisplay: string[] = [] + if (folderID && Array.isArray(query?.relationTo)) { + collectionsToDisplay = query.relationTo.filter( + (slug) => allowReadCollectionSlugs.includes(slug) || slug === foldersSlug, + ) + } else if (folderID) { + collectionsToDisplay = [...allowReadCollectionSlugs, foldersSlug] + } else { + collectionsToDisplay = [foldersSlug] + } const { routes: { admin: adminRoute }, @@ -93,14 +118,15 @@ export const buildBrowseByFolderView = async ( }, }) - const sortPreference: FolderSortKeys = browseByFolderPreferences?.sort || '_folderOrDocumentTitle' + const sortPreference: FolderSortKeys = browseByFolderPreferences?.sort || 'name' const viewPreference = browseByFolderPreferences?.viewPreference || 'grid' - const { breadcrumbs, documents, FolderResultsComponent, subfolders } = + const { breadcrumbs, documents, folderAssignedCollections, FolderResultsComponent, subfolders } = await getFolderResultsComponentAndData({ - activeCollectionSlugs: activeCollectionFolderSlugs, - browseByFolder: false, + browseByFolder: true, + collectionsToDisplay, displayAs: viewPreference, + folderAssignedCollections: collectionsToDisplay.filter((slug) => slug !== foldersSlug) || [], folderID, req: initPageResult.req, sort: sortPreference, @@ -142,10 +168,33 @@ export const buildBrowseByFolderView = async ( // serverProps, // }) - // documents cannot be created without a parent folder in this view - const allowCreateCollectionSlugs = resolvedFolderID - ? [config.folders.slug, ...browseByFolderSlugs] - : [config.folders.slug] + // Filter down allCollectionFolderSlugs by the ones the current folder is assingned to + const allAvailableCollectionSlugs = + folderID && Array.isArray(folderAssignedCollections) && folderAssignedCollections.length + ? allowReadCollectionSlugs.filter((slug) => folderAssignedCollections.includes(slug)) + : allowReadCollectionSlugs + + // Filter down activeCollectionFolderSlugs by the ones the current folder is assingned to + const availableActiveCollectionFolderSlugs = collectionsToDisplay.filter((slug) => { + if (slug === foldersSlug) { + return permissions?.collections?.[foldersSlug]?.read + } else { + return !folderAssignedCollections || folderAssignedCollections.includes(slug) + } + }) + + // Documents cannot be created without a parent folder in this view + const allowCreateCollectionSlugs = ( + resolvedFolderID ? [foldersSlug, ...allAvailableCollectionSlugs] : [foldersSlug] + ).filter((collectionSlug) => { + if (collectionSlug === foldersSlug) { + return permissions?.collections?.[foldersSlug]?.create + } + return ( + permissions?.collections?.[collectionSlug]?.create && + visibleEntities.collections.includes(collectionSlug) + ) + }) return { View: ( @@ -154,8 +203,8 @@ export const buildBrowseByFolderView = async ( {RenderServerComponent({ clientProps: { // ...folderViewSlots, - activeCollectionFolderSlugs, - allCollectionFolderSlugs: browseByFolderSlugs, + activeCollectionFolderSlugs: availableActiveCollectionFolderSlugs, + allCollectionFolderSlugs: allAvailableCollectionSlugs, allowCreateCollectionSlugs, baseFolderPath: `/browse-by-folder`, breadcrumbs, @@ -163,6 +212,7 @@ export const buildBrowseByFolderView = async ( disableBulkEdit, documents, enableRowSelections, + folderAssignedCollections, folderFieldName: config.folders.fieldName, folderID: resolvedFolderID || null, FolderResultsComponent, diff --git a/packages/next/src/views/CollectionFolders/buildView.tsx b/packages/next/src/views/CollectionFolders/buildView.tsx index e06bfcc2c0..8a110f20f7 100644 --- a/packages/next/src/views/CollectionFolders/buildView.tsx +++ b/packages/next/src/views/CollectionFolders/buildView.tsx @@ -97,23 +97,28 @@ export const buildCollectionFolderView = async ( }, }) - const sortPreference: FolderSortKeys = - collectionFolderPreferences?.sort || '_folderOrDocumentTitle' + const sortPreference: FolderSortKeys = collectionFolderPreferences?.sort || 'name' const viewPreference = collectionFolderPreferences?.viewPreference || 'grid' const { routes: { admin: adminRoute }, } = config - const { breadcrumbs, documents, FolderResultsComponent, subfolders } = - await getFolderResultsComponentAndData({ - activeCollectionSlugs: [config.folders.slug, collectionSlug], - browseByFolder: false, - displayAs: viewPreference, - folderID, - req: initPageResult.req, - sort: sortPreference, - }) + const { + breadcrumbs, + documents, + folderAssignedCollections, + FolderResultsComponent, + subfolders, + } = await getFolderResultsComponentAndData({ + browseByFolder: false, + collectionsToDisplay: [config.folders.slug, collectionSlug], + displayAs: viewPreference, + folderAssignedCollections: [collectionSlug], + folderID, + req: initPageResult.req, + sort: sortPreference, + }) const resolvedFolderID = breadcrumbs[breadcrumbs.length - 1]?.id @@ -182,6 +187,7 @@ export const buildCollectionFolderView = async ( disableBulkEdit, documents, enableRowSelections, + folderAssignedCollections, folderFieldName: config.folders.fieldName, folderID: resolvedFolderID || null, FolderResultsComponent, diff --git a/packages/payload/src/admin/functions/index.ts b/packages/payload/src/admin/functions/index.ts index e3676a10a7..d14fd04d3a 100644 --- a/packages/payload/src/admin/functions/index.ts +++ b/packages/payload/src/admin/functions/index.ts @@ -1,7 +1,7 @@ import type { ImportMap } from '../../bin/generateImportMap/index.js' import type { SanitizedConfig } from '../../config/types.js' import type { PaginatedDocs } from '../../database/types.js' -import type { CollectionSlug, ColumnPreference } from '../../index.js' +import type { CollectionSlug, ColumnPreference, FolderSortKeys } from '../../index.js' import type { PayloadRequest, Sort, Where } from '../../types/index.js' import type { ColumnsFromURL } from '../../utilities/transformColumnPreferences.js' @@ -78,10 +78,36 @@ export type BuildCollectionFolderViewResult = { } export type GetFolderResultsComponentAndDataArgs = { - activeCollectionSlugs: CollectionSlug[] + /** + * If true and no folderID is provided, only folders will be returned. + * If false, the results will include documents from the active collections. + */ browseByFolder: boolean + /** + * Used to filter document types to include in the results/display. + * + * i.e. ['folders', 'posts'] will only include folders and posts in the results. + * + * collectionsToQuery? + */ + collectionsToDisplay: CollectionSlug[] + /** + * Used to determine how the results should be displayed. + */ displayAs: 'grid' | 'list' + /** + * Used to filter folders by the collections they are assigned to. + * + * i.e. ['posts'] will only include folders that are assigned to the posts collections. + */ + folderAssignedCollections: CollectionSlug[] + /** + * The ID of the folder to filter results by. + */ folderID: number | string | undefined req: PayloadRequest - sort: string + /** + * The sort order for the results. + */ + sort: FolderSortKeys } diff --git a/packages/payload/src/admin/views/folderList.ts b/packages/payload/src/admin/views/folderList.ts index b1074fe467..18b9aac736 100644 --- a/packages/payload/src/admin/views/folderList.ts +++ b/packages/payload/src/admin/views/folderList.ts @@ -30,6 +30,7 @@ export type FolderListViewClientProps = { disableBulkEdit?: boolean documents: FolderOrDocument[] enableRowSelections?: boolean + folderAssignedCollections?: SanitizedCollectionConfig['slug'][] folderFieldName: string folderID: null | number | string FolderResultsComponent: React.ReactNode diff --git a/packages/payload/src/config/defaults.ts b/packages/payload/src/config/defaults.ts index b5a4063bb3..77dc94677c 100644 --- a/packages/payload/src/config/defaults.ts +++ b/packages/payload/src/config/defaults.ts @@ -163,14 +163,17 @@ export const addDefaultsToConfig = (config: Config): Config => { ...(config.auth || {}), } - const hasFolderCollections = config.collections.some((collection) => Boolean(collection.folders)) - if (hasFolderCollections) { + if ( + config.folders !== false && + config.collections.some((collection) => Boolean(collection.folders)) + ) { config.folders = { - slug: foldersSlug, - browseByFolder: true, - debug: false, - fieldName: parentFolderFieldName, - ...(config.folders || {}), + slug: config.folders?.slug ?? foldersSlug, + browseByFolder: config.folders?.browseByFolder ?? true, + collectionOverrides: config.folders?.collectionOverrides || undefined, + collectionSpecific: config.folders?.collectionSpecific ?? true, + debug: config.folders?.debug ?? false, + fieldName: config.folders?.fieldName ?? parentFolderFieldName, } } else { config.folders = false diff --git a/packages/payload/src/config/sanitize.ts b/packages/payload/src/config/sanitize.ts index d79b2d73fc..c90ee9703b 100644 --- a/packages/payload/src/config/sanitize.ts +++ b/packages/payload/src/config/sanitize.ts @@ -3,6 +3,7 @@ import type { AcceptedLanguages } from '@payloadcms/translations' import { en } from '@payloadcms/translations/languages/en' import { deepMergeSimple } from '@payloadcms/translations/utilities' +import type { CollectionSlug, GlobalSlug, SanitizedCollectionConfig } from '../index.js' import type { SanitizedJobsConfig } from '../queues/config/types/index.js' import type { Config, @@ -18,15 +19,10 @@ import { sanitizeCollection } from '../collections/config/sanitize.js' import { migrationsCollection } from '../database/migrations/migrationsCollection.js' import { DuplicateCollection, InvalidConfiguration } from '../errors/index.js' import { defaultTimezones } from '../fields/baseFields/timezone/defaultTimezones.js' -import { addFolderCollections } from '../folders/addFolderCollections.js' +import { addFolderCollection } from '../folders/addFolderCollection.js' +import { addFolderFieldToCollection } from '../folders/addFolderFieldToCollection.js' import { sanitizeGlobal } from '../globals/config/sanitize.js' -import { - baseBlockFields, - type CollectionSlug, - formatLabels, - type GlobalSlug, - sanitizeFields, -} from '../index.js' +import { baseBlockFields, formatLabels, sanitizeFields } from '../index.js' import { getLockedDocumentsCollection, lockedDocumentsCollectionSlug, @@ -191,8 +187,6 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise() - await addFolderCollections(config as unknown as Config) - const validRelationships = [ ...(config.collections?.map((c) => c.slug) ?? []), jobsCollectionSlug, @@ -200,6 +194,10 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise 0) { @@ -332,6 +345,16 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise + folderEnabledCollections: CollectionConfig[] + richTextSanitizationPromises?: Array<(config: SanitizedConfig) => Promise> + validRelationships?: string[] +}): Promise { + if (config.folders === false) { + return + } + + let folderCollectionConfig = createFolderCollection({ + slug: config.folders!.slug as string, + collectionSpecific, + debug: config.folders!.debug, + folderEnabledCollections, + folderFieldName: config.folders!.fieldName as string, + }) + + const collectionIndex = config.collections!.push(folderCollectionConfig) + + if ( + Array.isArray(config.folders?.collectionOverrides) && + config?.folders.collectionOverrides.length + ) { + for (const override of config.folders.collectionOverrides) { + folderCollectionConfig = await override({ collection: folderCollectionConfig }) + } + } + + const sanitizedCollectionWithOverrides = await sanitizeCollection( + config as unknown as Config, + folderCollectionConfig, + richTextSanitizationPromises, + validRelationships, + ) + + config.collections![collectionIndex - 1] = sanitizedCollectionWithOverrides +} diff --git a/packages/payload/src/folders/addFolderCollections.ts b/packages/payload/src/folders/addFolderCollections.ts deleted file mode 100644 index deb9323197..0000000000 --- a/packages/payload/src/folders/addFolderCollections.ts +++ /dev/null @@ -1,56 +0,0 @@ -import type { Config } from '../config/types.js' -import type { CollectionSlug } from '../index.js' - -import { createFolderCollection } from './createFolderCollection.js' - -export async function addFolderCollections(config: NonNullable): Promise { - if (!config.collections || !config.folders) { - return - } - - const enabledCollectionSlugs: CollectionSlug[] = [] - const debug = Boolean(config?.folders?.debug) - const folderFieldName = config?.folders?.fieldName as unknown as string - const folderSlug = config?.folders?.slug as unknown as CollectionSlug - - for (let i = 0; i < config.collections.length; i++) { - const collection = config.collections[i] - if (collection && collection?.folders) { - collection.fields.push({ - name: folderFieldName, - type: 'relationship', - admin: { - allowCreate: false, - allowEdit: false, - components: { - Cell: '@payloadcms/ui/rsc#FolderTableCell', - Field: '@payloadcms/ui/rsc#FolderEditField', - }, - }, - index: true, - label: 'Folder', - relationTo: folderSlug, - }) - enabledCollectionSlugs.push(collection.slug) - } - } - - if (enabledCollectionSlugs.length) { - let folderCollection = createFolderCollection({ - slug: folderSlug, - collectionSlugs: enabledCollectionSlugs, - debug, - folderFieldName, - }) - - if ( - Array.isArray(config?.folders?.collectionOverrides) && - config?.folders.collectionOverrides.length - ) { - for (const override of config.folders.collectionOverrides) { - folderCollection = await override({ collection: folderCollection }) - } - } - config.collections.push(folderCollection) - } -} diff --git a/packages/payload/src/folders/addFolderFieldToCollection.ts b/packages/payload/src/folders/addFolderFieldToCollection.ts new file mode 100644 index 0000000000..a4aa6c6860 --- /dev/null +++ b/packages/payload/src/folders/addFolderFieldToCollection.ts @@ -0,0 +1,33 @@ +import type { SanitizedCollectionConfig } from '../index.js' + +import { buildFolderField } from './buildFolderField.js' + +export const addFolderFieldToCollection = ({ + collection, + collectionSpecific, + folderFieldName, + folderSlug, +}: { + collection: SanitizedCollectionConfig + collectionSpecific: boolean + folderFieldName: string + folderSlug: string +}): void => { + collection.fields.push( + buildFolderField({ + collectionSpecific, + folderFieldName, + folderSlug, + overrides: { + admin: { + allowCreate: false, + allowEdit: false, + components: { + Cell: '@payloadcms/ui/rsc#FolderTableCell', + Field: '@payloadcms/ui/rsc#FolderField', + }, + }, + }, + }), + ) +} diff --git a/packages/payload/src/folders/buildFolderField.ts b/packages/payload/src/folders/buildFolderField.ts new file mode 100644 index 0000000000..c3920a4d58 --- /dev/null +++ b/packages/payload/src/folders/buildFolderField.ts @@ -0,0 +1,108 @@ +import type { SingleRelationshipField } from '../fields/config/types.js' +import type { Document } from '../types/index.js' + +import { extractID } from '../utilities/extractID.js' + +export const buildFolderField = ({ + collectionSpecific, + folderFieldName, + folderSlug, + overrides = {}, +}: { + collectionSpecific: boolean + folderFieldName: string + folderSlug: string + overrides?: Partial +}): SingleRelationshipField => { + const field: SingleRelationshipField = { + name: folderFieldName, + type: 'relationship', + admin: {}, + hasMany: false, + index: true, + label: 'Folder', + relationTo: folderSlug, + validate: async (value, { collectionSlug, data, overrideAccess, previousValue, req }) => { + if (!collectionSpecific) { + // if collection scoping is not enabled, no validation required since folders can contain any type of document + return true + } + + if (!value) { + // no folder, no validation required + return true + } + + const newID = extractID(value) + if (previousValue && extractID(previousValue) === newID) { + // value did not change, no validation required + return true + } else { + // need to validat the folder value allows this collection type + let parentFolder: Document = null + if (typeof value === 'string' || typeof value === 'number') { + // need to populate the value with the document + parentFolder = await req.payload.findByID({ + id: newID, + collection: folderSlug, + depth: 0, // no need to populate nested folders + overrideAccess, + req, + select: { + folderType: true, // only need to check folderType + }, + user: req.user, + }) + } + + if (parentFolder && collectionSlug) { + const parentFolderTypes: string[] = (parentFolder.folderType as string[]) || [] + + // if the parent folder has no folder types, it accepts all collections + if (parentFolderTypes.length === 0) { + return true + } + + // validation for a folder document + if (collectionSlug === folderSlug) { + // ensure the parent accepts ALL folder types + const folderTypes: string[] = 'folderType' in data ? (data.folderType as string[]) : [] + const invalidSlugs = folderTypes.filter((validCollectionSlug: string) => { + return !parentFolderTypes.includes(validCollectionSlug) + }) + if (invalidSlugs.length === 0) { + return true + } else { + return `Folder with ID ${newID} does not allow documents of type ${invalidSlugs.join(', ')}` + } + } + + // validation for a non-folder document + if (parentFolderTypes.includes(collectionSlug)) { + return true + } else { + return `Folder with ID ${newID} does not allow documents of type ${collectionSlug}` + } + } else { + return `Folder with ID ${newID} not found in collection ${folderSlug}` + } + } + }, + } + + if (overrides?.admin) { + field.admin = { + ...field.admin, + ...(overrides.admin || {}), + } + + if (overrides.admin.components) { + field.admin.components = { + ...field.admin.components, + ...(overrides.admin.components || {}), + } + } + } + + return field +} diff --git a/packages/payload/src/folders/createFolderCollection.ts b/packages/payload/src/folders/createFolderCollection.ts index 4da3e3bee7..9e1b8e93cd 100644 --- a/packages/payload/src/folders/createFolderCollection.ts +++ b/packages/payload/src/folders/createFolderCollection.ts @@ -1,74 +1,129 @@ import type { CollectionConfig } from '../collections/config/types.js' +import type { Field, Option, SelectField } from '../fields/config/types.js' -import { populateFolderDataEndpoint } from './endpoints/populateFolderData.js' +import { defaultAccess } from '../auth/defaultAccess.js' +import { buildFolderField } from './buildFolderField.js' +import { foldersSlug } from './constants.js' import { deleteSubfoldersBeforeDelete } from './hooks/deleteSubfoldersAfterDelete.js' import { dissasociateAfterDelete } from './hooks/dissasociateAfterDelete.js' +import { ensureSafeCollectionsChange } from './hooks/ensureSafeCollectionsChange.js' import { reparentChildFolder } from './hooks/reparentChildFolder.js' type CreateFolderCollectionArgs = { - collectionSlugs: string[] + collectionSpecific: boolean debug?: boolean + folderEnabledCollections: CollectionConfig[] folderFieldName: string slug: string } export const createFolderCollection = ({ slug, - collectionSlugs, + collectionSpecific, debug, + folderEnabledCollections, folderFieldName, -}: CreateFolderCollectionArgs): CollectionConfig => ({ - slug, - admin: { - hidden: !debug, - useAsTitle: 'name', - }, - endpoints: [populateFolderDataEndpoint], - fields: [ - { - name: 'name', - type: 'text', - index: true, - required: true, +}: CreateFolderCollectionArgs): CollectionConfig => { + const { collectionOptions, collectionSlugs } = folderEnabledCollections.reduce( + (acc, collection: CollectionConfig) => { + acc.collectionSlugs.push(collection.slug) + acc.collectionOptions.push({ + label: collection.labels?.plural || collection.slug, + value: collection.slug, + }) + + return acc }, { - name: folderFieldName, - type: 'relationship', - admin: { - hidden: !debug, + collectionOptions: [] as Option[], + collectionSlugs: [] as string[], + }, + ) + + return { + slug, + access: { + create: defaultAccess, + delete: defaultAccess, + read: defaultAccess, + readVersions: defaultAccess, + update: defaultAccess, + }, + admin: { + hidden: !debug, + useAsTitle: 'name', + }, + fields: [ + { + name: 'name', + type: 'text', + index: true, + required: true, }, - index: true, - relationTo: slug, - }, - { - name: 'documentsAndFolders', - type: 'join', - admin: { - hidden: !debug, + buildFolderField({ + collectionSpecific, + folderFieldName, + folderSlug: slug, + overrides: { + admin: { + hidden: !debug, + }, + }, + }), + { + name: 'documentsAndFolders', + type: 'join', + admin: { + hidden: !debug, + }, + collection: [slug, ...collectionSlugs], + hasMany: true, + on: folderFieldName, }, - collection: [slug, ...collectionSlugs], - hasMany: true, - on: folderFieldName, + ...(collectionSpecific + ? [ + { + name: 'folderType', + type: 'select', + admin: { + components: { + Field: { + clientProps: { + options: collectionOptions, + }, + path: '@payloadcms/ui#FolderTypeField', + }, + }, + position: 'sidebar', + }, + hasMany: true, + options: collectionOptions, + } satisfies SelectField, + ] + : ([] as Field[])), + ], + hooks: { + afterChange: [ + reparentChildFolder({ + folderFieldName, + }), + ], + afterDelete: [ + dissasociateAfterDelete({ + collectionSlugs, + folderFieldName, + }), + ], + beforeDelete: [deleteSubfoldersBeforeDelete({ folderFieldName, folderSlug: slug })], + beforeValidate: [ + ...(collectionSpecific ? [ensureSafeCollectionsChange({ foldersSlug })] : []), + ], }, - ], - hooks: { - afterChange: [ - reparentChildFolder({ - folderFieldName, - }), - ], - afterDelete: [ - dissasociateAfterDelete({ - collectionSlugs, - folderFieldName, - }), - ], - beforeDelete: [deleteSubfoldersBeforeDelete({ folderFieldName, folderSlug: slug })], - }, - labels: { - plural: 'Folders', - singular: 'Folder', - }, - typescript: { - interface: 'FolderInterface', - }, -}) + labels: { + plural: 'Folders', + singular: 'Folder', + }, + typescript: { + interface: 'FolderInterface', + }, + } +} diff --git a/packages/payload/src/folders/endpoints/populateFolderData.ts b/packages/payload/src/folders/endpoints/populateFolderData.ts deleted file mode 100644 index 9347602a9e..0000000000 --- a/packages/payload/src/folders/endpoints/populateFolderData.ts +++ /dev/null @@ -1,135 +0,0 @@ -import httpStatus from 'http-status' - -import type { Endpoint, Where } from '../../index.js' - -import { buildFolderWhereConstraints } from '../utils/buildFolderWhereConstraints.js' -import { getFolderData } from '../utils/getFolderData.js' - -export const populateFolderDataEndpoint: Endpoint = { - handler: async (req) => { - if (!req?.user) { - return Response.json( - { - message: 'Unauthorized request.', - }, - { - status: httpStatus.UNAUTHORIZED, - }, - ) - } - - if ( - !( - req.payload.config.folders && - Boolean(req.payload.collections?.[req.payload.config.folders.slug]) - ) - ) { - return Response.json( - { - message: 'Folders are not configured', - }, - { - status: httpStatus.NOT_FOUND, - }, - ) - } - - // if collectionSlug exists, we need to create constraints for that _specific collection_ and the folder collection - // if collectionSlug does not exist, we need to create constraints for _all folder enabled collections_ and the folder collection - let documentWhere: undefined | Where - let folderWhere: undefined | Where - const collectionSlug = req.searchParams?.get('collectionSlug') - - if (collectionSlug) { - const collectionConfig = req.payload.collections?.[collectionSlug]?.config - - if (!collectionConfig) { - return Response.json( - { - message: `Collection with slug "${collectionSlug}" not found`, - }, - { - status: httpStatus.NOT_FOUND, - }, - ) - } - - const collectionConstraints = await buildFolderWhereConstraints({ - collectionConfig, - folderID: req.searchParams?.get('folderID') || undefined, - localeCode: typeof req?.locale === 'string' ? req.locale : undefined, - req, - search: req.searchParams?.get('search') || undefined, - sort: req.searchParams?.get('sort') || undefined, - }) - - if (collectionConstraints) { - documentWhere = collectionConstraints - } - } else { - // loop over all folder enabled collections and build constraints for each - for (const collectionSlug of Object.keys(req.payload.collections)) { - const collectionConfig = req.payload.collections[collectionSlug]?.config - - if (collectionConfig?.folders) { - const collectionConstraints = await buildFolderWhereConstraints({ - collectionConfig, - folderID: req.searchParams?.get('folderID') || undefined, - localeCode: typeof req?.locale === 'string' ? req.locale : undefined, - req, - search: req.searchParams?.get('search') || undefined, - }) - - if (collectionConstraints) { - if (!documentWhere) { - documentWhere = { or: [] } - } - if (!Array.isArray(documentWhere.or)) { - documentWhere.or = [documentWhere] - } else if (Array.isArray(documentWhere.or)) { - documentWhere.or.push(collectionConstraints) - } - } - } - } - } - - const folderCollectionConfig = - req.payload.collections?.[req.payload.config.folders.slug]?.config - - if (!folderCollectionConfig) { - return Response.json( - { - message: 'Folder collection not found', - }, - { - status: httpStatus.NOT_FOUND, - }, - ) - } - - const folderConstraints = await buildFolderWhereConstraints({ - collectionConfig: folderCollectionConfig, - folderID: req.searchParams?.get('folderID') || undefined, - localeCode: typeof req?.locale === 'string' ? req.locale : undefined, - req, - search: req.searchParams?.get('search') || undefined, - }) - - if (folderConstraints) { - folderWhere = folderConstraints - } - - const data = await getFolderData({ - collectionSlug: req.searchParams?.get('collectionSlug') || undefined, - documentWhere: documentWhere ? documentWhere : undefined, - folderID: req.searchParams?.get('folderID') || undefined, - folderWhere, - req, - }) - - return Response.json(data) - }, - method: 'get', - path: '/populate-folder-data', -} diff --git a/packages/payload/src/folders/hooks/ensureSafeCollectionsChange.ts b/packages/payload/src/folders/hooks/ensureSafeCollectionsChange.ts new file mode 100644 index 0000000000..cd8e87858f --- /dev/null +++ b/packages/payload/src/folders/hooks/ensureSafeCollectionsChange.ts @@ -0,0 +1,144 @@ +import { APIError, type CollectionBeforeValidateHook, type CollectionSlug } from '../../index.js' +import { extractID } from '../../utilities/extractID.js' +import { getTranslatedLabel } from '../../utilities/getTranslatedLabel.js' + +export const ensureSafeCollectionsChange = + ({ foldersSlug }: { foldersSlug: CollectionSlug }): CollectionBeforeValidateHook => + async ({ data, originalDoc, req }) => { + const currentFolderID = extractID(originalDoc || {}) + const parentFolderID = extractID(data?.folder || originalDoc?.folder || {}) + if (Array.isArray(data?.folderType) && data.folderType.length > 0) { + const folderType = data.folderType as string[] + const currentlyAssignedCollections: string[] | undefined = + Array.isArray(originalDoc?.folderType) && originalDoc.folderType.length > 0 + ? originalDoc.folderType + : undefined + /** + * Check if the assigned collections have changed. + * example: + * - originalAssignedCollections: ['posts', 'pages'] + * - folderType: ['posts'] + * + * The user is narrowing the types of documents that can be associated with this folder. + * If the user is only expanding the types of documents that can be associated with this folder, + * we do not need to do anything. + */ + const newCollections = currentlyAssignedCollections + ? // user is narrowing the current scope of the folder + currentlyAssignedCollections.filter((c) => !folderType.includes(c)) + : // user is adding a scope to the folder + folderType + + if (newCollections && newCollections.length > 0) { + let hasDependentDocuments = false + if (typeof currentFolderID === 'string' || typeof currentFolderID === 'number') { + const childDocumentsResult = await req.payload.findByID({ + id: currentFolderID, + collection: foldersSlug, + joins: { + documentsAndFolders: { + limit: 100_000_000, + where: { + or: [ + { + relationTo: { + in: newCollections, + }, + }, + ], + }, + }, + }, + overrideAccess: true, + req, + }) + + hasDependentDocuments = childDocumentsResult.documentsAndFolders.docs.length > 0 + } + + // matches folders that are directly related to the removed collections + let hasDependentFolders = false + if ( + !hasDependentDocuments && + (typeof currentFolderID === 'string' || typeof currentFolderID === 'number') + ) { + const childFoldersResult = await req.payload.find({ + collection: foldersSlug, + limit: 1, + req, + where: { + and: [ + { + folderType: { + in: newCollections, + }, + }, + { + folder: { + equals: currentFolderID, + }, + }, + ], + }, + }) + hasDependentFolders = childFoldersResult.totalDocs > 0 + } + + if (hasDependentDocuments || hasDependentFolders) { + const translatedLabels = newCollections.map((collectionSlug) => { + if (req.payload.collections[collectionSlug]?.config.labels.singular) { + return getTranslatedLabel( + req.payload.collections[collectionSlug]?.config.labels.plural, + req.i18n, + ) + } + return collectionSlug + }) + + throw new APIError( + `The folder "${data.name || originalDoc.name}" contains ${hasDependentDocuments ? 'documents' : 'folders'} that still belong to the following collections: ${translatedLabels.join(', ')}`, + 400, + ) + } + return data + } + } else if ( + (data?.folderType === null || + (Array.isArray(data?.folderType) && data?.folderType.length === 0)) && + parentFolderID + ) { + // attempting to set the folderType to catch-all, so we need to ensure that the parent allows this + let parentFolder + if (typeof parentFolderID === 'string' || typeof parentFolderID === 'number') { + try { + parentFolder = await req.payload.findByID({ + id: parentFolderID, + collection: foldersSlug, + overrideAccess: true, + req, + select: { + name: true, + folderType: true, + }, + user: req.user, + }) + } catch (_) { + // parent folder does not exist + } + } + + if ( + parentFolder && + parentFolder?.folderType && + Array.isArray(parentFolder.folderType) && + parentFolder.folderType.length > 0 + ) { + throw new APIError( + `The folder "${data?.name || originalDoc.name}" must have folder-type set since its parent folder ${parentFolder?.name ? `"${parentFolder?.name}" ` : ''}has a folder-type set.`, + 400, + ) + } + } + + return data + } diff --git a/packages/payload/src/folders/types.ts b/packages/payload/src/folders/types.ts index 3b7b23793e..6ec48abef1 100644 --- a/packages/payload/src/folders/types.ts +++ b/packages/payload/src/folders/types.ts @@ -10,10 +10,12 @@ export type FolderInterface = { }[] } folder?: FolderInterface | (number | string | undefined) + folderType: CollectionSlug[] name: string } & TypeWithID export type FolderBreadcrumb = { + folderType?: CollectionSlug[] id: null | number | string name: string } @@ -58,6 +60,7 @@ export type FolderOrDocument = { _folderOrDocumentTitle: string createdAt?: string folderID?: number | string + folderType: CollectionSlug[] id: number | string updatedAt?: string } & DocumentMediaData @@ -66,6 +69,7 @@ export type FolderOrDocument = { export type GetFolderDataResult = { breadcrumbs: FolderBreadcrumb[] | null documents: FolderOrDocument[] + folderAssignedCollections: CollectionSlug[] | undefined subfolders: FolderOrDocument[] } @@ -85,6 +89,12 @@ export type RootFoldersConfiguration = { }: { collection: CollectionConfig }) => CollectionConfig | Promise)[] + /** + * If true, you can scope folders to specific collections. + * + * @default true + */ + collectionSpecific?: boolean /** * Ability to view hidden fields and collections related to folders * @@ -114,9 +124,6 @@ export type CollectionFoldersConfiguration = { browseByFolder?: boolean } -type BaseFolderSortKeys = keyof Pick< - FolderOrDocument['value'], - '_folderOrDocumentTitle' | 'createdAt' | 'updatedAt' -> +type BaseFolderSortKeys = 'createdAt' | 'name' | 'updatedAt' export type FolderSortKeys = `-${BaseFolderSortKeys}` | BaseFolderSortKeys diff --git a/packages/payload/src/folders/utils/formatFolderOrDocumentItem.ts b/packages/payload/src/folders/utils/formatFolderOrDocumentItem.ts index 825dbb9545..4f13d17083 100644 --- a/packages/payload/src/folders/utils/formatFolderOrDocumentItem.ts +++ b/packages/payload/src/folders/utils/formatFolderOrDocumentItem.ts @@ -23,6 +23,7 @@ export function formatFolderOrDocumentItem({ _folderOrDocumentTitle: String((useAsTitle && value?.[useAsTitle]) || value['id']), createdAt: value?.createdAt, folderID: value?.[folderFieldName], + folderType: value?.folderType || [], updatedAt: value?.updatedAt, } diff --git a/packages/payload/src/folders/utils/getFolderBreadcrumbs.ts b/packages/payload/src/folders/utils/getFolderBreadcrumbs.ts index c2cb4c097a..5e9c2a0102 100644 --- a/packages/payload/src/folders/utils/getFolderBreadcrumbs.ts +++ b/packages/payload/src/folders/utils/getFolderBreadcrumbs.ts @@ -27,6 +27,7 @@ export const getFolderBreadcrumbs = async ({ select: { name: true, [folderFieldName]: true, + folderType: true, }, user, where: { @@ -42,6 +43,7 @@ export const getFolderBreadcrumbs = async ({ breadcrumbs.push({ id: folder.id, name: folder.name, + folderType: folder.folderType, }) if (folder[folderFieldName]) { return getFolderBreadcrumbs({ diff --git a/packages/payload/src/folders/utils/getFolderData.ts b/packages/payload/src/folders/utils/getFolderData.ts index d5efa40ef0..6acfcf49bb 100644 --- a/packages/payload/src/folders/utils/getFolderData.ts +++ b/packages/payload/src/folders/utils/getFolderData.ts @@ -1,6 +1,6 @@ import type { CollectionSlug } from '../../index.js' import type { PayloadRequest, Where } from '../../types/index.js' -import type { GetFolderDataResult } from '../types.js' +import type { FolderOrDocument, FolderSortKeys, GetFolderDataResult } from '../types.js' import { parseDocumentID } from '../../index.js' import { getFolderBreadcrumbs } from './getFolderBreadcrumbs.js' @@ -29,6 +29,7 @@ type Args = { */ folderWhere?: Where req: PayloadRequest + sort: FolderSortKeys } /** * Query for documents, subfolders and breadcrumbs for a given folder @@ -39,6 +40,7 @@ export const getFolderData = async ({ folderID: _folderID, folderWhere, req, + sort = 'name', }: Args): Promise => { const { payload } = req @@ -65,15 +67,16 @@ export const getFolderData = async ({ parentFolderID, req, }) - const [breadcrumbs, documentsAndSubfolders] = await Promise.all([ + const [breadcrumbs, result] = await Promise.all([ breadcrumbsPromise, documentAndSubfolderPromise, ]) return { breadcrumbs, - documents: documentsAndSubfolders.documents, - subfolders: documentsAndSubfolders.subfolders, + documents: sortDocs({ docs: result.documents, sort }), + folderAssignedCollections: result.folderAssignedCollections, + subfolders: sortDocs({ docs: result.subfolders, sort }), } } else { // subfolders and documents are queried separately @@ -96,10 +99,40 @@ export const getFolderData = async ({ subfoldersPromise, documentsPromise, ]) + return { breadcrumbs, - documents, - subfolders, + documents: sortDocs({ docs: documents, sort }), + folderAssignedCollections: collectionSlug ? [collectionSlug] : undefined, + subfolders: sortDocs({ docs: subfolders, sort }), } } } + +function sortDocs({ + docs, + sort, +}: { + docs: FolderOrDocument[] + sort?: FolderSortKeys +}): FolderOrDocument[] { + if (!sort) { + return docs + } + const isDesc = typeof sort === 'string' && sort.startsWith('-') + const sortKey = (isDesc ? sort.slice(1) : sort) as FolderSortKeys + + return docs.sort((a, b) => { + let result = 0 + if (sortKey === 'name') { + result = a.value._folderOrDocumentTitle.localeCompare(b.value._folderOrDocumentTitle) + } else if (sortKey === 'createdAt') { + result = + new Date(a.value.createdAt || '').getTime() - new Date(b.value.createdAt || '').getTime() + } else if (sortKey === 'updatedAt') { + result = + new Date(a.value.updatedAt || '').getTime() - new Date(b.value.updatedAt || '').getTime() + } + return isDesc ? -result : result + }) +} diff --git a/packages/payload/src/folders/utils/getFoldersAndDocumentsFromJoin.ts b/packages/payload/src/folders/utils/getFoldersAndDocumentsFromJoin.ts index ea3ef47af9..98b40276c4 100644 --- a/packages/payload/src/folders/utils/getFoldersAndDocumentsFromJoin.ts +++ b/packages/payload/src/folders/utils/getFoldersAndDocumentsFromJoin.ts @@ -1,4 +1,5 @@ import type { PaginatedDocs } from '../../database/types.js' +import type { CollectionSlug } from '../../index.js' import type { Document, PayloadRequest, Where } from '../../types/index.js' import type { FolderOrDocument } from '../types.js' @@ -8,6 +9,7 @@ import { formatFolderOrDocumentItem } from './formatFolderOrDocumentItem.js' type QueryDocumentsAndFoldersResults = { documents: FolderOrDocument[] + folderAssignedCollections: CollectionSlug[] subfolders: FolderOrDocument[] } type QueryDocumentsAndFoldersArgs = { @@ -85,5 +87,9 @@ export async function queryDocumentsAndFoldersFromJoin({ }, ) - return results + return { + documents: results.documents, + folderAssignedCollections: subfolderDoc?.docs[0]?.folderType || [], + subfolders: results.subfolders, + } } diff --git a/packages/payload/src/utilities/combineWhereConstraints.spec.ts b/packages/payload/src/utilities/combineWhereConstraints.spec.ts new file mode 100644 index 0000000000..c852a9477b --- /dev/null +++ b/packages/payload/src/utilities/combineWhereConstraints.spec.ts @@ -0,0 +1,86 @@ +import { Where } from '../types/index.js' +import { combineWhereConstraints } from './combineWhereConstraints.js' + +describe('combineWhereConstraints', () => { + it('should merge matching constraint keys', async () => { + const constraint: Where = { + test: { + equals: 'value', + }, + } + + // should merge and queries + const andConstraint: Where = { + and: [constraint], + } + expect(combineWhereConstraints([andConstraint], 'and')).toEqual(andConstraint) + // should merge multiple and queries + expect(combineWhereConstraints([andConstraint, andConstraint], 'and')).toEqual({ + and: [constraint, constraint], + }) + + // should merge or queries + const orConstraint: Where = { + or: [constraint], + } + expect(combineWhereConstraints([orConstraint], 'or')).toEqual(orConstraint) + // should merge multiple or queries + expect(combineWhereConstraints([orConstraint, orConstraint], 'or')).toEqual({ + or: [constraint, constraint], + }) + }) + + it('should push mismatching constraints keys into `as` key', async () => { + const constraint: Where = { + test: { + equals: 'value', + }, + } + + // should push `and` into `or` key + const andConstraint: Where = { + and: [constraint], + } + expect(combineWhereConstraints([andConstraint], 'or')).toEqual({ + or: [andConstraint], + }) + + // should push `or` into `and` key + const orConstraint: Where = { + or: [constraint], + } + expect(combineWhereConstraints([orConstraint], 'and')).toEqual({ + and: [orConstraint], + }) + + // should merge `and` but push `or` into `and` key + expect(combineWhereConstraints([andConstraint, orConstraint], 'and')).toEqual({ + and: [constraint, orConstraint], + }) + }) + + it('should push non and/or constraint key into `as` key', async () => { + const basicConstraint: Where = { + test: { + equals: 'value', + }, + } + + expect(combineWhereConstraints([basicConstraint], 'and')).toEqual({ + and: [basicConstraint], + }) + expect(combineWhereConstraints([basicConstraint], 'or')).toEqual({ + or: [basicConstraint], + }) + }) + + it('should return an empty object when no constraints are provided', async () => { + expect(combineWhereConstraints([], 'and')).toEqual({}) + expect(combineWhereConstraints([], 'or')).toEqual({}) + }) + + it('should return an empty object when all constraints are empty', async () => { + expect(combineWhereConstraints([{}, {}, undefined], 'and')).toEqual({}) + expect(combineWhereConstraints([{}, {}, undefined], 'or')).toEqual({}) + }) +}) diff --git a/packages/payload/src/utilities/combineWhereConstraints.ts b/packages/payload/src/utilities/combineWhereConstraints.ts index 4363835aee..2a1b979b04 100644 --- a/packages/payload/src/utilities/combineWhereConstraints.ts +++ b/packages/payload/src/utilities/combineWhereConstraints.ts @@ -8,12 +8,27 @@ export function combineWhereConstraints( return {} } - return { - [as]: constraints.filter((constraint): constraint is Where => { + const reducedConstraints = constraints.reduce>( + (acc: Partial, constraint) => { if (constraint && typeof constraint === 'object' && Object.keys(constraint).length > 0) { - return true + if (as in constraint) { + // merge the objects under the shared key + acc[as] = [...(acc[as] as Where[]), ...(constraint[as] as Where[])] + } else { + // the constraint does not share the key + acc[as]?.push(constraint) + } } - return false - }), + + return acc + }, + { [as]: [] } satisfies Where, + ) + + if (reducedConstraints[as]?.length === 0) { + // If there are no constraints, return an empty object + return {} } + + return reducedConstraints as Where } diff --git a/packages/translations/src/clientKeys.ts b/packages/translations/src/clientKeys.ts index 0a9c986847..f50aa54f8f 100644 --- a/packages/translations/src/clientKeys.ts +++ b/packages/translations/src/clientKeys.ts @@ -134,6 +134,7 @@ export const clientTranslationKeys = createClientTranslationKeys([ 'folder:browseByFolder', 'folder:deleteFolder', 'folder:folders', + 'folder:folderTypeDescription', 'folder:folderName', 'folder:itemsMovedToFolder', 'folder:itemsMovedToRoot', diff --git a/packages/translations/src/languages/ar.ts b/packages/translations/src/languages/ar.ts index 6a33e7ecbe..ce23c1780c 100644 --- a/packages/translations/src/languages/ar.ts +++ b/packages/translations/src/languages/ar.ts @@ -183,6 +183,7 @@ export const arTranslations: DefaultTranslationsObject = { deleteFolder: 'حذف المجلد', folderName: 'اسم المجلد', folders: 'مجلدات', + folderTypeDescription: 'حدد نوع المستندات التي يجب السماح بها في هذا المجلد من المجموعات.', itemHasBeenMoved: 'تم نقل {{title}} إلى {{folderName}}', itemHasBeenMovedToRoot: 'تم نقل {{title}} إلى المجلد الجذر', itemsMovedToFolder: '{{title}} تم نقله إلى {{folderName}}', diff --git a/packages/translations/src/languages/az.ts b/packages/translations/src/languages/az.ts index 59726751e4..bc2ecb7ab7 100644 --- a/packages/translations/src/languages/az.ts +++ b/packages/translations/src/languages/az.ts @@ -186,6 +186,7 @@ export const azTranslations: DefaultTranslationsObject = { deleteFolder: 'Qovluğu Sil', folderName: 'Qovluq Adı', folders: 'Qovluqlar', + folderTypeDescription: 'Bu qovluqda hangi tip kolleksiya sənədlərinə icazə verilməlidir seçin.', itemHasBeenMoved: '{{title}} {{folderName}} qovluğuna köçürüldü.', itemHasBeenMovedToRoot: '{{title}} kök qovluğa köçürüldü.', itemsMovedToFolder: '{{title}} {{folderName}} qovluğuna köçürüldü', diff --git a/packages/translations/src/languages/bg.ts b/packages/translations/src/languages/bg.ts index b507d73d69..308778b051 100644 --- a/packages/translations/src/languages/bg.ts +++ b/packages/translations/src/languages/bg.ts @@ -186,6 +186,8 @@ export const bgTranslations: DefaultTranslationsObject = { deleteFolder: 'Изтрий папка', folderName: 'Име на папка', folders: 'Папки', + folderTypeDescription: + 'Изберете кой тип документи от колекциите трябва да се допускат в тази папка.', itemHasBeenMoved: '{{title}} е преместен в {{folderName}}', itemHasBeenMovedToRoot: '{{title}} беше преместено в основната папка', itemsMovedToFolder: '{{title}} беше преместен в {{folderName}}', diff --git a/packages/translations/src/languages/bnBd.ts b/packages/translations/src/languages/bnBd.ts index 50a3462030..9a41c809f9 100644 --- a/packages/translations/src/languages/bnBd.ts +++ b/packages/translations/src/languages/bnBd.ts @@ -187,6 +187,8 @@ export const bnBdTranslations: DefaultTranslationsObject = { deleteFolder: 'ফোল্ডার মুছুন', folderName: 'ফোল্ডারের নাম', folders: 'ফোল্ডারগুলি', + folderTypeDescription: + 'এই ফোল্ডারে কোন ধরনের সংগ্রহ নথিপত্র অনুমোদিত হওয়া উচিত তা নির্বাচন করুন।', itemHasBeenMoved: '{{title}} কে {{folderName}} এ সরানো হয়েছে', itemHasBeenMovedToRoot: '{{title}} কে মূল ফোল্ডারে সরানো হয়েছে', itemsMovedToFolder: '{{title}} কে {{folderName}} এ সরানো হয়েছে', diff --git a/packages/translations/src/languages/bnIn.ts b/packages/translations/src/languages/bnIn.ts index 97e1a90f76..8c01eb2f78 100644 --- a/packages/translations/src/languages/bnIn.ts +++ b/packages/translations/src/languages/bnIn.ts @@ -187,6 +187,8 @@ export const bnInTranslations: DefaultTranslationsObject = { deleteFolder: 'ফোল্ডার মুছুন', folderName: 'ফোল্ডারের নাম', folders: 'ফোল্ডারগুলি', + folderTypeDescription: + 'এই ফোল্ডারে কোন ধরণের কালেকশন ডকুমেন্টস অনুমতি দেওয়া উচিত তা নির্বাচন করুন।', itemHasBeenMoved: '{{title}} কে {{folderName}} এ সরানো হয়েছে', itemHasBeenMovedToRoot: '{{title}} কে মূল ফোল্ডারে সরানো হয়েছে', itemsMovedToFolder: '{{title}} কে {{folderName}} এ সরানো হয়েছে', diff --git a/packages/translations/src/languages/ca.ts b/packages/translations/src/languages/ca.ts index 36a9a5823c..c3c2ecead5 100644 --- a/packages/translations/src/languages/ca.ts +++ b/packages/translations/src/languages/ca.ts @@ -187,6 +187,8 @@ export const caTranslations: DefaultTranslationsObject = { deleteFolder: 'Esborra la carpeta', folderName: 'Nom de la Carpeta', folders: 'Carpetes', + folderTypeDescription: + 'Seleccioneu quin tipus de documents de la col·lecció haurien de ser permesos en aquesta carpeta.', itemHasBeenMoved: "{{title}} s'ha traslladat a {{folderName}}", itemHasBeenMovedToRoot: "{{title}} s'ha mogut a la carpeta arrel", itemsMovedToFolder: "{{title}} s'ha traslladat a {{folderName}}", diff --git a/packages/translations/src/languages/cs.ts b/packages/translations/src/languages/cs.ts index 4808651d5e..7f8304f59b 100644 --- a/packages/translations/src/languages/cs.ts +++ b/packages/translations/src/languages/cs.ts @@ -186,6 +186,8 @@ export const csTranslations: DefaultTranslationsObject = { deleteFolder: 'Smazat složku', folderName: 'Název složky', folders: 'Složky', + folderTypeDescription: + 'Vyberte, který typ dokumentů ze sbírky by měl být dovolen v této složce.', itemHasBeenMoved: '{{title}} bylo přesunuto do {{folderName}}', itemHasBeenMovedToRoot: '{{title}} byl přesunut do kořenové složky', itemsMovedToFolder: '{{title}} přesunuto do {{folderName}}', diff --git a/packages/translations/src/languages/da.ts b/packages/translations/src/languages/da.ts index 0f449e1c2a..ec1ef4b6ef 100644 --- a/packages/translations/src/languages/da.ts +++ b/packages/translations/src/languages/da.ts @@ -185,6 +185,8 @@ export const daTranslations: DefaultTranslationsObject = { deleteFolder: 'Slet mappe', folderName: 'Mappenavn', folders: 'Mapper', + folderTypeDescription: + 'Vælg hvilken type samling af dokumenter der bør være tilladt i denne mappe.', itemHasBeenMoved: '{{title}} er blevet flyttet til {{folderName}}', itemHasBeenMovedToRoot: '{{title}} er blevet flyttet til rodmappen', itemsMovedToFolder: '{{title}} flyttet til {{folderName}}', diff --git a/packages/translations/src/languages/de.ts b/packages/translations/src/languages/de.ts index 9c2fd199d3..ae924bb363 100644 --- a/packages/translations/src/languages/de.ts +++ b/packages/translations/src/languages/de.ts @@ -191,6 +191,8 @@ export const deTranslations: DefaultTranslationsObject = { deleteFolder: 'Ordner löschen', folderName: 'Ordnername', folders: 'Ordner', + folderTypeDescription: + 'Wählen Sie aus, welche Art von Sammlungsdokumenten in diesem Ordner zugelassen sein sollte.', itemHasBeenMoved: '{{title}} wurde in {{folderName}} verschoben.', itemHasBeenMovedToRoot: '{{title}} wurde in den Hauptordner verschoben', itemsMovedToFolder: '{{title}} wurde in {{folderName}} verschoben.', diff --git a/packages/translations/src/languages/en.ts b/packages/translations/src/languages/en.ts index 0b2d0f7694..e1600e45ae 100644 --- a/packages/translations/src/languages/en.ts +++ b/packages/translations/src/languages/en.ts @@ -186,6 +186,8 @@ export const enTranslations = { deleteFolder: 'Delete Folder', folderName: 'Folder Name', folders: 'Folders', + folderTypeDescription: + 'Select which type of collection documents should be allowed in this folder.', itemHasBeenMoved: '{{title}} has been moved to {{folderName}}', itemHasBeenMovedToRoot: '{{title}} has been moved to the root folder', itemsMovedToFolder: '{{title}} moved to {{folderName}}', diff --git a/packages/translations/src/languages/es.ts b/packages/translations/src/languages/es.ts index 311848771a..d91a45c21e 100644 --- a/packages/translations/src/languages/es.ts +++ b/packages/translations/src/languages/es.ts @@ -190,6 +190,8 @@ export const esTranslations: DefaultTranslationsObject = { deleteFolder: 'Eliminar Carpeta', folderName: 'Nombre de la Carpeta', folders: 'Carpetas', + folderTypeDescription: + 'Seleccione qué tipo de documentos de la colección se deben permitir en esta carpeta.', itemHasBeenMoved: '{{title}} se ha movido a {{folderName}}', itemHasBeenMovedToRoot: '{{title}} se ha movido a la carpeta raíz', itemsMovedToFolder: '{{title}} movido a {{folderName}}', diff --git a/packages/translations/src/languages/et.ts b/packages/translations/src/languages/et.ts index 1a72d3e422..15c77ebea4 100644 --- a/packages/translations/src/languages/et.ts +++ b/packages/translations/src/languages/et.ts @@ -185,6 +185,7 @@ export const etTranslations: DefaultTranslationsObject = { deleteFolder: 'Kustuta kaust', folderName: 'Kausta nimi', folders: 'Kaustad', + folderTypeDescription: 'Valige, millist tüüpi kogumiku dokumente peaks selles kaustas lubama.', itemHasBeenMoved: '{{title}} on teisaldatud kausta {{folderName}}', itemHasBeenMovedToRoot: '{{title}} on teisaldatud juurkausta', itemsMovedToFolder: '{{title}} viidi üle kausta {{folderName}}', diff --git a/packages/translations/src/languages/fa.ts b/packages/translations/src/languages/fa.ts index b0c0012eef..1284b1d942 100644 --- a/packages/translations/src/languages/fa.ts +++ b/packages/translations/src/languages/fa.ts @@ -184,6 +184,7 @@ export const faTranslations: DefaultTranslationsObject = { deleteFolder: 'حذف پوشه', folderName: 'نام پوشه', folders: 'پوشه‌ها', + folderTypeDescription: 'انتخاب کنید که کدام نوع اسناد مجموعه باید در این پوشه مجاز باشند.', itemHasBeenMoved: '{{title}} به {{folderName}} منتقل شده است.', itemHasBeenMovedToRoot: '{{title}} به پوشه اصلی انتقال یافته است.', itemsMovedToFolder: '{{title}} به {{folderName}} منتقل شد.', diff --git a/packages/translations/src/languages/fr.ts b/packages/translations/src/languages/fr.ts index 24ec5fd7b0..c5eab55fda 100644 --- a/packages/translations/src/languages/fr.ts +++ b/packages/translations/src/languages/fr.ts @@ -192,6 +192,8 @@ export const frTranslations: DefaultTranslationsObject = { deleteFolder: 'Supprimer le dossier', folderName: 'Nom du dossier', folders: 'Dossiers', + folderTypeDescription: + 'Sélectionnez le type de documents de collection qui devraient être autorisés dans ce dossier.', itemHasBeenMoved: '{{title}} a été déplacé vers {{folderName}}', itemHasBeenMovedToRoot: '{{title}} a été déplacé dans le dossier racine', itemsMovedToFolder: '{{title}} déplacé vers {{folderName}}', diff --git a/packages/translations/src/languages/he.ts b/packages/translations/src/languages/he.ts index 32d5ad7200..f7a8d4ff93 100644 --- a/packages/translations/src/languages/he.ts +++ b/packages/translations/src/languages/he.ts @@ -181,6 +181,7 @@ export const heTranslations: DefaultTranslationsObject = { deleteFolder: 'מחק תיקייה', folderName: 'שם תיקייה', folders: 'תיקיות', + folderTypeDescription: 'בחר איזה סוג של מסמכים מהאוסף יותרו להיות בתיקייה זו.', itemHasBeenMoved: '"{{title}}" הועבר ל- "{{folderName}}"', itemHasBeenMovedToRoot: '"{{title}}" הועבר לתיקיית השורש', itemsMovedToFolder: '{{title}} הועבר אל {{folderName}}', diff --git a/packages/translations/src/languages/hr.ts b/packages/translations/src/languages/hr.ts index 7271e0ac06..320217c8ef 100644 --- a/packages/translations/src/languages/hr.ts +++ b/packages/translations/src/languages/hr.ts @@ -187,6 +187,8 @@ export const hrTranslations: DefaultTranslationsObject = { deleteFolder: 'Izbriši mapu', folderName: 'Naziv mape', folders: 'Mape', + folderTypeDescription: + 'Odaberite koja vrsta dokumenata kolekcije treba biti dozvoljena u ovoj mapi.', itemHasBeenMoved: '{{title}} je premješten u {{folderName}}', itemHasBeenMovedToRoot: '{{title}} je premješten u korijensku mapu.', itemsMovedToFolder: '{{title}} premješteno u {{folderName}}', diff --git a/packages/translations/src/languages/hu.ts b/packages/translations/src/languages/hu.ts index eac7af8c04..8aaa81144b 100644 --- a/packages/translations/src/languages/hu.ts +++ b/packages/translations/src/languages/hu.ts @@ -188,6 +188,8 @@ export const huTranslations: DefaultTranslationsObject = { deleteFolder: 'Mappa törlése', folderName: 'Mappa neve', folders: 'Mappák', + folderTypeDescription: + 'Válassza ki, hogy milyen típusú dokumentumokat engedélyez ebben a mappában.', itemHasBeenMoved: '{{title}} át lett helyezve a {{folderName}} nevű mappába.', itemHasBeenMovedToRoot: 'A(z) {{title}} át lett helyezve a gyökérmappába.', itemsMovedToFolder: '{{title}} áthelyezve a(z) {{folderName}} mappába', diff --git a/packages/translations/src/languages/hy.ts b/packages/translations/src/languages/hy.ts index 925181f244..704b20d8e1 100644 --- a/packages/translations/src/languages/hy.ts +++ b/packages/translations/src/languages/hy.ts @@ -186,6 +186,8 @@ export const hyTranslations: DefaultTranslationsObject = { deleteFolder: 'Ջնջել թղթապանակը', folderName: 'Տեսակավորման անվանում', folders: 'Պատուհաններ', + folderTypeDescription: + 'Ընտրեք, թե որն է հավաքածուի փաստաթղթերը, որոնք պետք է թույլատրվեն այս պանակում:', itemHasBeenMoved: '{{title}}-ը տեղափոխվել է {{folderName}}-ում', itemHasBeenMovedToRoot: '«{{title}}» տեղափոխվել է արմատային պանակ։', itemsMovedToFolder: '{{title}} տեղափոխվեց {{folderName}}', diff --git a/packages/translations/src/languages/it.ts b/packages/translations/src/languages/it.ts index 55bfdbcd8e..3a51ef09b2 100644 --- a/packages/translations/src/languages/it.ts +++ b/packages/translations/src/languages/it.ts @@ -190,6 +190,8 @@ export const itTranslations: DefaultTranslationsObject = { deleteFolder: 'Elimina cartella', folderName: 'Nome Cartella', folders: 'Cartelle', + folderTypeDescription: + 'Seleziona quale tipo di documenti della collezione dovrebbero essere consentiti in questa cartella.', itemHasBeenMoved: '{{title}} è stato spostato in {{folderName}}', itemHasBeenMovedToRoot: '{{title}} è stato spostato nella cartella principale', itemsMovedToFolder: '{{title}} spostato in {{folderName}}', diff --git a/packages/translations/src/languages/ja.ts b/packages/translations/src/languages/ja.ts index 51d9284b96..024cf4e1fe 100644 --- a/packages/translations/src/languages/ja.ts +++ b/packages/translations/src/languages/ja.ts @@ -187,6 +187,8 @@ export const jaTranslations: DefaultTranslationsObject = { deleteFolder: 'フォルダを削除する', folderName: 'フォルダ名', folders: 'フォルダー', + folderTypeDescription: + 'このフォルダーに許可されるコレクションドキュメントのタイプを選択してください。', itemHasBeenMoved: '{{title}}は{{folderName}}に移動されました', itemHasBeenMovedToRoot: '{{title}}はルートフォルダに移動されました', itemsMovedToFolder: '{{title}}は{{folderName}}に移動されました', diff --git a/packages/translations/src/languages/ko.ts b/packages/translations/src/languages/ko.ts index e053968388..0d5af0445e 100644 --- a/packages/translations/src/languages/ko.ts +++ b/packages/translations/src/languages/ko.ts @@ -186,6 +186,7 @@ export const koTranslations: DefaultTranslationsObject = { deleteFolder: '폴더 삭제', folderName: '폴더 이름', folders: '폴더들', + folderTypeDescription: '이 폴더에서 어떤 유형의 컬렉션 문서가 허용되어야 하는지 선택하세요.', itemHasBeenMoved: '{{title}}는 {{folderName}}로 이동되었습니다.', itemHasBeenMovedToRoot: '{{title}}이(가) 루트 폴더로 이동되었습니다.', itemsMovedToFolder: '{{title}}이(가) {{folderName}}로 이동되었습니다.', diff --git a/packages/translations/src/languages/lt.ts b/packages/translations/src/languages/lt.ts index 0a9b605a10..94048058cd 100644 --- a/packages/translations/src/languages/lt.ts +++ b/packages/translations/src/languages/lt.ts @@ -188,6 +188,8 @@ export const ltTranslations: DefaultTranslationsObject = { deleteFolder: 'Ištrinti aplanką', folderName: 'Aplanko pavadinimas', folders: 'Aplankai', + folderTypeDescription: + 'Pasirinkite, kokio tipo rinkinio dokumentai turėtų būti leidžiami šiame aplanke.', itemHasBeenMoved: '{{title}} buvo perkeltas į {{folderName}}', itemHasBeenMovedToRoot: '{{title}} buvo perkeltas į pagrindinį katalogą', itemsMovedToFolder: '{{title}} perkeltas į {{folderName}}', diff --git a/packages/translations/src/languages/lv.ts b/packages/translations/src/languages/lv.ts index e7fb84bb10..0dcd973687 100644 --- a/packages/translations/src/languages/lv.ts +++ b/packages/translations/src/languages/lv.ts @@ -186,6 +186,8 @@ export const lvTranslations: DefaultTranslationsObject = { deleteFolder: 'Dzēst mapi', folderName: 'Mapes nosaukums', folders: 'Mapes', + folderTypeDescription: + 'Izvēlieties, kāda veida kolekcijas dokumentiem jābūt atļautiem šajā mapē.', itemHasBeenMoved: '{{title}} ir pārvietots uz {{folderName}}', itemHasBeenMovedToRoot: '{{title}} ir pārvietots uz saknes mapi', itemsMovedToFolder: '{{title}} pārvietots uz {{folderName}}', diff --git a/packages/translations/src/languages/my.ts b/packages/translations/src/languages/my.ts index c05bf18710..78c87fa725 100644 --- a/packages/translations/src/languages/my.ts +++ b/packages/translations/src/languages/my.ts @@ -187,6 +187,7 @@ export const myTranslations: DefaultTranslationsObject = { deleteFolder: 'Padam Folder', folderName: 'ဖိုင်နာမည်', folders: 'Fail', + folderTypeDescription: 'Pilih jenis dokumen koleksi yang harus diizinkan dalam folder ini.', itemHasBeenMoved: '{{title}} telah dipindahkan ke {{folderName}}', itemHasBeenMovedToRoot: '"{{title}}" က ဗဟိုဖိုလ်ဒါသို့ရွှေ့ပြီးပါပြီ။', itemsMovedToFolder: '{{title}} သို့ {{folderName}} သို့ ရွှေ့လိုက်သွားပါပယ်', diff --git a/packages/translations/src/languages/nb.ts b/packages/translations/src/languages/nb.ts index 90c796312c..291b85b6f7 100644 --- a/packages/translations/src/languages/nb.ts +++ b/packages/translations/src/languages/nb.ts @@ -186,6 +186,7 @@ export const nbTranslations: DefaultTranslationsObject = { deleteFolder: 'Slett mappe', folderName: 'Mappenavn', folders: 'Mapper', + folderTypeDescription: 'Velg hvilken type samling dokumenter som skal tillates i denne mappen.', itemHasBeenMoved: '{{title}} er flyttet til {{folderName}}', itemHasBeenMovedToRoot: '{{title}} er flyttet til rotmappen', itemsMovedToFolder: '{{title}} flyttet til {{folderName}}', diff --git a/packages/translations/src/languages/nl.ts b/packages/translations/src/languages/nl.ts index 316cc69508..1ba7d51a26 100644 --- a/packages/translations/src/languages/nl.ts +++ b/packages/translations/src/languages/nl.ts @@ -188,6 +188,8 @@ export const nlTranslations: DefaultTranslationsObject = { deleteFolder: 'Verwijder map', folderName: 'Mapnaam', folders: 'Mappen', + folderTypeDescription: + 'Selecteer welk type verzameldocumenten toegestaan zou moeten zijn in deze map.', itemHasBeenMoved: '{{title}} is verplaatst naar {{folderName}}', itemHasBeenMovedToRoot: '{{title}} is verplaatst naar de hoofdmap', itemsMovedToFolder: '{{title}} verplaatst naar {{folderName}}', diff --git a/packages/translations/src/languages/pl.ts b/packages/translations/src/languages/pl.ts index 2c12ba691b..1e60b6ac79 100644 --- a/packages/translations/src/languages/pl.ts +++ b/packages/translations/src/languages/pl.ts @@ -185,6 +185,8 @@ export const plTranslations: DefaultTranslationsObject = { deleteFolder: 'Usuń folder', folderName: 'Nazwa folderu', folders: 'Foldery', + folderTypeDescription: + 'Wybierz, które typy dokumentów z kolekcji powinny być dozwolone w tym folderze.', itemHasBeenMoved: '{{title}} został przeniesiony do {{folderName}}', itemHasBeenMovedToRoot: '{{title}} został przeniesiony do folderu głównego', itemsMovedToFolder: '{{title}} przeniesiono do {{folderName}}', diff --git a/packages/translations/src/languages/pt.ts b/packages/translations/src/languages/pt.ts index 8b8f95fa9a..ac01e47c00 100644 --- a/packages/translations/src/languages/pt.ts +++ b/packages/translations/src/languages/pt.ts @@ -186,6 +186,8 @@ export const ptTranslations: DefaultTranslationsObject = { deleteFolder: 'Apagar Pasta', folderName: 'Nome da Pasta', folders: 'Pastas', + folderTypeDescription: + 'Selecione qual tipo de documentos da coleção devem ser permitidos nesta pasta.', itemHasBeenMoved: '{{title}} foi movido para {{folderName}}', itemHasBeenMovedToRoot: '{{title}} foi movido para a pasta raiz', itemsMovedToFolder: '{{title}} movido para {{folderName}}', diff --git a/packages/translations/src/languages/ro.ts b/packages/translations/src/languages/ro.ts index 38825ec119..34bae916f4 100644 --- a/packages/translations/src/languages/ro.ts +++ b/packages/translations/src/languages/ro.ts @@ -190,6 +190,8 @@ export const roTranslations: DefaultTranslationsObject = { deleteFolder: 'Ștergeți dosarul', folderName: 'Nume dosar', folders: 'Dosare', + folderTypeDescription: + 'Selectați ce tip de documente din colecție ar trebui să fie permise în acest dosar.', itemHasBeenMoved: '{{title}} a fost mutat în {{folderName}}', itemHasBeenMovedToRoot: '{{title}} a fost mutat în dosarul rădăcină', itemsMovedToFolder: '{{title}} a fost mutat în {{folderName}}', diff --git a/packages/translations/src/languages/rs.ts b/packages/translations/src/languages/rs.ts index 78803c8e58..1f0701c3f4 100644 --- a/packages/translations/src/languages/rs.ts +++ b/packages/translations/src/languages/rs.ts @@ -187,6 +187,8 @@ export const rsTranslations: DefaultTranslationsObject = { deleteFolder: 'Obriši fasciklu', folderName: 'Ime fascikle', folders: 'Fascikle', + folderTypeDescription: + 'Odaberite koja vrsta dokumenata iz kolekcije treba biti dozvoljena u ovom folderu.', itemHasBeenMoved: '{{title}} je premješten u {{folderName}}', itemHasBeenMovedToRoot: '{{title}} je premešten u osnovni direktorijum.', itemsMovedToFolder: '{{title}} premešten u {{folderName}}', diff --git a/packages/translations/src/languages/rsLatin.ts b/packages/translations/src/languages/rsLatin.ts index 31c321059b..2ae83c93db 100644 --- a/packages/translations/src/languages/rsLatin.ts +++ b/packages/translations/src/languages/rsLatin.ts @@ -187,6 +187,8 @@ export const rsLatinTranslations: DefaultTranslationsObject = { deleteFolder: 'Obriši mapu', folderName: 'Naziv fascikle', folders: 'Fascikle', + folderTypeDescription: + 'Odaberite koja vrsta dokumenta iz kolekcije bi trebala biti dozvoljena u ovoj fascikli.', itemHasBeenMoved: '{{title}} je premesten u {{folderName}}', itemHasBeenMovedToRoot: '{{title}} je premešten u osnovnu fasciklu', itemsMovedToFolder: '{{title}} premešteno u {{folderName}}', diff --git a/packages/translations/src/languages/ru.ts b/packages/translations/src/languages/ru.ts index e881a1c1d4..b23b2ef9fd 100644 --- a/packages/translations/src/languages/ru.ts +++ b/packages/translations/src/languages/ru.ts @@ -188,6 +188,8 @@ export const ruTranslations: DefaultTranslationsObject = { deleteFolder: 'Удалить папку', folderName: 'Название папки', folders: 'Папки', + folderTypeDescription: + 'Выберите, какие типы документов коллекции должны быть разрешены в этой папке.', itemHasBeenMoved: '{{title}} был перемещен в {{folderName}}', itemHasBeenMovedToRoot: '{{title}} был перемещен в корневую папку', itemsMovedToFolder: '{{title}} перемещен в {{folderName}}', diff --git a/packages/translations/src/languages/sk.ts b/packages/translations/src/languages/sk.ts index 44713ae7fe..4c24b250d3 100644 --- a/packages/translations/src/languages/sk.ts +++ b/packages/translations/src/languages/sk.ts @@ -189,6 +189,8 @@ export const skTranslations: DefaultTranslationsObject = { deleteFolder: 'Odstrániť priečinok', folderName: 'Názov priečinka', folders: 'Priečinky', + folderTypeDescription: + 'Vyberte, ktorý typ dokumentov z kolekcie by mal byť povolený v tejto zložke.', itemHasBeenMoved: '{{title}} bol presunutý do {{folderName}}', itemHasBeenMovedToRoot: '{{title}} bol presunutý do koreňového priečinka', itemsMovedToFolder: '{{title}} presunuté do {{folderName}}', diff --git a/packages/translations/src/languages/sl.ts b/packages/translations/src/languages/sl.ts index 45954b2d29..02e046a58b 100644 --- a/packages/translations/src/languages/sl.ts +++ b/packages/translations/src/languages/sl.ts @@ -186,6 +186,8 @@ export const slTranslations: DefaultTranslationsObject = { deleteFolder: 'Izbriši mapo', folderName: 'Ime mape', folders: 'Mape', + folderTypeDescription: + 'Izberite, katere vrste dokumentov zbirke naj bodo dovoljene v tej mapi.', itemHasBeenMoved: '{{title}} je bil premaknjen v {{folderName}}', itemHasBeenMovedToRoot: '{{title}} je bil premaknjen v korensko mapo.', itemsMovedToFolder: '{{title}} premaknjeno v {{folderName}}', diff --git a/packages/translations/src/languages/sv.ts b/packages/translations/src/languages/sv.ts index 3a9b1e12d2..caef27df3b 100644 --- a/packages/translations/src/languages/sv.ts +++ b/packages/translations/src/languages/sv.ts @@ -186,6 +186,7 @@ export const svTranslations: DefaultTranslationsObject = { deleteFolder: 'Ta bort mapp', folderName: 'Mappnamn', folders: 'Mappar', + folderTypeDescription: 'Välj vilken typ av samlingsdokument som ska tillåtas i denna mapp.', itemHasBeenMoved: '{{title}} har flyttats till {{folderName}}', itemHasBeenMovedToRoot: '{{title}} har flyttats till rotmappen', itemsMovedToFolder: '{{title}} flyttad till {{folderName}}', diff --git a/packages/translations/src/languages/th.ts b/packages/translations/src/languages/th.ts index 8d53deae75..41cb9878b8 100644 --- a/packages/translations/src/languages/th.ts +++ b/packages/translations/src/languages/th.ts @@ -183,6 +183,7 @@ export const thTranslations: DefaultTranslationsObject = { deleteFolder: 'ลบโฟลเดอร์', folderName: 'ชื่อโฟลเดอร์', folders: 'โฟลเดอร์', + folderTypeDescription: 'เลือกประเภทของเอกสารคอลเลกชันที่ควรอนุญาตในโฟลเดอร์นี้', itemHasBeenMoved: '{{title}} ได้ถูกย้ายไปที่ {{folderName}}', itemHasBeenMovedToRoot: '"{{title}}" ได้ถูกย้ายไปยังโฟลเดอร์ราก', itemsMovedToFolder: '{{title}} ถูกย้ายไปยัง {{folderName}}', diff --git a/packages/translations/src/languages/tr.ts b/packages/translations/src/languages/tr.ts index 1630721cf9..1daaae2925 100644 --- a/packages/translations/src/languages/tr.ts +++ b/packages/translations/src/languages/tr.ts @@ -188,6 +188,8 @@ export const trTranslations: DefaultTranslationsObject = { deleteFolder: 'Klasörü Sil', folderName: 'Klasör Adı', folders: 'Klasörler', + folderTypeDescription: + 'Bu klasörde hangi türden koleksiyon belgelerine izin verilmesi gerektiğini seçin.', itemHasBeenMoved: '{{title}} {{folderName}} klasörüne taşındı.', itemHasBeenMovedToRoot: '{{title}} kök klasöre taşındı.', itemsMovedToFolder: "{{title}} {{folderName}}'ye taşındı.", diff --git a/packages/translations/src/languages/uk.ts b/packages/translations/src/languages/uk.ts index e76e29e6be..eb33c1daac 100644 --- a/packages/translations/src/languages/uk.ts +++ b/packages/translations/src/languages/uk.ts @@ -187,6 +187,8 @@ export const ukTranslations: DefaultTranslationsObject = { deleteFolder: 'Видалити папку', folderName: 'Назва папки', folders: 'Папки', + folderTypeDescription: + 'Виберіть, який тип документів колекції повинен бути дозволений у цій папці.', itemHasBeenMoved: '{{title}} було переміщено до {{folderName}}', itemHasBeenMovedToRoot: '{{title}} був переміщений до кореневої папки', itemsMovedToFolder: '{{title}} перенесено до {{folderName}}', diff --git a/packages/translations/src/languages/vi.ts b/packages/translations/src/languages/vi.ts index ae280b4fc0..7f747ef15f 100644 --- a/packages/translations/src/languages/vi.ts +++ b/packages/translations/src/languages/vi.ts @@ -186,6 +186,7 @@ export const viTranslations: DefaultTranslationsObject = { deleteFolder: 'Xóa Thư mục', folderName: 'Tên thư mục', folders: 'Thư mục', + folderTypeDescription: 'Chọn loại tài liệu bộ sưu tập nào nên được cho phép trong thư mục này.', itemHasBeenMoved: '{{title}} đã được chuyển đến {{folderName}}', itemHasBeenMovedToRoot: '{{title}} đã được chuyển đến thư mục gốc', itemsMovedToFolder: '{{title}} đã được di chuyển vào {{folderName}}', diff --git a/packages/translations/src/languages/zh.ts b/packages/translations/src/languages/zh.ts index 296612d0e1..84a477ba71 100644 --- a/packages/translations/src/languages/zh.ts +++ b/packages/translations/src/languages/zh.ts @@ -179,6 +179,7 @@ export const zhTranslations: DefaultTranslationsObject = { deleteFolder: '删除文件夹', folderName: '文件夹名称', folders: '文件夹', + folderTypeDescription: '在此文件夹中选择应允许哪种类型的集合文档。', itemHasBeenMoved: '{{title}}已被移至{{folderName}}', itemHasBeenMovedToRoot: '{{title}}已被移至根文件夹', itemsMovedToFolder: '{{title}}已移至{{folderName}}', diff --git a/packages/translations/src/languages/zhTw.ts b/packages/translations/src/languages/zhTw.ts index 6cf18d9773..e659462f6b 100644 --- a/packages/translations/src/languages/zhTw.ts +++ b/packages/translations/src/languages/zhTw.ts @@ -178,6 +178,7 @@ export const zhTwTranslations: DefaultTranslationsObject = { deleteFolder: '刪除資料夾', folderName: '資料夾名稱', folders: '資料夾', + folderTypeDescription: '在此文件夾中選擇應允許的集合文件類型。', itemHasBeenMoved: '{{title}}已被移至{{folderName}}', itemHasBeenMovedToRoot: '{{title}}已被移至根文件夾', itemsMovedToFolder: '{{title}} 已移至 {{folderName}}', diff --git a/packages/ui/src/elements/FolderView/Breadcrumbs/index.tsx b/packages/ui/src/elements/FolderView/Breadcrumbs/index.tsx index 9fc54d7eb2..881a85d830 100644 --- a/packages/ui/src/elements/FolderView/Breadcrumbs/index.tsx +++ b/packages/ui/src/elements/FolderView/Breadcrumbs/index.tsx @@ -48,10 +48,11 @@ export function DroppableBreadcrumb({ children, className, onClick, -}: { children: React.ReactNode; className?: string; onClick: () => void } & Pick< - FolderBreadcrumb, - 'id' ->) { +}: { + children: React.ReactNode + className?: string + onClick: () => void +} & Pick) { const { isOver, setNodeRef } = useDroppable({ id: `folder-${id}`, data: { diff --git a/packages/ui/src/elements/FolderView/CurrentFolderActions/index.tsx b/packages/ui/src/elements/FolderView/CurrentFolderActions/index.tsx index df8b79865b..d0d54ecf00 100644 --- a/packages/ui/src/elements/FolderView/CurrentFolderActions/index.tsx +++ b/packages/ui/src/elements/FolderView/CurrentFolderActions/index.tsx @@ -107,6 +107,7 @@ export function CurrentFolderActions({ className }: Props) { diff --git a/packages/ui/src/elements/FolderView/DraggableTableRow/index.tsx b/packages/ui/src/elements/FolderView/DraggableTableRow/index.tsx index f34c70f9bd..4ded3ccd1f 100644 --- a/packages/ui/src/elements/FolderView/DraggableTableRow/index.tsx +++ b/packages/ui/src/elements/FolderView/DraggableTableRow/index.tsx @@ -69,7 +69,6 @@ export function DraggableTableRow({ ] .filter(Boolean) .join(' ')} - id={itemKey} key={itemKey} onClick={onClick} onKeyDown={onKeyDown} diff --git a/packages/ui/src/elements/FolderView/DraggableWithClick/index.scss b/packages/ui/src/elements/FolderView/DraggableWithClick/index.scss index 1fdff6a1d3..e2d7f25d64 100644 --- a/packages/ui/src/elements/FolderView/DraggableWithClick/index.scss +++ b/packages/ui/src/elements/FolderView/DraggableWithClick/index.scss @@ -1,5 +1,5 @@ @layer payload-default { - .draggable-with-click { + .draggable-with-click:not(.draggable-with-click--disabled) { user-select: none; } } diff --git a/packages/ui/src/elements/FolderView/DraggableWithClick/index.tsx b/packages/ui/src/elements/FolderView/DraggableWithClick/index.tsx index d99d2e70d9..aced1e06eb 100644 --- a/packages/ui/src/elements/FolderView/DraggableWithClick/index.tsx +++ b/packages/ui/src/elements/FolderView/DraggableWithClick/index.tsx @@ -1,5 +1,5 @@ import { useDraggable } from '@dnd-kit/core' -import React, { useRef } from 'react' +import React, { useId, useRef } from 'react' import './index.scss' @@ -9,7 +9,7 @@ type Props = { readonly as?: React.ElementType readonly children?: React.ReactNode readonly className?: string - readonly id: string + readonly disabled?: boolean readonly onClick: (e: React.MouseEvent) => void readonly onKeyDown?: (e: React.KeyboardEvent) => void readonly ref?: React.RefObject @@ -17,16 +17,17 @@ type Props = { } export const DraggableWithClick = ({ - id, as = 'div', children, className, + disabled = false, onClick, onKeyDown, ref, thresholdPixels = 3, }: Props) => { - const { attributes, listeners, setNodeRef } = useDraggable({ id }) + const id = useId() + const { attributes, listeners, setNodeRef } = useDraggable({ id, disabled }) const initialPos = useRef({ x: 0, y: 0 }) const isDragging = useRef(false) @@ -75,10 +76,15 @@ export const DraggableWithClick = ({ role="button" tabIndex={0} {...attributes} - className={`${baseClass} ${className || ''}`.trim()} - onKeyDown={onKeyDown} - onPointerDown={onClick ? handlePointerDown : undefined} + className={[baseClass, className, disabled ? `${baseClass}--disabled` : ''] + .filter(Boolean) + .join(' ')} + onKeyDown={disabled ? undefined : onKeyDown} + onPointerDown={disabled ? undefined : onClick ? handlePointerDown : undefined} ref={(node) => { + if (disabled) { + return + } setNodeRef(node) if (ref) { ref.current = node diff --git a/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.scss b/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.scss index 216c9b4c92..e4b74b8a78 100644 --- a/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.scss +++ b/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.scss @@ -22,5 +22,9 @@ align-items: center; gap: calc(var(--base) / 2); } + + .item-card-grid__title { + display: none; + } } } diff --git a/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.tsx b/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.tsx index 614182936a..ba0601174f 100644 --- a/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.tsx +++ b/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.tsx @@ -41,6 +41,7 @@ type ActionProps = } export type MoveToFolderDrawerProps = { readonly drawerSlug: string + readonly folderAssignedCollections: CollectionSlug[] readonly folderCollectionSlug: string readonly folderFieldName: string readonly fromFolderID?: number | string @@ -86,11 +87,13 @@ function LoadFolderData(props: MoveToFolderDrawerProps) { async (folderIDToPopulate: null | number | string) => { try { const result = await getFolderResultsComponentAndData({ - activeCollectionSlugs: [props.folderCollectionSlug], browseByFolder: false, + collectionsToDisplay: [props.folderCollectionSlug], displayAs: 'grid', + // todo: should be able to pass undefined, empty array or null and get all folders. Need to look at API for this in the server function + folderAssignedCollections: props.folderAssignedCollections, folderID: folderIDToPopulate, - sort: '_folderOrDocumentTitle', + sort: 'name', }) setBreadcrumbs(result.breadcrumbs || []) @@ -107,7 +110,7 @@ function LoadFolderData(props: MoveToFolderDrawerProps) { hasLoadedRef.current = true }, - [getFolderResultsComponentAndData, props.folderCollectionSlug], + [getFolderResultsComponentAndData, props.folderAssignedCollections, props.folderCollectionSlug], ) React.useEffect(() => { @@ -167,6 +170,7 @@ function Content({ folderFieldName, folderID, FolderResultsComponent, + folderType, getSelectedItems, subfolders, } = useFolder() @@ -229,7 +233,7 @@ function Content({ }, [drawerSlug, isModalOpen, clearRouteCache, folderAddedToUnderlyingFolder]) return ( - <> +
{ closeModal(drawerSlug) @@ -298,6 +302,7 @@ function Content({ { void onCreateSuccess({ @@ -321,6 +326,7 @@ function Content({ )} - +
) } diff --git a/packages/ui/src/elements/FolderView/CollectionTypePill/index.scss b/packages/ui/src/elements/FolderView/FilterFolderTypePill/index.scss similarity index 100% rename from packages/ui/src/elements/FolderView/CollectionTypePill/index.scss rename to packages/ui/src/elements/FolderView/FilterFolderTypePill/index.scss diff --git a/packages/ui/src/elements/FolderView/CollectionTypePill/index.tsx b/packages/ui/src/elements/FolderView/FilterFolderTypePill/index.tsx similarity index 97% rename from packages/ui/src/elements/FolderView/CollectionTypePill/index.tsx rename to packages/ui/src/elements/FolderView/FilterFolderTypePill/index.tsx index 38a05855ba..7c0fbd0462 100644 --- a/packages/ui/src/elements/FolderView/CollectionTypePill/index.tsx +++ b/packages/ui/src/elements/FolderView/FilterFolderTypePill/index.tsx @@ -12,7 +12,7 @@ import './index.scss' const baseClass = 'collection-type' -export function CollectionTypePill() { +export function FilterFolderTypePill() { const { activeCollectionFolderSlugs: visibleCollectionSlugs, allCollectionFolderSlugs: folderCollectionSlugs, diff --git a/packages/ui/src/elements/FolderView/Field/index.scss b/packages/ui/src/elements/FolderView/FolderField/index.scss similarity index 100% rename from packages/ui/src/elements/FolderView/Field/index.scss rename to packages/ui/src/elements/FolderView/FolderField/index.scss diff --git a/packages/ui/src/elements/FolderView/Field/index.server.tsx b/packages/ui/src/elements/FolderView/FolderField/index.server.tsx similarity index 87% rename from packages/ui/src/elements/FolderView/Field/index.server.tsx rename to packages/ui/src/elements/FolderView/FolderField/index.server.tsx index 42da4dd077..ce051fe0b0 100644 --- a/packages/ui/src/elements/FolderView/Field/index.server.tsx +++ b/packages/ui/src/elements/FolderView/FolderField/index.server.tsx @@ -6,7 +6,7 @@ import './index.scss' const baseClass = 'folder-edit-field' -export const FolderEditField = (props: RelationshipFieldServerProps) => { +export const FolderField = (props: RelationshipFieldServerProps) => { if (props.payload.config.folders === false) { return null } diff --git a/packages/ui/src/elements/FolderView/FolderFileCard/index.scss b/packages/ui/src/elements/FolderView/FolderFileCard/index.scss index ef6dfe134b..69ec82e70a 100644 --- a/packages/ui/src/elements/FolderView/FolderFileCard/index.scss +++ b/packages/ui/src/elements/FolderView/FolderFileCard/index.scss @@ -10,6 +10,7 @@ --card-titlebar-icon-color: var(--theme-elevation-300); --card-label-color: var(--theme-text); --card-preview-icon-color: var(--theme-elevation-400); + --assigned-collections-color: var(--theme-elevation-900); position: relative; display: grid; @@ -61,6 +62,7 @@ --card-label-color: var(--theme-success-800); --card-preview-icon-color: var(--theme-success-800); --accessibility-outline: 2px solid var(--theme-success-600); + --assigned-collections-color: var(--theme-success-850); .popup:hover:not(.popup--active) { --card-icon-dots-bg-color: var(--theme-success-100); @@ -74,12 +76,25 @@ } .folder-file-card__icon-wrap .icon { - opacity: 50%; + opacity: 0.5; } .folder-file-card__preview-area .icon { opacity: 0.7; } + + .folder-file-card__preview-area .thumbnail { + &:after { + content: ''; + position: absolute; + top: 0; + left: 0; + background: var(--theme-success-150); + width: 100%; + height: 100%; + mix-blend-mode: hard-light; + } + } } &:not(.folder-file-card--selected) { @@ -104,22 +119,6 @@ } } - &__drag-handle { - position: absolute; - top: 0; - width: 100%; - height: 100%; - cursor: pointer; - background: none; - border: none; - padding: 0; - outline-offset: var(--accessibility-outline-offset); - - &:focus-visible { - outline: var(--accessibility-outline); - } - } - &__drop-area { position: absolute; top: 0; @@ -195,13 +194,15 @@ &__titlebar-area { position: relative; pointer-events: none; + display: flex; + flex-direction: column; grid-area: details; border-radius: inherit; display: grid; grid-template-columns: auto 1fr auto; gap: 1rem; align-items: center; - padding: 1rem; + padding: calc(var(--base) / 2); background-color: var(--card-bg-color); .popup { @@ -209,6 +210,10 @@ } } + &__titlebar-labels { + display: grid; + } + &__name { overflow: hidden; font-weight: bold; @@ -219,6 +224,13 @@ color: var(--card-label-color); } + &__assigned-collections { + color: var(--assigned-collections-color); + opacity: 0.5; + margin-top: 4px; + line-height: normal; + } + &__icon-wrap .icon { flex-shrink: 0; color: var(--card-titlebar-icon-color); diff --git a/packages/ui/src/elements/FolderView/FolderFileCard/index.tsx b/packages/ui/src/elements/FolderView/FolderFileCard/index.tsx index f9bb5a9cfc..cfabb80825 100644 --- a/packages/ui/src/elements/FolderView/FolderFileCard/index.tsx +++ b/packages/ui/src/elements/FolderView/FolderFileCard/index.tsx @@ -3,11 +3,14 @@ import type { FolderOrDocument } from 'payload/shared' import { useDroppable } from '@dnd-kit/core' +import { getTranslation } from '@payloadcms/translations' import React from 'react' import { DocumentIcon } from '../../../icons/Document/index.js' import { ThreeDotsIcon } from '../../../icons/ThreeDots/index.js' +import { useConfig } from '../../../providers/Config/index.js' import { useFolder } from '../../../providers/Folders/index.js' +import { useTranslation } from '../../../providers/Translation/index.js' import { Popup } from '../../Popup/index.js' import { Thumbnail } from '../../Thumbnail/index.js' import { ColoredFolderIcon } from '../ColoredFolderIcon/index.js' @@ -19,6 +22,7 @@ const baseClass = 'folder-file-card' type Props = { readonly className?: string readonly disabled?: boolean + readonly folderType?: string[] readonly id: number | string readonly isDeleting?: boolean readonly isFocused?: boolean @@ -37,6 +41,7 @@ export function FolderFileCard({ type, className = '', disabled = false, + folderType, isDeleting = false, isFocused = false, isSelected = false, @@ -54,6 +59,7 @@ export function FolderFileCard({ data: { id, type, + folderType, }, disabled: disableDrop, }) @@ -75,7 +81,7 @@ export function FolderFileCard({ }, [isFocused]) return ( -
- {!disabled && (onClick || onKeyDown) && ( - - )} {!disableDrop ?
: null} {type === 'file' ? ( @@ -112,9 +112,14 @@ export function FolderFileCard({
{type === 'file' ? : }
-

- {title} -

+
+

+ {title} +

+ {folderType && folderType.length > 0 ? ( + + ) : null} +
{PopupActions ? ( } @@ -127,7 +132,33 @@ export function FolderFileCard({ ) : null}
-
+ + ) +} + +function AssignedCollections({ folderType }: { folderType: string[] }) { + const { config } = useConfig() + const { i18n } = useTranslation() + + const collectionsDisplayText = React.useMemo(() => { + return folderType.reduce((acc, collection) => { + const collectionConfig = config.collections?.find((c) => c.slug === collection) + if (collectionConfig) { + return [...acc, getTranslation(collectionConfig.labels.plural, i18n)] + } + return acc + }, []) + }, [folderType, config.collections, i18n]) + + return ( +

+ {collectionsDisplayText.map((label, index) => ( + + {label} + {index < folderType.length - 1 ? ', ' : ''} + + ))} +

) } @@ -138,20 +169,16 @@ type ContextCardProps = { readonly type: 'file' | 'folder' } export function ContextFolderFileCard({ type, className, index, item }: ContextCardProps) { - const { - focusedRowIndex, - isDragging, - itemKeysToMove, - onItemClick, - onItemKeyPress, - selectedItemKeys, - } = useFolder() + const { checkIfItemIsDisabled, focusedRowIndex, onItemClick, onItemKeyPress, selectedItemKeys } = + useFolder() const isSelected = selectedItemKeys.has(item.itemKey) + const isDisabled = checkIfItemIsDisabled(item) return ( { - const map: Record = {} + const map: Record = {} config.collections.forEach((collection) => { - map[collection.slug] = getTranslation(collection.labels?.singular, i18n) + map[collection.slug] = { + plural: getTranslation(collection.labels?.plural, i18n), + singular: getTranslation(collection.labels?.singular, i18n), + } }) return map }) @@ -94,7 +97,22 @@ export function FolderFileTable({ showRelationCell = true }: Props) { } if (name === 'type') { - cellValue = relationToMap[relationTo] || relationTo + cellValue = ( + <> + {relationToMap[relationTo]?.singular || relationTo} + {Array.isArray(subfolder.value?.folderType) + ? subfolder.value?.folderType.reduce((acc, slug, index) => { + if (index === 0) { + return ` — ${relationToMap[slug]?.plural || slug}` + } + if (index > 0) { + return `${acc}, ${relationToMap[slug]?.plural || slug}` + } + return acc + }, '') + : ''} + + ) } if (index === 0) { @@ -108,7 +126,7 @@ export function FolderFileTable({ showRelationCell = true }: Props) { return cellValue } })} - disabled={isDragging && selectedItemKeys?.has(itemKey)} + disabled={checkIfItemIsDisabled(subfolder)} dragData={{ id: subfolderID, type: 'folder', @@ -160,7 +178,7 @@ export function FolderFileTable({ showRelationCell = true }: Props) { } if (name === 'type') { - cellValue = relationToMap[relationTo] || relationTo + cellValue = relationToMap[relationTo]?.singular || relationTo } if (index === 0) { @@ -174,7 +192,7 @@ export function FolderFileTable({ showRelationCell = true }: Props) { return cellValue } })} - disabled={isDragging || selectedItemKeys?.has(itemKey)} + disabled={checkIfItemIsDisabled(document)} dragData={{ id: documentID, type: 'document', diff --git a/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx b/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx new file mode 100644 index 0000000000..2592eff529 --- /dev/null +++ b/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx @@ -0,0 +1,140 @@ +import type { Option, OptionObject, SelectFieldClientProps } from 'payload' + +import React from 'react' + +import type { ReactSelectAdapterProps } from '../../ReactSelect/types.js' + +import { mergeFieldStyles } from '../../../fields/mergeFieldStyles.js' +import { formatOptions } from '../../../fields/Select/index.js' +import { SelectInput } from '../../../fields/Select/Input.js' +import { useField } from '../../../forms/useField/index.js' +import { useFolder } from '../../../providers/Folders/index.js' +import { useTranslation } from '../../../providers/Translation/index.js' + +export const FolderTypeField = ({ + options: allSelectOptions, + ...props +}: { options: Option[] } & SelectFieldClientProps) => { + const { + field, + field: { + name, + admin: { + className, + isClearable = true, + isSortable = true, + placeholder, + } = {} as SelectFieldClientProps['field']['admin'], + hasMany = false, + label, + localized, + required, + }, + onChange: onChangeFromProps, + path: pathFromProps, + readOnly, + validate, + } = props + const { t } = useTranslation() + + const { folderType } = useFolder() + + const options = React.useMemo(() => { + if (!folderType || folderType.length === 0) { + return formatOptions(allSelectOptions) + } + return formatOptions( + allSelectOptions.filter((option) => { + if (typeof option === 'object' && option.value) { + return folderType.includes(option.value) + } + return true + }), + ) + }, [allSelectOptions, folderType]) + + const memoizedValidate = React.useCallback( + (value, validationOptions) => { + if (typeof validate === 'function') { + return validate(value, { ...validationOptions, hasMany, options, required }) + } + }, + [validate, required, hasMany, options], + ) + + const { + customComponents: { AfterInput, BeforeInput, Description, Error, Label } = {}, + disabled, + path, + selectFilterOptions, + setValue, + showError, + value, + } = useField({ + potentiallyStalePath: pathFromProps, + validate: memoizedValidate, + }) + + const onChange: ReactSelectAdapterProps['onChange'] = React.useCallback( + (selectedOption: OptionObject | OptionObject[]) => { + if (!readOnly || disabled) { + let newValue: string | string[] = null + if (selectedOption && hasMany) { + if (Array.isArray(selectedOption)) { + newValue = selectedOption.map((option) => option.value) + } else { + newValue = [] + } + } else if (selectedOption && !Array.isArray(selectedOption)) { + newValue = selectedOption.value + } + + if (typeof onChangeFromProps === 'function') { + onChangeFromProps(newValue) + } + + setValue(newValue) + } + }, + [readOnly, disabled, hasMany, setValue, onChangeFromProps], + ) + + const styles = React.useMemo(() => mergeFieldStyles(field), [field]) + + return ( +
+ + selectFilterOptions?.some( + (option) => (typeof option === 'string' ? option : option.value) === value, + ) + : undefined + } + hasMany={hasMany} + isClearable={isClearable} + isSortable={isSortable} + Label={Label} + label={label} + localized={localized} + name={name} + onChange={onChange} + options={options} + path={path} + placeholder={placeholder} + readOnly={readOnly || disabled} + required={required || (Array.isArray(folderType) && folderType.length > 0)} + showError={showError} + style={styles} + value={value as string | string[]} + /> +
+ ) +} diff --git a/packages/ui/src/elements/FolderView/MoveDocToFolder/index.tsx b/packages/ui/src/elements/FolderView/MoveDocToFolder/index.tsx index 1b20c13213..2493c04f57 100644 --- a/packages/ui/src/elements/FolderView/MoveDocToFolder/index.tsx +++ b/packages/ui/src/elements/FolderView/MoveDocToFolder/index.tsx @@ -1,5 +1,6 @@ 'use client' +import type { CollectionSlug } from 'payload' import type { FolderOrDocument } from 'payload/shared' import { useModal } from '@faceless-ui/modal' @@ -16,8 +17,8 @@ import { useDocumentInfo } from '../../../providers/DocumentInfo/index.js' import { useTranslation } from '../../../providers/Translation/index.js' import { Button } from '../../Button/index.js' import { formatDrawerSlug, useDrawerDepth } from '../../Drawer/index.js' -import { MoveItemsToFolderDrawer } from '../Drawers/MoveToFolder/index.js' import './index.scss' +import { MoveItemsToFolderDrawer } from '../Drawers/MoveToFolder/index.js' const baseClass = 'move-doc-to-folder' @@ -151,6 +152,8 @@ export const MoveDocToFolderButton = ({ React.ReactNode - value: keyof FolderOrDocument['value'] + value: FolderSortKeys }[] = [ - { label: (t) => t('general:name'), value: '_folderOrDocumentTitle' }, + { label: (t) => t('general:name'), value: 'name' }, { label: (t) => t('general:createdAt'), value: 'createdAt' }, { label: (t) => t('general:updatedAt'), value: 'updatedAt' }, ] @@ -48,9 +48,9 @@ export function SortByPill() { const { refineFolderData, sort } = useFolder() const { t } = useTranslation() const sortDirection = sort.startsWith('-') ? 'desc' : 'asc' - const [selectedSortOption] = sortOnOptions.filter( - ({ value }) => value === (sort.startsWith('-') ? sort.slice(1) : sort), - ) + const [selectedSortOption] = + sortOnOptions.filter(({ value }) => value === (sort.startsWith('-') ? sort.slice(1) : sort)) || + sortOnOptions const [selectedOrderOption] = orderOnOptions.filter(({ value }) => value === sortDirection) return ( @@ -62,7 +62,7 @@ export function SortByPill() { ) : ( )} - {selectedSortOption.label(t)} + {selectedSortOption?.label(t)} } className={baseClass} @@ -73,12 +73,13 @@ export function SortByPill() { {sortOnOptions.map(({ label, value }) => ( { refineFolderData({ query: { - sort: value, + page: '1', + sort: sortDirection === 'desc' ? `-${value}` : value, }, updateURL: true, }) @@ -94,19 +95,23 @@ export function SortByPill() { {orderOnOptions.map(({ label, value }) => ( { - if (value === 'asc') { + if (sortDirection !== value) { refineFolderData({ query: { - sort: value === 'asc' ? `-${sort}` : sort, + page: '1', + sort: + value === 'desc' + ? `-${selectedSortOption?.value}` + : selectedSortOption?.value, }, updateURL: true, }) + close() } - close() }} > {label(t)} diff --git a/packages/ui/src/elements/ListHeader/TitleActions/ListCreateNewDocInFolderButton.tsx b/packages/ui/src/elements/ListHeader/TitleActions/ListCreateNewDocInFolderButton.tsx index ff8fe24c9a..626ff0c649 100644 --- a/packages/ui/src/elements/ListHeader/TitleActions/ListCreateNewDocInFolderButton.tsx +++ b/packages/ui/src/elements/ListHeader/TitleActions/ListCreateNewDocInFolderButton.tsx @@ -18,11 +18,13 @@ const baseClass = 'create-new-doc-in-folder' export function ListCreateNewDocInFolderButton({ buttonLabel, collectionSlugs, + folderAssignedCollections, onCreateSuccess, slugPrefix, }: { buttonLabel: string collectionSlugs: CollectionSlug[] + folderAssignedCollections: CollectionSlug[] onCreateSuccess: (args: { collectionSlug: CollectionSlug doc: Record @@ -133,6 +135,9 @@ export function ListCreateNewDocInFolderButton({ { await onCreateSuccess({ diff --git a/packages/ui/src/exports/client/index.ts b/packages/ui/src/exports/client/index.ts index e7db91cd0d..f33f1981cb 100644 --- a/packages/ui/src/exports/client/index.ts +++ b/packages/ui/src/exports/client/index.ts @@ -123,8 +123,9 @@ export { SaveDraftButton } from '../../elements/SaveDraftButton/index.js' // folder elements export { FolderProvider, useFolder } from '../../providers/Folders/index.js' export { BrowseByFolderButton } from '../../elements/FolderView/BrowseByFolderButton/index.js' -export { ItemCardGrid } from '../../elements/FolderView/ItemCardGrid/index.js' +export { FolderTypeField } from '../../elements/FolderView/FolderTypeField/index.js' export { FolderFileTable } from '../../elements/FolderView/FolderFileTable/index.js' +export { ItemCardGrid } from '../../elements/FolderView/ItemCardGrid/index.js' export { type Option as ReactSelectOption, ReactSelect } from '../../elements/ReactSelect/index.js' export { ReactSelect as Select } from '../../elements/ReactSelect/index.js' diff --git a/packages/ui/src/exports/rsc/index.ts b/packages/ui/src/exports/rsc/index.ts index 7e0b00208f..155cd026fe 100644 --- a/packages/ui/src/exports/rsc/index.ts +++ b/packages/ui/src/exports/rsc/index.ts @@ -1,7 +1,7 @@ export { FieldDiffContainer } from '../../elements/FieldDiffContainer/index.js' export { FieldDiffLabel } from '../../elements/FieldDiffLabel/index.js' export { FolderTableCell } from '../../elements/FolderView/Cell/index.server.js' -export { FolderEditField } from '../../elements/FolderView/Field/index.server.js' +export { FolderField } from '../../elements/FolderView/FolderField/index.server.js' export { getHTMLDiffComponents } from '../../elements/HTMLDiff/index.js' export { File } from '../../graphics/File/index.js' export { CheckIcon } from '../../icons/Check/index.js' diff --git a/packages/ui/src/fields/Checkbox/Input.tsx b/packages/ui/src/fields/Checkbox/Input.tsx index 149712a716..43ff3c412b 100644 --- a/packages/ui/src/fields/Checkbox/Input.tsx +++ b/packages/ui/src/fields/Checkbox/Input.tsx @@ -1,7 +1,7 @@ 'use client' import type { StaticLabel } from 'payload' -import React from 'react' +import React, { useId } from 'react' import { RenderCustomComponent } from '../../elements/RenderCustomComponent/index.js' import { FieldLabel } from '../../fields/FieldLabel/index.js' @@ -28,7 +28,7 @@ export type CheckboxInputProps = { export const inputBaseClass = 'checkbox-input' export const CheckboxInput: React.FC = ({ - id, + id: idFromProps, name, AfterInput, BeforeInput, @@ -43,6 +43,8 @@ export const CheckboxInput: React.FC = ({ readOnly, required, }) => { + const fallbackID = useId() + const id = idFromProps || fallbackID return (
+export const formatOptions = (options: Option[]): OptionObject[] => options.map((option) => { if (typeof option === 'object' && (option.value || option.value === '')) { return option diff --git a/packages/ui/src/providers/Folders/groupItemIDsByRelation.ts b/packages/ui/src/providers/Folders/groupItemIDsByRelation.ts new file mode 100644 index 0000000000..145a8d0935 --- /dev/null +++ b/packages/ui/src/providers/Folders/groupItemIDsByRelation.ts @@ -0,0 +1,15 @@ +import type { FolderOrDocument } from 'payload/shared' + +export function groupItemIDsByRelation(items: FolderOrDocument[]) { + return items.reduce( + (acc, item) => { + if (!acc[item.relationTo]) { + acc[item.relationTo] = [] + } + acc[item.relationTo].push(item.value.id) + + return acc + }, + {} as Record, + ) +} diff --git a/packages/ui/src/providers/Folders/index.tsx b/packages/ui/src/providers/Folders/index.tsx index 62d47883e4..2da311857e 100644 --- a/packages/ui/src/providers/Folders/index.tsx +++ b/packages/ui/src/providers/Folders/index.tsx @@ -14,7 +14,7 @@ import { parseSearchParams } from '../../utilities/parseSearchParams.js' import { useConfig } from '../Config/index.js' import { useRouteTransition } from '../RouteTransition/index.js' import { useTranslation } from '../Translation/index.js' -import { getMetaSelection, getShiftSelection, groupItemIDsByRelation } from './selection.js' +import { groupItemIDsByRelation } from './groupItemIDsByRelation.js' type FolderQueryParams = { page?: string @@ -43,20 +43,22 @@ export type FolderContextValue = { readonly allCollectionFolderSlugs?: CollectionSlug[] allowCreateCollectionSlugs: CollectionSlug[] breadcrumbs?: FolderBreadcrumb[] + checkIfItemIsDisabled: (item: FolderOrDocument) => boolean clearSelections: () => void currentFolder?: FolderOrDocument | null documents?: FolderOrDocument[] + dragOverlayItem?: FolderOrDocument | undefined focusedRowIndex: number folderCollectionConfig: ClientCollectionConfig folderCollectionSlug: string folderFieldName: string folderID?: number | string FolderResultsComponent: React.ReactNode + folderType: CollectionSlug[] | undefined getFolderRoute: (toFolderID?: number | string) => string getSelectedItems?: () => FolderOrDocument[] isDragging: boolean itemKeysToMove?: Set - lastSelectedIndex: null | number moveToFolder: (args: { itemsToMove: FolderOrDocument[] toFolderID?: number | string @@ -69,6 +71,7 @@ export type FolderContextValue = { }) => void refineFolderData: (args: { query?: FolderQueryParams; updateURL: boolean }) => void search: string + selectedFolderCollections?: CollectionSlug[] readonly selectedItemKeys: Set setBreadcrumbs: React.Dispatch> setFocusedRowIndex: React.Dispatch> @@ -82,30 +85,33 @@ const Context = React.createContext({ allCollectionFolderSlugs: [], allowCreateCollectionSlugs: [], breadcrumbs: [], + checkIfItemIsDisabled: () => false, clearSelections: () => {}, currentFolder: null, documents: [], + dragOverlayItem: undefined, focusedRowIndex: -1, folderCollectionConfig: null, folderCollectionSlug: '', folderFieldName: 'folder', folderID: undefined, FolderResultsComponent: null, + folderType: undefined, getFolderRoute: () => '', getSelectedItems: () => [], isDragging: false, itemKeysToMove: undefined, - lastSelectedIndex: null, moveToFolder: () => Promise.resolve(undefined), onItemClick: () => undefined, onItemKeyPress: () => undefined, refineFolderData: () => undefined, search: '', + selectedFolderCollections: undefined, selectedItemKeys: new Set(), setBreadcrumbs: () => {}, setFocusedRowIndex: () => -1, setIsDragging: () => false, - sort: '_folderOrDocumentTitle', + sort: 'name', subfolders: [], }) @@ -191,7 +197,7 @@ export function FolderProvider({ FolderResultsComponent: InitialFolderResultsComponent, onItemClick: onItemClickFromProps, search, - sort = '_folderOrDocumentTitle', + sort = 'name', subfolders, }: FolderProviderProps) { const parentFolderContext = useFolder() @@ -202,6 +208,11 @@ export function FolderProvider({ const router = useRouter() const { startRouteTransition } = useRouteTransition() + const currentlySelectedIndexes = React.useRef(new Set()) + + const [selectedFolderCollections, setSelectedFolderCollections] = React.useState< + CollectionSlug[] + >([]) const [FolderResultsComponent, setFolderResultsComponent] = React.useState( InitialFolderResultsComponent || (() => null), ) @@ -221,7 +232,8 @@ export function FolderProvider({ () => new Set(), ) const [focusedRowIndex, setFocusedRowIndex] = React.useState(-1) - const [lastSelectedIndex, setLastSelectedIndex] = React.useState(null) + // This is used to determine what data to display on the drag overlay + const [dragOverlayItem, setDragOverlayItem] = React.useState() const [breadcrumbs, setBreadcrumbs] = React.useState(_breadcrumbsFromProps) const lastClickTime = React.useRef(null) @@ -230,7 +242,8 @@ export function FolderProvider({ const clearSelections = React.useCallback(() => { setFocusedRowIndex(-1) setSelectedItemKeys(new Set()) - setLastSelectedIndex(undefined) + setDragOverlayItem(undefined) + currentlySelectedIndexes.current = new Set() }, []) const mergeQuery = React.useCallback( @@ -245,6 +258,7 @@ export function FolderProvider({ ...currentQuery, ...newQuery, page, + relationTo: 'relationTo' in newQuery ? newQuery.relationTo : currentQuery?.relationTo, search: 'search' in newQuery ? newQuery.search : currentQuery?.search, sort: 'sort' in newQuery ? newQuery.sort : (currentQuery?.sort ?? undefined), } @@ -258,8 +272,11 @@ export function FolderProvider({ ({ query, updateURL }) => { if (updateURL) { const newQuery = mergeQuery(query) + startRouteTransition(() => - router.replace(`${qs.stringify(newQuery, { addQueryPrefix: true })}`), + router.replace( + `${qs.stringify({ ...newQuery, relationTo: JSON.stringify(newQuery.relationTo) }, { addQueryPrefix: true })}`, + ), ) setCurrentQuery(newQuery) @@ -301,10 +318,12 @@ export function FolderProvider({ ({ collectionSlug, docID }: { collectionSlug: string; docID?: number | string }) => { if (drawerDepth === 1) { // not in a drawer (default is 1) - clearSelections() if (collectionSlug === folderCollectionSlug) { // clicked on folder, take the user to the folder view - startRouteTransition(() => router.push(getFolderRoute(docID))) + startRouteTransition(() => { + router.push(getFolderRoute(docID)) + clearSelections() + }) } else if (collectionSlug) { // clicked on document, take the user to the documet view startRouteTransition(() => { @@ -314,8 +333,11 @@ export function FolderProvider({ path: `/collections/${collectionSlug}/${docID}`, }), ) + clearSelections() }) } + } else { + clearSelections() } if (typeof onItemClickFromProps === 'function') { @@ -335,97 +357,205 @@ export function FolderProvider({ ], ) + const handleShiftSelection = React.useCallback( + (targetIndex: number) => { + const allItems = [...subfolders, ...documents] + + // Find existing selection boundaries + const existingIndexes = allItems.reduce((acc, item, idx) => { + if (selectedItemKeys.has(item.itemKey)) { + acc.push(idx) + } + return acc + }, []) + + if (existingIndexes.length === 0) { + // No existing selection, just select target + return [targetIndex] + } + + const firstSelectedIndex = Math.min(...existingIndexes) + const lastSelectedIndex = Math.max(...existingIndexes) + const isWithinBounds = targetIndex >= firstSelectedIndex && targetIndex <= lastSelectedIndex + + // Choose anchor based on whether we're contracting or extending + let anchorIndex = targetIndex + if (isWithinBounds) { + // Contracting: if target is at a boundary, use target as anchor + // Otherwise, use furthest boundary to maintain opposite edge + if (targetIndex === firstSelectedIndex || targetIndex === lastSelectedIndex) { + anchorIndex = targetIndex + } else { + const distanceToFirst = Math.abs(targetIndex - firstSelectedIndex) + const distanceToLast = Math.abs(targetIndex - lastSelectedIndex) + anchorIndex = distanceToFirst >= distanceToLast ? firstSelectedIndex : lastSelectedIndex + } + } else { + // Extending: use closest boundary + const distanceToFirst = Math.abs(targetIndex - firstSelectedIndex) + const distanceToLast = Math.abs(targetIndex - lastSelectedIndex) + anchorIndex = distanceToFirst <= distanceToLast ? firstSelectedIndex : lastSelectedIndex + } + + // Create range from anchor to target + const startIndex = Math.min(anchorIndex, targetIndex) + const endIndex = Math.max(anchorIndex, targetIndex) + const newRangeIndexes = Array.from( + { length: endIndex - startIndex + 1 }, + (_, i) => startIndex + i, + ) + + if (isWithinBounds) { + // Contracting: replace with new range + return newRangeIndexes + } else { + // Extending: union with existing + return [...new Set([...existingIndexes, ...newRangeIndexes])] + } + }, + [subfolders, documents, selectedItemKeys], + ) + + const updateSelections = React.useCallback( + ({ indexes }: { indexes: number[] }) => { + const allItems = [...subfolders, ...documents] + const { newSelectedFolderCollections, newSelectedItemKeys } = allItems.reduce( + (acc, item, index) => { + if (indexes.includes(index)) { + acc.newSelectedItemKeys.add(item.itemKey) + if (item.relationTo === folderCollectionSlug) { + item.value.folderType?.forEach((collectionSlug) => { + if (!acc.newSelectedFolderCollections.includes(collectionSlug)) { + acc.newSelectedFolderCollections.push(collectionSlug) + } + }) + } else { + if (!acc.newSelectedFolderCollections.includes(item.relationTo)) { + acc.newSelectedFolderCollections.push(item.relationTo) + } + } + } + return acc + }, + { + newSelectedFolderCollections: [] satisfies CollectionSlug[], + newSelectedItemKeys: new Set(), + }, + ) + + setSelectedFolderCollections(newSelectedFolderCollections) + setSelectedItemKeys(newSelectedItemKeys) + }, + [documents, folderCollectionSlug, subfolders], + ) + const onItemKeyPress: FolderContextValue['onItemKeyPress'] = React.useCallback( - ({ event, index, item }) => { + ({ event, item: currentItem }) => { const { code, ctrlKey, metaKey, shiftKey } = event const isShiftPressed = shiftKey const isCtrlPressed = ctrlKey || metaKey - let newSelectedIndexes: Set | undefined = undefined + const isCurrentlySelected = selectedItemKeys.has(currentItem.itemKey) + const allItems = [...subfolders, ...documents] + const currentItemIndex = allItems.findIndex((item) => item.itemKey === currentItem.itemKey) switch (code) { - case 'ArrowDown': { - event.preventDefault() - const nextIndex = Math.min(index + 1, totalCount - 1) - setFocusedRowIndex(nextIndex) - - if (isCtrlPressed) { - break - } - - if (allowMultiSelection && isShiftPressed) { - newSelectedIndexes = getShiftSelection({ - selectFromIndex: Math.min(lastSelectedIndex, totalCount), - selectToIndex: Math.min(nextIndex, totalCount), - }) - } else { - setLastSelectedIndex(nextIndex) - newSelectedIndexes = new Set([nextIndex]) - } - break - } + case 'ArrowDown': + case 'ArrowLeft': + case 'ArrowRight': case 'ArrowUp': { event.preventDefault() - const prevIndex = Math.max(index - 1, 0) - setFocusedRowIndex(prevIndex) + + if (currentItemIndex === -1) { + break + } + + const isBackward = code === 'ArrowLeft' || code === 'ArrowUp' + const newItemIndex = isBackward ? currentItemIndex - 1 : currentItemIndex + 1 + + if (newItemIndex < 0 || newItemIndex > totalCount - 1) { + // out of bounds, keep current selection + return + } + + setFocusedRowIndex(newItemIndex) if (isCtrlPressed) { break } - if (allowMultiSelection && isShiftPressed) { - newSelectedIndexes = getShiftSelection({ - selectFromIndex: lastSelectedIndex, - selectToIndex: prevIndex, - }) - } else { - setLastSelectedIndex(prevIndex) - newSelectedIndexes = new Set([prevIndex]) + if (isShiftPressed && allowMultiSelection) { + const selectedIndexes = handleShiftSelection(newItemIndex) + updateSelections({ indexes: selectedIndexes }) + return } + + // Single selection without shift + if (!isShiftPressed) { + const newItem = allItems[newItemIndex] + setSelectedItemKeys(new Set([newItem.itemKey])) + } + break } case 'Enter': { if (selectedItemKeys.size === 1) { - newSelectedIndexes = new Set([]) setFocusedRowIndex(undefined) + navigateAfterSelection({ + collectionSlug: currentItem.relationTo, + docID: extractID(currentItem.value), + }) + return } break } case 'Escape': { - setFocusedRowIndex(undefined) - newSelectedIndexes = new Set([]) + clearSelections() break } case 'KeyA': { if (allowMultiSelection && isCtrlPressed) { event.preventDefault() setFocusedRowIndex(totalCount - 1) - newSelectedIndexes = new Set(Array.from({ length: totalCount }, (_, i) => i)) + updateSelections({ + indexes: Array.from({ length: totalCount }, (_, i) => i), + }) } break } case 'Space': { if (allowMultiSelection && isShiftPressed) { event.preventDefault() - newSelectedIndexes = getMetaSelection({ - currentSelection: newSelectedIndexes, - toggleIndex: index, + const allItems = [...subfolders, ...documents] + updateSelections({ + indexes: allItems.reduce((acc, item, idx) => { + if (item.itemKey === currentItem.itemKey) { + if (isCurrentlySelected) { + return acc + } else { + acc.push(idx) + } + } else if (selectedItemKeys.has(item.itemKey)) { + acc.push(idx) + } + return acc + }, []), }) - setLastSelectedIndex(index) } else { event.preventDefault() - newSelectedIndexes = new Set([index]) - setLastSelectedIndex(index) + updateSelections({ + indexes: isCurrentlySelected ? [] : [currentItemIndex], + }) } break } case 'Tab': { if (allowMultiSelection && isShiftPressed) { - const prevIndex = index - 1 - if (prevIndex < 0 && newSelectedIndexes?.size > 0) { + const prevIndex = currentItemIndex - 1 + if (prevIndex < 0 && selectedItemKeys?.size > 0) { setFocusedRowIndex(prevIndex) } } else { - const nextIndex = index + 1 + const nextIndex = currentItemIndex + 1 if (nextIndex === totalCount && selectedItemKeys.size > 0) { setFocusedRowIndex(totalCount - 1) } @@ -433,101 +563,100 @@ export function FolderProvider({ break } } - - if (!newSelectedIndexes) { - return - } - - setSelectedItemKeys( - [...subfolders, ...documents].reduce((acc, item, index) => { - if (newSelectedIndexes?.size && newSelectedIndexes.has(index)) { - acc.add(item.itemKey) - } - return acc - }, new Set()), - ) - - if (selectedItemKeys.size === 1 && code === 'Enter') { - navigateAfterSelection({ - collectionSlug: item.relationTo, - docID: extractID(item.value), - }) - } }, [ - allowMultiSelection, - documents, - lastSelectedIndex, - navigateAfterSelection, - subfolders, - totalCount, selectedItemKeys, + subfolders, + documents, + allowMultiSelection, + handleShiftSelection, + updateSelections, + navigateAfterSelection, + clearSelections, + totalCount, ], ) const onItemClick: FolderContextValue['onItemClick'] = React.useCallback( - ({ event, index, item }) => { + ({ event, item: clickedItem }) => { let doubleClicked: boolean = false const isCtrlPressed = event.ctrlKey || event.metaKey const isShiftPressed = event.shiftKey - let newSelectedIndexes: Set | undefined = undefined + const isCurrentlySelected = selectedItemKeys.has(clickedItem.itemKey) + const allItems = [...subfolders, ...documents] + const currentItemIndex = allItems.findIndex((item) => item.itemKey === clickedItem.itemKey) if (allowMultiSelection && isCtrlPressed) { - newSelectedIndexes = getMetaSelection({ - currentSelection: newSelectedIndexes, - toggleIndex: index, - }) - } else if (allowMultiSelection && isShiftPressed && lastSelectedIndex !== undefined) { - newSelectedIndexes = getShiftSelection({ - selectFromIndex: lastSelectedIndex, - selectToIndex: index, - }) + event.preventDefault() + let overlayItemKey: FolderDocumentItemKey | undefined + const indexes = allItems.reduce((acc, item, idx) => { + if (item.itemKey === clickedItem.itemKey) { + if (isCurrentlySelected && event.type !== 'pointermove') { + return acc + } else { + acc.push(idx) + overlayItemKey = item.itemKey + } + } else if (selectedItemKeys.has(item.itemKey)) { + acc.push(idx) + } + return acc + }, []) + + updateSelections({ indexes }) + + if (overlayItemKey) { + setDragOverlayItem(getItem(overlayItemKey)) + } + } else if (allowMultiSelection && isShiftPressed) { + if (currentItemIndex !== -1) { + const selectedIndexes = handleShiftSelection(currentItemIndex) + updateSelections({ indexes: selectedIndexes }) + } } else if (allowMultiSelection && event.type === 'pointermove') { // on drag start of an unselected item - if (!selectedItemKeys.has(item.itemKey)) { - newSelectedIndexes = new Set([index]) + if (!isCurrentlySelected) { + updateSelections({ + indexes: allItems.reduce((acc, item, idx) => { + if (item.itemKey === clickedItem.itemKey) { + acc.push(idx) + } + return acc + }, []), + }) } - setLastSelectedIndex(index) + setDragOverlayItem(getItem(clickedItem.itemKey)) } else { // Normal click - select single item - newSelectedIndexes = new Set([index]) const now = Date.now() - doubleClicked = now - lastClickTime.current < 400 && lastSelectedIndex === index + doubleClicked = + now - lastClickTime.current < 400 && dragOverlayItem?.itemKey === clickedItem.itemKey lastClickTime.current = now - setLastSelectedIndex(index) - } - - if (!newSelectedIndexes) { - setFocusedRowIndex(undefined) - } else { - setFocusedRowIndex(index) - } - - if (newSelectedIndexes) { - setSelectedItemKeys( - [...subfolders, ...documents].reduce((acc, item, index) => { - if (newSelectedIndexes.size && newSelectedIndexes.has(index)) { - acc.add(item.itemKey) - } - return acc - }, new Set()), - ) + if (!doubleClicked) { + updateSelections({ + indexes: isCurrentlySelected && selectedItemKeys.size === 1 ? [] : [currentItemIndex], + }) + } + setDragOverlayItem(getItem(clickedItem.itemKey)) } if (doubleClicked) { navigateAfterSelection({ - collectionSlug: item.relationTo, - docID: extractID(item.value), + collectionSlug: clickedItem.relationTo, + docID: extractID(clickedItem.value), }) } }, [ selectedItemKeys, - allowMultiSelection, - lastSelectedIndex, subfolders, documents, + allowMultiSelection, + dragOverlayItem, + getItem, + updateSelections, navigateAfterSelection, + handleShiftSelection, ], ) @@ -602,6 +731,70 @@ export function FolderProvider({ [folderID, clearSelections, folderCollectionSlug, folderFieldName, routes.api, serverURL, t], ) + const checkIfItemIsDisabled: FolderContextValue['checkIfItemIsDisabled'] = React.useCallback( + (item) => { + function folderAcceptsItem({ + item, + selectedFolderCollections, + }: { + item: FolderOrDocument + selectedFolderCollections: string[] + }): boolean { + if ( + !item.value.folderType || + (Array.isArray(item.value.folderType) && item.value.folderType.length === 0) + ) { + // Enable folder that accept all collections + return false + } + + if (selectedFolderCollections.length === 0) { + // If no collections are selected, enable folders that accept all collections + return Boolean(item.value.folderType || item.value.folderType.length > 0) + } + + // Disable folders that do not accept all of the selected collections + return selectedFolderCollections.some((slug) => { + return !item.value.folderType.includes(slug) + }) + } + + if (isDragging) { + const isSelected = selectedItemKeys.has(item.itemKey) + if (isSelected) { + return true + } else if (item.relationTo === folderCollectionSlug) { + return folderAcceptsItem({ item, selectedFolderCollections }) + } else { + // Non folder items are disabled on drag + return true + } + } else if (parentFolderContext?.selectedItemKeys?.size) { + // Disable selected items from being navigated to in move to drawer + if (parentFolderContext.selectedItemKeys.has(item.itemKey)) { + return true + } + // Moving items to folder + if (item.relationTo === folderCollectionSlug) { + return folderAcceptsItem({ + item, + selectedFolderCollections: parentFolderContext.selectedFolderCollections, + }) + } + // If the item is not a folder, it is disabled on move + return true + } + }, + [ + selectedFolderCollections, + isDragging, + selectedItemKeys, + folderCollectionSlug, + parentFolderContext?.selectedFolderCollections, + parentFolderContext?.selectedItemKeys, + ], + ) + // If a new component is provided, update the state so children can re-render with the new component React.useEffect(() => { if (InitialFolderResultsComponent) { @@ -616,33 +809,37 @@ export function FolderProvider({ allCollectionFolderSlugs, allowCreateCollectionSlugs, breadcrumbs, + checkIfItemIsDisabled, clearSelections, - currentFolder: breadcrumbs?.[0]?.id - ? formatFolderOrDocumentItem({ - folderFieldName, - isUpload: false, - relationTo: folderCollectionSlug, - useAsTitle: folderCollectionConfig.admin.useAsTitle, - value: breadcrumbs[breadcrumbs.length - 1], - }) - : null, + currentFolder: + breadcrumbs?.[breadcrumbs.length - 1]?.id !== undefined + ? formatFolderOrDocumentItem({ + folderFieldName, + isUpload: false, + relationTo: folderCollectionSlug, + useAsTitle: folderCollectionConfig.admin.useAsTitle, + value: breadcrumbs[breadcrumbs.length - 1], + }) + : null, documents, + dragOverlayItem, focusedRowIndex, folderCollectionConfig, folderCollectionSlug, folderFieldName, folderID, FolderResultsComponent, + folderType: breadcrumbs?.[breadcrumbs.length - 1]?.folderType, getFolderRoute, getSelectedItems, isDragging, itemKeysToMove: parentFolderContext.selectedItemKeys, - lastSelectedIndex, moveToFolder, onItemClick, onItemKeyPress, refineFolderData, search, + selectedFolderCollections, selectedItemKeys, setBreadcrumbs, setFocusedRowIndex, diff --git a/packages/ui/src/providers/Folders/selection.ts b/packages/ui/src/providers/Folders/selection.ts deleted file mode 100644 index b3b1f932ca..0000000000 --- a/packages/ui/src/providers/Folders/selection.ts +++ /dev/null @@ -1,52 +0,0 @@ -import type { FolderOrDocument } from 'payload/shared' - -export function getShiftSelection({ - selectFromIndex, - selectToIndex, -}: { - selectFromIndex: number - selectToIndex: number -}): Set { - if (selectFromIndex === null || selectFromIndex === undefined) { - return new Set([selectToIndex]) - } - - const start = Math.min(selectToIndex, selectFromIndex) - const end = Math.max(selectToIndex, selectFromIndex) - const rangeSelection = new Set( - Array.from({ length: Math.max(start, end) + 1 }, (_, i) => i).filter((index) => { - return index >= start && index <= end - }), - ) - return rangeSelection -} - -export function getMetaSelection({ - currentSelection, - toggleIndex, -}: { - currentSelection: Set - toggleIndex: number -}): Set { - const newSelection = new Set(currentSelection) - if (newSelection.has(toggleIndex)) { - newSelection.delete(toggleIndex) - } else { - newSelection.add(toggleIndex) - } - return newSelection -} - -export function groupItemIDsByRelation(items: FolderOrDocument[]) { - return items.reduce( - (acc, item) => { - if (!acc[item.relationTo]) { - acc[item.relationTo] = [] - } - acc[item.relationTo].push(item.value.id) - - return acc - }, - {} as Record, - ) -} diff --git a/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx b/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx index 30c6830de0..69378293b6 100644 --- a/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx +++ b/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx @@ -8,7 +8,7 @@ import type { import type { FolderBreadcrumb, FolderOrDocument } from 'payload/shared' import { APIError, formatErrors, getFolderData } from 'payload' -import { buildFolderWhereConstraints } from 'payload/shared' +import { buildFolderWhereConstraints, combineWhereConstraints } from 'payload/shared' import { FolderFileTable, @@ -19,6 +19,7 @@ import { type GetFolderResultsComponentAndDataResult = { breadcrumbs?: FolderBreadcrumb[] documents?: FolderOrDocument[] + folderAssignedCollections?: CollectionSlug[] FolderResultsComponent: React.ReactNode subfolders?: FolderOrDocument[] } @@ -45,17 +46,10 @@ export const getFolderResultsComponentAndDataHandler: ServerFunction< const res = await getFolderResultsComponentAndData(args) return res } catch (err) { - req.payload.logger.error({ err, msg: `There was an error building form state` }) - - if (err.message === 'Could not find field schema for given path') { - return { - message: err.message, - } - } - - if (err.message === 'Unauthorized') { - return null - } + req.payload.logger.error({ + err, + msg: `There was an error getting the folder results component and data`, + }) return formatErrors(err) } @@ -64,16 +58,12 @@ export const getFolderResultsComponentAndDataHandler: ServerFunction< /** * This function is responsible for fetching folder data, building the results component * and returns the data and component together. - * - * - * Open ended questions: - * - If we rerender the results section, does the provider update?? I dont think so, if the provider is on the server. - * Maybe we should move the provider to the client. */ export const getFolderResultsComponentAndData = async ({ - activeCollectionSlugs, - browseByFolder, + browseByFolder = false, + collectionsToDisplay: activeCollectionSlugs, displayAs, + folderAssignedCollections, folderID = undefined, req, sort, @@ -84,9 +74,17 @@ export const getFolderResultsComponentAndData = async ({ throw new APIError('Folders are not enabled in the configuration.') } + const emptyQuery = { + id: { + exists: false, + }, + } + let collectionSlug: CollectionSlug | undefined = undefined - let documentWhere: undefined | Where = undefined - let folderWhere: undefined | Where = undefined + let documentWhere: undefined | Where = + Array.isArray(activeCollectionSlugs) && !activeCollectionSlugs.length ? emptyQuery : undefined + let folderWhere: undefined | Where = + Array.isArray(activeCollectionSlugs) && !activeCollectionSlugs.length ? emptyQuery : undefined // todo(perf): - collect promises and resolve them in parallel for (const activeCollectionSlug of activeCollectionSlugs) { @@ -103,6 +101,39 @@ export const getFolderResultsComponentAndData = async ({ if (folderCollectionConstraints) { folderWhere = folderCollectionConstraints } + + folderWhere = combineWhereConstraints([ + folderWhere, + Array.isArray(folderAssignedCollections) && + folderAssignedCollections.length && + payload.config.folders.collectionSpecific + ? { + or: [ + { + folderType: { + in: folderAssignedCollections, + }, + }, + // if the folderType is not set, it means it accepts all collections and should appear in the results + { + folderType: { + exists: false, + }, + }, + { + folderType: { + equals: [], + }, + }, + { + folderType: { + equals: null, + }, + }, + ], + } + : undefined, + ]) } else if ((browseByFolder && folderID) || !browseByFolder) { if (!browseByFolder) { collectionSlug = activeCollectionSlug @@ -135,6 +166,7 @@ export const getFolderResultsComponentAndData = async ({ folderID, folderWhere, req, + sort, }) let FolderResultsComponent = null @@ -167,6 +199,7 @@ export const getFolderResultsComponentAndData = async ({ return { breadcrumbs: folderData.breadcrumbs, documents: folderData.documents, + folderAssignedCollections: folderData.folderAssignedCollections, FolderResultsComponent, subfolders: folderData.subfolders, } diff --git a/packages/ui/src/views/BrowseByFolder/index.tsx b/packages/ui/src/views/BrowseByFolder/index.tsx index 3e9771e6bb..f2688189e7 100644 --- a/packages/ui/src/views/BrowseByFolder/index.tsx +++ b/packages/ui/src/views/BrowseByFolder/index.tsx @@ -9,10 +9,10 @@ import { useRouter } from 'next/navigation.js' import React, { Fragment } from 'react' import { DroppableBreadcrumb } from '../../elements/FolderView/Breadcrumbs/index.js' -import { CollectionTypePill } from '../../elements/FolderView/CollectionTypePill/index.js' import { ColoredFolderIcon } from '../../elements/FolderView/ColoredFolderIcon/index.js' import { CurrentFolderActions } from '../../elements/FolderView/CurrentFolderActions/index.js' import { DragOverlaySelection } from '../../elements/FolderView/DragOverlaySelection/index.js' +import { FilterFolderTypePill } from '../../elements/FolderView/FilterFolderTypePill/index.js' import { FolderFileTable } from '../../elements/FolderView/FolderFileTable/index.js' import { ItemCardGrid } from '../../elements/FolderView/ItemCardGrid/index.js' import { SortByPill } from '../../elements/FolderView/SortByPill/index.js' @@ -92,6 +92,7 @@ function BrowseByFolderViewInContext(props: BrowseByFolderViewInContextProps) { Description, disableBulkDelete, disableBulkEdit, + folderAssignedCollections, viewPreference, } = props @@ -111,11 +112,12 @@ function BrowseByFolderViewInContext(props: BrowseByFolderViewInContextProps) { allowCreateCollectionSlugs, breadcrumbs, documents, + dragOverlayItem, folderCollectionConfig, folderID, + folderType, getFolderRoute, getSelectedItems, - lastSelectedIndex, moveToFolder, refineFolderData, search, @@ -236,6 +238,10 @@ function BrowseByFolderViewInContext(props: BrowseByFolderViewInContextProps) { } }, [breadcrumbs, drawerDepth, getFolderRoute, router, setStepNav, startRouteTransition, t]) + const nonFolderCollectionSlugs = allowCreateCollectionSlugs.filter( + (slug) => slug !== folderCollectionConfig.slug, + ) + return ( @@ -248,6 +254,7 @@ function BrowseByFolderViewInContext(props: BrowseByFolderViewInContextProps) { ), @@ -259,6 +266,7 @@ function BrowseByFolderViewInContext(props: BrowseByFolderViewInContextProps) { , - folderID && , + folderID && , ), - folderID && - allowCreateCollectionSlugs.filter((slug) => slug !== folderCollectionConfig.slug) - .length > 0 && ( - slug !== folderCollectionConfig.slug, - )} - key="create-document" - onCreateSuccess={clearRouteCache} - slugPrefix="create-document--no-results" - /> - ), + folderID && nonFolderCollectionSlugs.length > 0 && ( + + ), ].filter(Boolean)} Message={

@@ -347,11 +353,9 @@ function BrowseByFolderViewInContext(props: BrowseByFolderViewInContextProps) { {AfterFolderList}

- + {selectedItemKeys.size > 0 && dragOverlayItem && ( + + )} ) } diff --git a/packages/ui/src/views/CollectionFolder/ListSelection/index.tsx b/packages/ui/src/views/CollectionFolder/ListSelection/index.tsx index cef1fffdf0..3ab40ab3b5 100644 --- a/packages/ui/src/views/CollectionFolder/ListSelection/index.tsx +++ b/packages/ui/src/views/CollectionFolder/ListSelection/index.tsx @@ -1,5 +1,7 @@ 'use client' +import type { CollectionSlug } from 'payload' + import { useModal } from '@faceless-ui/modal' import { extractID } from 'payload/shared' import React, { Fragment } from 'react' @@ -30,16 +32,17 @@ type GroupedSelections = { export type ListSelectionProps = { disableBulkDelete?: boolean disableBulkEdit?: boolean + folderAssignedCollections: CollectionSlug[] } export const ListSelection: React.FC = ({ disableBulkDelete, disableBulkEdit, + folderAssignedCollections, }) => { const { clearSelections, currentFolder, - folderCollectionConfig, folderCollectionSlug, folderFieldName, folderID, @@ -135,6 +138,7 @@ export const ListSelection: React.FC = ({ = ({ ) } + clearRouteCache() closeModal(moveToFolderDrawerSlug) }} /> diff --git a/packages/ui/src/views/CollectionFolder/index.tsx b/packages/ui/src/views/CollectionFolder/index.tsx index 98c58b60b7..12cc5eefcb 100644 --- a/packages/ui/src/views/CollectionFolder/index.tsx +++ b/packages/ui/src/views/CollectionFolder/index.tsx @@ -107,11 +107,12 @@ function CollectionFolderViewInContext(props: CollectionFolderViewInContextProps allowCreateCollectionSlugs, breadcrumbs, documents, + dragOverlayItem, folderCollectionConfig, folderCollectionSlug, FolderResultsComponent, + folderType, getSelectedItems, - lastSelectedIndex, moveToFolder, refineFolderData, selectedItemKeys, @@ -265,6 +266,9 @@ function CollectionFolderViewInContext(props: CollectionFolderViewInContextProps ), @@ -284,6 +288,9 @@ function CollectionFolderViewInContext(props: CollectionFolderViewInContextProps {AfterFolderList} - + {selectedItemKeys.size > 0 && dragOverlayItem && ( + + )} ) } diff --git a/test/folders/e2e.spec.ts b/test/folders/e2e.spec.ts index aaa1a61aee..2e433de469 100644 --- a/test/folders/e2e.spec.ts +++ b/test/folders/e2e.spec.ts @@ -1,19 +1,26 @@ import type { Page } from '@playwright/test' import { expect, test } from '@playwright/test' -import { reInitializeDB } from 'helpers/reInitializeDB.js' import * as path from 'path' import { fileURLToPath } from 'url' import { ensureCompilationIsDone, initPageConsoleErrorCatch, saveDocAndAssert } from '../helpers.js' import { AdminUrlUtil } from '../helpers/adminUrlUtil.js' +import { + getSelectInputOptions, + getSelectInputValue, + openSelectMenu, +} from '../helpers/e2e/selectInput.js' +import { applyBrowseByFolderTypeFilter } from '../helpers/folders/applyBrowseByFolderTypeFilter.js' import { clickFolderCard } from '../helpers/folders/clickFolderCard.js' import { createFolder } from '../helpers/folders/createFolder.js' +import { createFolderDoc } from '../helpers/folders/createFolderDoc.js' import { createFolderFromDoc } from '../helpers/folders/createFolderFromDoc.js' import { expectNoResultsAndCreateFolderButton } from '../helpers/folders/expectNoResultsAndCreateFolderButton.js' import { selectFolderAndConfirmMove } from '../helpers/folders/selectFolderAndConfirmMove.js' import { selectFolderAndConfirmMoveFromList } from '../helpers/folders/selectFolderAndConfirmMoveFromList.js' import { initPayloadE2ENoConfig } from '../helpers/initPayloadE2ENoConfig.js' +import { reInitializeDB } from '../helpers/reInitializeDB.js' import { TEST_TIMEOUT_LONG } from '../playwright.config.js' import { omittedFromBrowseBySlug, postSlug } from './shared.js' @@ -93,16 +100,15 @@ test.describe('Folders', () => { await page.goto(`${serverURL}/admin/browse-by-folder`) await createFolder({ folderName: 'Test Folder', page }) await clickFolderCard({ folderName: 'Test Folder', page }) - const renameButton = page.locator('.list-selection__actions button', { - hasText: 'Rename', + const editFolderDocButton = page.locator('.list-selection__actions button', { + hasText: 'Edit', + }) + await editFolderDocButton.click() + await createFolderDoc({ + page, + folderName: 'Renamed Folder', + folderType: ['Posts'], }) - await renameButton.click() - const folderNameInput = page.locator('input[id="field-name"]') - await folderNameInput.fill('Renamed Folder') - const applyChangesButton = page.locator( - 'dialog#rename-folder--list button[aria-label="Apply Changes"]', - ) - await applyChangesButton.click() await expect(page.locator('.payload-toast-container')).toContainText('successfully') const renamedFolderCard = page .locator('.folder-file-card__name', { @@ -165,16 +171,12 @@ test.describe('Folders', () => { hasText: 'Move', }) await moveButton.click() - const destinationFolder = page - .locator('dialog#move-to-folder--list .folder-file-card') - .filter({ - has: page.locator('.folder-file-card__name', { hasText: 'Move Into This Folder' }), - }) - .first() - const destinationFolderButton = destinationFolder.locator( - 'div[role="button"].folder-file-card__drag-handle', - ) - await destinationFolderButton.click() + await clickFolderCard({ + folderName: 'Move Into This Folder', + page, + doubleClick: true, + rootLocator: page.locator('dialog#move-to-folder--list'), + }) const selectButton = page.locator( 'dialog#move-to-folder--list button[aria-label="Apply Changes"]', ) @@ -193,7 +195,11 @@ test.describe('Folders', () => { // this test currently fails in postgres test('should create new document from folder', async () => { await page.goto(`${serverURL}/admin/browse-by-folder`) - await createFolder({ folderName: 'Create New Here', page }) + await createFolder({ + folderName: 'Create New Here', + page, + folderType: ['Posts', 'Drafts'], + }) await clickFolderCard({ folderName: 'Create New Here', page, doubleClick: true }) const createDocButton = page.locator('.create-new-doc-in-folder__popup-button', { hasText: 'Create document', @@ -231,22 +237,12 @@ test.describe('Folders', () => { await expect(createFolderButton).toBeVisible() await createFolderButton.click() - const drawerHeader = page.locator( - 'dialog#create-folder--no-results-new-folder-drawer h1.drawerHeader__title', - ) - await expect(drawerHeader).toHaveText('New Folder') + await createFolderDoc({ + page, + folderName: 'Nested Folder', + folderType: ['Posts'], + }) - const titleField = page.locator( - 'dialog#create-folder--no-results-new-folder-drawer input[id="field-name"]', - ) - await titleField.fill('Nested Folder') - const createButton = page - .locator( - 'dialog#create-folder--no-results-new-folder-drawer button[aria-label="Apply Changes"]', - ) - .filter({ hasText: 'Create' }) - .first() - await createButton.click() await expect(page.locator('.payload-toast-container')).toContainText('successfully') await expect(page.locator('dialog#create-folder--no-results-new-folder-drawer')).toBeHidden() }) @@ -296,12 +292,11 @@ test.describe('Folders', () => { await createNewDropdown.click() const createFolderButton = page.locator('.popup-button-list__button').first() await createFolderButton.click() - const folderNameInput = page.locator('input[id="field-name"]') - await folderNameInput.fill('Nested Folder') - const createButton = page - .locator('.drawerHeader button[aria-label="Apply Changes"]') - .filter({ hasText: 'Create' }) - await createButton.click() + await createFolderDoc({ + page, + folderName: 'Nested Folder', + folderType: ['Posts'], + }) await expect(page.locator('.folder-file-card__name')).toHaveText('Nested Folder') await createNewDropdown.click() @@ -314,18 +309,28 @@ test.describe('Folders', () => { await saveButton.click() await expect(page.locator('.payload-toast-container')).toContainText('successfully') - const typeButton = page.locator('.popup-button', { hasText: 'Type' }) - await typeButton.click() - const folderCheckbox = page.locator('.checkbox-popup__options .checkbox-input__input').first() - await folderCheckbox.click() + // should filter out folders and only show posts + await applyBrowseByFolderTypeFilter({ + page, + type: { label: 'Folders', value: 'payload-folders' }, + on: false, + }) const folderGroup = page.locator('.item-card-grid__title', { hasText: 'Folders' }) const postGroup = page.locator('.item-card-grid__title', { hasText: 'Documents' }) await expect(folderGroup).toBeHidden() await expect(postGroup).toBeVisible() - await folderCheckbox.click() - const postCheckbox = page.locator('.checkbox-popup__options .checkbox-input__input').nth(1) - await postCheckbox.click() + // should filter out posts and only show folders + await applyBrowseByFolderTypeFilter({ + page, + type: { label: 'Folders', value: 'payload-folders' }, + on: true, + }) + await applyBrowseByFolderTypeFilter({ + page, + type: { label: 'Posts', value: 'posts' }, + on: false, + }) await expect(folderGroup).toBeVisible() await expect(postGroup).toBeHidden() @@ -389,7 +394,6 @@ test.describe('Folders', () => { test('should resolve folder pills and not get stuck as Loading...', async () => { await selectFolderAndConfirmMoveFromList({ folderName: 'Move Into This Folder', page }) const folderPill = page.locator('tbody .row-1 .move-doc-to-folder') - await page.reload() await expect(folderPill).not.toHaveText('Loading...') }) test('should show updated folder pill after folder change', async () => { @@ -402,10 +406,16 @@ test.describe('Folders', () => { const folderPill = page.locator('tbody .row-1 .move-doc-to-folder') await selectFolderAndConfirmMoveFromList({ folderName: 'Move Into This Folder', page }) await expect(folderPill).toHaveText('Move Into This Folder') - await page.reload() await folderPill.click() - const folderBreadcrumb = page.locator('.folderBreadcrumbs__crumb-item', { hasText: 'Folder' }) - await folderBreadcrumb.click() + const drawerLocator = page.locator('dialog .move-folder-drawer') + await drawerLocator + .locator('.droppable-button.folderBreadcrumbs__crumb-item', { + hasText: 'Folder', + }) + .click() + await expect( + drawerLocator.locator('.folder-file-card__name', { hasText: 'Move Into This Folder' }), + ).toBeVisible() await selectFolderAndConfirmMove({ page }) await expect(folderPill).toHaveText('No Folder') }) @@ -418,14 +428,11 @@ test.describe('Folders', () => { await createDropdown.click() const createFolderButton = page.locator('.popup-button-list__button', { hasText: 'Folder' }) await createFolderButton.click() - const drawerHeader = page.locator('.drawerHeader__title', { hasText: 'New Folder' }) - await expect(drawerHeader).toBeVisible() - const folderNameInput = page.locator('input[id="field-name"]') - await folderNameInput.fill('New Folder From Collection') - const createButton = page - .locator('.drawerHeader button[aria-label="Apply Changes"]') - .filter({ hasText: 'Create' }) - await createButton.click() + await createFolderDoc({ + page, + folderName: 'New Folder From Collection', + folderType: ['Posts'], + }) await expect(page.locator('.payload-toast-container')).toContainText('successfully') }) }) @@ -470,6 +477,58 @@ test.describe('Folders', () => { }) }) + test.describe('Collection with browse by folders disabled', () => { + test('should not show omitted collection documents in browse by folder view', async () => { + await page.goto(OmittedFromBrowseBy.byFolder) + const folderName = 'Folder without omitted Docs' + await page.goto(OmittedFromBrowseBy.byFolder) + await createFolder({ + folderName, + page, + fromDropdown: true, + folderType: ['Omitted From Browse By', 'Posts'], + }) + + // create document + await page.goto(OmittedFromBrowseBy.create) + const titleInput = page.locator('input[name="title"]') + await titleInput.fill('Omitted Doc') + await saveDocAndAssert(page) + + // assign to folder + const folderPill = page.locator('.doc-controls .move-doc-to-folder', { hasText: 'No Folder' }) + await folderPill.click() + await clickFolderCard({ folderName, page }) + const selectButton = page + .locator('button[aria-label="Apply Changes"]') + .filter({ hasText: 'Select' }) + await selectButton.click() + await saveDocAndAssert(page) + + // go to browse by folder view + await page.goto(`${serverURL}/admin/browse-by-folder`) + await clickFolderCard({ folderName, page, doubleClick: true }) + + // folder should be empty + await expectNoResultsAndCreateFolderButton({ page }) + }) + + test('should not show collection type in browse by folder view', async () => { + const folderName = 'omitted collection pill test folder' + await page.goto(`${serverURL}/admin/browse-by-folder`) + await createFolder({ folderName, page }) + await clickFolderCard({ folderName, page, doubleClick: true }) + + await page.locator('button:has(.collection-type__count)').click() + + await expect( + page.locator('.checkbox-input .field-label', { + hasText: 'Omitted From Browse By', + }), + ).toBeHidden() + }) + }) + test.describe('Multiple select options', () => { test.beforeEach(async () => { await page.goto(`${serverURL}/admin/browse-by-folder`) @@ -545,48 +604,140 @@ test.describe('Folders', () => { }) }) - test.describe('Collection with browse by folders disabled', () => { - const folderName = 'Folder without omitted Docs' - test('should not show omitted collection documents in browse by folder view', async () => { - await page.goto(OmittedFromBrowseBy.byFolder) - await createFolder({ folderName, page, fromDropdown: true }) - - // create document - await page.goto(OmittedFromBrowseBy.create) - const titleInput = page.locator('input[name="title"]') - await titleInput.fill('Omitted Doc') - await saveDocAndAssert(page) - - // assign to folder - const folderPill = page.locator('.doc-controls .move-doc-to-folder', { hasText: 'No Folder' }) - await folderPill.click() - await clickFolderCard({ folderName, page }) - const selectButton = page - .locator('button[aria-label="Apply Changes"]') - .filter({ hasText: 'Select' }) - await selectButton.click() - - // go to browse by folder view + test.describe('should inherit folderType select values from parent folder', () => { + test('should scope folderType select options for: scoped > child folder', async () => { await page.goto(`${serverURL}/admin/browse-by-folder`) - await clickFolderCard({ folderName, page, doubleClick: true }) + await createFolder({ folderName: 'Posts and Media', page, folderType: ['Posts', 'Media'] }) + await clickFolderCard({ folderName: 'Posts and Media', page, doubleClick: true }) - // folder should be empty - await expectNoResultsAndCreateFolderButton({ page }) + const createNewDropdown = page.locator('.create-new-doc-in-folder__popup-button', { + hasText: 'Create New', + }) + await createNewDropdown.click() + const createFolderButton = page.locator( + '.list-header__title-actions .popup-button-list__button', + { hasText: 'Folder' }, + ) + await createFolderButton.click() + + const drawer = page.locator('dialog .collection-edit--payload-folders') + const titleInput = drawer.locator('#field-name') + await titleInput.fill('Should only allow Posts and Media') + const selectLocator = drawer.locator('#field-folderType') + await expect(selectLocator).toBeVisible() + + // should prefill with Posts and Media + await expect + .poll(async () => { + const options = await getSelectInputValue({ selectLocator, multiSelect: true }) + return options.sort() + }) + .toEqual(['Posts', 'Media'].sort()) + + // should have no more select options available + await openSelectMenu({ selectLocator }) + await expect( + selectLocator.locator('.rs__menu-notice', { hasText: 'No options' }), + ).toBeVisible() }) - test('should not show collection type in browse by folder view', async () => { - const folderName = 'omitted collection pill test folder' + test('should scope folderType select options for: unscoped > scoped > child folder', async () => { await page.goto(`${serverURL}/admin/browse-by-folder`) - await createFolder({ folderName, page }) - await clickFolderCard({ folderName, page, doubleClick: true }) - await page.locator('button:has(.collection-type__count)').click() + // create an unscoped parent folder + await createFolder({ folderName: 'All collections', page, folderType: [] }) + await clickFolderCard({ folderName: 'All collections', page, doubleClick: true }) + + // create a scoped child folder + await createFolder({ + folderName: 'Posts and Media', + page, + folderType: ['Posts', 'Media'], + fromDropdown: true, + }) + await clickFolderCard({ folderName: 'Posts and Media', page, doubleClick: true }) await expect( - page.locator('.checkbox-input .field-label', { - hasText: 'Omitted From Browse By', + page.locator('.step-nav', { + hasText: 'Posts and Media', }), - ).toBeHidden() + ).toBeVisible() + + const titleActionsLocator = page.locator('.list-header__title-actions') + await expect(titleActionsLocator).toBeVisible() + const folderDropdown = page.locator( + '.list-header__title-actions .create-new-doc-in-folder__action-popup', + { + hasText: 'Create', + }, + ) + await expect(folderDropdown).toBeVisible() + await folderDropdown.click() + const createFolderButton = page.locator( + '.list-header__title-actions .popup-button-list__button', + { + hasText: 'Folder', + }, + ) + await createFolderButton.click() + + const drawer = page.locator('dialog .collection-edit--payload-folders') + const titleInput = drawer.locator('#field-name') + await titleInput.fill('Should only allow posts and media') + const selectLocator = drawer.locator('#field-folderType') + await expect(selectLocator).toBeVisible() + + // should not prefill with any options + await expect + .poll(async () => { + const options = await getSelectInputValue({ selectLocator, multiSelect: true }) + return options.sort() + }) + .toEqual(['Posts', 'Media'].sort()) + + // should have no more select options available + await openSelectMenu({ selectLocator }) + await expect( + selectLocator.locator('.rs__menu-notice', { hasText: 'No options' }), + ).toBeVisible() + }) + + test('should not scope child folder of an unscoped parent folder', async () => { + await page.goto(`${serverURL}/admin/browse-by-folder`) + await createFolder({ folderName: 'All collections', page, folderType: [] }) + await clickFolderCard({ folderName: 'All collections', page, doubleClick: true }) + + const createNewDropdown = page.locator('.create-new-doc-in-folder__popup-button', { + hasText: 'Create New', + }) + await createNewDropdown.click() + const createFolderButton = page.locator( + '.list-header__title-actions .popup-button-list__button', + { hasText: 'Folder' }, + ) + await createFolderButton.click() + + const drawer = page.locator('dialog .collection-edit--payload-folders') + const titleInput = drawer.locator('#field-name') + await titleInput.fill('Should allow all collections') + const selectLocator = drawer.locator('#field-folderType') + await expect(selectLocator).toBeVisible() + + // should not prefill with any options + await expect + .poll(async () => { + const options = await getSelectInputValue({ selectLocator, multiSelect: true }) + return options + }) + .toEqual([]) + + // should have many options + await expect + .poll(async () => { + const options = await getSelectInputOptions({ selectLocator }) + return options.length + }) + .toBeGreaterThan(4) }) }) diff --git a/test/folders/int.spec.ts b/test/folders/int.spec.ts index 17afb242a8..6f10a6e733 100644 --- a/test/folders/int.spec.ts +++ b/test/folders/int.spec.ts @@ -3,18 +3,15 @@ import type { Payload } from 'payload' import path from 'path' import { fileURLToPath } from 'url' -import type { NextRESTClient } from '../helpers/NextRESTClient.js' - import { initPayloadInt } from '../helpers/initPayloadInt.js' let payload: Payload -let restClient: NextRESTClient const filename = fileURLToPath(import.meta.url) const dirname = path.dirname(filename) describe('folders', () => { beforeAll(async () => { - ;({ payload, restClient } = await initPayloadInt(dirname)) + ;({ payload } = await initPayloadInt(dirname)) }) afterAll(async () => { @@ -23,7 +20,7 @@ describe('folders', () => { beforeEach(async () => { await payload.delete({ - collection: 'posts', + collection: 'payload-folders', depth: 0, where: { id: { @@ -48,6 +45,7 @@ describe('folders', () => { collection: 'payload-folders', data: { name: 'Parent Folder', + folderType: ['posts'], }, }) const folderIDFromParams = parentFolder.id @@ -57,6 +55,7 @@ describe('folders', () => { data: { name: 'Nested 1', folder: folderIDFromParams, + folderType: ['posts'], }, }) @@ -65,6 +64,7 @@ describe('folders', () => { data: { name: 'Nested 2', folder: folderIDFromParams, + folderType: ['posts'], }, }) @@ -73,7 +73,7 @@ describe('folders', () => { id: folderIDFromParams, }) - expect(parentFolderQuery.documentsAndFolders.docs).toHaveLength(2) + expect(parentFolderQuery.documentsAndFolders?.docs).toHaveLength(2) }) }) @@ -82,6 +82,7 @@ describe('folders', () => { const parentFolder = await payload.create({ collection: 'payload-folders', data: { + folderType: ['posts'], name: 'Parent Folder', }, }) @@ -108,7 +109,7 @@ describe('folders', () => { id: folderIDFromParams, }) - expect(parentFolderQuery.documentsAndFolders.docs).toHaveLength(2) + expect(parentFolderQuery.documentsAndFolders?.docs).toHaveLength(2) }) }) @@ -117,6 +118,7 @@ describe('folders', () => { const parentFolder = await payload.create({ collection: 'payload-folders', data: { + folderType: ['posts'], name: 'Parent Folder', }, }) @@ -124,6 +126,7 @@ describe('folders', () => { const childFolder = await payload.create({ collection: 'payload-folders', data: { + folderType: ['posts'], name: 'Child Folder', folder: parentFolder, }, @@ -153,6 +156,7 @@ describe('folders', () => { const parentFolder = await payload.create({ collection: 'payload-folders', data: { + folderType: ['posts'], name: 'Parent Folder', }, }) @@ -168,6 +172,7 @@ describe('folders', () => { const parentFolder = await payload.create({ collection: 'payload-folders', data: { + folderType: ['posts'], name: 'Parent Folder', }, }) @@ -176,6 +181,7 @@ describe('folders', () => { data: { name: 'Child Folder', folder: parentFolder, + folderType: ['posts'], }, }) @@ -189,5 +195,154 @@ describe('folders', () => { }), ).resolves.toBeNull() }) + + describe('ensureSafeCollectionsChange', () => { + it('should prevent narrowing scope of a folder if it contains documents of a removed type', async () => { + const sharedFolder = await payload.create({ + collection: 'payload-folders', + data: { + name: 'Posts and Drafts Folder', + folderType: ['posts', 'drafts'], + }, + }) + + await payload.create({ + collection: 'posts', + data: { + title: 'Post 1', + folder: sharedFolder.id, + }, + }) + + await payload.create({ + collection: 'drafts', + data: { + title: 'Post 1', + folder: sharedFolder.id, + }, + }) + + try { + const updatedFolder = await payload.update({ + collection: 'payload-folders', + id: sharedFolder.id, + data: { + folderType: ['posts'], + }, + }) + + expect(updatedFolder).not.toBeDefined() + } catch (e: any) { + expect(e.message).toBe( + 'The folder "Posts and Drafts Folder" contains documents that still belong to the following collections: Drafts', + ) + } + }) + + it('should prevent adding scope to a folder if it contains documents outside of the new scope', async () => { + const folderAcceptsAnything = await payload.create({ + collection: 'payload-folders', + data: { + name: 'Anything Goes', + folderType: [], + }, + }) + + await payload.create({ + collection: 'posts', + data: { + title: 'Post 1', + folder: folderAcceptsAnything.id, + }, + }) + + try { + const scopedFolder = await payload.update({ + collection: 'payload-folders', + id: folderAcceptsAnything.id, + data: { + folderType: ['posts'], + }, + }) + + expect(scopedFolder).not.toBeDefined() + } catch (e: any) { + expect(e.message).toBe( + 'The folder "Anything Goes" contains documents that still belong to the following collections: Posts', + ) + } + }) + + it('should prevent narrowing scope of a folder if subfolders are assigned to any of the removed types', async () => { + const parentFolder = await payload.create({ + collection: 'payload-folders', + data: { + name: 'Parent Folder', + folderType: ['posts', 'drafts'], + }, + }) + + await payload.create({ + collection: 'payload-folders', + data: { + name: 'Parent Folder', + folderType: ['posts', 'drafts'], + folder: parentFolder.id, + }, + }) + + try { + const updatedParent = await payload.update({ + collection: 'payload-folders', + id: parentFolder.id, + data: { + folderType: ['posts'], + }, + }) + + expect(updatedParent).not.toBeDefined() + } catch (e: any) { + expect(e.message).toBe( + 'The folder "Parent Folder" contains folders that still belong to the following collections: Drafts', + ) + } + }) + + it('should prevent widening scope on a scoped subfolder', async () => { + const unscopedFolder = await payload.create({ + collection: 'payload-folders', + data: { + name: 'Parent Folder', + folderType: [], + }, + }) + + const level1Folder = await payload.create({ + collection: 'payload-folders', + data: { + name: 'Level 1 Folder', + folderType: ['posts', 'drafts'], + folder: unscopedFolder.id, + }, + }) + + try { + const level2UnscopedFolder = await payload.create({ + collection: 'payload-folders', + data: { + name: 'Level 2 Folder', + folder: level1Folder.id, + folderType: [], + }, + }) + + expect(level2UnscopedFolder).not.toBeDefined() + } catch (e: any) { + expect(e.message).toBe( + 'The folder "Level 2 Folder" must have folder-type set since its parent folder "Level 1 Folder" has a folder-type set.', + ) + } + }) + }) }) }) diff --git a/test/folders/payload-types.ts b/test/folders/payload-types.ts index 276727a036..8f1e60b6b5 100644 --- a/test/folders/payload-types.ts +++ b/test/folders/payload-types.ts @@ -201,6 +201,7 @@ export interface FolderInterface { hasNextPage?: boolean; totalDocs?: number; }; + folderType?: ('posts' | 'media' | 'drafts' | 'autosave' | 'omitted-from-browse-by')[] | null; folderSlug?: string | null; updatedAt: string; createdAt: string; @@ -419,6 +420,7 @@ export interface PayloadFoldersSelect { name?: T; folder?: T; documentsAndFolders?: T; + folderType?: T; folderSlug?: T; updatedAt?: T; createdAt?: T; diff --git a/test/folders/tsconfig.json b/test/folders/tsconfig.json new file mode 100644 index 0000000000..3c43903cfd --- /dev/null +++ b/test/folders/tsconfig.json @@ -0,0 +1,3 @@ +{ + "extends": "../tsconfig.json" +} diff --git a/test/helpers/folders/applyBrowseByFolderTypeFilter.ts b/test/helpers/folders/applyBrowseByFolderTypeFilter.ts new file mode 100644 index 0000000000..1cdbefe63c --- /dev/null +++ b/test/helpers/folders/applyBrowseByFolderTypeFilter.ts @@ -0,0 +1,41 @@ +import type { Page } from '@playwright/test' + +export const applyBrowseByFolderTypeFilter = async ({ + page, + type, + on, +}: { + on: boolean + page: Page + type: { + label: string + value: string + } +}) => { + // Check if the popup is already active + let typePill = page.locator('.search-bar__actions .checkbox-popup.popup--active', { + hasText: 'Type', + }) + const isActive = (await typePill.count()) > 0 + + if (!isActive) { + typePill = page.locator('.search-bar__actions .checkbox-popup', { hasText: 'Type' }) + await typePill.locator('.popup-button', { hasText: 'Type' }).click() + } + + await typePill.locator('.field-label', { hasText: type.label }).click() + + await page.waitForURL((urlStr) => { + try { + const url = new URL(urlStr) + const relationTo = url.searchParams.get('relationTo') + if (on) { + return Boolean(relationTo?.includes(`"${type.value}"`)) + } else { + return Boolean(!relationTo?.includes(`"${type.value}"`)) + } + } catch { + return false + } + }) +} diff --git a/test/helpers/folders/clickFolderCard.ts b/test/helpers/folders/clickFolderCard.ts index b563122771..f145828420 100644 --- a/test/helpers/folders/clickFolderCard.ts +++ b/test/helpers/folders/clickFolderCard.ts @@ -1,27 +1,37 @@ -import type { Page } from '@playwright/test' +import type { Locator, Page } from '@playwright/test' + +import { expect } from '@playwright/test' type Args = { doubleClick?: boolean folderName: string page: Page + rootLocator?: Locator } export async function clickFolderCard({ page, folderName, doubleClick = false, + rootLocator, }: Args): Promise { - const folderCard = page - .locator('.folder-file-card') + const folderCard = (rootLocator || page) + .locator('div[role="button"].draggable-with-click') .filter({ has: page.locator('.folder-file-card__name', { hasText: folderName }), }) .first() - const dragHandleButton = folderCard.locator('div[role="button"].folder-file-card__drag-handle') + await folderCard.waitFor({ state: 'visible' }) if (doubleClick) { - await dragHandleButton.dblclick() + // Release any modifier keys that might be held down from previous tests + await page.keyboard.up('Shift') + await page.keyboard.up('Control') + await page.keyboard.up('Alt') + await page.keyboard.up('Meta') + await folderCard.dblclick() + await expect(folderCard).toBeHidden() } else { - await dragHandleButton.click() + await folderCard.click() } } diff --git a/test/helpers/folders/createFolder.ts b/test/helpers/folders/createFolder.ts index 5ac1d06e16..f3c4785a12 100644 --- a/test/helpers/folders/createFolder.ts +++ b/test/helpers/folders/createFolder.ts @@ -1,7 +1,10 @@ import { expect, type Page } from '@playwright/test' +import { createFolderDoc } from './createFolderDoc.js' + type Args = { folderName: string + folderType?: string[] fromDropdown?: boolean page: Page } @@ -9,13 +12,15 @@ export async function createFolder({ folderName, fromDropdown = false, page, + folderType = ['Posts'], }: Args): Promise { if (fromDropdown) { - const folderDropdown = page.locator('.create-new-doc-in-folder__popup-button', { + const titleActionsLocator = page.locator('.list-header__title-actions') + const folderDropdown = titleActionsLocator.locator('.create-new-doc-in-folder__action-popup', { hasText: 'Create', }) await folderDropdown.click() - const createFolderButton = page.locator('.popup-button-list__button', { + const createFolderButton = titleActionsLocator.locator('.popup-button-list__button', { hasText: 'Folder', }) await createFolderButton.click() @@ -26,16 +31,11 @@ export async function createFolder({ await createFolderButton.click() } - const folderNameInput = page.locator( - 'dialog#create-document--header-pill-new-folder-drawer div.drawer-content-container input#field-name', - ) - - await folderNameInput.fill(folderName) - - const createButton = page.getByRole('button', { name: 'Apply Changes' }) - await createButton.click() - - await expect(page.locator('.payload-toast-container')).toContainText('successfully') + await createFolderDoc({ + page, + folderName, + folderType, + }) const folderCard = page.locator('.folder-file-card__name', { hasText: folderName }).first() await expect(folderCard).toBeVisible() diff --git a/test/helpers/folders/createFolderDoc.ts b/test/helpers/folders/createFolderDoc.ts new file mode 100644 index 0000000000..4266755e7d --- /dev/null +++ b/test/helpers/folders/createFolderDoc.ts @@ -0,0 +1,26 @@ +import { expect, type Page } from '@playwright/test' + +import { selectInput } from '../../helpers/e2e/selectInput.js' +export const createFolderDoc = async ({ + folderName, + page, + folderType, +}: { + folderName: string + folderType: string[] + page: Page +}) => { + const drawer = page.locator('dialog .collection-edit--payload-folders') + await drawer.locator('input#field-name').fill(folderName) + + await selectInput({ + multiSelect: true, + options: folderType, + selectLocator: drawer.locator('#field-folderType'), + }) + + const createButton = drawer.getByRole('button', { name: 'Save' }) + await createButton.click() + + await expect(page.locator('.payload-toast-container')).toContainText('successfully') +} diff --git a/test/helpers/folders/createFolderFromDoc.ts b/test/helpers/folders/createFolderFromDoc.ts index fe8fdaabd4..b9ff977ba6 100644 --- a/test/helpers/folders/createFolderFromDoc.ts +++ b/test/helpers/folders/createFolderFromDoc.ts @@ -1,26 +1,29 @@ import { expect, type Page } from '@playwright/test' +import { createFolder } from './createFolder.js' +import { createFolderDoc } from './createFolderDoc.js' + type Args = { folderName: string + folderType?: string[] page: Page } -export async function createFolderFromDoc({ folderName, page }: Args): Promise { +export async function createFolderFromDoc({ + folderName, + page, + folderType = ['Posts'], +}: Args): Promise { const addFolderButton = page.locator('.create-new-doc-in-folder__button', { hasText: 'Create folder', }) await addFolderButton.click() - const folderNameInput = page.locator('div.drawer-content-container input#field-name') - - await folderNameInput.fill(folderName) - - const createButton = page - .locator('button[aria-label="Apply Changes"]') - .filter({ hasText: 'Create' }) - await createButton.click() - - await expect(page.locator('.payload-toast-container')).toContainText('successfully') + await createFolderDoc({ + page, + folderName, + folderType, + }) const folderCard = page.locator('.folder-file-card__name', { hasText: folderName }).first() await expect(folderCard).toBeVisible() From 95e373e60b28a1c6fceedeef60ffb378e90e0773 Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Thu, 17 Jul 2025 14:12:58 -0400 Subject: [PATCH 08/91] fix(plugin-import-export): disabled flag to cascade to nested fields from parent containers (#13199) ### What? Fixes the `custom.plugin-import-export.disabled` flag to correctly disable fields in all nested structures including: - Groups - Arrays - Tabs - Blocks Previously, only top-level fields or direct children were respected. This update ensures nested paths (e.g. `group.array.field1`, `blocks.hero.title`, etc.) are matched and filtered from exports. ### Why? - Updated regex logic in both `createExport` and Preview components to recursively support: - Indexed array fields (e.g. `array_0_field1`) - Block fields with slugs (e.g. `blocks_0_hero_title`) - Nested field accessors with correct part-by-part expansion ### How? To allow users to disable entire field groups or deeply nested fields in structured layouts. --- .../FieldsToExport/reduceFields.tsx | 6 +- .../src/components/Preview/index.tsx | 25 +++--- .../src/export/createExport.ts | 18 ++-- .../src/export/flattenObject.ts | 14 +++- packages/plugin-import-export/src/index.ts | 14 +--- .../src/utilities/buildDisabledFieldRegex.ts | 13 +++ .../utilities/collectDisabledFieldPaths.ts | 82 +++++++++++++++++++ .../src/utilities/getFlattenedFieldKeys.ts | 9 +- .../migrations/20250714_201659.ts | 2 +- .../up-down-migration/migrations/index.ts | 6 +- test/plugin-import-export/int.spec.ts | 8 +- 11 files changed, 156 insertions(+), 41 deletions(-) create mode 100644 packages/plugin-import-export/src/utilities/buildDisabledFieldRegex.ts create mode 100644 packages/plugin-import-export/src/utilities/collectDisabledFieldPaths.ts diff --git a/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx b/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx index 37c2a47f48..a20568b436 100644 --- a/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx +++ b/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx @@ -114,7 +114,11 @@ export const reduceFields = ({ const val = createNestedClientFieldPath(path, field) // If the field is disabled, skip it - if (disabledFields.includes(val)) { + if ( + disabledFields.some( + (disabledField) => val === disabledField || val.startsWith(`${disabledField}.`), + ) + ) { return fieldsToUse } diff --git a/packages/plugin-import-export/src/components/Preview/index.tsx b/packages/plugin-import-export/src/components/Preview/index.tsx index 046b04c4a7..4cafe1f4fe 100644 --- a/packages/plugin-import-export/src/components/Preview/index.tsx +++ b/packages/plugin-import-export/src/components/Preview/index.tsx @@ -18,8 +18,9 @@ import type { PluginImportExportTranslations, } from '../../translations/index.js' -import { useImportExport } from '../ImportExportProvider/index.js' +import { buildDisabledFieldRegex } from '../../utilities/buildDisabledFieldRegex.js' import './index.scss' +import { useImportExport } from '../ImportExportProvider/index.js' const baseClass = 'preview' @@ -46,12 +47,11 @@ export const Preview = () => { (collection) => collection.slug === collectionSlug, ) - const disabledFieldsUnderscored = React.useMemo(() => { - return ( - collectionConfig?.admin?.custom?.['plugin-import-export']?.disabledFields?.map((f: string) => - f.replace(/\./g, '_'), - ) ?? [] - ) + const disabledFieldRegexes: RegExp[] = React.useMemo(() => { + const disabledFieldPaths = + collectionConfig?.admin?.custom?.['plugin-import-export']?.disabledFields ?? [] + + return disabledFieldPaths.map(buildDisabledFieldRegex) }, [collectionConfig]) const isCSV = format === 'csv' @@ -101,11 +101,16 @@ export const Preview = () => { Array.isArray(fields) && fields.length > 0 ? fields.flatMap((field) => { const regex = fieldToRegex(field) - return allKeys.filter((key) => regex.test(key)) + return allKeys.filter( + (key) => + regex.test(key) && + !disabledFieldRegexes.some((disabledRegex) => disabledRegex.test(key)), + ) }) : allKeys.filter( (key) => - !defaultMetaFields.includes(key) && !disabledFieldsUnderscored.includes(key), + !defaultMetaFields.includes(key) && + !disabledFieldRegexes.some((regex) => regex.test(key)), ) const fieldKeys = @@ -150,7 +155,7 @@ export const Preview = () => { }, [ collectionConfig, collectionSlug, - disabledFieldsUnderscored, + disabledFieldRegexes, draft, fields, i18n, diff --git a/packages/plugin-import-export/src/export/createExport.ts b/packages/plugin-import-export/src/export/createExport.ts index 9868e0a965..2b4b05bff2 100644 --- a/packages/plugin-import-export/src/export/createExport.ts +++ b/packages/plugin-import-export/src/export/createExport.ts @@ -5,6 +5,7 @@ import { stringify } from 'csv-stringify/sync' import { APIError } from 'payload' import { Readable } from 'stream' +import { buildDisabledFieldRegex } from '../utilities/buildDisabledFieldRegex.js' import { flattenObject } from './flattenObject.js' import { getCustomFieldFunctions } from './getCustomFieldFunctions.js' import { getFilename } from './getFilename.js' @@ -108,15 +109,22 @@ export const createExport = async (args: CreateExportArgs) => { fields: collectionConfig.flattenedFields, }) - const disabledFieldsDot = + const disabledFields = collectionConfig.admin?.custom?.['plugin-import-export']?.disabledFields ?? [] - const disabledFields = disabledFieldsDot.map((f: string) => f.replace(/\./g, '_')) + + const disabledRegexes: RegExp[] = disabledFields.map(buildDisabledFieldRegex) const filterDisabled = (row: Record): Record => { - for (const key of disabledFields) { - delete row[key] + const filtered: Record = {} + + for (const [key, value] of Object.entries(row)) { + const isDisabled = disabledRegexes.some((regex) => regex.test(key)) + if (!isDisabled) { + filtered[key] = value + } } - return row + + return filtered } if (download) { diff --git a/packages/plugin-import-export/src/export/flattenObject.ts b/packages/plugin-import-export/src/export/flattenObject.ts index 022238aacf..0801a2e5ef 100644 --- a/packages/plugin-import-export/src/export/flattenObject.ts +++ b/packages/plugin-import-export/src/export/flattenObject.ts @@ -24,6 +24,10 @@ export const flattenObject = ({ if (Array.isArray(value)) { value.forEach((item, index) => { if (typeof item === 'object' && item !== null) { + const blockType = typeof item.blockType === 'string' ? item.blockType : undefined + + const itemPrefix = blockType ? `${newKey}_${index}_${blockType}` : `${newKey}_${index}` + // Case: hasMany polymorphic relationships if ( 'relationTo' in item && @@ -31,12 +35,12 @@ export const flattenObject = ({ typeof item.value === 'object' && item.value !== null ) { - row[`${`${newKey}_${index}`}_relationTo`] = item.relationTo - row[`${`${newKey}_${index}`}_id`] = item.value.id + row[`${itemPrefix}_relationTo`] = item.relationTo + row[`${itemPrefix}_id`] = item.value.id return } - flatten(item, `${newKey}_${index}`) + flatten(item, itemPrefix) } else { if (toCSVFunctions?.[newKey]) { const columnName = `${newKey}_${index}` @@ -54,7 +58,9 @@ export const flattenObject = ({ } } catch (error) { throw new Error( - `Error in toCSVFunction for array item "${columnName}": ${JSON.stringify(item)}\n${(error as Error).message}`, + `Error in toCSVFunction for array item "${columnName}": ${JSON.stringify(item)}\n${ + (error as Error).message + }`, ) } } else { diff --git a/packages/plugin-import-export/src/index.ts b/packages/plugin-import-export/src/index.ts index 27dd52bd83..a64e80bf15 100644 --- a/packages/plugin-import-export/src/index.ts +++ b/packages/plugin-import-export/src/index.ts @@ -1,6 +1,6 @@ import type { Config, FlattenedField } from 'payload' -import { addDataAndFileToRequest, deepMergeSimple, flattenTopLevelFields } from 'payload' +import { addDataAndFileToRequest, deepMergeSimple } from 'payload' import type { PluginDefaultTranslationsObject } from './translations/types.js' import type { ImportExportPluginConfig, ToCSVFunction } from './types.js' @@ -11,6 +11,7 @@ import { getCustomFieldFunctions } from './export/getCustomFieldFunctions.js' import { getSelect } from './export/getSelect.js' import { getExportCollection } from './getExportCollection.js' import { translations } from './translations/index.js' +import { collectDisabledFieldPaths } from './utilities/collectDisabledFieldPaths.js' import { getFlattenedFieldKeys } from './utilities/getFlattenedFieldKeys.js' export const importExportPlugin = @@ -59,15 +60,8 @@ export const importExportPlugin = path: '@payloadcms/plugin-import-export/rsc#ExportListMenuItem', }) - // Flatten top-level fields to expose nested fields for export config - const flattenedFields = flattenTopLevelFields(collection.fields, { - moveSubFieldsToTop: true, - }) - - // Find fields explicitly marked as disabled for import/export - const disabledFieldAccessors = flattenedFields - .filter((field) => field.custom?.['plugin-import-export']?.disabled) - .map((field) => field.accessor || field.name) + // // Find fields explicitly marked as disabled for import/export + const disabledFieldAccessors = collectDisabledFieldPaths(collection.fields) // Store disabled field accessors in the admin config for use in the UI collection.admin.custom = { diff --git a/packages/plugin-import-export/src/utilities/buildDisabledFieldRegex.ts b/packages/plugin-import-export/src/utilities/buildDisabledFieldRegex.ts new file mode 100644 index 0000000000..41e44ad72e --- /dev/null +++ b/packages/plugin-import-export/src/utilities/buildDisabledFieldRegex.ts @@ -0,0 +1,13 @@ +/** + * Builds a RegExp that matches flattened field keys from a given dot-notated path. + */ +export const buildDisabledFieldRegex = (path: string): RegExp => { + const parts = path.split('.') + + const patternParts = parts.map((part) => { + return `${part}(?:_\\d+)?(?:_[^_]+)?` + }) + + const pattern = `^${patternParts.join('_')}(?:_.*)?$` + return new RegExp(pattern) +} diff --git a/packages/plugin-import-export/src/utilities/collectDisabledFieldPaths.ts b/packages/plugin-import-export/src/utilities/collectDisabledFieldPaths.ts new file mode 100644 index 0000000000..dafeae456c --- /dev/null +++ b/packages/plugin-import-export/src/utilities/collectDisabledFieldPaths.ts @@ -0,0 +1,82 @@ +import type { Field } from 'payload' + +import { traverseFields } from 'payload' +import { fieldAffectsData } from 'payload/shared' + +/** + * Recursively traverses a Payload field schema to collect all field paths + * that are explicitly disabled for the import/export plugin via: + * field.custom['plugin-import-export'].disabled + * + * Handles nested fields including named tabs, groups, arrays, blocks, etc. + * Tracks each field’s path by storing it in `ref.path` and manually propagating + * it through named tab layers via a temporary `__manualRef` marker. + * + * @param fields - The top-level array of Payload field definitions + * @returns An array of dot-notated field paths that are marked as disabled + */ +export const collectDisabledFieldPaths = (fields: Field[]): string[] => { + const disabledPaths: string[] = [] + + traverseFields({ + callback: ({ field, next, parentRef, ref }) => { + // Handle named tabs + if (field.type === 'tabs' && Array.isArray(field.tabs)) { + for (const tab of field.tabs) { + if ('name' in tab && typeof tab.name === 'string') { + // Build the path prefix for this tab + const parentPath = + parentRef && typeof (parentRef as { path?: unknown }).path === 'string' + ? (parentRef as { path: string }).path + : '' + const tabPath = parentPath ? `${parentPath}.${tab.name}` : tab.name + + // Prepare a ref for this named tab's children to inherit the path + const refObj = ref as Record + const tabRef = refObj[tab.name] ?? {} + tabRef.path = tabPath + tabRef.__manualRef = true // flag this as a manually constructed parentRef + refObj[tab.name] = tabRef + } + } + + // Skip further processing of the tab container itself + return + } + + // Skip unnamed fields (e.g. rows/collapsibles) + if (!('name' in field) || typeof field.name !== 'string') { + return + } + + // Determine the path to the current field + let parentPath: string | undefined + + if ( + parentRef && + typeof parentRef === 'object' && + 'path' in parentRef && + typeof (parentRef as { path?: unknown }).path === 'string' + ) { + parentPath = (parentRef as { path: string }).path + } else if ((ref as any)?.__manualRef && typeof (ref as any)?.path === 'string') { + // Fallback: if current ref is a manual tabRef, use its path + parentPath = (ref as any).path + } + + const fullPath = parentPath ? `${parentPath}.${field.name}` : field.name + + // Store current path for any nested children to use + ;(ref as any).path = fullPath + + // If field is a data-affecting field and disabled via plugin config, collect its path + if (fieldAffectsData(field) && field.custom?.['plugin-import-export']?.disabled) { + disabledPaths.push(fullPath) + return next?.() + } + }, + fields, + }) + + return disabledPaths +} diff --git a/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts b/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts index 5ba649c13e..f124208dc9 100644 --- a/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts +++ b/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts @@ -34,12 +34,15 @@ export const getFlattenedFieldKeys = (fields: FieldWithPresentational[], prefix keys.push(...subKeys) break } - case 'blocks': + case 'blocks': { field.blocks.forEach((block) => { - const blockKeys = getFlattenedFieldKeys(block.fields as FlattenedField[], `${name}_0`) - keys.push(...blockKeys) + const blockPrefix = `${name}_0_${block.slug}` + keys.push(`${blockPrefix}_blockType`) + keys.push(`${blockPrefix}_id`) + keys.push(...getFlattenedFieldKeys(block.fields as FlattenedField[], blockPrefix)) }) break + } case 'collapsible': case 'group': case 'row': diff --git a/test/database/up-down-migration/migrations/20250714_201659.ts b/test/database/up-down-migration/migrations/20250714_201659.ts index 098ecd2a0d..b473da2504 100644 --- a/test/database/up-down-migration/migrations/20250714_201659.ts +++ b/test/database/up-down-migration/migrations/20250714_201659.ts @@ -1,4 +1,4 @@ -import type { MigrateDownArgs, MigrateUpArgs} from '@payloadcms/db-postgres'; +import type { MigrateDownArgs, MigrateUpArgs } from '@payloadcms/db-postgres' import { sql } from '@payloadcms/db-postgres' diff --git a/test/database/up-down-migration/migrations/index.ts b/test/database/up-down-migration/migrations/index.ts index fea58e46c2..8fbc100ef2 100644 --- a/test/database/up-down-migration/migrations/index.ts +++ b/test/database/up-down-migration/migrations/index.ts @@ -1,9 +1,9 @@ -import * as migration_20250714_201659 from './20250714_201659.js'; +import * as migration_20250714_201659 from './20250714_201659.js' export const migrations = [ { up: migration_20250714_201659.up, down: migration_20250714_201659.down, - name: '20250714_201659' + name: '20250714_201659', }, -]; +] diff --git a/test/plugin-import-export/int.spec.ts b/test/plugin-import-export/int.spec.ts index afc6ecb854..64d2516de0 100644 --- a/test/plugin-import-export/int.spec.ts +++ b/test/plugin-import-export/int.spec.ts @@ -364,8 +364,8 @@ describe('@payloadcms/plugin-import-export', () => { const expectedPath = path.join(dirname, './uploads', doc.filename as string) const data = await readCSV(expectedPath) - expect(data[0].blocks_0_blockType).toStrictEqual('hero') - expect(data[0].blocks_1_blockType).toStrictEqual('content') + expect(data[0].blocks_0_hero_blockType).toStrictEqual('hero') + expect(data[0].blocks_1_content_blockType).toStrictEqual('content') }) it('should create a csv of all fields when fields is empty', async () => { @@ -629,8 +629,8 @@ describe('@payloadcms/plugin-import-export', () => { const expectedPath = path.join(dirname, './uploads', doc.filename as string) const data = await readCSV(expectedPath) - expect(data[0].blocks_0_blockType).toStrictEqual('hero') - expect(data[0].blocks_1_blockType).toStrictEqual('content') + expect(data[0].blocks_0_hero_blockType).toStrictEqual('hero') + expect(data[0].blocks_1_content_blockType).toStrictEqual('content') }) }) }) From a3361356b2f85ef7052dd4e94026bc343ae3464c Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Thu, 17 Jul 2025 14:45:59 -0400 Subject: [PATCH 09/91] chore(release): v3.48.0 [skip ci] --- package.json | 2 +- packages/admin-bar/package.json | 2 +- packages/create-payload-app/package.json | 2 +- packages/db-mongodb/package.json | 2 +- packages/db-postgres/package.json | 2 +- packages/db-sqlite/package.json | 2 +- packages/db-vercel-postgres/package.json | 2 +- packages/drizzle/package.json | 2 +- packages/email-nodemailer/package.json | 2 +- packages/email-resend/package.json | 2 +- packages/graphql/package.json | 2 +- packages/live-preview-react/package.json | 2 +- packages/live-preview-vue/package.json | 2 +- packages/live-preview/package.json | 2 +- packages/next/package.json | 2 +- packages/payload-cloud/package.json | 2 +- packages/payload/package.json | 2 +- packages/plugin-cloud-storage/package.json | 2 +- packages/plugin-form-builder/package.json | 2 +- packages/plugin-import-export/package.json | 2 +- packages/plugin-multi-tenant/package.json | 2 +- packages/plugin-nested-docs/package.json | 2 +- packages/plugin-redirects/package.json | 2 +- packages/plugin-search/package.json | 2 +- packages/plugin-sentry/package.json | 2 +- packages/plugin-seo/package.json | 2 +- packages/plugin-stripe/package.json | 2 +- packages/richtext-lexical/package.json | 2 +- packages/richtext-slate/package.json | 2 +- packages/storage-azure/package.json | 2 +- packages/storage-gcs/package.json | 2 +- packages/storage-s3/package.json | 2 +- packages/storage-uploadthing/package.json | 2 +- packages/storage-vercel-blob/package.json | 2 +- packages/translations/package.json | 2 +- packages/ui/package.json | 2 +- 36 files changed, 36 insertions(+), 36 deletions(-) diff --git a/package.json b/package.json index 7bd509f311..eefed55b4f 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "payload-monorepo", - "version": "3.47.0", + "version": "3.48.0", "private": true, "type": "module", "workspaces": [ diff --git a/packages/admin-bar/package.json b/packages/admin-bar/package.json index 57b1c7a870..0a6f6a0105 100644 --- a/packages/admin-bar/package.json +++ b/packages/admin-bar/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/admin-bar", - "version": "3.47.0", + "version": "3.48.0", "description": "An admin bar for React apps using Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/create-payload-app/package.json b/packages/create-payload-app/package.json index 5b091b3e62..341570e772 100644 --- a/packages/create-payload-app/package.json +++ b/packages/create-payload-app/package.json @@ -1,6 +1,6 @@ { "name": "create-payload-app", - "version": "3.47.0", + "version": "3.48.0", "homepage": "https://payloadcms.com", "repository": { "type": "git", diff --git a/packages/db-mongodb/package.json b/packages/db-mongodb/package.json index 4fd777f07a..ce2fde62fe 100644 --- a/packages/db-mongodb/package.json +++ b/packages/db-mongodb/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/db-mongodb", - "version": "3.47.0", + "version": "3.48.0", "description": "The officially supported MongoDB database adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/db-postgres/package.json b/packages/db-postgres/package.json index 6030f85c7f..c37cad5380 100644 --- a/packages/db-postgres/package.json +++ b/packages/db-postgres/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/db-postgres", - "version": "3.47.0", + "version": "3.48.0", "description": "The officially supported Postgres database adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/db-sqlite/package.json b/packages/db-sqlite/package.json index 05c2e49058..c76214ab56 100644 --- a/packages/db-sqlite/package.json +++ b/packages/db-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/db-sqlite", - "version": "3.47.0", + "version": "3.48.0", "description": "The officially supported SQLite database adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/db-vercel-postgres/package.json b/packages/db-vercel-postgres/package.json index 71b13d98c4..bad20b8c60 100644 --- a/packages/db-vercel-postgres/package.json +++ b/packages/db-vercel-postgres/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/db-vercel-postgres", - "version": "3.47.0", + "version": "3.48.0", "description": "Vercel Postgres adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/drizzle/package.json b/packages/drizzle/package.json index 56fb038fd8..b2777a10f8 100644 --- a/packages/drizzle/package.json +++ b/packages/drizzle/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/drizzle", - "version": "3.47.0", + "version": "3.48.0", "description": "A library of shared functions used by different payload database adapters", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/email-nodemailer/package.json b/packages/email-nodemailer/package.json index fed7571620..c1a4307614 100644 --- a/packages/email-nodemailer/package.json +++ b/packages/email-nodemailer/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/email-nodemailer", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload Nodemailer Email Adapter", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/email-resend/package.json b/packages/email-resend/package.json index bc9d5f61ed..270bb6fda4 100644 --- a/packages/email-resend/package.json +++ b/packages/email-resend/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/email-resend", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload Resend Email Adapter", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/graphql/package.json b/packages/graphql/package.json index 5e50af1df0..0f3b87400a 100644 --- a/packages/graphql/package.json +++ b/packages/graphql/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/graphql", - "version": "3.47.0", + "version": "3.48.0", "homepage": "https://payloadcms.com", "repository": { "type": "git", diff --git a/packages/live-preview-react/package.json b/packages/live-preview-react/package.json index d8a5915db2..bbc1f6d2eb 100644 --- a/packages/live-preview-react/package.json +++ b/packages/live-preview-react/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/live-preview-react", - "version": "3.47.0", + "version": "3.48.0", "description": "The official React SDK for Payload Live Preview", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/live-preview-vue/package.json b/packages/live-preview-vue/package.json index 5f826d54f0..73674817ba 100644 --- a/packages/live-preview-vue/package.json +++ b/packages/live-preview-vue/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/live-preview-vue", - "version": "3.47.0", + "version": "3.48.0", "description": "The official Vue SDK for Payload Live Preview", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/live-preview/package.json b/packages/live-preview/package.json index d7b9fe1e82..4e695dd2c1 100644 --- a/packages/live-preview/package.json +++ b/packages/live-preview/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/live-preview", - "version": "3.47.0", + "version": "3.48.0", "description": "The official live preview JavaScript SDK for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/next/package.json b/packages/next/package.json index f833f30232..2751f2d441 100644 --- a/packages/next/package.json +++ b/packages/next/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/next", - "version": "3.47.0", + "version": "3.48.0", "homepage": "https://payloadcms.com", "repository": { "type": "git", diff --git a/packages/payload-cloud/package.json b/packages/payload-cloud/package.json index 29087ac235..9679a69c08 100644 --- a/packages/payload-cloud/package.json +++ b/packages/payload-cloud/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/payload-cloud", - "version": "3.47.0", + "version": "3.48.0", "description": "The official Payload Cloud plugin", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/payload/package.json b/packages/payload/package.json index 9b4edd5b5d..3437d8d148 100644 --- a/packages/payload/package.json +++ b/packages/payload/package.json @@ -1,6 +1,6 @@ { "name": "payload", - "version": "3.47.0", + "version": "3.48.0", "description": "Node, React, Headless CMS and Application Framework built on Next.js", "keywords": [ "admin panel", diff --git a/packages/plugin-cloud-storage/package.json b/packages/plugin-cloud-storage/package.json index 98b0ea98bd..c992d92ac7 100644 --- a/packages/plugin-cloud-storage/package.json +++ b/packages/plugin-cloud-storage/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-cloud-storage", - "version": "3.47.0", + "version": "3.48.0", "description": "The official cloud storage plugin for Payload CMS", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/plugin-form-builder/package.json b/packages/plugin-form-builder/package.json index fdaa894696..3847b3980e 100644 --- a/packages/plugin-form-builder/package.json +++ b/packages/plugin-form-builder/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-form-builder", - "version": "3.47.0", + "version": "3.48.0", "description": "Form builder plugin for Payload CMS", "keywords": [ "payload", diff --git a/packages/plugin-import-export/package.json b/packages/plugin-import-export/package.json index bf893f2ec4..6ec831fcf2 100644 --- a/packages/plugin-import-export/package.json +++ b/packages/plugin-import-export/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-import-export", - "version": "3.47.0", + "version": "3.48.0", "description": "Import-Export plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-multi-tenant/package.json b/packages/plugin-multi-tenant/package.json index 6c8ecac5f1..03398f95b7 100644 --- a/packages/plugin-multi-tenant/package.json +++ b/packages/plugin-multi-tenant/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-multi-tenant", - "version": "3.47.0", + "version": "3.48.0", "description": "Multi Tenant plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-nested-docs/package.json b/packages/plugin-nested-docs/package.json index 6e96917d68..416c2335cd 100644 --- a/packages/plugin-nested-docs/package.json +++ b/packages/plugin-nested-docs/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-nested-docs", - "version": "3.47.0", + "version": "3.48.0", "description": "The official Nested Docs plugin for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/plugin-redirects/package.json b/packages/plugin-redirects/package.json index 6ef9ceb11e..2ad67ff908 100644 --- a/packages/plugin-redirects/package.json +++ b/packages/plugin-redirects/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-redirects", - "version": "3.47.0", + "version": "3.48.0", "description": "Redirects plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-search/package.json b/packages/plugin-search/package.json index e4a5086360..e0c482656b 100644 --- a/packages/plugin-search/package.json +++ b/packages/plugin-search/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-search", - "version": "3.47.0", + "version": "3.48.0", "description": "Search plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-sentry/package.json b/packages/plugin-sentry/package.json index 9cfae24d58..3ce1a34195 100644 --- a/packages/plugin-sentry/package.json +++ b/packages/plugin-sentry/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-sentry", - "version": "3.47.0", + "version": "3.48.0", "description": "Sentry plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-seo/package.json b/packages/plugin-seo/package.json index e970f1518a..24ba866c55 100644 --- a/packages/plugin-seo/package.json +++ b/packages/plugin-seo/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-seo", - "version": "3.47.0", + "version": "3.48.0", "description": "SEO plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-stripe/package.json b/packages/plugin-stripe/package.json index d792db9a12..0adb38a9aa 100644 --- a/packages/plugin-stripe/package.json +++ b/packages/plugin-stripe/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-stripe", - "version": "3.47.0", + "version": "3.48.0", "description": "Stripe plugin for Payload", "keywords": [ "payload", diff --git a/packages/richtext-lexical/package.json b/packages/richtext-lexical/package.json index 46f318440b..f1afb8d806 100644 --- a/packages/richtext-lexical/package.json +++ b/packages/richtext-lexical/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/richtext-lexical", - "version": "3.47.0", + "version": "3.48.0", "description": "The officially supported Lexical richtext adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/richtext-slate/package.json b/packages/richtext-slate/package.json index 4bd5c023bf..76d636e0b7 100644 --- a/packages/richtext-slate/package.json +++ b/packages/richtext-slate/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/richtext-slate", - "version": "3.47.0", + "version": "3.48.0", "description": "The officially supported Slate richtext adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-azure/package.json b/packages/storage-azure/package.json index dcd0d6d3ff..46ba50ce2d 100644 --- a/packages/storage-azure/package.json +++ b/packages/storage-azure/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-azure", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload storage adapter for Azure Blob Storage", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-gcs/package.json b/packages/storage-gcs/package.json index 7535814e6b..c99f9baa16 100644 --- a/packages/storage-gcs/package.json +++ b/packages/storage-gcs/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-gcs", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload storage adapter for Google Cloud Storage", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-s3/package.json b/packages/storage-s3/package.json index e667db97e2..00b61727bc 100644 --- a/packages/storage-s3/package.json +++ b/packages/storage-s3/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-s3", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload storage adapter for Amazon S3", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-uploadthing/package.json b/packages/storage-uploadthing/package.json index aeaeecd7fc..6b5e65f06f 100644 --- a/packages/storage-uploadthing/package.json +++ b/packages/storage-uploadthing/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-uploadthing", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload storage adapter for uploadthing", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-vercel-blob/package.json b/packages/storage-vercel-blob/package.json index 652d56acc3..4948f19bd0 100644 --- a/packages/storage-vercel-blob/package.json +++ b/packages/storage-vercel-blob/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-vercel-blob", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload storage adapter for Vercel Blob Storage", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/translations/package.json b/packages/translations/package.json index 8ebecc5dfe..42c7da0a58 100644 --- a/packages/translations/package.json +++ b/packages/translations/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/translations", - "version": "3.47.0", + "version": "3.48.0", "homepage": "https://payloadcms.com", "repository": { "type": "git", diff --git a/packages/ui/package.json b/packages/ui/package.json index 72537a0bb2..66a34c2f31 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/ui", - "version": "3.47.0", + "version": "3.48.0", "homepage": "https://payloadcms.com", "repository": { "type": "git", From 4ae503d70050a2746f12d974ca3958e68d9fa812 Mon Sep 17 00:00:00 2001 From: Jake Fell Date: Thu, 17 Jul 2025 20:33:49 +0100 Subject: [PATCH 10/91] fix: exit payload jobs:run process after completion (#13211) ### What? Exit the process after running jobs. ### Why? When running the `payload jobs:run` bin script with a postgres database the process hangs forever. ### How? Execute `process.exit(0)` after running all jobs. --- packages/payload/src/bin/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/payload/src/bin/index.ts b/packages/payload/src/bin/index.ts index b1b973d0ef..6bd604a373 100755 --- a/packages/payload/src/bin/index.ts +++ b/packages/payload/src/bin/index.ts @@ -133,7 +133,7 @@ export const bin = async () => { await payload.destroy() // close database connections after running jobs so process can exit cleanly - return + process.exit(0) } } From c08b2aea8971863369f23440a7069e2cea05e646 Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Fri, 18 Jul 2025 03:48:27 -0700 Subject: [PATCH 11/91] feat: scheduling jobs (#12863) Adds a new `schedule` property to workflow and task configs that can be used to have Payload automatically _queue_ jobs following a certain _schedule_. Docs: https://payloadcms.com/docs/dynamic/jobs-queue/schedules?branch=feat/schedule-jobs ## API Example ```ts export default buildConfig({ // ... jobs: { // ... scheduler: 'manual', // Or `cron` if you're not using serverless. If `manual` is used, then user needs to set up running /api/payload-jobs/handleSchedules or payload.jobs.handleSchedules in regular intervals tasks: [ { schedule: [ { cron: '* * * * * *', queue: 'autorunSecond', // Hooks are optional hooks: { // Not an array, as providing and calling `defaultBeforeSchedule` would be more error-prone if this was an array beforeSchedule: async (args) => { // Handles verifying that there are no jobs already scheduled or processing. // You can override this behavior by not calling defaultBeforeSchedule, e.g. if you wanted // to allow a maximum of 3 scheduled jobs in the queue instead of 1, or add any additional conditions const result = await args.defaultBeforeSchedule(args) return { ...result, input: { message: 'This task runs every second', }, } }, afterSchedule: async (args) => { await args.defaultAfterSchedule(args) // Handles updating the payload-jobs-stats global args.req.payload.logger.info( 'EverySecond task scheduled: ' + (args.status === 'success' ? args.job.id : 'skipped or failed to schedule'), ) }, }, }, ], slug: 'EverySecond', inputSchema: [ { name: 'message', type: 'text', required: true, }, ], handler: ({ input, req }) => { req.payload.logger.info(input.message) return { output: {}, } }, } ] } }) ``` --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210495300843759 --- docs/jobs-queue/schedules.mdx | 156 +++++++ .../db-mongodb/src/utilities/transform.ts | 4 + packages/payload/package.json | 2 +- packages/payload/src/config/sanitize.ts | 28 +- .../payload/src/database/defaultUpdateJobs.ts | 2 +- packages/payload/src/database/types.ts | 3 + packages/payload/src/index.ts | 48 +- .../queues/config/{index.ts => collection.ts} | 33 +- packages/payload/src/queues/config/global.ts | 45 ++ .../payload/src/queues/config/types/index.ts | 139 +++++- .../src/queues/config/types/taskTypes.ts | 8 + .../src/queues/config/types/workflowTypes.ts | 15 + .../src/queues/endpoints/handleSchedules.ts | 66 +++ packages/payload/src/queues/endpoints/run.ts | 118 +++++ .../errors/calculateBackoffWaitUntil.ts | 12 +- .../src/queues/errors/handleTaskError.ts | 42 +- .../src/queues/errors/handleWorkflowError.ts | 24 +- packages/payload/src/queues/localAPI.ts | 56 ++- .../countRunnableOrActiveJobsForQueue.ts | 74 ++++ .../handleSchedules/defaultAfterSchedule.ts | 64 +++ .../handleSchedules/defaultBeforeSchedule.ts | 20 + .../handleSchedules/getQueuesWithSchedules.ts | 50 +++ .../operations/handleSchedules/index.ts | 223 ++++++++++ .../src/queues/operations/runJobs/index.ts | 44 +- .../operations/runJobs/runJSONJob/index.ts | 15 +- .../runJobs/runJob/getRunTaskFunction.ts | 5 +- .../queues/operations/runJobs/runJob/index.ts | 16 +- .../payload/src/queues/restEndpointRun.ts | 91 ---- .../src/queues/utilities/getCurrentDate.ts | 21 + .../payload/src/queues/utilities/updateJob.ts | 2 +- .../versions/deleteScheduledPublishJobs.ts | 2 +- pnpm-lock.yaml | 10 +- test/helpers/initPayloadInt.ts | 7 +- test/initDevAndTest.ts | 3 +- test/queues/config.schedules-autocron.ts | 22 + test/queues/config.schedules.ts | 22 + test/queues/config.ts | 416 +----------------- test/queues/getConfig.ts | 176 ++++++++ test/queues/int.spec.ts | 148 ++++--- test/queues/payload-types.ts | 72 ++- test/queues/schedules-autocron.int.spec.ts | 105 +++++ test/queues/schedules.int.spec.ts | 341 ++++++++++++++ test/queues/tasks/CreateSimpleRetries0Task.ts | 41 ++ .../tasks/CreateSimpleRetriesUndefinedTask.ts | 40 ++ test/queues/tasks/CreateSimpleTask.ts | 41 ++ .../CreateSimpleWithDuplicateMessageTask.ts | 42 ++ test/queues/tasks/EverySecondMax2Task.ts | 67 +++ test/queues/tasks/EverySecondTask.ts | 54 +++ test/queues/tasks/ExternalTask.ts | 26 ++ test/queues/tasks/ReturnCustomErrorTask.ts | 20 + test/queues/tasks/ReturnErrorTask.ts | 13 + test/queues/tasks/ThrowErrorTask.ts | 11 + test/queues/tasks/UpdatePostStep2Task.ts | 23 + test/queues/tasks/UpdatePostTask.ts | 31 ++ test/queues/utilities.ts | 62 +++ test/runInit.ts | 3 +- 56 files changed, 2579 insertions(+), 645 deletions(-) create mode 100644 docs/jobs-queue/schedules.mdx rename packages/payload/src/queues/config/{index.ts => collection.ts} (84%) create mode 100644 packages/payload/src/queues/config/global.ts create mode 100644 packages/payload/src/queues/endpoints/handleSchedules.ts create mode 100644 packages/payload/src/queues/endpoints/run.ts create mode 100644 packages/payload/src/queues/operations/handleSchedules/countRunnableOrActiveJobsForQueue.ts create mode 100644 packages/payload/src/queues/operations/handleSchedules/defaultAfterSchedule.ts create mode 100644 packages/payload/src/queues/operations/handleSchedules/defaultBeforeSchedule.ts create mode 100644 packages/payload/src/queues/operations/handleSchedules/getQueuesWithSchedules.ts create mode 100644 packages/payload/src/queues/operations/handleSchedules/index.ts delete mode 100644 packages/payload/src/queues/restEndpointRun.ts create mode 100644 packages/payload/src/queues/utilities/getCurrentDate.ts create mode 100644 test/queues/config.schedules-autocron.ts create mode 100644 test/queues/config.schedules.ts create mode 100644 test/queues/getConfig.ts create mode 100644 test/queues/schedules-autocron.int.spec.ts create mode 100644 test/queues/schedules.int.spec.ts create mode 100644 test/queues/tasks/CreateSimpleRetries0Task.ts create mode 100644 test/queues/tasks/CreateSimpleRetriesUndefinedTask.ts create mode 100644 test/queues/tasks/CreateSimpleTask.ts create mode 100644 test/queues/tasks/CreateSimpleWithDuplicateMessageTask.ts create mode 100644 test/queues/tasks/EverySecondMax2Task.ts create mode 100644 test/queues/tasks/EverySecondTask.ts create mode 100644 test/queues/tasks/ExternalTask.ts create mode 100644 test/queues/tasks/ReturnCustomErrorTask.ts create mode 100644 test/queues/tasks/ReturnErrorTask.ts create mode 100644 test/queues/tasks/ThrowErrorTask.ts create mode 100644 test/queues/tasks/UpdatePostStep2Task.ts create mode 100644 test/queues/tasks/UpdatePostTask.ts create mode 100644 test/queues/utilities.ts diff --git a/docs/jobs-queue/schedules.mdx b/docs/jobs-queue/schedules.mdx new file mode 100644 index 0000000000..a2b7aa3311 --- /dev/null +++ b/docs/jobs-queue/schedules.mdx @@ -0,0 +1,156 @@ +--- +title: Job Schedules +label: Schedules +order: 60 +desc: Payload allows you to schedule jobs to run periodically +keywords: jobs queue, application framework, typescript, node, react, nextjs, scheduling, cron, schedule +--- + +Payload's `schedule` property lets you enqueue Jobs regularly according to a cron schedule - daily, weekly, hourly, or any custom interval. This is ideal for tasks or workflows that must repeat automatically and without manual intervention. + +Scheduling Jobs differs significantly from running them: + +- **Queueing**: Scheduling only creates (enqueues) the Job according to your cron expression. It does not immediately execute any business logic. +- **Running**: Execution happens separately through your Jobs runner - such as autorun, or manual invocation using `payload.jobs.run()` or the `payload-jobs/run` endpoint. + +Use the `schedule` property specifically when you have recurring tasks or workflows. To enqueue a single Job to run once in the future, use the `waitUntil` property instead. + +## Example use cases + +**Regular emails or notifications** + +Send nightly digests, weekly newsletters, or hourly updates. + +**Batch processing during off-hours** + +Process analytics data or rebuild static sites during low-traffic times. + +**Periodic data synchronization** + +Regularly push or pull updates to or from external APIs. + +## Handling schedules + +Something needs to actually trigger the scheduling of jobs (execute the scheduling lifecycle seen below). By default, the `jobs.autorun` configuration, as well as the `/api/payload-jobs/run` will also handle scheduling for the queue specified in the `autorun` configuration. + +You can disable this behavior by setting `disableScheduling: true` in your `autorun` configuration, or by passing `disableScheduling=true` to the `/api/payload-jobs/run` endpoint. This is useful if you want to handle scheduling manually, for example, by using a cron job or a serverless function that calls the `/api/payload-jobs/handle-schedules` endpoint or the `payload.jobs.handleSchedules()` local API method. + +## Defining schedules on Tasks or Workflows + +Schedules are defined using the `schedule` property: + +```ts +export type ScheduleConfig = { + cron: string // required, supports seconds precision + queue: string // required, the queue to push Jobs onto + hooks?: { + // Optional hooks to customize scheduling behavior + beforeSchedule?: BeforeScheduleFn + afterSchedule?: AfterScheduleFn + } +} +``` + +### Example schedule + +The following example demonstrates scheduling a Job to enqueue every day at midnight: + +```ts +import type { TaskConfig } from 'payload' + +export const SendDigestEmail: TaskConfig<'SendDigestEmail'> = { + slug: 'SendDigestEmail', + schedule: [ + { + cron: '0 0 * * *', // Every day at midnight + queue: 'nightly', + }, + ], + handler: async () => { + await sendDigestToAllUsers() + }, +} +``` + +This configuration only queues the Job - it does not execute it immediately. To actually run the queued Job, you configure autorun in your Payload config (note that autorun should **not** be used on serverless platforms): + +```ts +export default buildConfig({ + jobs: { + scheduler: 'cron', + autoRun: [ + { + cron: '* * * * *', // Runs every minute + queue: 'nightly', + }, + ], + tasks: [SendDigestEmail], + }, +}) +``` + +That way, Payload's scheduler will automatically enqueue the job into the `nightly` queue every day at midnight. The autorun configuration will check the `nightly` queue every minute and execute any Jobs that are due to run. + +## Scheduling lifecycle + +Here's how the scheduling process operates in detail: + +1. **Cron evaluation**: Payload (or your external trigger in `manual` mode) identifies which schedules are due to run. To do that, it will + read the `payload-jobs-stats` global which contains information about the last time each scheduled task or workflow was run. +2. **BeforeSchedule hook**: + - The default beforeSchedule hook checks how many active or runnable jobs of the same type that have been queued by the scheduling system currently exist. + If such a job exists, it will skip scheduling a new one. + - You can provide your own `beforeSchedule` hook to customize this behavior. For example, you might want to allow multiple overlapping Jobs or dynamically set the Job input data. +3. **Enqueue Job**: Payload queues up a new job. This job will have `waitUntil` set to the next scheduled time based on the cron expression. +4. **AfterSchedule hook**: + - The default afterSchedule hook updates the `payload-jobs-stats` global metadata with the last scheduled time for the Job. + - You can provide your own afterSchedule hook to it for custom logging, metrics, or other post-scheduling actions. + +## Customizing concurrency and input (Advanced) + +You may want more control over concurrency or dynamically set Job inputs at scheduling time. For instance, allowing multiple overlapping Jobs to be scheduled, even if a previously scheduled job has not completed yet, or preparing dynamic data to pass to your Job handler: + +```ts +import { countRunnableOrActiveJobsForQueue } from 'payload' + +schedule: [ + { + cron: '* * * * *', // every minute + queue: 'reports', + hooks: { + beforeSchedule: async ({ queueable, req }) => { + const runnableOrActiveJobsForQueue = + await countRunnableOrActiveJobsForQueue({ + queue: queueable.scheduleConfig.queue, + req, + taskSlug: queueable.taskConfig?.slug, + workflowSlug: queueable.workflowConfig?.slug, + onlyScheduled: true, + }) + + // Allow up to 3 simultaneous scheduled jobs and set dynamic input + return { + shouldSchedule: runnableOrActiveJobsForQueue < 3, + input: { text: 'Hi there' }, + } + }, + }, + }, +] +``` + +This allows fine-grained control over how many Jobs can run simultaneously and provides dynamically computed input values each time a Job is scheduled. + +## Scheduling in serverless environments + +On serverless platforms, scheduling must be triggered externally since Payload does not automatically run cron schedules in ephemeral environments. You have two main ways to trigger scheduling manually: + +- **Invoke via Payload's API:** `payload.jobs.handleSchedules()` +- **Use the REST API endpoint:** `/api/payload-jobs/handle-schedules` +- **Use the run endpoint, which also handles scheduling by default:** `GET /api/payload-jobs/run` + +For example, on Vercel, you can set up a Vercel Cron to regularly trigger scheduling: + +- **Vercel Cron Job:** Configure Vercel Cron to periodically call `GET /api/payload-jobs/handle-schedules`. If you would like to auto-run your scheduled jobs as well, you can use the `GET /api/payload-jobs/run` endpoint. + +Once Jobs are queued, their execution depends entirely on your configured runner setup (e.g., autorun, or manual invocation). diff --git a/packages/db-mongodb/src/utilities/transform.ts b/packages/db-mongodb/src/utilities/transform.ts index 24113806ae..35a271877c 100644 --- a/packages/db-mongodb/src/utilities/transform.ts +++ b/packages/db-mongodb/src/utilities/transform.ts @@ -406,6 +406,10 @@ export const transform = ({ parentIsLocalized = false, validateRelationships = true, }: Args) => { + if (!data) { + return null + } + if (Array.isArray(data)) { for (const item of data) { transform({ $inc, adapter, data: item, fields, globalSlug, operation, validateRelationships }) diff --git a/packages/payload/package.json b/packages/payload/package.json index 3437d8d148..ed757a0d27 100644 --- a/packages/payload/package.json +++ b/packages/payload/package.json @@ -92,7 +92,7 @@ "busboy": "^1.6.0", "ci-info": "^4.1.0", "console-table-printer": "2.12.1", - "croner": "9.0.0", + "croner": "9.1.0", "dataloader": "2.2.3", "deepmerge": "4.3.1", "file-type": "19.3.0", diff --git a/packages/payload/src/config/sanitize.ts b/packages/payload/src/config/sanitize.ts index c90ee9703b..043bb34d3d 100644 --- a/packages/payload/src/config/sanitize.ts +++ b/packages/payload/src/config/sanitize.ts @@ -29,7 +29,8 @@ import { } from '../locked-documents/config.js' import { getPreferencesCollection, preferencesCollectionSlug } from '../preferences/config.js' import { getQueryPresetsConfig, queryPresetsCollectionSlug } from '../query-presets/config.js' -import { getDefaultJobsCollection, jobsCollectionSlug } from '../queues/config/index.js' +import { getDefaultJobsCollection, jobsCollectionSlug } from '../queues/config/collection.js' +import { getJobStatsGlobal } from '../queues/config/global.js' import { flattenBlock } from '../utilities/flattenAllFields.js' import { getSchedulePublishTask } from '../versions/schedule/job.js' import { addDefaultsToConfig } from './defaults.js' @@ -313,7 +314,28 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise task.schedule)) || + (config?.jobs?.workflows?.length && + config.jobs.workflows.some((workflow) => workflow.schedule)) + + if (hasScheduleProperty) { + config.jobs.scheduling = true + // Add payload-jobs-stats global for tracking when a job of a specific slug was last run + ;(config.globals ??= []).push( + await sanitizeGlobal( + config as unknown as Config, + getJobStatsGlobal(config as unknown as Config), + richTextSanitizationPromises, + validRelationships, + ), + ) + + config.jobs.stats = true + } + + let defaultJobsCollection = getDefaultJobsCollection(config.jobs) if (typeof config.jobs.jobsCollectionOverrides === 'function') { defaultJobsCollection = config.jobs.jobsCollectionOverrides({ @@ -342,7 +364,7 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise { - const job = new Cron(cronConfig.cron ?? DEFAULT_CRON, async () => { + const jobAutorunCron = new Cron(cronConfig.cron ?? DEFAULT_CRON, async () => { + if ( + _internal_jobSystemGlobals.shouldAutoSchedule && + !cronConfig.disableScheduling && + this.config.jobs.scheduling + ) { + await this.jobs.handleSchedules({ + queue: cronConfig.queue, + }) + } + + if (!_internal_jobSystemGlobals.shouldAutoRun) { + return + } + if (typeof this.config.jobs.shouldAutoRun === 'function') { const shouldAutoRun = await this.config.jobs.shouldAutoRun(this) if (!shouldAutoRun) { - job.stop() - - return false + jobAutorunCron.stop() + return } } await this.jobs.run({ limit: cronConfig.limit ?? DEFAULT_LIMIT, queue: cronConfig.queue, + silent: cronConfig.silent, }) }) - this.crons.push(job) + this.crons.push(jobAutorunCron) }), ) } @@ -931,8 +946,10 @@ export const reload = async ( payload: Payload, skipImportMapGeneration?: boolean, ): Promise => { - await payload.destroy() - + if (typeof payload.db.destroy === 'function') { + // Only destroy db, as we then later only call payload.db.init and not payload.init + await payload.db.destroy() + } payload.config = config payload.collections = config.collections.reduce( @@ -1176,6 +1193,7 @@ export type { export type { CompoundIndex } from './collections/config/types.js' export type { SanitizedCompoundIndex } from './collections/config/types.js' + export { createDataloaderCacheKey, getDataLoader } from './collections/dataloader.js' export { countOperation } from './collections/operations/count.js' export { createOperation } from './collections/operations/create.js' @@ -1321,6 +1339,7 @@ export { export type { ValidationFieldError } from './errors/index.js' export { baseBlockFields } from './fields/baseFields/baseBlockFields.js' + export { baseIDField } from './fields/baseFields/baseIDField.js' export { @@ -1444,6 +1463,7 @@ export type { export { getDefaultValue } from './fields/getDefaultValue.js' export { traverseFields as afterChangeTraverseFields } from './fields/hooks/afterChange/traverseFields.js' + export { promise as afterReadPromise } from './fields/hooks/afterRead/promise.js' export { traverseFields as afterReadTraverseFields } from './fields/hooks/afterRead/traverseFields.js' export { traverseFields as beforeChangeTraverseFields } from './fields/hooks/beforeChange/traverseFields.js' @@ -1451,6 +1471,7 @@ export { traverseFields as beforeValidateTraverseFields } from './fields/hooks/b export { sortableFieldTypes } from './fields/sortableFieldTypes.js' export { validations } from './fields/validations.js' + export type { ArrayFieldValidation, BlocksFieldValidation, @@ -1505,6 +1526,7 @@ export type { export { docAccessOperation as docAccessOperationGlobal } from './globals/operations/docAccess.js' export { findOneOperation } from './globals/operations/findOne.js' + export { findVersionByIDOperation as findVersionByIDOperationGlobal } from './globals/operations/findVersionByID.js' export { findVersionsOperation as findVersionsOperationGlobal } from './globals/operations/findVersions.js' export { restoreVersionOperation as restoreVersionOperationGlobal } from './globals/operations/restoreVersion.js' @@ -1525,8 +1547,7 @@ export type { TabsPreferences, } from './preferences/types.js' export type { QueryPreset } from './query-presets/types.js' -export { jobAfterRead } from './queues/config/index.js' - +export { jobAfterRead } from './queues/config/collection.js' export type { JobsConfig, RunJobAccess, RunJobAccessArgs } from './queues/config/types/index.js' export type { RunInlineTaskFunction, @@ -1541,6 +1562,7 @@ export type { TaskOutput, TaskType, } from './queues/config/types/taskTypes.js' + export type { BaseJob, JobLog, @@ -1551,8 +1573,14 @@ export type { WorkflowHandler, WorkflowTypes, } from './queues/config/types/workflowTypes.js' - +export { countRunnableOrActiveJobsForQueue } from './queues/operations/handleSchedules/countRunnableOrActiveJobsForQueue.js' export { importHandlerPath } from './queues/operations/runJobs/runJob/importHandlerPath.js' + +export { + _internal_jobSystemGlobals, + _internal_resetJobSystemGlobals, + getCurrentDate, +} from './queues/utilities/getCurrentDate.js' export { getLocalI18n } from './translations/getLocalI18n.js' export * from './types/index.js' export { getFileByPath } from './uploads/getFileByPath.js' diff --git a/packages/payload/src/queues/config/index.ts b/packages/payload/src/queues/config/collection.ts similarity index 84% rename from packages/payload/src/queues/config/index.ts rename to packages/payload/src/queues/config/collection.ts index 9628a29bff..e9e925d804 100644 --- a/packages/payload/src/queues/config/index.ts +++ b/packages/payload/src/queues/config/collection.ts @@ -1,25 +1,28 @@ import type { CollectionConfig } from '../../collections/config/types.js' -import type { Config, SanitizedConfig } from '../../config/types.js' +import type { SanitizedConfig } from '../../config/types.js' import type { Field } from '../../fields/config/types.js' import type { Job } from '../../index.js' -import { runJobsEndpoint } from '../restEndpointRun.js' +import { handleSchedulesJobsEndpoint } from '../endpoints/handleSchedules.js' +import { runJobsEndpoint } from '../endpoints/run.js' import { getJobTaskStatus } from '../utilities/getJobTaskStatus.js' export const jobsCollectionSlug = 'payload-jobs' -export const getDefaultJobsCollection: (config: Config) => CollectionConfig = (config) => { +export const getDefaultJobsCollection: (jobsConfig: SanitizedConfig['jobs']) => CollectionConfig = ( + jobsConfig, +) => { const workflowSlugs: Set = new Set() const taskSlugs: Set = new Set(['inline']) - if (config.jobs?.workflows?.length) { - config.jobs?.workflows.forEach((workflow) => { + if (jobsConfig.workflows?.length) { + jobsConfig.workflows.forEach((workflow) => { workflowSlugs.add(workflow.slug) }) } - if (config.jobs?.tasks?.length) { - config.jobs.tasks.forEach((task) => { + if (jobsConfig.tasks?.length) { + jobsConfig.tasks.forEach((task) => { if (workflowSlugs.has(task.slug)) { throw new Error( `Task slug "${task.slug}" is already used by a workflow. No tasks are allowed to have the same slug as a workflow.`, @@ -78,7 +81,7 @@ export const getDefaultJobsCollection: (config: Config) => CollectionConfig = (c }, ] - if (config?.jobs?.addParentToTaskLog) { + if (jobsConfig.addParentToTaskLog) { logFields.push({ name: 'parent', type: 'group', @@ -102,7 +105,7 @@ export const getDefaultJobsCollection: (config: Config) => CollectionConfig = (c group: 'System', hidden: true, }, - endpoints: [runJobsEndpoint], + endpoints: [runJobsEndpoint, handleSchedulesJobsEndpoint], fields: [ { name: 'input', @@ -198,6 +201,9 @@ export const getDefaultJobsCollection: (config: Config) => CollectionConfig = (c { name: 'waitUntil', type: 'date', + admin: { + date: { pickerAppearance: 'dayAndTime' }, + }, index: true, }, { @@ -237,6 +243,15 @@ export const getDefaultJobsCollection: (config: Config) => CollectionConfig = (c lockDocuments: false, } + if (jobsConfig.stats) { + // TODO: In 4.0, this should be added by default. + // The meta field can be used to store arbitrary data about the job. The scheduling system uses this to store + // `scheduled: true` to indicate that the job was queued by the scheduling system. + jobsCollection.fields.push({ + name: 'meta', + type: 'json', + }) + } return jobsCollection } diff --git a/packages/payload/src/queues/config/global.ts b/packages/payload/src/queues/config/global.ts new file mode 100644 index 0000000000..55c87d247f --- /dev/null +++ b/packages/payload/src/queues/config/global.ts @@ -0,0 +1,45 @@ +import type { Config } from '../../config/types.js' +import type { GlobalConfig } from '../../globals/config/types.js' +import type { TaskType } from './types/taskTypes.js' +import type { WorkflowTypes } from './types/workflowTypes.js' + +export const jobStatsGlobalSlug = 'payload-jobs-stats' + +/** + * Type for data stored in the payload-jobs-stats global. + */ +export type JobStats = { + stats?: { + scheduledRuns?: { + queues?: { + [queueSlug: string]: { + tasks?: { + [taskSlug: TaskType]: { + lastScheduledRun: string + } + } + workflows?: { + [workflowSlug: WorkflowTypes]: { + lastScheduledRun: string + } + } + } + } + } + } +} + +/** + * Global config for job statistics. + */ +export const getJobStatsGlobal: (config: Config) => GlobalConfig = (config) => { + return { + slug: jobStatsGlobalSlug, + fields: [ + { + name: 'stats', + type: 'json', + }, + ], + } +} diff --git a/packages/payload/src/queues/config/types/index.ts b/packages/payload/src/queues/config/types/index.ts index 6bf730f44f..9ea4ff2233 100644 --- a/packages/payload/src/queues/config/types/index.ts +++ b/packages/payload/src/queues/config/types/index.ts @@ -1,10 +1,12 @@ -import type { CollectionConfig } from '../../../index.js' +import type { CollectionConfig, Job } from '../../../index.js' import type { Payload, PayloadRequest, Sort } from '../../../types/index.js' +import type { RunJobsSilent } from '../../localAPI.js' import type { RunJobsArgs } from '../../operations/runJobs/index.js' +import type { JobStats } from '../global.js' import type { TaskConfig } from './taskTypes.js' import type { WorkflowConfig } from './workflowTypes.js' -export type CronConfig = { +export type AutorunCronConfig = { /** * The cron schedule for the job. * @default '* * * * *' (every minute). @@ -26,6 +28,15 @@ export type CronConfig = { * - '* * * * * *' every second */ cron?: string + /** + * By default, the autorun will attempt to schedule jobs for tasks and workflows that have a `schedule` property, given + * the queue name is the same. + * + * Set this to `true` to disable the scheduling of jobs automatically. + * + * @default false + */ + disableScheduling?: boolean /** * The limit for the job. This can be overridden by the user. Defaults to 10. */ @@ -34,6 +45,15 @@ export type CronConfig = { * The queue name for the job. */ queue?: string + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent } export type RunJobAccessArgs = { @@ -48,6 +68,16 @@ export type SanitizedJobsConfig = { * This property is automatically set during sanitization. */ enabled?: boolean + /** + * If set to `true`, at least one task or workflow has scheduling enabled. + * This property is automatically set during sanitization. + */ + scheduling?: boolean + /** + * If set to `true`, a payload-job-stats global exists. + * This property is automatically set during sanitization. + */ + stats?: boolean } & JobsConfig export type JobsConfig = { /** @@ -73,7 +103,9 @@ export type JobsConfig = { * * @remark this property should not be used on serverless platforms like Vercel */ - autoRun?: ((payload: Payload) => CronConfig[] | Promise) | CronConfig[] + autoRun?: + | ((payload: Payload) => AutorunCronConfig[] | Promise) + | AutorunCronConfig[] /** * Determine whether or not to delete a job after it has successfully completed. */ @@ -135,3 +167,104 @@ export type JobsConfig = { */ workflows?: WorkflowConfig[] } + +export type Queueable = { + scheduleConfig: ScheduleConfig + taskConfig?: TaskConfig + // If not set, queue it immediately + waitUntil?: Date + workflowConfig?: WorkflowConfig +} + +type OptionalPromise = Promise | T + +export type BeforeScheduleFn = (args: { + defaultBeforeSchedule: BeforeScheduleFn + /** + * payload-job-stats global data + */ + jobStats: JobStats + queueable: Queueable + req: PayloadRequest +}) => OptionalPromise<{ + input?: object + shouldSchedule: boolean + waitUntil?: Date +}> + +export type AfterScheduleFn = ( + args: { + defaultAfterSchedule: AfterScheduleFn + /** + * payload-job-stats global data. If the global does not exist, it will be null. + */ + jobStats: JobStats | null + queueable: Queueable + req: PayloadRequest + } & ( + | { + error: Error + job?: never + status: 'error' + } + | { + error?: never + job: Job + status: 'success' + } + | { + error?: never + job?: never + /** + * If the beforeSchedule hook returned `shouldSchedule: false`, this will be called with status `skipped`. + */ + status: 'skipped' + } + ), +) => OptionalPromise + +export type ScheduleConfig = { + /** + * The cron for scheduling the job. + * + * @example + * ┌───────────── (optional) second (0 - 59) + * │ ┌───────────── minute (0 - 59) + * │ │ ┌───────────── hour (0 - 23) + * │ │ │ ┌───────────── day of the month (1 - 31) + * │ │ │ │ ┌───────────── month (1 - 12) + * │ │ │ │ │ ┌───────────── day of the week (0 - 6) (Sunday to Saturday) + * │ │ │ │ │ │ + * │ │ │ │ │ │ + * - '* 0 * * * *' every hour at minute 0 + * - '* 0 0 * * *' daily at midnight + * - '* 0 0 * * 0' weekly at midnight on Sundays + * - '* 0 0 1 * *' monthly at midnight on the 1st day of the month + * - '* 0/5 * * * *' every 5 minutes + * - '* * * * * *' every second + */ + cron: string + hooks?: { + /** + * Functions that will be executed after the job has been successfully scheduled. + * + * @default By default, global update?? Unless global update should happen before + */ + afterSchedule?: AfterScheduleFn + /** + * Functions that will be executed before the job is scheduled. + * You can use this to control whether or not the job should be scheduled, or what input + * data should be passed to the job. + * + * @default By default, this has one function that returns { shouldSchedule: true } if the following conditions are met: + * - There currently is no job of the same type in the specified queue that is currently running + * - There currently is no job of the same type in the specified queue that is scheduled to run in the future + * - There currently is no job of the same type in the specified queue that failed previously but can be retried + */ + beforeSchedule?: BeforeScheduleFn + } + /** + * Queue to which the scheduled job will be added. + */ + queue: string +} diff --git a/packages/payload/src/queues/config/types/taskTypes.ts b/packages/payload/src/queues/config/types/taskTypes.ts index fe9107b8f8..48edff6881 100644 --- a/packages/payload/src/queues/config/types/taskTypes.ts +++ b/packages/payload/src/queues/config/types/taskTypes.ts @@ -1,4 +1,5 @@ import type { Field, Job, PayloadRequest, StringKeyOf, TypedJobs } from '../../../index.js' +import type { ScheduleConfig } from './index.js' import type { SingleTaskStatus } from './workflowTypes.js' export type TaskInputOutput = { @@ -54,6 +55,9 @@ export type TaskHandler< args: TaskHandlerArgs, ) => Promise> | TaskHandlerResult +/** + * @todo rename to TaskSlug in 4.0, similar to CollectionSlug + */ export type TaskType = StringKeyOf // Extracts the type of `input` corresponding to each task @@ -233,6 +237,10 @@ export type TaskConfig< * @default By default, tasks are not retried and `retries` is `undefined`. */ retries?: number | RetryConfig | undefined + /** + * Allows automatically scheduling this task to run regularly at a specified interval. + */ + schedule?: ScheduleConfig[] /** * Define a slug-based name for this job. This slug needs to be unique among both tasks and workflows. */ diff --git a/packages/payload/src/queues/config/types/workflowTypes.ts b/packages/payload/src/queues/config/types/workflowTypes.ts index 6a4adc011b..8c16825102 100644 --- a/packages/payload/src/queues/config/types/workflowTypes.ts +++ b/packages/payload/src/queues/config/types/workflowTypes.ts @@ -7,6 +7,7 @@ import type { TypedJobs, } from '../../../index.js' import type { TaskParent } from '../../operations/runJobs/runJob/getRunTaskFunction.js' +import type { ScheduleConfig } from './index.js' import type { RetryConfig, RunInlineTaskFunction, @@ -53,6 +54,13 @@ export type BaseJob< ? TypedJobs['workflows'][TWorkflowSlugOrInput]['input'] : TWorkflowSlugOrInput log?: JobLog[] + meta?: { + [key: string]: unknown + /** + * If true, this job was queued by the scheduling system. + */ + scheduled?: boolean + } processing?: boolean queue?: string taskSlug?: null | TaskType @@ -63,6 +71,9 @@ export type BaseJob< workflowSlug?: null | WorkflowTypes } +/** + * @todo rename to WorkflowSlug in 4.0, similar to CollectionSlug + */ export type WorkflowTypes = StringKeyOf /** @@ -155,6 +166,10 @@ export type WorkflowConfig< * @default undefined. By default, workflows retries are defined by their tasks */ retries?: number | RetryConfig | undefined + /** + * Allows automatically scheduling this workflow to run regularly at a specified interval. + */ + schedule?: ScheduleConfig[] /** * Define a slug-based name for this job. */ diff --git a/packages/payload/src/queues/endpoints/handleSchedules.ts b/packages/payload/src/queues/endpoints/handleSchedules.ts new file mode 100644 index 0000000000..385cb496e9 --- /dev/null +++ b/packages/payload/src/queues/endpoints/handleSchedules.ts @@ -0,0 +1,66 @@ +import type { Endpoint } from '../../config/types.js' + +import { handleSchedules } from '../operations/handleSchedules/index.js' +import { configHasJobs } from './run.js' + +/** + * GET /api/payload-jobs/handle-schedules endpoint + * + * This endpoint is GET instead of POST to allow it to be used in a Vercel Cron. + */ +export const handleSchedulesJobsEndpoint: Endpoint = { + handler: async (req) => { + const jobsConfig = req.payload.config.jobs + + if (!configHasJobs(jobsConfig)) { + return Response.json( + { + message: 'No jobs to schedule.', + }, + { status: 200 }, + ) + } + + const accessFn = jobsConfig.access?.run ?? (() => true) + + const hasAccess = await accessFn({ req }) + + if (!hasAccess) { + return Response.json( + { + message: req.i18n.t('error:unauthorized'), + }, + { status: 401 }, + ) + } + + if (!jobsConfig.scheduling) { + // There is no reason to call the handleSchedules endpoint if the stats global is not enabled (= no schedules defined) + return Response.json( + { + message: + 'Cannot handle schedules because no tasks or workflows with schedules are defined.', + }, + { status: 500 }, + ) + } + + const { queue } = req.query as { + queue?: string + } + + const { errored, queued, skipped } = await handleSchedules({ queue, req }) + + return Response.json( + { + errored, + message: req.i18n.t('general:success'), + queued, + skipped, + }, + { status: 200 }, + ) + }, + method: 'get', + path: '/handle-schedules', +} diff --git a/packages/payload/src/queues/endpoints/run.ts b/packages/payload/src/queues/endpoints/run.ts new file mode 100644 index 0000000000..a362a7d2cc --- /dev/null +++ b/packages/payload/src/queues/endpoints/run.ts @@ -0,0 +1,118 @@ +import type { Endpoint } from '../../config/types.js' +import type { SanitizedJobsConfig } from '../config/types/index.js' + +import { runJobs, type RunJobsArgs } from '../operations/runJobs/index.js' + +/** + * /api/payload-jobs/run endpoint + * + * This endpoint is GET instead of POST to allow it to be used in a Vercel Cron. + */ +export const runJobsEndpoint: Endpoint = { + handler: async (req) => { + const jobsConfig = req.payload.config.jobs + + if (!configHasJobs(jobsConfig)) { + return Response.json( + { + message: 'No jobs to run.', + }, + { status: 200 }, + ) + } + + const accessFn = jobsConfig.access?.run ?? (() => true) + + const hasAccess = await accessFn({ req }) + + if (!hasAccess) { + return Response.json( + { + message: req.i18n.t('error:unauthorized'), + }, + { status: 401 }, + ) + } + + const { + allQueues, + disableScheduling: disableSchedulingParam, + limit, + queue, + silent: silentParam, + } = req.query as { + allQueues?: 'false' | 'true' + disableScheduling?: 'false' | 'true' + limit?: number + queue?: string + silent?: string + } + + const silent = silentParam === 'true' + + const shouldHandleSchedules = disableSchedulingParam !== 'true' + + const runAllQueues = allQueues && !(typeof allQueues === 'string' && allQueues === 'false') + + if (shouldHandleSchedules && jobsConfig.scheduling) { + // If should handle schedules and schedules are defined + await req.payload.jobs.handleSchedules({ queue: runAllQueues ? undefined : queue, req }) + } + + const runJobsArgs: RunJobsArgs = { + queue, + req, + // Access is validated above, so it's safe to override here + allQueues: runAllQueues, + overrideAccess: true, + silent, + } + + if (typeof queue === 'string') { + runJobsArgs.queue = queue + } + + const parsedLimit = Number(limit) + if (!isNaN(parsedLimit)) { + runJobsArgs.limit = parsedLimit + } + + let noJobsRemaining = false + let remainingJobsFromQueried = 0 + try { + const result = await runJobs(runJobsArgs) + noJobsRemaining = !!result.noJobsRemaining + remainingJobsFromQueried = result.remainingJobsFromQueried + } catch (err) { + req.payload.logger.error({ + err, + msg: 'There was an error running jobs:', + queue: runJobsArgs.queue, + }) + + return Response.json( + { + message: req.i18n.t('error:unknown'), + noJobsRemaining: true, + remainingJobsFromQueried, + }, + { status: 500 }, + ) + } + + return Response.json( + { + message: req.i18n.t('general:success'), + noJobsRemaining, + remainingJobsFromQueried, + }, + { status: 200 }, + ) + }, + method: 'get', + path: '/run', +} + +export const configHasJobs = (jobsConfig: SanitizedJobsConfig): boolean => { + return Boolean(jobsConfig.tasks?.length || jobsConfig.workflows?.length) +} diff --git a/packages/payload/src/queues/errors/calculateBackoffWaitUntil.ts b/packages/payload/src/queues/errors/calculateBackoffWaitUntil.ts index e8ff239e8e..0214ecd141 100644 --- a/packages/payload/src/queues/errors/calculateBackoffWaitUntil.ts +++ b/packages/payload/src/queues/errors/calculateBackoffWaitUntil.ts @@ -1,5 +1,7 @@ import type { RetryConfig } from '../config/types/taskTypes.js' +import { getCurrentDate } from '../utilities/getCurrentDate.js' + export function calculateBackoffWaitUntil({ retriesConfig, totalTried, @@ -7,23 +9,23 @@ export function calculateBackoffWaitUntil({ retriesConfig: number | RetryConfig totalTried: number }): Date { - let waitUntil: Date = new Date() + let waitUntil: Date = getCurrentDate() if (typeof retriesConfig === 'object') { if (retriesConfig.backoff) { if (retriesConfig.backoff.type === 'fixed') { waitUntil = retriesConfig.backoff.delay - ? new Date(new Date().getTime() + retriesConfig.backoff.delay) - : new Date() + ? new Date(getCurrentDate().getTime() + retriesConfig.backoff.delay) + : getCurrentDate() } else if (retriesConfig.backoff.type === 'exponential') { // 2 ^ (attempts - 1) * delay (current attempt is not included in totalTried, thus no need for -1) const delay = retriesConfig.backoff.delay ? retriesConfig.backoff.delay : 0 - waitUntil = new Date(new Date().getTime() + Math.pow(2, totalTried) * delay) + waitUntil = new Date(getCurrentDate().getTime() + Math.pow(2, totalTried) * delay) } } } /* - const differenceInMSBetweenNowAndWaitUntil = waitUntil.getTime() - new Date().getTime() + const differenceInMSBetweenNowAndWaitUntil = waitUntil.getTime() - getCurrentDate().getTime() const differenceInSBetweenNowAndWaitUntil = differenceInMSBetweenNowAndWaitUntil / 1000 console.log('Calculated backoff', { diff --git a/packages/payload/src/queues/errors/handleTaskError.ts b/packages/payload/src/queues/errors/handleTaskError.ts index 3d6b491c95..3366b0a15b 100644 --- a/packages/payload/src/queues/errors/handleTaskError.ts +++ b/packages/payload/src/queues/errors/handleTaskError.ts @@ -1,9 +1,11 @@ import ObjectIdImport from 'bson-objectid' import type { PayloadRequest } from '../../index.js' +import type { RunJobsSilent } from '../localAPI.js' import type { UpdateJobFunction } from '../operations/runJobs/runJob/getUpdateJobFunction.js' import type { TaskError } from './index.js' +import { getCurrentDate } from '../utilities/getCurrentDate.js' import { calculateBackoffWaitUntil } from './calculateBackoffWaitUntil.js' import { getWorkflowRetryBehavior } from './getWorkflowRetryBehavior.js' @@ -13,10 +15,20 @@ const ObjectId = (ObjectIdImport.default || export async function handleTaskError({ error, req, + silent = false, updateJob, }: { error: TaskError req: PayloadRequest + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent updateJob: UpdateJobFunction }): Promise<{ hasFinalError: boolean @@ -46,7 +58,7 @@ export async function handleTaskError({ stack: error.stack, } - const currentDate = new Date() + const currentDate = getCurrentDate() ;(job.log ??= []).push({ id: new ObjectId().toHexString(), @@ -102,12 +114,14 @@ export async function handleTaskError({ waitUntil: job.waitUntil, }) - req.payload.logger.error({ - err: error, - job, - msg: `Error running task ${taskID}. Attempt ${job.totalTried} - max retries reached`, - taskSlug, - }) + if (!silent || (typeof silent === 'object' && !silent.error)) { + req.payload.logger.error({ + err: error, + job, + msg: `Error running task ${taskID}. Attempt ${job.totalTried} - max retries reached`, + taskSlug, + }) + } return { hasFinalError: true, } @@ -135,12 +149,14 @@ export async function handleTaskError({ retriesConfig: workflowConfig.retries, }) - req.payload.logger.error({ - err: error, - job, - msg: `Error running task ${taskID}. Attempt ${job.totalTried + 1}${maxWorkflowRetries !== undefined ? '/' + (maxWorkflowRetries + 1) : ''}`, - taskSlug, - }) + if (!silent || (typeof silent === 'object' && !silent.error)) { + req.payload.logger.error({ + err: error, + job, + msg: `Error running task ${taskID}. Attempt ${job.totalTried + 1}${maxWorkflowRetries !== undefined ? '/' + (maxWorkflowRetries + 1) : ''}`, + taskSlug, + }) + } // Update job's waitUntil only if this waitUntil is later than the current one if (waitUntil && (!job.waitUntil || waitUntil > new Date(job.waitUntil))) { diff --git a/packages/payload/src/queues/errors/handleWorkflowError.ts b/packages/payload/src/queues/errors/handleWorkflowError.ts index 6c5fed8da2..2716aebdec 100644 --- a/packages/payload/src/queues/errors/handleWorkflowError.ts +++ b/packages/payload/src/queues/errors/handleWorkflowError.ts @@ -1,7 +1,9 @@ import type { PayloadRequest } from '../../index.js' +import type { RunJobsSilent } from '../localAPI.js' import type { UpdateJobFunction } from '../operations/runJobs/runJob/getUpdateJobFunction.js' import type { WorkflowError } from './index.js' +import { getCurrentDate } from '../utilities/getCurrentDate.js' import { getWorkflowRetryBehavior } from './getWorkflowRetryBehavior.js' /** @@ -15,10 +17,20 @@ import { getWorkflowRetryBehavior } from './getWorkflowRetryBehavior.js' export async function handleWorkflowError({ error, req, + silent = false, updateJob, }: { error: WorkflowError req: PayloadRequest + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent updateJob: UpdateJobFunction }): Promise<{ hasFinalError: boolean @@ -41,7 +53,7 @@ export async function handleWorkflowError({ if (job.waitUntil) { // Check if waitUntil is in the past const waitUntil = new Date(job.waitUntil) - if (waitUntil < new Date()) { + if (waitUntil < getCurrentDate()) { // Outdated waitUntil, remove it delete job.waitUntil } @@ -55,10 +67,12 @@ export async function handleWorkflowError({ const jobLabel = job.workflowSlug || `Task: ${job.taskSlug}` - req.payload.logger.error({ - err: error, - msg: `Error running job ${jobLabel} id: ${job.id} attempt ${job.totalTried + 1}${maxWorkflowRetries !== undefined ? '/' + (maxWorkflowRetries + 1) : ''}`, - }) + if (!silent || (typeof silent === 'object' && !silent.error)) { + req.payload.logger.error({ + err: error, + msg: `Error running job ${jobLabel} id: ${job.id} attempt ${job.totalTried + 1}${maxWorkflowRetries !== undefined ? '/' + (maxWorkflowRetries + 1) : ''}`, + }) + } // Tasks update the job if they error - but in case there is an unhandled error (e.g. in the workflow itself, not in a task) // we need to ensure the job is updated to reflect the error diff --git a/packages/payload/src/queues/localAPI.ts b/packages/payload/src/queues/localAPI.ts index f1449c5664..c38a64868f 100644 --- a/packages/payload/src/queues/localAPI.ts +++ b/packages/payload/src/queues/localAPI.ts @@ -1,4 +1,4 @@ -import type { RunningJobFromTask } from './config/types/workflowTypes.js' +import type { BaseJob, RunningJobFromTask } from './config/types/workflowTypes.js' import { createLocalReq, @@ -9,11 +9,37 @@ import { type TypedJobs, type Where, } from '../index.js' -import { jobAfterRead, jobsCollectionSlug } from './config/index.js' +import { jobAfterRead, jobsCollectionSlug } from './config/collection.js' +import { handleSchedules, type HandleSchedulesResult } from './operations/handleSchedules/index.js' import { runJobs } from './operations/runJobs/index.js' import { updateJob, updateJobs } from './utilities/updateJob.js' +export type RunJobsSilent = + | { + error?: boolean + info?: boolean + } + | boolean export const getJobsLocalAPI = (payload: Payload) => ({ + handleSchedules: async (args?: { + // By default, schedule all queues - only scheduling jobs scheduled to be added to the `default` queue would not make sense + // here, as you'd usually specify a different queue than `default` here, especially if this is used in combination with autorun. + // The `queue` property for setting up schedules is required, and not optional. + /** + * If you want to only schedule jobs that are set to schedule in a specific queue, set this to the queue name. + * + * @default all jobs for all queues will be scheduled. + */ + queue?: string + req?: PayloadRequest + }): Promise => { + const newReq: PayloadRequest = args?.req ?? (await createLocalReq({}, payload)) + + return await handleSchedules({ + queue: args?.queue, + req: newReq, + }) + }, queue: async < // eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents TTaskOrWorkflowSlug extends keyof TypedJobs['tasks'] | keyof TypedJobs['workflows'], @@ -21,6 +47,7 @@ export const getJobsLocalAPI = (payload: Payload) => ({ args: | { input: TypedJobs['tasks'][TTaskOrWorkflowSlug]['input'] + meta?: BaseJob['meta'] queue?: string req?: PayloadRequest // TTaskOrWorkflowlug with keyof TypedJobs['workflows'] removed: @@ -30,6 +57,7 @@ export const getJobsLocalAPI = (payload: Payload) => ({ } | { input: TypedJobs['workflows'][TTaskOrWorkflowSlug]['input'] + meta?: BaseJob['meta'] queue?: string req?: PayloadRequest task?: never @@ -74,6 +102,10 @@ export const getJobsLocalAPI = (payload: Payload) => ({ data.taskSlug = args.task as string } + if (args.meta) { + data.meta = args.meta + } + type ReturnType = TTaskOrWorkflowSlug extends keyof TypedJobs['workflows'] ? Job : RunningJobFromTask // Type assertion is still needed here @@ -130,6 +162,15 @@ export const getJobsLocalAPI = (payload: Payload) => ({ * If you want to run them in sequence, set this to true. */ sequential?: boolean + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent where?: Where }): Promise> => { const newReq: PayloadRequest = args?.req ?? (await createLocalReq({}, payload)) @@ -142,6 +183,7 @@ export const getJobsLocalAPI = (payload: Payload) => ({ queue: args?.queue, req: newReq, sequential: args?.sequential, + silent: args?.silent, where: args?.where, }) }, @@ -150,6 +192,15 @@ export const getJobsLocalAPI = (payload: Payload) => ({ id: number | string overrideAccess?: boolean req?: PayloadRequest + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent }): Promise> => { const newReq: PayloadRequest = args.req ?? (await createLocalReq({}, payload)) @@ -157,6 +208,7 @@ export const getJobsLocalAPI = (payload: Payload) => ({ id: args.id, overrideAccess: args.overrideAccess !== false, req: newReq, + silent: args.silent, }) }, diff --git a/packages/payload/src/queues/operations/handleSchedules/countRunnableOrActiveJobsForQueue.ts b/packages/payload/src/queues/operations/handleSchedules/countRunnableOrActiveJobsForQueue.ts new file mode 100644 index 0000000000..713cfa2048 --- /dev/null +++ b/packages/payload/src/queues/operations/handleSchedules/countRunnableOrActiveJobsForQueue.ts @@ -0,0 +1,74 @@ +import type { PayloadRequest, Where } from '../../../types/index.js' +import type { TaskType } from '../../config/types/taskTypes.js' +import type { WorkflowTypes } from '../../config/types/workflowTypes.js' + +/** + * Gets all queued jobs that can be run. This means they either: + * - failed but do not have a definitive error => can be retried + * - are currently processing + * - have not been started yet + */ +export async function countRunnableOrActiveJobsForQueue({ + onlyScheduled = false, + queue, + req, + taskSlug, + workflowSlug, +}: { + /** + * If true, this counts only jobs that have been created through the scheduling system. + * + * @default false + */ + onlyScheduled?: boolean + queue: string + req: PayloadRequest + taskSlug?: TaskType + workflowSlug?: WorkflowTypes +}): Promise { + const and: Where[] = [ + { + queue: { + equals: queue, + }, + }, + + { + completedAt: { exists: false }, + }, + { + error: { exists: false }, + }, + ] + + if (taskSlug) { + and.push({ + taskSlug: { + equals: taskSlug, + }, + }) + } else if (workflowSlug) { + and.push({ + workflowSlug: { + equals: workflowSlug, + }, + }) + } + if (onlyScheduled) { + and.push({ + 'meta.scheduled': { + equals: true, + }, + }) + } + + const runnableOrActiveJobsForQueue = await req.payload.db.count({ + collection: 'payload-jobs', + req, + where: { + and, + }, + }) + + return runnableOrActiveJobsForQueue.totalDocs +} diff --git a/packages/payload/src/queues/operations/handleSchedules/defaultAfterSchedule.ts b/packages/payload/src/queues/operations/handleSchedules/defaultAfterSchedule.ts new file mode 100644 index 0000000000..4627c407c6 --- /dev/null +++ b/packages/payload/src/queues/operations/handleSchedules/defaultAfterSchedule.ts @@ -0,0 +1,64 @@ +import type { AfterScheduleFn } from '../../config/types/index.js' + +import { type JobStats, jobStatsGlobalSlug } from '../../config/global.js' +import { getCurrentDate } from '../../utilities/getCurrentDate.js' + +type JobStatsScheduledRuns = NonNullable< + NonNullable['scheduledRuns']>['queues'] +>[string] + +export const defaultAfterSchedule: AfterScheduleFn = async ({ jobStats, queueable, req }) => { + const existingQueuesConfig = + jobStats?.stats?.scheduledRuns?.queues?.[queueable.scheduleConfig.queue] || {} + + const queueConfig: JobStatsScheduledRuns = { + ...existingQueuesConfig, + } + if (queueable.taskConfig) { + ;(queueConfig.tasks ??= {})[queueable.taskConfig.slug] = { + lastScheduledRun: getCurrentDate().toISOString(), + } + } else if (queueable.workflowConfig) { + ;(queueConfig.workflows ??= {})[queueable.workflowConfig.slug] = { + lastScheduledRun: getCurrentDate().toISOString(), + } + } + + // Add to payload-jobs-stats global regardless of the status + if (jobStats) { + await req.payload.db.updateGlobal({ + slug: jobStatsGlobalSlug, + data: { + ...(jobStats || {}), + stats: { + ...(jobStats?.stats || {}), + scheduledRuns: { + ...(jobStats?.stats?.scheduledRuns || {}), + queues: { + ...(jobStats?.stats?.scheduledRuns?.queues || {}), + [queueable.scheduleConfig.queue]: queueConfig, + }, + }, + }, + } as JobStats, + req, + returning: false, + }) + } else { + await req.payload.db.createGlobal({ + slug: jobStatsGlobalSlug, + data: { + createdAt: getCurrentDate().toISOString(), + stats: { + scheduledRuns: { + queues: { + [queueable.scheduleConfig.queue]: queueConfig, + }, + }, + }, + } as JobStats, + req, + returning: false, + }) + } +} diff --git a/packages/payload/src/queues/operations/handleSchedules/defaultBeforeSchedule.ts b/packages/payload/src/queues/operations/handleSchedules/defaultBeforeSchedule.ts new file mode 100644 index 0000000000..96b8092258 --- /dev/null +++ b/packages/payload/src/queues/operations/handleSchedules/defaultBeforeSchedule.ts @@ -0,0 +1,20 @@ +import type { BeforeScheduleFn } from '../../config/types/index.js' + +import { countRunnableOrActiveJobsForQueue } from './countRunnableOrActiveJobsForQueue.js' + +export const defaultBeforeSchedule: BeforeScheduleFn = async ({ queueable, req }) => { + // All tasks in that queue that are either currently processing or can be run + const runnableOrActiveJobsForQueue = await countRunnableOrActiveJobsForQueue({ + onlyScheduled: true, + queue: queueable.scheduleConfig.queue, + req, + taskSlug: queueable.taskConfig?.slug, + workflowSlug: queueable.workflowConfig?.slug, + }) + + return { + input: {}, + shouldSchedule: runnableOrActiveJobsForQueue === 0, + waitUntil: queueable.waitUntil, + } +} diff --git a/packages/payload/src/queues/operations/handleSchedules/getQueuesWithSchedules.ts b/packages/payload/src/queues/operations/handleSchedules/getQueuesWithSchedules.ts new file mode 100644 index 0000000000..817de244f6 --- /dev/null +++ b/packages/payload/src/queues/operations/handleSchedules/getQueuesWithSchedules.ts @@ -0,0 +1,50 @@ +import type { SanitizedJobsConfig, ScheduleConfig } from '../../config/types/index.js' +import type { TaskConfig } from '../../config/types/taskTypes.js' +import type { WorkflowConfig } from '../../config/types/workflowTypes.js' + +type QueuesWithSchedules = { + [queue: string]: { + schedules: { + scheduleConfig: ScheduleConfig + taskConfig?: TaskConfig + workflowConfig?: WorkflowConfig + }[] + } +} + +export const getQueuesWithSchedules = ({ + jobsConfig, +}: { + jobsConfig: SanitizedJobsConfig +}): QueuesWithSchedules => { + const tasksWithSchedules = + jobsConfig.tasks?.filter((task) => { + return task.schedule?.length + }) ?? [] + + const workflowsWithSchedules = + jobsConfig.workflows?.filter((workflow) => { + return workflow.schedule?.length + }) ?? [] + + const queuesWithSchedules: QueuesWithSchedules = {} + + for (const task of tasksWithSchedules) { + for (const schedule of task.schedule ?? []) { + ;(queuesWithSchedules[schedule.queue] ??= { schedules: [] }).schedules.push({ + scheduleConfig: schedule, + taskConfig: task, + }) + } + } + for (const workflow of workflowsWithSchedules) { + for (const schedule of workflow.schedule ?? []) { + ;(queuesWithSchedules[schedule.queue] ??= { schedules: [] }).schedules.push({ + scheduleConfig: schedule, + workflowConfig: workflow, + }) + } + } + + return queuesWithSchedules +} diff --git a/packages/payload/src/queues/operations/handleSchedules/index.ts b/packages/payload/src/queues/operations/handleSchedules/index.ts new file mode 100644 index 0000000000..b5daefccb9 --- /dev/null +++ b/packages/payload/src/queues/operations/handleSchedules/index.ts @@ -0,0 +1,223 @@ +import { Cron } from 'croner' + +import type { Job, TaskConfig, WorkflowConfig } from '../../../index.js' +import type { PayloadRequest } from '../../../types/index.js' +import type { BeforeScheduleFn, Queueable, ScheduleConfig } from '../../config/types/index.js' + +import { type JobStats, jobStatsGlobalSlug } from '../../config/global.js' +import { defaultAfterSchedule } from './defaultAfterSchedule.js' +import { defaultBeforeSchedule } from './defaultBeforeSchedule.js' +import { getQueuesWithSchedules } from './getQueuesWithSchedules.js' + +export type HandleSchedulesResult = { + errored: Queueable[] + queued: Queueable[] + skipped: Queueable[] +} + +/** + * On vercel, we cannot auto-schedule jobs using a Cron - instead, we'll use this same endpoint that can + * also be called from Vercel Cron for auto-running jobs. + * + * The benefit of doing it like this instead of a separate endpoint is that we can run jobs immediately + * after they are scheduled + */ +export async function handleSchedules({ + queue, + req, +}: { + /** + * If you want to only schedule jobs that are set to schedule in a specific queue, set this to the queue name. + * + * @default all jobs for all queues will be scheduled. + */ + queue?: string + req: PayloadRequest +}): Promise { + const jobsConfig = req.payload.config.jobs + const queuesWithSchedules = getQueuesWithSchedules({ + jobsConfig, + }) + + const stats: JobStats = await req.payload.db.findGlobal({ + slug: jobStatsGlobalSlug, + req, + }) + + /** + * Almost last step! Tasks and Workflows added here just need to be constraint-checked (e.g max. 1 running task etc.), + * before we can queue them + */ + const queueables: Queueable[] = [] + + // Need to know when that particular job was last scheduled in that particular queue + + for (const [queueName, { schedules }] of Object.entries(queuesWithSchedules)) { + if (queue && queueName !== queue) { + // If a queue is specified, only schedule jobs for that queue + continue + } + for (const schedulable of schedules) { + const queuable = checkQueueableTimeConstraints({ + queue: queueName, + scheduleConfig: schedulable.scheduleConfig, + stats, + taskConfig: schedulable.taskConfig, + workflowConfig: schedulable.workflowConfig, + }) + if (queuable) { + queueables.push(queuable) + } + } + } + + const queued: Queueable[] = [] + const skipped: Queueable[] = [] + const errored: Queueable[] = [] + + /** + * Now queue, but check for constraints (= beforeSchedule) first. + * Default constraint (= defaultBeforeSchedule): max. 1 running / scheduled task or workflow per queue + */ + for (const queueable of queueables) { + const { status } = await scheduleQueueable({ + queueable, + req, + stats, + }) + switch (status) { + case 'error': + errored.push(queueable) + break + case 'skipped': + skipped.push(queueable) + break + case 'success': + queued.push(queueable) + break + } + } + return { + errored, + queued, + skipped, + } +} + +export function checkQueueableTimeConstraints({ + queue, + scheduleConfig, + stats, + taskConfig, + workflowConfig, +}: { + queue: string + scheduleConfig: ScheduleConfig + stats: JobStats + taskConfig?: TaskConfig + workflowConfig?: WorkflowConfig +}): false | Queueable { + const queueScheduleStats = stats?.stats?.scheduledRuns?.queues?.[queue] + + const lastScheduledRun = taskConfig + ? queueScheduleStats?.tasks?.[taskConfig.slug]?.lastScheduledRun + : queueScheduleStats?.workflows?.[workflowConfig?.slug ?? '']?.lastScheduledRun + + const nextRun = new Cron(scheduleConfig.cron).nextRun(lastScheduledRun ?? undefined) + + if (!nextRun) { + return false + } + return { + scheduleConfig, + taskConfig, + waitUntil: nextRun, + workflowConfig, + } +} + +export async function scheduleQueueable({ + queueable, + req, + stats, +}: { + queueable: Queueable + req: PayloadRequest + stats: JobStats +}): Promise<{ + job?: Job + status: 'error' | 'skipped' | 'success' +}> { + if (!queueable.taskConfig && !queueable.workflowConfig) { + return { + status: 'error', + } + } + + const beforeScheduleFn = queueable.scheduleConfig.hooks?.beforeSchedule + const afterScheduleFN = queueable.scheduleConfig.hooks?.afterSchedule + + try { + const beforeScheduleResult: Awaited> = await ( + beforeScheduleFn ?? defaultBeforeSchedule + )({ + // @ts-expect-error we know defaultBeforeSchedule will never call itself => pass null + defaultBeforeSchedule: beforeScheduleFn ? defaultBeforeSchedule : null, + jobStats: stats, + queueable, + req, + }) + + if (!beforeScheduleResult.shouldSchedule) { + await (afterScheduleFN ?? defaultAfterSchedule)({ + // @ts-expect-error we know defaultAfterchedule will never call itself => pass null + defaultAfterSchedule: afterScheduleFN ? defaultAfterSchedule : null, + jobStats: stats, + queueable, + req, + status: 'skipped', + }) + return { + status: 'skipped', + } + } + + const job = (await req.payload.jobs.queue({ + input: beforeScheduleResult.input ?? {}, + meta: { + scheduled: true, + }, + queue: queueable.scheduleConfig.queue, + req, + task: queueable?.taskConfig?.slug, + waitUntil: beforeScheduleResult.waitUntil, + workflow: queueable.workflowConfig?.slug, + } as Parameters[0])) as unknown as Job + + await (afterScheduleFN ?? defaultAfterSchedule)({ + // @ts-expect-error we know defaultAfterchedule will never call itself => pass null + defaultAfterSchedule: afterScheduleFN ? defaultAfterSchedule : null, + job, + jobStats: stats, + queueable, + req, + status: 'success', + }) + return { + status: 'success', + } + } catch (error) { + await (afterScheduleFN ?? defaultAfterSchedule)({ + // @ts-expect-error we know defaultAfterchedule will never call itself => pass null + defaultAfterSchedule: afterScheduleFN ? defaultAfterSchedule : null, + error: error as Error, + jobStats: stats, + queueable, + req, + status: 'error', + }) + return { + status: 'error', + } + } +} diff --git a/packages/payload/src/queues/operations/runJobs/index.ts b/packages/payload/src/queues/operations/runJobs/index.ts index c626103467..9530594788 100644 --- a/packages/payload/src/queues/operations/runJobs/index.ts +++ b/packages/payload/src/queues/operations/runJobs/index.ts @@ -2,12 +2,14 @@ import type { Job } from '../../../index.js' import type { PayloadRequest, Sort, Where } from '../../../types/index.js' import type { WorkflowJSON } from '../../config/types/workflowJSONTypes.js' import type { WorkflowConfig, WorkflowHandler } from '../../config/types/workflowTypes.js' +import type { RunJobsSilent } from '../../localAPI.js' import type { RunJobResult } from './runJob/index.js' import { Forbidden } from '../../../errors/Forbidden.js' import { isolateObjectProperty } from '../../../utilities/isolateObjectProperty.js' -import { jobsCollectionSlug } from '../../config/index.js' +import { jobsCollectionSlug } from '../../config/collection.js' import { JobCancelledError } from '../../errors/index.js' +import { getCurrentDate } from '../../utilities/getCurrentDate.js' import { updateJob, updateJobs } from '../../utilities/updateJob.js' import { getUpdateJobFunction } from './runJob/getUpdateJobFunction.js' import { importHandlerPath } from './runJob/importHandlerPath.js' @@ -53,6 +55,15 @@ export type RunJobsArgs = { * If you want to run them in sequence, set this to true. */ sequential?: boolean + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent where?: Where } @@ -84,6 +95,7 @@ export const runJobs = async (args: RunJobsArgs): Promise => { }, }, sequential, + silent = false, where: whereFromProps, } = args @@ -119,7 +131,7 @@ export const runJobs = async (args: RunJobsArgs): Promise => { }, { waitUntil: { - less_than: new Date().toISOString(), + less_than: getCurrentDate().toISOString(), }, }, ], @@ -219,11 +231,13 @@ export const runJobs = async (args: RunJobsArgs): Promise => { } } - payload.logger.info({ - msg: `Running ${jobs.length} jobs.`, - new: newJobs?.length, - retrying: existingJobs?.length, - }) + if (!silent || (typeof silent === 'object' && !silent.info)) { + payload.logger.info({ + msg: `Running ${jobs.length} jobs.`, + new: newJobs?.length, + retrying: existingJobs?.length, + }) + } const successfullyCompletedJobs: (number | string)[] = [] @@ -277,7 +291,9 @@ export const runJobs = async (args: RunJobsArgs): Promise => { if (!workflowHandler) { const jobLabel = job.workflowSlug || `Task: ${job.taskSlug}` const errorMessage = `Can't find runner while importing with the path ${workflowConfig.handler} in job type ${jobLabel}.` - payload.logger.error(errorMessage) + if (!silent || (typeof silent === 'object' && !silent.error)) { + payload.logger.error(errorMessage) + } await updateJob({ error: { @@ -300,6 +316,7 @@ export const runJobs = async (args: RunJobsArgs): Promise => { const result = await runJob({ job, req: jobReq, + silent, updateJob, workflowConfig, workflowHandler, @@ -314,6 +331,7 @@ export const runJobs = async (args: RunJobsArgs): Promise => { const result = await runJSONJob({ job, req: jobReq, + silent, updateJob, workflowConfig, workflowHandler, @@ -370,10 +388,12 @@ export const runJobs = async (args: RunJobsArgs): Promise => { }) } } catch (err) { - payload.logger.error({ - err, - msg: `Failed to delete jobs ${successfullyCompletedJobs.join(', ')} on complete`, - }) + if (!silent || (typeof silent === 'object' && !silent.error)) { + payload.logger.error({ + err, + msg: `Failed to delete jobs ${successfullyCompletedJobs.join(', ')} on complete`, + }) + } } } diff --git a/packages/payload/src/queues/operations/runJobs/runJSONJob/index.ts b/packages/payload/src/queues/operations/runJobs/runJSONJob/index.ts index 66b0156724..87f5995904 100644 --- a/packages/payload/src/queues/operations/runJobs/runJSONJob/index.ts +++ b/packages/payload/src/queues/operations/runJobs/runJSONJob/index.ts @@ -2,16 +2,27 @@ import type { Job } from '../../../../index.js' import type { PayloadRequest } from '../../../../types/index.js' import type { WorkflowJSON, WorkflowStep } from '../../../config/types/workflowJSONTypes.js' import type { WorkflowConfig } from '../../../config/types/workflowTypes.js' +import type { RunJobsSilent } from '../../../localAPI.js' import type { UpdateJobFunction } from '../runJob/getUpdateJobFunction.js' import type { JobRunStatus } from '../runJob/index.js' import { handleWorkflowError } from '../../../errors/handleWorkflowError.js' import { WorkflowError } from '../../../errors/index.js' +import { getCurrentDate } from '../../../utilities/getCurrentDate.js' import { getRunTaskFunction } from '../runJob/getRunTaskFunction.js' type Args = { job: Job req: PayloadRequest + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent updateJob: UpdateJobFunction workflowConfig: WorkflowConfig workflowHandler: WorkflowJSON @@ -24,6 +35,7 @@ export type RunJSONJobResult = { export const runJSONJob = async ({ job, req, + silent = false, updateJob, workflowConfig, workflowHandler, @@ -79,6 +91,7 @@ export const runJSONJob = async ({ : 'An unhandled error occurred', workflowConfig, }), + silent, req, updateJob, @@ -111,7 +124,7 @@ export const runJSONJob = async ({ if (workflowCompleted) { await updateJob({ - completedAt: new Date().toISOString(), + completedAt: getCurrentDate().toISOString(), processing: false, totalTried: (job.totalTried ?? 0) + 1, }) diff --git a/packages/payload/src/queues/operations/runJobs/runJob/getRunTaskFunction.ts b/packages/payload/src/queues/operations/runJobs/runJob/getRunTaskFunction.ts index 868ac5602d..aa9f171567 100644 --- a/packages/payload/src/queues/operations/runJobs/runJob/getRunTaskFunction.ts +++ b/packages/payload/src/queues/operations/runJobs/runJob/getRunTaskFunction.ts @@ -20,6 +20,7 @@ import type { import type { UpdateJobFunction } from './getUpdateJobFunction.js' import { TaskError } from '../../../errors/index.js' +import { getCurrentDate } from '../../../utilities/getCurrentDate.js' import { getTaskHandlerFromConfig } from './importHandlerPath.js' const ObjectId = (ObjectIdImport.default || @@ -54,7 +55,7 @@ export const getRunTaskFunction = ( task, }: Parameters[1] & Parameters>[1], ) => { - const executedAt = new Date() + const executedAt = getCurrentDate() let taskConfig: TaskConfig | undefined if (!isInline) { @@ -186,7 +187,7 @@ export const getRunTaskFunction = ( ;(job.log ??= []).push({ id: new ObjectId().toHexString(), - completedAt: new Date().toISOString(), + completedAt: getCurrentDate().toISOString(), executedAt: executedAt.toISOString(), input, output, diff --git a/packages/payload/src/queues/operations/runJobs/runJob/index.ts b/packages/payload/src/queues/operations/runJobs/runJob/index.ts index fe8f6256ef..c92599e298 100644 --- a/packages/payload/src/queues/operations/runJobs/runJob/index.ts +++ b/packages/payload/src/queues/operations/runJobs/runJob/index.ts @@ -1,16 +1,27 @@ import type { Job } from '../../../../index.js' import type { PayloadRequest } from '../../../../types/index.js' import type { WorkflowConfig, WorkflowHandler } from '../../../config/types/workflowTypes.js' +import type { RunJobsSilent } from '../../../localAPI.js' import type { UpdateJobFunction } from './getUpdateJobFunction.js' import { handleTaskError } from '../../../errors/handleTaskError.js' import { handleWorkflowError } from '../../../errors/handleWorkflowError.js' import { JobCancelledError, TaskError, WorkflowError } from '../../../errors/index.js' +import { getCurrentDate } from '../../../utilities/getCurrentDate.js' import { getRunTaskFunction } from './getRunTaskFunction.js' type Args = { job: Job req: PayloadRequest + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent updateJob: UpdateJobFunction workflowConfig: WorkflowConfig workflowHandler: WorkflowHandler @@ -25,6 +36,7 @@ export type RunJobResult = { export const runJob = async ({ job, req, + silent, updateJob, workflowConfig, workflowHandler, @@ -45,6 +57,7 @@ export const runJob = async ({ const { hasFinalError } = await handleTaskError({ error, req, + silent, updateJob, }) @@ -66,6 +79,7 @@ export const runJob = async ({ workflowConfig, }), req, + silent, updateJob, }) @@ -76,7 +90,7 @@ export const runJob = async ({ // Workflow has completed successfully await updateJob({ - completedAt: new Date().toISOString(), + completedAt: getCurrentDate().toISOString(), log: job.log, processing: false, totalTried: (job.totalTried ?? 0) + 1, diff --git a/packages/payload/src/queues/restEndpointRun.ts b/packages/payload/src/queues/restEndpointRun.ts deleted file mode 100644 index 14c425a940..0000000000 --- a/packages/payload/src/queues/restEndpointRun.ts +++ /dev/null @@ -1,91 +0,0 @@ -import type { Endpoint, SanitizedConfig } from '../config/types.js' - -import { runJobs, type RunJobsArgs } from './operations/runJobs/index.js' - -const configHasJobs = (config: SanitizedConfig): boolean => { - return Boolean(config.jobs?.tasks?.length || config.jobs?.workflows?.length) -} - -/** - * /api/payload-jobs/run endpoint - */ -export const runJobsEndpoint: Endpoint = { - handler: async (req) => { - if (!configHasJobs(req.payload.config)) { - return Response.json( - { - message: 'No jobs to run.', - }, - { status: 200 }, - ) - } - - const accessFn = req.payload.config.jobs?.access?.run ?? (() => true) - - const hasAccess = await accessFn({ req }) - - if (!hasAccess) { - return Response.json( - { - message: req.i18n.t('error:unauthorized'), - }, - { status: 401 }, - ) - } - - const { allQueues, limit, queue } = req.query as { - allQueues?: boolean - limit?: number - queue?: string - } - - const runJobsArgs: RunJobsArgs = { - queue, - req, - // We are checking access above, so we can override it here - overrideAccess: true, - } - - if (typeof limit !== 'undefined') { - runJobsArgs.limit = Number(limit) - } - - if (allQueues && !(typeof allQueues === 'string' && allQueues === 'false')) { - runJobsArgs.allQueues = true - } - - let noJobsRemaining = false - let remainingJobsFromQueried = 0 - try { - const result = await runJobs(runJobsArgs) - noJobsRemaining = !!result.noJobsRemaining - remainingJobsFromQueried = result.remainingJobsFromQueried - } catch (err) { - req.payload.logger.error({ - err, - msg: 'There was an error running jobs:', - queue: runJobsArgs.queue, - }) - - return Response.json( - { - message: req.i18n.t('error:unknown'), - noJobsRemaining: true, - remainingJobsFromQueried, - }, - { status: 500 }, - ) - } - - return Response.json( - { - message: req.i18n.t('general:success'), - noJobsRemaining, - remainingJobsFromQueried, - }, - { status: 200 }, - ) - }, - method: 'get', - path: '/run', -} diff --git a/packages/payload/src/queues/utilities/getCurrentDate.ts b/packages/payload/src/queues/utilities/getCurrentDate.ts new file mode 100644 index 0000000000..6e0d67af3b --- /dev/null +++ b/packages/payload/src/queues/utilities/getCurrentDate.ts @@ -0,0 +1,21 @@ +/** + * Globals that are used by our integration tests to modify the behavior of the job system during runtime. + * This is useful to avoid having to wait for the cron jobs to run, or to pause auto-running jobs. + */ +export const _internal_jobSystemGlobals = { + getCurrentDate: () => { + return new Date() + }, + shouldAutoRun: true, + shouldAutoSchedule: true, +} + +export function _internal_resetJobSystemGlobals() { + _internal_jobSystemGlobals.getCurrentDate = () => new Date() + _internal_jobSystemGlobals.shouldAutoRun = true + _internal_jobSystemGlobals.shouldAutoSchedule = true +} + +export const getCurrentDate: () => Date = () => { + return _internal_jobSystemGlobals.getCurrentDate() +} diff --git a/packages/payload/src/queues/utilities/updateJob.ts b/packages/payload/src/queues/utilities/updateJob.ts index 6ce4479eaa..a8a4ff69ee 100644 --- a/packages/payload/src/queues/utilities/updateJob.ts +++ b/packages/payload/src/queues/utilities/updateJob.ts @@ -3,7 +3,7 @@ import type { UpdateJobsArgs } from '../../database/types.js' import type { Job } from '../../index.js' import type { PayloadRequest, Sort, Where } from '../../types/index.js' -import { jobAfterRead, jobsCollectionSlug } from '../config/index.js' +import { jobAfterRead, jobsCollectionSlug } from '../config/collection.js' type BaseArgs = { data: Partial diff --git a/packages/payload/src/versions/deleteScheduledPublishJobs.ts b/packages/payload/src/versions/deleteScheduledPublishJobs.ts index 4020ad4fd6..6ce4199f8c 100644 --- a/packages/payload/src/versions/deleteScheduledPublishJobs.ts +++ b/packages/payload/src/versions/deleteScheduledPublishJobs.ts @@ -1,7 +1,7 @@ import type { PayloadRequest } from '../types/index.js' import { type Payload } from '../index.js' -import { jobsCollectionSlug } from '../queues/config/index.js' +import { jobsCollectionSlug } from '../queues/config/collection.js' type Args = { id?: number | string diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 9f7e558f35..8012d5e446 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -823,8 +823,8 @@ importers: specifier: 2.12.1 version: 2.12.1 croner: - specifier: 9.0.0 - version: 9.0.0 + specifier: 9.1.0 + version: 9.1.0 dataloader: specifier: 2.2.3 version: 2.2.3 @@ -7401,8 +7401,8 @@ packages: engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} hasBin: true - croner@9.0.0: - resolution: {integrity: sha512-onMB0OkDjkXunhdW9htFjEhqrD54+M94i6ackoUkjHKbRnXdyEyKRelp4nJ1kAz32+s27jP1FsebpJCVl0BsvA==} + croner@9.1.0: + resolution: {integrity: sha512-p9nwwR4qyT5W996vBZhdvBCnMhicY5ytZkR4D1Xj0wuTDEiMnjwR57Q3RXYY/s0EpX6Ay3vgIcfaR+ewGHsi+g==} engines: {node: '>=18.0'} cross-env@7.0.3: @@ -18799,7 +18799,7 @@ snapshots: - supports-color - ts-node - croner@9.0.0: {} + croner@9.1.0: {} cross-env@7.0.3: dependencies: diff --git a/test/helpers/initPayloadInt.ts b/test/helpers/initPayloadInt.ts index 2801b5985c..ffd2584ef3 100644 --- a/test/helpers/initPayloadInt.ts +++ b/test/helpers/initPayloadInt.ts @@ -13,15 +13,16 @@ export async function initPayloadInt { const testSuiteName = testSuiteNameOverride ?? path.basename(dirname) - await runInit(testSuiteName, false, true) - console.log('importing config', path.resolve(dirname, 'config.ts')) - const { default: config } = await import(path.resolve(dirname, 'config.ts')) + await runInit(testSuiteName, false, true, configFile) + console.log('importing config', path.resolve(dirname, configFile ?? 'config.ts')) + const { default: config } = await import(path.resolve(dirname, configFile ?? 'config.ts')) if (initializePayload === false) { return { config: await config } as any diff --git a/test/initDevAndTest.ts b/test/initDevAndTest.ts index 791fea9ef8..2d10397287 100644 --- a/test/initDevAndTest.ts +++ b/test/initDevAndTest.ts @@ -17,6 +17,7 @@ export async function initDevAndTest( testSuiteArg: string, writeDBAdapter: string, skipGenImportMap: string, + configFile?: string, ): Promise { const importMapPath: string = path.resolve( getNextRootDir(testSuiteArg).rootDir, @@ -44,7 +45,7 @@ export async function initDevAndTest( const testDir = path.resolve(dirname, testSuiteArg) console.log('Generating import map for config:', testDir) - const configUrl = pathToFileURL(path.resolve(testDir, 'config.ts')).href + const configUrl = pathToFileURL(path.resolve(testDir, configFile ?? 'config.ts')).href const config: SanitizedConfig = await (await import(configUrl)).default process.env.ROOT_DIR = getNextRootDir(testSuiteArg).rootDir diff --git a/test/queues/config.schedules-autocron.ts b/test/queues/config.schedules-autocron.ts new file mode 100644 index 0000000000..7c5a063264 --- /dev/null +++ b/test/queues/config.schedules-autocron.ts @@ -0,0 +1,22 @@ +/* eslint-disable no-restricted-exports */ +import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' +import { getConfig } from './getConfig.js' +import { EverySecondMax2Task } from './tasks/EverySecondMax2Task.js' +import { EverySecondTask } from './tasks/EverySecondTask.js' + +const config = getConfig() + +export default buildConfigWithDefaults({ + ...config, + jobs: { + ...config.jobs, + tasks: [...(config?.jobs?.tasks || []), EverySecondTask, EverySecondMax2Task], + autoRun: [ + { + // @ts-expect-error not undefined + ...config.jobs.autoRun[0], + disableScheduling: false, + }, + ], + }, +}) diff --git a/test/queues/config.schedules.ts b/test/queues/config.schedules.ts new file mode 100644 index 0000000000..a5435e9957 --- /dev/null +++ b/test/queues/config.schedules.ts @@ -0,0 +1,22 @@ +/* eslint-disable no-restricted-exports */ +import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' +import { getConfig } from './getConfig.js' +import { EverySecondMax2Task } from './tasks/EverySecondMax2Task.js' +import { EverySecondTask } from './tasks/EverySecondTask.js' + +const config = getConfig() + +export default buildConfigWithDefaults({ + ...config, + jobs: { + ...config.jobs, + tasks: [...(config?.jobs?.tasks || []), EverySecondTask, EverySecondMax2Task], + autoRun: [ + { + // @ts-expect-error not undefined + ...config.jobs.autoRun[0], + disableScheduling: true, + }, + ], + }, +}) diff --git a/test/queues/config.ts b/test/queues/config.ts index 331d89fd1e..6c16a4bd2b 100644 --- a/test/queues/config.ts +++ b/test/queues/config.ts @@ -1,416 +1,4 @@ -import type { TaskConfig } from 'payload' - -import { lexicalEditor } from '@payloadcms/richtext-lexical' -import { fileURLToPath } from 'node:url' -import path from 'path' - import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' -import { devUser } from '../credentials.js' -import { updatePostStep1, updatePostStep2 } from './runners/updatePost.js' -import { seed } from './seed.js' -import { externalWorkflow } from './workflows/externalWorkflow.js' -import { failsImmediatelyWorkflow } from './workflows/failsImmediately.js' -import { inlineTaskTestWorkflow } from './workflows/inlineTaskTest.js' -import { inlineTaskTestDelayedWorkflow } from './workflows/inlineTaskTestDelayed.js' -import { longRunningWorkflow } from './workflows/longRunning.js' -import { noRetriesSetWorkflow } from './workflows/noRetriesSet.js' -import { parallelTaskWorkflow } from './workflows/parallelTaskWorkflow.js' -import { retries0Workflow } from './workflows/retries0.js' -import { retriesBackoffTestWorkflow } from './workflows/retriesBackoffTest.js' -import { retriesRollbackTestWorkflow } from './workflows/retriesRollbackTest.js' -import { retriesTestWorkflow } from './workflows/retriesTest.js' -import { retriesWorkflowLevelTestWorkflow } from './workflows/retriesWorkflowLevelTest.js' -import { subTaskWorkflow } from './workflows/subTask.js' -import { subTaskFailsWorkflow } from './workflows/subTaskFails.js' -import { updatePostWorkflow } from './workflows/updatePost.js' -import { updatePostJSONWorkflow } from './workflows/updatePostJSON.js' -import { workflowAndTasksRetriesUndefinedWorkflow } from './workflows/workflowAndTasksRetriesUndefined.js' -import { workflowRetries2TasksRetries0Workflow } from './workflows/workflowRetries2TasksRetries0.js' -import { workflowRetries2TasksRetriesUndefinedWorkflow } from './workflows/workflowRetries2TasksRetriesUndefined.js' +import { getConfig } from './getConfig.js' -const filename = fileURLToPath(import.meta.url) -const dirname = path.dirname(filename) - -// eslint-disable-next-line no-restricted-exports -export default buildConfigWithDefaults({ - collections: [ - { - slug: 'posts', - admin: { - useAsTitle: 'title', - }, - hooks: { - afterChange: [ - async ({ req, doc, context }) => { - await req.payload.jobs.queue({ - workflow: context.useJSONWorkflow ? 'updatePostJSONWorkflow' : 'updatePost', - input: { - post: doc.id, - message: 'hello', - }, - req, - }) - }, - ], - }, - fields: [ - { - name: 'title', - type: 'text', - required: true, - }, - { - name: 'content', - type: 'richText', - }, - { - name: 'jobStep1Ran', - type: 'text', - }, - { - name: 'jobStep2Ran', - type: 'text', - }, - ], - }, - { - slug: 'simple', - admin: { - useAsTitle: 'title', - }, - fields: [ - { - name: 'title', - type: 'text', - required: true, - }, - ], - }, - ], - admin: { - importMap: { - baseDir: path.resolve(dirname), - }, - autoLogin: { - prefillOnly: true, - email: devUser.email, - password: devUser.password, - }, - }, - jobs: { - autoRun: [ - { - // Every second - cron: '* * * * * *', - limit: 100, - queue: 'autorunSecond', // name of the queue - }, - // add as many cron jobs as you want - ], - shouldAutoRun: () => true, - jobsCollectionOverrides: ({ defaultJobsCollection }) => { - return { - ...defaultJobsCollection, - admin: { - ...(defaultJobsCollection?.admin || {}), - hidden: false, - }, - } - }, - processingOrder: { - queues: { - lifo: '-createdAt', - }, - }, - tasks: [ - { - retries: 2, - slug: 'UpdatePost', - interfaceName: 'MyUpdatePostType', - inputSchema: [ - { - name: 'post', - type: 'relationship', - relationTo: 'posts', - maxDepth: 0, - required: true, - }, - { - name: 'message', - type: 'text', - required: true, - }, - ], - outputSchema: [ - { - name: 'messageTwice', - type: 'text', - required: true, - }, - ], - handler: updatePostStep1, - } as TaskConfig<'UpdatePost'>, - { - retries: 2, - slug: 'UpdatePostStep2', - inputSchema: [ - { - name: 'post', - type: 'relationship', - relationTo: 'posts', - maxDepth: 0, - required: true, - }, - { - name: 'messageTwice', - type: 'text', - required: true, - }, - ], - handler: updatePostStep2, - } as TaskConfig<'UpdatePostStep2'>, - { - retries: 3, - slug: 'CreateSimple', - inputSchema: [ - { - name: 'message', - type: 'text', - required: true, - }, - { - name: 'shouldFail', - type: 'checkbox', - }, - ], - outputSchema: [ - { - name: 'simpleID', - type: 'text', - required: true, - }, - ], - handler: async ({ input, req }) => { - if (input.shouldFail) { - throw new Error('Failed on purpose') - } - const newSimple = await req.payload.create({ - collection: 'simple', - req, - data: { - title: input.message, - }, - }) - return { - output: { - simpleID: newSimple.id, - }, - } - }, - } as TaskConfig<'CreateSimple'>, - { - slug: 'CreateSimpleRetriesUndefined', - inputSchema: [ - { - name: 'message', - type: 'text', - required: true, - }, - { - name: 'shouldFail', - type: 'checkbox', - }, - ], - outputSchema: [ - { - name: 'simpleID', - type: 'text', - required: true, - }, - ], - handler: async ({ input, req }) => { - if (input.shouldFail) { - throw new Error('Failed on purpose') - } - const newSimple = await req.payload.create({ - collection: 'simple', - req, - data: { - title: input.message, - }, - }) - return { - output: { - simpleID: newSimple.id, - }, - } - }, - } as TaskConfig<'CreateSimpleRetriesUndefined'>, - { - slug: 'CreateSimpleRetries0', - retries: 0, - inputSchema: [ - { - name: 'message', - type: 'text', - required: true, - }, - { - name: 'shouldFail', - type: 'checkbox', - }, - ], - outputSchema: [ - { - name: 'simpleID', - type: 'text', - required: true, - }, - ], - handler: async ({ input, req }) => { - if (input.shouldFail) { - throw new Error('Failed on purpose') - } - const newSimple = await req.payload.create({ - collection: 'simple', - req, - data: { - title: input.message, - }, - }) - return { - output: { - simpleID: newSimple.id, - }, - } - }, - } as TaskConfig<'CreateSimpleRetries0'>, - { - retries: 2, - slug: 'CreateSimpleWithDuplicateMessage', - inputSchema: [ - { - name: 'message', - type: 'text', - required: true, - }, - { - name: 'shouldFail', - type: 'checkbox', - }, - ], - outputSchema: [ - { - name: 'simpleID', - type: 'text', - required: true, - }, - ], - handler: async ({ input, req }) => { - if (input.shouldFail) { - throw new Error('Failed on purpose') - } - const newSimple = await req.payload.create({ - collection: 'simple', - req, - data: { - title: input.message + input.message, - }, - }) - return { - output: { - simpleID: newSimple.id, - }, - } - }, - } as TaskConfig<'CreateSimpleWithDuplicateMessage'>, - { - retries: 2, - slug: 'ExternalTask', - inputSchema: [ - { - name: 'message', - type: 'text', - required: true, - }, - ], - outputSchema: [ - { - name: 'simpleID', - type: 'text', - required: true, - }, - ], - handler: path.resolve(dirname, 'runners/externalTask.ts') + '#externalTaskHandler', - } as TaskConfig<'ExternalTask'>, - { - retries: 0, - slug: 'ThrowError', - inputSchema: [], - outputSchema: [], - handler: () => { - throw new Error('failed') - }, - } as TaskConfig<'ThrowError'>, - { - retries: 0, - slug: 'ReturnError', - inputSchema: [], - outputSchema: [], - handler: () => { - return { - state: 'failed', - } - }, - } as TaskConfig<'ReturnError'>, - { - retries: 0, - slug: 'ReturnCustomError', - inputSchema: [ - { - name: 'errorMessage', - type: 'text', - required: true, - }, - ], - outputSchema: [], - handler: ({ input }) => { - return { - state: 'failed', - errorMessage: input.errorMessage, - } - }, - } as TaskConfig<'ReturnCustomError'>, - ], - workflows: [ - updatePostWorkflow, - updatePostJSONWorkflow, - retriesTestWorkflow, - retriesRollbackTestWorkflow, - retriesWorkflowLevelTestWorkflow, - noRetriesSetWorkflow, - retries0Workflow, - workflowAndTasksRetriesUndefinedWorkflow, - workflowRetries2TasksRetriesUndefinedWorkflow, - workflowRetries2TasksRetries0Workflow, - inlineTaskTestWorkflow, - failsImmediatelyWorkflow, - inlineTaskTestDelayedWorkflow, - externalWorkflow, - retriesBackoffTestWorkflow, - subTaskWorkflow, - subTaskFailsWorkflow, - longRunningWorkflow, - parallelTaskWorkflow, - ], - }, - editor: lexicalEditor(), - onInit: async (payload) => { - if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') { - await seed(payload) - } - }, - typescript: { - outputFile: path.resolve(dirname, 'payload-types.ts'), - }, -}) +export default buildConfigWithDefaults(getConfig()) diff --git a/test/queues/getConfig.ts b/test/queues/getConfig.ts new file mode 100644 index 0000000000..f1c8126e36 --- /dev/null +++ b/test/queues/getConfig.ts @@ -0,0 +1,176 @@ +import type { Config } from 'payload' + +import { lexicalEditor } from '@payloadcms/richtext-lexical' +import { fileURLToPath } from 'node:url' +import path from 'path' + +import { devUser } from '../credentials.js' +import { seed } from './seed.js' +import { CreateSimpleRetries0Task } from './tasks/CreateSimpleRetries0Task.js' +import { CreateSimpleRetriesUndefinedTask } from './tasks/CreateSimpleRetriesUndefinedTask.js' +import { CreateSimpleTask } from './tasks/CreateSimpleTask.js' +import { CreateSimpleWithDuplicateMessageTask } from './tasks/CreateSimpleWithDuplicateMessageTask.js' +import { ExternalTask } from './tasks/ExternalTask.js' +import { ReturnCustomErrorTask } from './tasks/ReturnCustomErrorTask.js' +import { ReturnErrorTask } from './tasks/ReturnErrorTask.js' +import { ThrowErrorTask } from './tasks/ThrowErrorTask.js' +import { UpdatePostStep2Task } from './tasks/UpdatePostStep2Task.js' +import { UpdatePostTask } from './tasks/UpdatePostTask.js' +import { externalWorkflow } from './workflows/externalWorkflow.js' +import { failsImmediatelyWorkflow } from './workflows/failsImmediately.js' +import { inlineTaskTestWorkflow } from './workflows/inlineTaskTest.js' +import { inlineTaskTestDelayedWorkflow } from './workflows/inlineTaskTestDelayed.js' +import { longRunningWorkflow } from './workflows/longRunning.js' +import { noRetriesSetWorkflow } from './workflows/noRetriesSet.js' +import { parallelTaskWorkflow } from './workflows/parallelTaskWorkflow.js' +import { retries0Workflow } from './workflows/retries0.js' +import { retriesBackoffTestWorkflow } from './workflows/retriesBackoffTest.js' +import { retriesRollbackTestWorkflow } from './workflows/retriesRollbackTest.js' +import { retriesTestWorkflow } from './workflows/retriesTest.js' +import { retriesWorkflowLevelTestWorkflow } from './workflows/retriesWorkflowLevelTest.js' +import { subTaskWorkflow } from './workflows/subTask.js' +import { subTaskFailsWorkflow } from './workflows/subTaskFails.js' +import { updatePostWorkflow } from './workflows/updatePost.js' +import { updatePostJSONWorkflow } from './workflows/updatePostJSON.js' +import { workflowAndTasksRetriesUndefinedWorkflow } from './workflows/workflowAndTasksRetriesUndefined.js' +import { workflowRetries2TasksRetries0Workflow } from './workflows/workflowRetries2TasksRetries0.js' +import { workflowRetries2TasksRetriesUndefinedWorkflow } from './workflows/workflowRetries2TasksRetriesUndefined.js' + +const dirname = path.dirname(fileURLToPath(import.meta.url)) + +// Needs to be a function to prevent object reference issues due to duplicative configs +export const getConfig: () => Partial = () => ({ + collections: [ + { + slug: 'posts', + admin: { + useAsTitle: 'title', + }, + hooks: { + afterChange: [ + async ({ req, doc, context }) => { + await req.payload.jobs.queue({ + workflow: context.useJSONWorkflow ? 'updatePostJSONWorkflow' : 'updatePost', + input: { + post: doc.id, + message: 'hello', + }, + req, + }) + }, + ], + }, + fields: [ + { + name: 'title', + type: 'text', + required: true, + }, + { + name: 'content', + type: 'richText', + }, + { + name: 'jobStep1Ran', + type: 'text', + }, + { + name: 'jobStep2Ran', + type: 'text', + }, + ], + }, + { + slug: 'simple', + admin: { + useAsTitle: 'title', + }, + fields: [ + { + name: 'title', + type: 'text', + required: true, + }, + ], + }, + ], + admin: { + importMap: { + baseDir: path.resolve(dirname), + }, + autoLogin: { + prefillOnly: true, + email: devUser.email, + password: devUser.password, + }, + }, + jobs: { + autoRun: [ + { + silent: true, + // Every second + cron: '* * * * * *', + limit: 100, + queue: 'autorunSecond', + }, + // add as many cron jobs as you want + ], + shouldAutoRun: () => true, + jobsCollectionOverrides: ({ defaultJobsCollection }) => { + return { + ...defaultJobsCollection, + admin: { + ...(defaultJobsCollection?.admin || {}), + hidden: false, + }, + } + }, + processingOrder: { + queues: { + lifo: '-createdAt', + }, + }, + tasks: [ + UpdatePostTask, + UpdatePostStep2Task, + CreateSimpleTask, + CreateSimpleRetriesUndefinedTask, + CreateSimpleRetries0Task, + CreateSimpleWithDuplicateMessageTask, + ExternalTask, + ThrowErrorTask, + ReturnErrorTask, + ReturnCustomErrorTask, + ], + workflows: [ + updatePostWorkflow, + updatePostJSONWorkflow, + retriesTestWorkflow, + retriesRollbackTestWorkflow, + retriesWorkflowLevelTestWorkflow, + noRetriesSetWorkflow, + retries0Workflow, + workflowAndTasksRetriesUndefinedWorkflow, + workflowRetries2TasksRetriesUndefinedWorkflow, + workflowRetries2TasksRetries0Workflow, + inlineTaskTestWorkflow, + failsImmediatelyWorkflow, + inlineTaskTestDelayedWorkflow, + externalWorkflow, + retriesBackoffTestWorkflow, + subTaskWorkflow, + subTaskFailsWorkflow, + longRunningWorkflow, + parallelTaskWorkflow, + ], + }, + editor: lexicalEditor(), + onInit: async (payload) => { + if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') { + await seed(payload) + } + }, + typescript: { + outputFile: path.resolve(dirname, 'payload-types.ts'), + }, +}) diff --git a/test/queues/int.spec.ts b/test/queues/int.spec.ts index 0bb23d4748..e5b55963b3 100644 --- a/test/queues/int.spec.ts +++ b/test/queues/int.spec.ts @@ -1,7 +1,13 @@ -import type { JobTaskStatus, Payload, SanitizedConfig } from 'payload' - import path from 'path' +import { + _internal_jobSystemGlobals, + _internal_resetJobSystemGlobals, + type JobTaskStatus, + type Payload, + type SanitizedConfig, +} from 'payload' import { migrateCLI } from 'payload' +import { wait } from 'payload/shared' import { fileURLToPath } from 'url' import type { NextRESTClient } from '../helpers/NextRESTClient.js' @@ -9,6 +15,7 @@ import type { NextRESTClient } from '../helpers/NextRESTClient.js' import { devUser } from '../credentials.js' import { initPayloadInt } from '../helpers/initPayloadInt.js' import { clearAndSeedEverything } from './seed.js' +import { waitUntilAutorunIsDone } from './utilities.js' let payload: Payload let restClient: NextRESTClient @@ -25,10 +32,25 @@ describe('Queues', () => { }) afterAll(async () => { + // Ensure no new crons are scheduled + _internal_jobSystemGlobals.shouldAutoRun = false + _internal_jobSystemGlobals.shouldAutoSchedule = false + // Wait 3 seconds to ensure all currently-running crons are done. If we shut down the db while a function is running, it can cause issues + // Cron function runs may persist after a test has finished + await wait(3000) + // Now we can destroy the payload instance await payload.destroy() + _internal_resetJobSystemGlobals() + }) + + afterEach(() => { + _internal_resetJobSystemGlobals() }) beforeEach(async () => { + // Set autorun to false during seed process to ensure no crons are scheduled, which may affect the tests + _internal_jobSystemGlobals.shouldAutoRun = false + _internal_jobSystemGlobals.shouldAutoSchedule = false await clearAndSeedEverything(payload) const data = await restClient .POST('/users/login', { @@ -43,10 +65,12 @@ describe('Queues', () => { token = data.token } payload.config.jobs.deleteJobOnComplete = true + _internal_jobSystemGlobals.shouldAutoRun = true + _internal_jobSystemGlobals.shouldAutoSchedule = true }) it('will run access control on jobs runner', async () => { - const response = await restClient.GET('/payload-jobs/run', { + const response = await restClient.GET('/payload-jobs/run?silent=true', { headers: { // Authorization: `JWT ${token}`, }, @@ -55,7 +79,7 @@ describe('Queues', () => { }) it('will return 200 from jobs runner', async () => { - const response = await restClient.GET('/payload-jobs/run', { + const response = await restClient.GET('/payload-jobs/run?silent=true', { headers: { Authorization: `JWT ${token}`, }, @@ -109,7 +133,7 @@ describe('Queues', () => { expect(retrievedPost.jobStep1Ran).toBeFalsy() expect(retrievedPost.jobStep2Ran).toBeFalsy() - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const postAfterJobs = await payload.findByID({ collection: 'posts', @@ -139,7 +163,7 @@ describe('Queues', () => { expect(retrievedPost.jobStep1Ran).toBeFalsy() expect(retrievedPost.jobStep2Ran).toBeFalsy() - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const postAfterJobs = await payload.findByID({ collection: 'posts', @@ -163,7 +187,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -198,7 +222,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -221,7 +245,7 @@ describe('Queues', () => { expect(jobAfterRun.input.amountRetried).toBe(2) }) - it('ensure workflows dont limit retries if no retries property is sett', async () => { + it('ensure workflows dont limit retries if no retries property is set', async () => { payload.config.jobs.deleteJobOnComplete = false const job = await payload.jobs.queue({ workflow: 'workflowNoRetriesSet', @@ -233,7 +257,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -268,7 +292,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -303,7 +327,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -338,7 +362,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -373,7 +397,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -409,7 +433,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({silent: true}) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -453,7 +477,7 @@ describe('Queues', () => { !firstGotNoJobs || new Date().getTime() - firstGotNoJobs.getTime() < 3000 ) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { if (hasJobsRemaining) { @@ -537,6 +561,7 @@ describe('Queues', () => { await payload.jobs.run({ sequential: true, + silent: true, }) const allSimples = await payload.find({ @@ -569,6 +594,7 @@ describe('Queues', () => { await payload.jobs.run({ sequential: true, + silent: true, processingOrder: '-createdAt', }) @@ -604,6 +630,7 @@ describe('Queues', () => { await payload.jobs.run({ sequential: true, + silent: true, queue: 'lifo', }) @@ -626,7 +653,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -637,29 +664,6 @@ describe('Queues', () => { expect(allSimples.docs[0]?.title).toBe('hello!') }) - it('can create and autorun jobs', async () => { - await payload.jobs.queue({ - workflow: 'inlineTaskTest', - queue: 'autorunSecond', - input: { - message: 'hello!', - }, - }) - - // Do not call payload.jobs.run() - - // Autorun runs every second - so should definitely be done if we wait 2 seconds - await new Promise((resolve) => setTimeout(resolve, 2000)) - - const allSimples = await payload.find({ - collection: 'simple', - limit: 100, - }) - - expect(allSimples.totalDocs).toBe(1) - expect(allSimples?.docs?.[0]?.title).toBe('hello!') - }) - it('should respect deleteJobOnComplete true default configuration', async () => { const { id } = await payload.jobs.queue({ workflow: 'inlineTaskTest', @@ -671,7 +675,7 @@ describe('Queues', () => { const before = await payload.findByID({ collection: 'payload-jobs', id, disableErrors: true }) expect(before?.id).toBe(id) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const after = await payload.findByID({ collection: 'payload-jobs', id, disableErrors: true }) expect(after).toBeNull() @@ -686,7 +690,7 @@ describe('Queues', () => { const before = await payload.findByID({ collection: 'payload-jobs', id, disableErrors: true }) expect(before?.id).toBe(id) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const after = await payload.findByID({ collection: 'payload-jobs', id, disableErrors: true }) expect(after?.id).toBe(id) @@ -704,7 +708,7 @@ describe('Queues', () => { const before = await payload.findByID({ collection: 'payload-jobs', id, disableErrors: true }) expect(before?.id).toBe(id) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const after = await payload.findByID({ collection: 'payload-jobs', id, disableErrors: true }) expect(after?.id).toBe(id) @@ -718,7 +722,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -739,7 +743,7 @@ describe('Queues', () => { }, }) - await restClient.GET('/payload-jobs/run', { + await restClient.GET('/payload-jobs/run?silent=true', { headers: { Authorization: `JWT ${token}`, }, @@ -877,7 +881,7 @@ describe('Queues', () => { }) } - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -903,6 +907,7 @@ describe('Queues', () => { } await payload.jobs.run({ + silent: true, limit: numberOfTasks, }) @@ -926,7 +931,7 @@ describe('Queues', () => { }) } - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -950,6 +955,7 @@ describe('Queues', () => { await payload.jobs.run({ limit: 42, + silent: true, }) const allSimples = await payload.find({ @@ -985,7 +991,7 @@ describe('Queues', () => { }) } - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -1017,7 +1023,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -1036,7 +1042,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -1066,6 +1072,7 @@ describe('Queues', () => { await payload.jobs.runByID({ id: lastJobID, + silent: true, }) const allSimples = await payload.find({ @@ -1108,6 +1115,7 @@ describe('Queues', () => { } await payload.jobs.run({ + silent: true, where: { id: { equals: lastJobID, @@ -1150,6 +1158,7 @@ describe('Queues', () => { } await payload.jobs.run({ + silent: true, where: { 'input.message': { equals: 'from single task 2', @@ -1188,7 +1197,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -1229,7 +1238,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -1262,7 +1271,7 @@ describe('Queues', () => { workflow: 'longRunning', input: {}, }) - void payload.jobs.run().catch((_ignored) => {}) + void payload.jobs.run({ silent: true }).catch((_ignored) => {}) await new Promise((resolve) => setTimeout(resolve, 1000)) // Should be in processing - cancel job @@ -1296,7 +1305,7 @@ describe('Queues', () => { workflow: 'longRunning', input: {}, }) - void payload.jobs.run().catch((_ignored) => {}) + void payload.jobs.run({ silent: true }).catch((_ignored) => {}) await new Promise((resolve) => setTimeout(resolve, 1000)) // Cancel all jobs @@ -1335,7 +1344,7 @@ describe('Queues', () => { input: {}, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const jobAfterRun = await payload.findByID({ collection: 'payload-jobs', @@ -1356,7 +1365,7 @@ describe('Queues', () => { input: {}, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const jobAfterRun = await payload.findByID({ collection: 'payload-jobs', @@ -1379,7 +1388,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const jobAfterRun = await payload.findByID({ collection: 'payload-jobs', @@ -1408,7 +1417,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const jobAfterRun = await payload.findByID({ collection: 'payload-jobs', @@ -1434,6 +1443,29 @@ describe('Queues', () => { expect((logEntry?.output as any)?.simpleID).toBe(simpleDoc?.id) } }) + + it('can create and autorun jobs', async () => { + await payload.jobs.queue({ + workflow: 'inlineTaskTest', + queue: 'autorunSecond', + input: { + message: 'hello!', + }, + }) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples?.docs?.[0]?.title).toBe('hello!') + }) }) describe('Queues - CLI', () => { diff --git a/test/queues/payload-types.ts b/test/queues/payload-types.ts index 54945be89c..19e3d8782a 100644 --- a/test/queues/payload-types.ts +++ b/test/queues/payload-types.ts @@ -88,14 +88,20 @@ export interface Config { db: { defaultIDType: string; }; - globals: {}; - globalsSelect: {}; + globals: { + 'payload-jobs-stats': PayloadJobsStat; + }; + globalsSelect: { + 'payload-jobs-stats': PayloadJobsStatsSelect | PayloadJobsStatsSelect; + }; locale: null; user: User & { collection: 'users'; }; jobs: { tasks: { + EverySecond: TaskEverySecond; + EverySecondMax2: TaskEverySecondMax2; UpdatePost: MyUpdatePostType; UpdatePostStep2: TaskUpdatePostStep2; CreateSimple: TaskCreateSimple; @@ -260,6 +266,8 @@ export interface PayloadJob { completedAt: string; taskSlug: | 'inline' + | 'EverySecond' + | 'EverySecondMax2' | 'UpdatePost' | 'UpdatePostStep2' | 'CreateSimple' @@ -328,6 +336,8 @@ export interface PayloadJob { taskSlug?: | ( | 'inline' + | 'EverySecond' + | 'EverySecondMax2' | 'UpdatePost' | 'UpdatePostStep2' | 'CreateSimple' @@ -343,6 +353,15 @@ export interface PayloadJob { queue?: string | null; waitUntil?: string | null; processing?: boolean | null; + meta?: + | { + [k: string]: unknown; + } + | unknown[] + | string + | number + | boolean + | null; updatedAt: string; createdAt: string; } @@ -476,6 +495,7 @@ export interface PayloadJobsSelect { queue?: T; waitUntil?: T; processing?: T; + meta?: T; updatedAt?: T; createdAt?: T; } @@ -511,6 +531,54 @@ export interface PayloadMigrationsSelect { updatedAt?: T; createdAt?: T; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "payload-jobs-stats". + */ +export interface PayloadJobsStat { + id: string; + stats?: + | { + [k: string]: unknown; + } + | unknown[] + | string + | number + | boolean + | null; + updatedAt?: string | null; + createdAt?: string | null; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "payload-jobs-stats_select". + */ +export interface PayloadJobsStatsSelect { + stats?: T; + updatedAt?: T; + createdAt?: T; + globalType?: T; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "TaskEverySecond". + */ +export interface TaskEverySecond { + input: { + message: string; + }; + output?: unknown; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "TaskEverySecondMax2". + */ +export interface TaskEverySecondMax2 { + input: { + message: string; + }; + output?: unknown; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "MyUpdatePostType". diff --git a/test/queues/schedules-autocron.int.spec.ts b/test/queues/schedules-autocron.int.spec.ts new file mode 100644 index 0000000000..1e9773fe11 --- /dev/null +++ b/test/queues/schedules-autocron.int.spec.ts @@ -0,0 +1,105 @@ +import path from 'path' +import { _internal_jobSystemGlobals, _internal_resetJobSystemGlobals, type Payload } from 'payload' +import { wait } from 'payload/shared' +import { fileURLToPath } from 'url' + +import type { NextRESTClient } from '../helpers/NextRESTClient.js' + +import { devUser } from '../credentials.js' +import { initPayloadInt } from '../helpers/initPayloadInt.js' +import { clearAndSeedEverything } from './seed.js' + +let payload: Payload +let restClient: NextRESTClient +let token: string + +const { email, password } = devUser +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +describe('Queues - scheduling, with automatic scheduling handling', () => { + beforeAll(async () => { + process.env.SEED_IN_CONFIG_ONINIT = 'false' // Makes it so the payload config onInit seed is not run. Otherwise, the seed would be run unnecessarily twice for the initial test run - once for beforeEach and once for onInit + ;({ payload, restClient } = await initPayloadInt( + dirname, + undefined, + undefined, + 'config.schedules-autocron.ts', + )) + }) + + afterAll(async () => { + // Ensure no new crons are scheduled + _internal_jobSystemGlobals.shouldAutoRun = false + _internal_jobSystemGlobals.shouldAutoSchedule = false + // Wait 3 seconds to ensure all currently-running crons are done. If we shut down the db while a function is running, it can cause issues + // Cron function runs may persist after a test has finished + await wait(3000) + // Now we can destroy the payload instance + await payload.destroy() + _internal_resetJobSystemGlobals() + }) + + afterEach(() => { + _internal_resetJobSystemGlobals() + }) + + beforeEach(async () => { + // Set autorun to false during seed process to ensure no crons are scheduled, which may affect the tests + _internal_jobSystemGlobals.shouldAutoRun = false + _internal_jobSystemGlobals.shouldAutoSchedule = false + + await clearAndSeedEverything(payload) + const data = await restClient + .POST('/users/login', { + body: JSON.stringify({ + email, + password, + }), + }) + .then((res) => res.json()) + + if (data.token) { + token = data.token + } + payload.config.jobs.deleteJobOnComplete = true + _internal_jobSystemGlobals.shouldAutoRun = true + _internal_jobSystemGlobals.shouldAutoSchedule = true + }) + + it('can auto-schedule through automatic crons and autorun jobs', async () => { + // Do not call payload.jobs.run() or payload.jobs.handleSchedules() - payload should automatically schedule crons for auto-scheduling + + // Autorun and Autoschedule runs every second - so should have autorun at least twice after 3.5 seconds. Case with the lowest amount of jobs completed, + // if autoschedule runs after the first autorun: + // Second 1: Autorun runs => no jobs + // Second 1: Autoschedule runs => scheduels 1 job + // Second 2: Autorun runs => runs 1 job => 1 + // Second 2: Autoschedule runs => schedules 1 job + // Second 3: Autorun runs => runs 1 job => 2 + // Second 3: Autoschedule runs => schedules 1 job + // Status after 3.5 seconds: 2 jobs running, 1 job scheduled + + // Best case - most amounts of jobs completed: + // Second 1: Autoschedule runs => schedules 1 job + // Second 1: Autorun runs => runs 1 job => 1 + // Second 2: Autoschedule runs => schedules 1 job + // Second 2: Autorun runs => runs 1 job => 2 + // Second 3: Autoschedule runs => schedules 1 job + // Second 3: Autorun runs => runs 1 job => 3 + // Status after 3.5 seconds: 3 jobs running, no jobs scheduled + const minJobsCompleted = 2 + const maxJobsCompleted = 3 + + await new Promise((resolve) => setTimeout(resolve, 3500)) // 3 seconds + 0.5 seconds to ensure the last job has been completed + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBeGreaterThanOrEqual(minJobsCompleted) + expect(allSimples.totalDocs).toBeLessThanOrEqual(maxJobsCompleted) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) +}) diff --git a/test/queues/schedules.int.spec.ts b/test/queues/schedules.int.spec.ts new file mode 100644 index 0000000000..f4fa8a3ed8 --- /dev/null +++ b/test/queues/schedules.int.spec.ts @@ -0,0 +1,341 @@ +import path from 'path' +import { _internal_jobSystemGlobals, _internal_resetJobSystemGlobals, type Payload } from 'payload' +import { wait } from 'payload/shared' +import { fileURLToPath } from 'url' + +import type { NextRESTClient } from '../helpers/NextRESTClient.js' + +import { devUser } from '../credentials.js' +import { initPayloadInt } from '../helpers/initPayloadInt.js' +import { clearAndSeedEverything } from './seed.js' +import { timeFreeze, timeTravel, waitUntilAutorunIsDone, withoutAutoRun } from './utilities.js' + +let payload: Payload +let restClient: NextRESTClient +let token: string + +const { email, password } = devUser +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +describe('Queues - scheduling, without automatic scheduling handling', () => { + beforeAll(async () => { + process.env.SEED_IN_CONFIG_ONINIT = 'false' // Makes it so the payload config onInit seed is not run. Otherwise, the seed would be run unnecessarily twice for the initial test run - once for beforeEach and once for onInit + ;({ payload, restClient } = await initPayloadInt( + dirname, + undefined, + undefined, + 'config.schedules.ts', + )) + }) + + afterAll(async () => { + // Ensure no new crons are scheduled + _internal_jobSystemGlobals.shouldAutoRun = false + _internal_jobSystemGlobals.shouldAutoSchedule = false + // Wait 3 seconds to ensure all currently-running crons are done. If we shut down the db while a function is running, it can cause issues + // Cron function runs may persist after a test has finished + await wait(3000) + // Now we can destroy the payload instance + await payload.destroy() + _internal_resetJobSystemGlobals() + }) + + afterEach(() => { + _internal_resetJobSystemGlobals() + }) + + beforeEach(async () => { + // Set autorun to false during seed process to ensure no crons are scheduled, which may affect the tests + _internal_jobSystemGlobals.shouldAutoRun = false + _internal_jobSystemGlobals.shouldAutoSchedule = false + await clearAndSeedEverything(payload) + const data = await restClient + .POST('/users/login', { + body: JSON.stringify({ + email, + password, + }), + }) + .then((res) => res.json()) + + if (data.token) { + token = data.token + } + payload.config.jobs.deleteJobOnComplete = true + _internal_jobSystemGlobals.shouldAutoRun = true + _internal_jobSystemGlobals.shouldAutoSchedule = true + }) + + it('can auto-schedule through local API and autorun jobs', async () => { + // Do not call payload.jobs.queue() - the `EverySecond` task should be scheduled here + await payload.jobs.handleSchedules() + + // Do not call payload.jobs.run{silent: true}) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) + + it('can auto-schedule through handleSchedules REST API and autorun jobs', async () => { + // Do not call payload.jobs.queue() - the `EverySecond` task should be scheduled here + await restClient.GET('/payload-jobs/handle-schedules', { + headers: { + Authorization: `JWT ${token}`, + }, + }) + + // Do not call payload.jobs.run({silent: true}) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) + + it('can auto-schedule through run REST API and autorun jobs', async () => { + // Do not call payload.jobs.queue() - the `EverySecond` task should be scheduled here + await restClient.GET('/payload-jobs/run?silent=true', { + headers: { + Authorization: `JWT ${token}`, + }, + }) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) + + it('do not auto-schedule through run REST API when passing disableScheduling=true', async () => { + // Do not call payload.jobs.queue() - the `EverySecond` task should be scheduled here + await restClient.GET('/payload-jobs/run?silent=true&disableScheduling=true', { + headers: { + Authorization: `JWT ${token}`, + }, + }) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(0) + }) + + it('ensure scheduler does not schedule more jobs than needed if executed sequentially', async () => { + await withoutAutoRun(async () => { + for (let i = 0; i < 3; i++) { + await payload.jobs.handleSchedules() + } + }) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) + + it('ensure scheduler max-one-job condition, by default, ignores jobs not scheduled by scheduler', async () => { + await withoutAutoRun(async () => { + for (let i = 0; i < 2; i++) { + await payload.jobs.queue({ + task: 'EverySecond', + queue: 'autorunSecond', + input: { + message: 'This task runs every second', + }, + }) + } + for (let i = 0; i < 3; i++) { + await payload.jobs.handleSchedules() + } + }) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(3) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) + + it('ensure scheduler max-one-job condition, respects jobs not scheduled by scheduler due to task setting onlyScheduled: false', async () => { + timeFreeze() + await withoutAutoRun(async () => { + for (let i = 0; i < 2; i++) { + await payload.jobs.queue({ + task: 'EverySecondMax2', + input: { + message: 'This task runs every second - max 2 per second', + }, + }) + } + for (let i = 0; i < 3; i++) { + await payload.jobs.handleSchedules({ queue: 'default' }) + } + }) + + timeTravel(20) // Advance time to satisfy the waitUntil of newly scheduled jobs + + await payload.jobs.run({ + limit: 100, + silent: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(2) // Would be 4 by default, if only scheduled jobs were respected in handleSchedules condition + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second - max 2 per second') + }) + + it('ensure scheduler does not schedule more jobs than needed if executed sequentially - max. 2 jobs configured', async () => { + timeFreeze() + for (let i = 0; i < 3; i++) { + await payload.jobs.handleSchedules({ queue: 'default' }) + } + + // Advance time to satisfy the waitUntil of newly scheduled jobs + timeTravel(20) + + // default queue is not scheduled to autorun + await payload.jobs.run({ + silent: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(2) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second - max 2 per second') + }) + + it('ensure job is scheduled every second', async () => { + timeFreeze() + for (let i = 0; i < 3; i++) { + await withoutAutoRun(async () => { + // Call it twice to test that it only schedules one + await payload.jobs.handleSchedules() + await payload.jobs.handleSchedules() + }) + // Advance time to satisfy the waitUntil of newly scheduled jobs + timeTravel(20) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + } + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(3) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) + + it('ensure job is scheduled every second - max. 2 jobs configured', async () => { + timeFreeze() + + for (let i = 0; i < 3; i++) { + await withoutAutoRun(async () => { + // Call it 3x to test that it only schedules two + await payload.jobs.handleSchedules({ queue: 'default' }) + await payload.jobs.handleSchedules({ queue: 'default' }) + await payload.jobs.handleSchedules({ queue: 'default' }) + }) + + // Advance time to satisfy the waitUntil of newly scheduled jobs + timeTravel(20) + + // default queue is not scheduled to autorun => run manually + await payload.jobs.run({ + silent: true, + }) + } + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + where: { + title: { + equals: 'This task runs every second - max 2 per second', + }, + }, + }) + + expect(allSimples.totalDocs).toBe(6) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second - max 2 per second') + }) + + it('should not auto-schedule through automatic crons if scheduler set to manual', async () => { + // Autorun runs every second - so should definitely be done if we wait 2 seconds + await new Promise((resolve) => setTimeout(resolve, 2000)) // Should not flake, as we are expecting nothing to happen + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(0) + }) +}) diff --git a/test/queues/tasks/CreateSimpleRetries0Task.ts b/test/queues/tasks/CreateSimpleRetries0Task.ts new file mode 100644 index 0000000000..cc85b26cf6 --- /dev/null +++ b/test/queues/tasks/CreateSimpleRetries0Task.ts @@ -0,0 +1,41 @@ +import type { TaskConfig } from 'payload' + +export const CreateSimpleRetries0Task: TaskConfig<'CreateSimpleRetries0'> = { + slug: 'CreateSimpleRetries0', + retries: 0, + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + { + name: 'shouldFail', + type: 'checkbox', + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + if (input.shouldFail) { + throw new Error('Failed on purpose') + } + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } + }, +} diff --git a/test/queues/tasks/CreateSimpleRetriesUndefinedTask.ts b/test/queues/tasks/CreateSimpleRetriesUndefinedTask.ts new file mode 100644 index 0000000000..267150005c --- /dev/null +++ b/test/queues/tasks/CreateSimpleRetriesUndefinedTask.ts @@ -0,0 +1,40 @@ +import type { TaskConfig } from 'payload' + +export const CreateSimpleRetriesUndefinedTask: TaskConfig<'CreateSimpleRetriesUndefined'> = { + slug: 'CreateSimpleRetriesUndefined', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + { + name: 'shouldFail', + type: 'checkbox', + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + if (input.shouldFail) { + throw new Error('Failed on purpose') + } + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } + }, +} diff --git a/test/queues/tasks/CreateSimpleTask.ts b/test/queues/tasks/CreateSimpleTask.ts new file mode 100644 index 0000000000..8279aa1fdd --- /dev/null +++ b/test/queues/tasks/CreateSimpleTask.ts @@ -0,0 +1,41 @@ +import type { TaskConfig } from 'payload' + +export const CreateSimpleTask: TaskConfig<'CreateSimple'> = { + retries: 3, + slug: 'CreateSimple', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + { + name: 'shouldFail', + type: 'checkbox', + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + if (input.shouldFail) { + throw new Error('Failed on purpose') + } + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } + }, +} diff --git a/test/queues/tasks/CreateSimpleWithDuplicateMessageTask.ts b/test/queues/tasks/CreateSimpleWithDuplicateMessageTask.ts new file mode 100644 index 0000000000..887a9060fe --- /dev/null +++ b/test/queues/tasks/CreateSimpleWithDuplicateMessageTask.ts @@ -0,0 +1,42 @@ +import type { TaskConfig } from 'payload' + +export const CreateSimpleWithDuplicateMessageTask: TaskConfig<'CreateSimpleWithDuplicateMessage'> = + { + retries: 2, + slug: 'CreateSimpleWithDuplicateMessage', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + { + name: 'shouldFail', + type: 'checkbox', + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + if (input.shouldFail) { + throw new Error('Failed on purpose') + } + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: input.message + input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } + }, + } diff --git a/test/queues/tasks/EverySecondMax2Task.ts b/test/queues/tasks/EverySecondMax2Task.ts new file mode 100644 index 0000000000..b307f5dfd7 --- /dev/null +++ b/test/queues/tasks/EverySecondMax2Task.ts @@ -0,0 +1,67 @@ +import { + countRunnableOrActiveJobsForQueue, + type TaskConfig, + type TaskType, + type WorkflowTypes, +} from 'payload' + +export const EverySecondMax2Task: TaskConfig<'EverySecondMax2'> = { + schedule: [ + { + cron: '* * * * * *', + queue: 'default', + hooks: { + beforeSchedule: async ({ queueable, req }) => { + const runnableOrActiveJobsForQueue = await countRunnableOrActiveJobsForQueue({ + queue: queueable.scheduleConfig.queue, + req, + taskSlug: queueable.taskConfig?.slug as TaskType, + workflowSlug: queueable.workflowConfig?.slug as WorkflowTypes, + onlyScheduled: false, // Set to false, used to test it + }) + + return { + input: { + message: 'This task runs every second - max 2 per second', + }, + shouldSchedule: runnableOrActiveJobsForQueue <= 1, + waitUntil: queueable.waitUntil, + } + }, + afterSchedule: async (args) => { + await args.defaultAfterSchedule(args) // Handles updating the payload-jobs-stats global + args.req.payload.logger.info( + 'EverySecondMax2 task scheduled: ' + + (args.status === 'success' + ? String(args.job.id) + : args.status === 'skipped' + ? 'skipped' + : 'error'), + ) + }, + }, + }, + ], + slug: 'EverySecondMax2', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + req.payload.logger.info(input.message) + + await req.payload.create({ + collection: 'simple', + data: { + title: input.message, + }, + req, + }) + return { + output: {}, + } + }, +} diff --git a/test/queues/tasks/EverySecondTask.ts b/test/queues/tasks/EverySecondTask.ts new file mode 100644 index 0000000000..d79a3ee963 --- /dev/null +++ b/test/queues/tasks/EverySecondTask.ts @@ -0,0 +1,54 @@ +import type { TaskConfig } from 'payload' + +export const EverySecondTask: TaskConfig<'EverySecond'> = { + schedule: [ + { + cron: '* * * * * *', + queue: 'autorunSecond', + hooks: { + beforeSchedule: async (args) => { + const result = await args.defaultBeforeSchedule(args) // Handles verifying that there are no jobs already scheduled or processing + return { + ...result, + input: { + message: 'This task runs every second', + }, + } + }, + afterSchedule: async (args) => { + await args.defaultAfterSchedule(args) // Handles updating the payload-jobs-stats global + args.req.payload.logger.info( + 'EverySecond task scheduled: ' + + (args.status === 'success' + ? String(args.job.id) + : args.status === 'skipped' + ? 'skipped' + : 'error'), + ) + }, + }, + }, + ], + slug: 'EverySecond', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + req.payload.logger.info(input.message) + + await req.payload.create({ + collection: 'simple', + data: { + title: input.message, + }, + req, + }) + return { + output: {}, + } + }, +} diff --git a/test/queues/tasks/ExternalTask.ts b/test/queues/tasks/ExternalTask.ts new file mode 100644 index 0000000000..9eda7dc0dd --- /dev/null +++ b/test/queues/tasks/ExternalTask.ts @@ -0,0 +1,26 @@ +import type { TaskConfig } from 'payload' + +import path from 'path' +import { fileURLToPath } from 'url' + +const dirname = path.dirname(fileURLToPath(import.meta.url)) + +export const ExternalTask: TaskConfig<'ExternalTask'> = { + retries: 2, + slug: 'ExternalTask', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: path.resolve(dirname, '../runners/externalTask.ts') + '#externalTaskHandler', +} diff --git a/test/queues/tasks/ReturnCustomErrorTask.ts b/test/queues/tasks/ReturnCustomErrorTask.ts new file mode 100644 index 0000000000..0c7253d26f --- /dev/null +++ b/test/queues/tasks/ReturnCustomErrorTask.ts @@ -0,0 +1,20 @@ +import type { TaskConfig } from 'payload' + +export const ReturnCustomErrorTask: TaskConfig<'ReturnCustomError'> = { + retries: 0, + slug: 'ReturnCustomError', + inputSchema: [ + { + name: 'errorMessage', + type: 'text', + required: true, + }, + ], + outputSchema: [], + handler: ({ input }) => { + return { + state: 'failed', + errorMessage: input.errorMessage, + } + }, +} diff --git a/test/queues/tasks/ReturnErrorTask.ts b/test/queues/tasks/ReturnErrorTask.ts new file mode 100644 index 0000000000..661551ddd4 --- /dev/null +++ b/test/queues/tasks/ReturnErrorTask.ts @@ -0,0 +1,13 @@ +import type { TaskConfig } from 'payload' + +export const ReturnErrorTask: TaskConfig<'ReturnError'> = { + retries: 0, + slug: 'ReturnError', + inputSchema: [], + outputSchema: [], + handler: () => { + return { + state: 'failed', + } + }, +} diff --git a/test/queues/tasks/ThrowErrorTask.ts b/test/queues/tasks/ThrowErrorTask.ts new file mode 100644 index 0000000000..fa6f9ea303 --- /dev/null +++ b/test/queues/tasks/ThrowErrorTask.ts @@ -0,0 +1,11 @@ +import type { TaskConfig } from 'payload' + +export const ThrowErrorTask: TaskConfig<'ThrowError'> = { + retries: 0, + slug: 'ThrowError', + inputSchema: [], + outputSchema: [], + handler: () => { + throw new Error('failed') + }, +} diff --git a/test/queues/tasks/UpdatePostStep2Task.ts b/test/queues/tasks/UpdatePostStep2Task.ts new file mode 100644 index 0000000000..de7b310459 --- /dev/null +++ b/test/queues/tasks/UpdatePostStep2Task.ts @@ -0,0 +1,23 @@ +import type { TaskConfig } from 'payload' + +import { updatePostStep2 } from '../runners/updatePost.js' + +export const UpdatePostStep2Task: TaskConfig<'UpdatePostStep2'> = { + retries: 2, + slug: 'UpdatePostStep2', + inputSchema: [ + { + name: 'post', + type: 'relationship', + relationTo: 'posts', + maxDepth: 0, + required: true, + }, + { + name: 'messageTwice', + type: 'text', + required: true, + }, + ], + handler: updatePostStep2, +} diff --git a/test/queues/tasks/UpdatePostTask.ts b/test/queues/tasks/UpdatePostTask.ts new file mode 100644 index 0000000000..b8cdfd52a4 --- /dev/null +++ b/test/queues/tasks/UpdatePostTask.ts @@ -0,0 +1,31 @@ +import type { TaskConfig } from 'payload' + +import { updatePostStep1 } from '../runners/updatePost.js' + +export const UpdatePostTask: TaskConfig<'UpdatePost'> = { + retries: 2, + slug: 'UpdatePost', + interfaceName: 'MyUpdatePostType', + inputSchema: [ + { + name: 'post', + type: 'relationship', + relationTo: 'posts', + maxDepth: 0, + required: true, + }, + { + name: 'message', + type: 'text', + required: true, + }, + ], + outputSchema: [ + { + name: 'messageTwice', + type: 'text', + required: true, + }, + ], + handler: updatePostStep1, +} diff --git a/test/queues/utilities.ts b/test/queues/utilities.ts new file mode 100644 index 0000000000..bda1e0ac83 --- /dev/null +++ b/test/queues/utilities.ts @@ -0,0 +1,62 @@ +import { + _internal_jobSystemGlobals, + countRunnableOrActiveJobsForQueue, + createLocalReq, + type Payload, +} from 'payload' + +export async function waitUntilAutorunIsDone({ + payload, + queue, + onlyScheduled = false, +}: { + onlyScheduled?: boolean + payload: Payload + queue: string +}): Promise { + const req = await createLocalReq({}, payload) + + return new Promise((resolve) => { + const interval = setInterval(async () => { + const count = await countRunnableOrActiveJobsForQueue({ + queue, + req, + onlyScheduled, + }) + if (count === 0) { + clearInterval(interval) + resolve() + } + }, 200) + }) +} + +export function timeFreeze() { + const curDate = new Date() + _internal_jobSystemGlobals.getCurrentDate = () => curDate +} + +export function timeTravel(seconds: number) { + const curDate = _internal_jobSystemGlobals.getCurrentDate() + _internal_jobSystemGlobals.getCurrentDate = () => new Date(curDate.getTime() + seconds * 1000) +} + +export async function withoutAutoRun(fn: () => Promise): Promise { + const originalValue = _internal_jobSystemGlobals.shouldAutoRun + _internal_jobSystemGlobals.shouldAutoRun = false + try { + return await fn() + } finally { + _internal_jobSystemGlobals.shouldAutoRun = originalValue + } +} + +export async function withoutAutoSchedule(fn: () => Promise): Promise { + const originalValue = _internal_jobSystemGlobals.shouldAutoSchedule + _internal_jobSystemGlobals.shouldAutoSchedule = false + try { + return await fn() + } finally { + _internal_jobSystemGlobals.shouldAutoSchedule = originalValue + } +} diff --git a/test/runInit.ts b/test/runInit.ts index 46b38b2872..3ff6c7b738 100644 --- a/test/runInit.ts +++ b/test/runInit.ts @@ -4,6 +4,7 @@ export async function runInit( testSuiteArg: string, writeDBAdapter: boolean, skipGenImportMap: boolean = false, + configFile?: string, ): Promise { - await initDevAndTest(testSuiteArg, String(writeDBAdapter), String(skipGenImportMap)) + await initDevAndTest(testSuiteArg, String(writeDBAdapter), String(skipGenImportMap), configFile) } From 46d8a26b0d57d5ecf17c69a1ee8d5a4491d6da7e Mon Sep 17 00:00:00 2001 From: iamacup Date: Fri, 18 Jul 2025 13:34:54 +0100 Subject: [PATCH 12/91] fix: handle undefined values in afterChange hooks when read:false and create:true on the field level access for parents and siblings (#12664) ### What? Fixes a bug where `afterChange` hooks would attempt to access values for fields that are `read: false` but `create: true`, resulting in `undefined` values and unexpected behavior. ### Why? In scenarios where access control allows field creation (`create: true`) but disallows reading it (`read: false`), hooks like `afterChange` would still attempt to operate on `undefined` values from `siblingDoc` or `previousDoc`, potentially causing errors or skipped logic. ### How? Adds safe optional chaining and fallback object initialization in `promise.ts` for: - `previousDoc[field.name]` - `siblingDoc[field.name]` - Group, Array, and Block field traversals This ensures that these values are treated as empty objects or arrays where appropriate to prevent runtime errors during traversal or hook execution. Fixes https://github.com/payloadcms/payload/issues/12660 --------- Co-authored-by: Niall Bambury --- .../src/fields/hooks/afterChange/promise.ts | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/payload/src/fields/hooks/afterChange/promise.ts b/packages/payload/src/fields/hooks/afterChange/promise.ts index 23010c6116..ecbc930870 100644 --- a/packages/payload/src/fields/hooks/afterChange/promise.ts +++ b/packages/payload/src/fields/hooks/afterChange/promise.ts @@ -88,12 +88,12 @@ export const promise = async ({ path: pathSegments, previousDoc, previousSiblingDoc, - previousValue: previousDoc[field.name!], + previousValue: previousDoc?.[field.name!], req, schemaPath: schemaPathSegments, siblingData, siblingFields: siblingFields!, - value: siblingDoc[field.name!], + value: siblingDoc?.[field.name!], }) if (hookedValue !== undefined) { @@ -226,10 +226,10 @@ export const promise = async ({ parentPath: path, parentSchemaPath: schemaPath, previousDoc, - previousSiblingDoc: previousDoc[field.name] as JsonObject, + previousSiblingDoc: (previousDoc?.[field.name] as JsonObject) || {}, req, siblingData: (siblingData?.[field.name] as JsonObject) || {}, - siblingDoc: siblingDoc[field.name] as JsonObject, + siblingDoc: (siblingDoc?.[field.name] as JsonObject) || {}, }) } else { await traverseFields({ @@ -282,11 +282,11 @@ export const promise = async ({ path: pathSegments, previousDoc, previousSiblingDoc, - previousValue: previousDoc[field.name], + previousValue: previousDoc?.[field.name], req, schemaPath: schemaPathSegments, siblingData, - value: siblingDoc[field.name], + value: siblingDoc?.[field.name], }) if (hookedValue !== undefined) { @@ -305,9 +305,9 @@ export const promise = async ({ const isNamedTab = tabHasName(field) if (isNamedTab) { - tabSiblingData = (siblingData[field.name] as JsonObject) ?? {} - tabSiblingDoc = (siblingDoc[field.name] as JsonObject) ?? {} - tabPreviousSiblingDoc = (previousDoc[field.name] as JsonObject) ?? {} + tabSiblingData = (siblingData?.[field.name] ?? {}) as JsonObject + tabSiblingDoc = (siblingDoc?.[field.name] ?? {}) as JsonObject + tabPreviousSiblingDoc = (previousDoc?.[field.name] ?? {}) as JsonObject } await traverseFields({ From d7a3faa4e9d74a953d12316c835cd295e46028d1 Mon Sep 17 00:00:00 2001 From: Jacob Fletcher Date: Fri, 18 Jul 2025 09:29:26 -0400 Subject: [PATCH 13/91] fix(ui): properly sync search params to user preferences (#13200) Some search params within the list view do not properly sync to user preferences, and visa versa. For example, when selecting a query preset, the `?preset=123` param is injected into the URL and saved to preferences, but when reloading the page without the param, that preset is not reactivated as expected. ### Problem The reason this wasn't working before is that omitting this param would also reset prefs. It was designed this way in order to support client-side resets, e.g. clicking the query presets "x" button. This pattern would never work, however, because this means that every time the user navigates to the list view directly, their preference is cleared, as no param would exist in the query. Note: this is not an issue with _all_ params, as not all are handled in the same way. ### Solution The fix is to use empty values instead, e.g. `?preset=`. When the server receives this, it knows to clear the pref. If it doesn't exist at all, it knows to load from prefs. And if it has a value, it saves to prefs. On the client, we sanitize those empty values back out so they don't appear in the URL in the end. This PR also refactors much of the list query context and its respective provider to be significantly more predictable and easier to work with, namely: - The `ListQuery` type now fully aligns with what Payload APIs expect, e.g. `page` is a number, not a string - The provider now receives a single `query` prop which matches the underlying context 1:1 - Propagating the query from the server to the URL is significantly more predictable - Any new props that may be supported in the future will automatically work - No more reconciling `columns` and `listPreferences.columns`, its just `query.columns` --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210827129744922 --- packages/next/src/views/List/index.tsx | 81 ++++++------- packages/next/src/views/Versions/index.tsx | 6 +- packages/payload/src/admin/functions/index.ts | 4 +- .../utilities/transformColumnPreferences.ts | 4 +- .../src/utilities/validateWhereQuery.ts | 7 +- .../elements/ListControls/useQueryPresets.tsx | 8 +- .../src/elements/RelationshipTable/index.tsx | 17 ++- packages/ui/src/providers/ListQuery/index.tsx | 107 ++++++---------- .../ui/src/providers/ListQuery/mergeQuery.ts | 36 ++++++ .../src/providers/ListQuery/sanitizeQuery.ts | 38 ++++++ packages/ui/src/providers/ListQuery/types.ts | 6 +- .../TableColumns/buildColumnState/index.tsx | 12 +- .../buildColumnState/isColumnActive.ts | 10 +- packages/ui/src/utilities/buildTableState.ts | 5 +- packages/ui/src/utilities/renderTable.tsx | 4 - .../ui/src/utilities/upsertPreferences.ts | 39 +++--- test/eslint.config.js | 1 + test/fields/payload-types.ts | 19 +++ test/query-presets/e2e.spec.ts | 43 +++++-- test/query-presets/helpers/assertURLParams.ts | 10 +- test/query-presets/payload-types.ts | 14 +++ tsconfig.base.json | 114 +++++++++++++----- 22 files changed, 370 insertions(+), 215 deletions(-) create mode 100644 packages/ui/src/providers/ListQuery/mergeQuery.ts create mode 100644 packages/ui/src/providers/ListQuery/sanitizeQuery.ts diff --git a/packages/next/src/views/List/index.tsx b/packages/next/src/views/List/index.tsx index a2af698ea1..1a019e48a4 100644 --- a/packages/next/src/views/List/index.tsx +++ b/packages/next/src/views/List/index.tsx @@ -2,13 +2,11 @@ import type { AdminViewServerProps, CollectionPreferences, ColumnPreference, - DefaultDocumentIDType, ListQuery, ListViewClientProps, ListViewServerPropsOnly, QueryPreset, SanitizedCollectionPermission, - Where, } from 'payload' import { DefaultListView, HydrateAuthProvider, ListQueryProvider } from '@payloadcms/ui' @@ -20,6 +18,7 @@ import { isNumber, mergeListSearchAndWhere, transformColumnsToPreferences, + transformColumnsToSearchParams, } from 'payload/shared' import React, { Fragment } from 'react' @@ -87,28 +86,33 @@ export const renderListView = async ( throw new Error('not-found') } - const query = queryFromArgs || queryFromReq + const query: ListQuery = queryFromArgs || queryFromReq - const columns: ColumnPreference[] = transformColumnsToPreferences( - query?.columns as ColumnPreference[] | string, - ) + const columnsFromQuery: ColumnPreference[] = transformColumnsToPreferences(query?.columns) - /** - * @todo: find a pattern to avoid setting preferences on hard navigation, i.e. direct links, page refresh, etc. - * This will ensure that prefs are only updated when explicitly set by the user - * This could potentially be done by injecting a `sessionID` into the params and comparing it against a session cookie - */ const collectionPreferences = await upsertPreferences({ key: `collection-${collectionSlug}`, req, value: { - columns, + columns: columnsFromQuery, limit: isNumber(query?.limit) ? Number(query.limit) : undefined, - preset: (query?.preset as DefaultDocumentIDType) || null, + preset: query?.preset, sort: query?.sort as string, }, }) + query.preset = collectionPreferences?.preset + + query.page = isNumber(query?.page) ? Number(query.page) : 0 + + query.limit = collectionPreferences?.limit || collectionConfig.admin.pagination.defaultLimit + + query.sort = + collectionPreferences?.sort || + (typeof collectionConfig.defaultSort === 'string' ? collectionConfig.defaultSort : undefined) + + query.columns = transformColumnsToSearchParams(collectionPreferences?.columns || []) + const { routes: { admin: adminRoute }, } = config @@ -118,35 +122,27 @@ export const renderListView = async ( throw new Error('not-found') } - const page = isNumber(query?.page) ? Number(query.page) : 0 - - const limit = collectionPreferences?.limit || collectionConfig.admin.pagination.defaultLimit - - const sort = - collectionPreferences?.sort || - (typeof collectionConfig.defaultSort === 'string' ? collectionConfig.defaultSort : undefined) - - let where = mergeListSearchAndWhere({ - collectionConfig, - search: typeof query?.search === 'string' ? query.search : undefined, - where: (query?.where as Where) || undefined, - }) - if (typeof collectionConfig.admin?.baseListFilter === 'function') { const baseListFilter = await collectionConfig.admin.baseListFilter({ - limit, - page, + limit: query.limit, + page: query.page, req, - sort, + sort: query.sort, }) if (baseListFilter) { - where = { - and: [where, baseListFilter].filter(Boolean), + query.where = { + and: [query.where, baseListFilter].filter(Boolean), } } } + const whereWithMergedSearch = mergeListSearchAndWhere({ + collectionConfig, + search: typeof query?.search === 'string' ? query.search : undefined, + where: query?.where, + }) + let queryPreset: QueryPreset | undefined let queryPresetPermissions: SanitizedCollectionPermission | undefined @@ -179,14 +175,14 @@ export const renderListView = async ( draft: true, fallbackLocale: false, includeLockStatus: true, - limit, + limit: query.limit, locale, overrideAccess: false, - page, + page: query.page, req, - sort, + sort: query.sort, user, - where: where || {}, + where: whereWithMergedSearch, }) const clientCollectionConfig = clientConfig.collections.find((c) => c.slug === collectionSlug) @@ -194,8 +190,7 @@ export const renderListView = async ( const { columnState, Table } = renderTable({ clientCollectionConfig, collectionConfig, - columnPreferences: collectionPreferences?.columns, - columns, + columns: collectionPreferences?.columns, customCellProps, docs: data.docs, drawerSlug, @@ -232,7 +227,7 @@ export const renderListView = async ( collectionConfig, data, i18n, - limit, + limit: query.limit, listPreferences: collectionPreferences, listSearchableFields: collectionConfig.admin.listSearchableFields, locale: fullLocale, @@ -258,19 +253,19 @@ export const renderListView = async ( const isInDrawer = Boolean(drawerSlug) + // Needed to prevent: Only plain objects can be passed to Client Components from Server Components. Objects with toJSON methods are not supported. Convert it manually to a simple value before passing it to props. + query.where = query?.where ? JSON.parse(JSON.stringify(query?.where || {})) : undefined + return { List: ( {RenderServerComponent({ clientProps: { diff --git a/packages/next/src/views/Versions/index.tsx b/packages/next/src/views/Versions/index.tsx index c4a564cf44..65eb60913c 100644 --- a/packages/next/src/views/Versions/index.tsx +++ b/packages/next/src/views/Versions/index.tsx @@ -148,10 +148,12 @@ export async function VersionsView(props: DocumentViewServerProps) { { let columnsToTransform = columns @@ -44,5 +44,5 @@ export const transformColumnsToPreferences = ( export const transformColumnsToSearchParams = ( columns: Column[] | ColumnPreference[], ): ColumnsFromURL => { - return columns.map((col) => (col.active ? col.accessor : `-${col.accessor}`)) + return columns?.map((col) => (col.active ? col.accessor : `-${col.accessor}`)) } diff --git a/packages/payload/src/utilities/validateWhereQuery.ts b/packages/payload/src/utilities/validateWhereQuery.ts index cb920db1a9..720aa66a11 100644 --- a/packages/payload/src/utilities/validateWhereQuery.ts +++ b/packages/payload/src/utilities/validateWhereQuery.ts @@ -13,9 +13,10 @@ import { validOperatorSet } from '../types/constants.js' export const validateWhereQuery = (whereQuery: Where): whereQuery is Where => { if ( whereQuery?.or && - whereQuery?.or?.length > 0 && - whereQuery?.or?.[0]?.and && - whereQuery?.or?.[0]?.and?.length > 0 + (whereQuery?.or?.length === 0 || + (whereQuery?.or?.length > 0 && + whereQuery?.or?.[0]?.and && + whereQuery?.or?.[0]?.and?.length > 0)) ) { // At this point we know that the whereQuery has 'or' and 'and' fields, // now let's check the structure and content of these fields. diff --git a/packages/ui/src/elements/ListControls/useQueryPresets.tsx b/packages/ui/src/elements/ListControls/useQueryPresets.tsx index 694d6f68d9..587eac6784 100644 --- a/packages/ui/src/elements/ListControls/useQueryPresets.tsx +++ b/packages/ui/src/elements/ListControls/useQueryPresets.tsx @@ -3,7 +3,7 @@ import type { CollectionSlug, QueryPreset, SanitizedCollectionPermission } from import { useModal } from '@faceless-ui/modal' import { getTranslation } from '@payloadcms/translations' import { transformColumnsToPreferences, transformColumnsToSearchParams } from 'payload/shared' -import React, { Fragment, useCallback, useMemo } from 'react' +import React, { useCallback, useMemo } from 'react' import { toast } from 'sonner' import { useConfig } from '../../providers/Config/index.js' @@ -103,9 +103,9 @@ export const useQueryPresets = ({ const resetQueryPreset = useCallback(async () => { await refineListData( { - columns: undefined, - preset: undefined, - where: undefined, + columns: [], + preset: '', + where: {}, }, false, ) diff --git a/packages/ui/src/elements/RelationshipTable/index.tsx b/packages/ui/src/elements/RelationshipTable/index.tsx index 80fdad7d93..a8080fb72a 100644 --- a/packages/ui/src/elements/RelationshipTable/index.tsx +++ b/packages/ui/src/elements/RelationshipTable/index.tsx @@ -114,7 +114,7 @@ export const RelationshipTable: React.FC = (pro const renderTable = useCallback( async (docs?: PaginatedDocs['docs']) => { const newQuery: ListQuery = { - limit: String(field?.defaultLimit || collectionConfig?.admin?.pagination?.defaultLimit), + limit: field?.defaultLimit || collectionConfig?.admin?.pagination?.defaultLimit, sort: field.defaultSort || collectionConfig?.defaultSort, ...(query || {}), where: { ...(query?.where || {}) }, @@ -240,6 +240,15 @@ export const RelationshipTable: React.FC = (pro // eslint-disable-next-line react-hooks/exhaustive-deps }, [isDrawerOpen]) + const memoizedQuery = React.useMemo( + () => ({ + columns: transformColumnsToPreferences(columnState)?.map(({ accessor }) => accessor), + limit: field.defaultLimit ?? collectionConfig?.admin?.pagination?.defaultLimit, + sort: field.defaultSort ?? collectionConfig?.defaultSort, + }), + [field, columnState, collectionConfig], + ) + return (
@@ -306,12 +315,7 @@ export const RelationshipTable: React.FC = (pro {data?.docs && data.docs.length > 0 && ( = (pro ? undefined : `_${field.collection}_${fieldPath.replaceAll('.', '_')}_order` } + query={memoizedQuery} > = ({ children, collectionSlug, - columns, data, - defaultLimit, - defaultSort, - listPreferences, modifySearchParams, onQueryChange: onQueryChangeFromProps, orderableFieldName, + query: queryFromProps, }) => { // TODO: Investigate if this is still needed - // eslint-disable-next-line react-compiler/react-compiler + 'use no memo' const router = useRouter() const rawSearchParams = useSearchParams() @@ -36,7 +34,7 @@ export const ListQueryProvider: React.FC = ({ const [modified, setModified] = useState(false) const searchParams = useMemo( - () => parseSearchParams(rawSearchParams), + () => sanitizeQuery(parseSearchParams(rawSearchParams)), [rawSearchParams], ) @@ -51,37 +49,12 @@ export const ListQueryProvider: React.FC = ({ return searchParams } else { return { - limit: String(defaultLimit), - sort: defaultSort, + limit: queryFromProps.limit, + sort: queryFromProps.sort, } } }) - const mergeQuery = useCallback( - (newQuery: ListQuery = {}): ListQuery => { - let page = 'page' in newQuery ? newQuery.page : currentQuery?.page - - if ('where' in newQuery || 'search' in newQuery) { - page = '1' - } - - const mergedQuery: ListQuery = { - ...currentQuery, - ...newQuery, - columns: 'columns' in newQuery ? newQuery.columns : currentQuery.columns, - limit: 'limit' in newQuery ? newQuery.limit : (currentQuery?.limit ?? String(defaultLimit)), - page, - preset: 'preset' in newQuery ? newQuery.preset : currentQuery?.preset, - search: 'search' in newQuery ? newQuery.search : currentQuery?.search, - sort: 'sort' in newQuery ? newQuery.sort : ((currentQuery?.sort as string) ?? defaultSort), - where: 'where' in newQuery ? newQuery.where : currentQuery?.where, - } - - return mergedQuery - }, - [currentQuery, defaultLimit, defaultSort], - ) - const refineListData = useCallback( // eslint-disable-next-line @typescript-eslint/require-await async (incomingQuery: ListQuery, modified?: boolean) => { @@ -91,12 +64,23 @@ export const ListQueryProvider: React.FC = ({ setModified(true) } - const newQuery = mergeQuery(incomingQuery) + const newQuery = mergeQuery(currentQuery, incomingQuery, { + defaults: { + limit: queryFromProps.limit, + sort: queryFromProps.sort, + }, + }) if (modifySearchParams) { startRouteTransition(() => router.replace( - `${qs.stringify({ ...newQuery, columns: JSON.stringify(newQuery.columns) }, { addQueryPrefix: true })}`, + `${qs.stringify( + { + ...newQuery, + columns: JSON.stringify(newQuery.columns), + }, + { addQueryPrefix: true }, + )}`, ), ) } else if ( @@ -110,7 +94,9 @@ export const ListQueryProvider: React.FC = ({ setCurrentQuery(newQuery) }, [ - mergeQuery, + currentQuery, + queryFromProps.limit, + queryFromProps.sort, modifySearchParams, onQueryChange, onQueryChangeFromProps, @@ -121,14 +107,14 @@ export const ListQueryProvider: React.FC = ({ const handlePageChange = useCallback( async (arg: number) => { - await refineListData({ page: String(arg) }) + await refineListData({ page: arg }) }, [refineListData], ) const handlePerPageChange = React.useCallback( async (arg: number) => { - await refineListData({ limit: String(arg), page: '1' }) + await refineListData({ limit: arg, page: 1 }) }, [refineListData], ) @@ -155,47 +141,26 @@ export const ListQueryProvider: React.FC = ({ [refineListData], ) - const syncQuery = useEffectEvent(() => { - let shouldUpdateQueryString = false - const newQuery = { ...(currentQuery || {}) } + const mergeQueryFromPropsAndSyncToURL = useEffectEvent(() => { + const newQuery = sanitizeQuery({ ...(currentQuery || {}), ...(queryFromProps || {}) }) - // Allow the URL to override the default limit - if (isNumber(defaultLimit) && !('limit' in currentQuery)) { - newQuery.limit = String(defaultLimit) - shouldUpdateQueryString = true - } + const search = `?${qs.stringify({ ...newQuery, columns: JSON.stringify(newQuery.columns) })}` - // Allow the URL to override the default sort - if (defaultSort && !('sort' in currentQuery)) { - newQuery.sort = defaultSort - shouldUpdateQueryString = true - } - - // Only modify columns if they originated from preferences - // We can assume they did if `listPreferences.columns` is defined - if (columns && listPreferences?.columns && !('columns' in currentQuery)) { - newQuery.columns = transformColumnsToSearchParams(columns) - shouldUpdateQueryString = true - } - - if (shouldUpdateQueryString) { + if (window.location.search !== search) { setCurrentQuery(newQuery) - // Do not use router.replace here to avoid re-rendering on initial load - window.history.replaceState( - null, - '', - `?${qs.stringify({ ...newQuery, columns: JSON.stringify(newQuery.columns) })}`, - ) + + // Important: do not use router.replace here to avoid re-rendering on initial load + window.history.replaceState(null, '', search) } }) - // If `defaultLimit` or `defaultSort` are updated externally, update the query - // I.e. when HMR runs, these properties may be different + // If `query` is updated externally, update the local state + // E.g. when HMR runs, these properties may be different useEffect(() => { if (modifySearchParams) { - syncQuery() + mergeQueryFromPropsAndSyncToURL() } - }, [defaultSort, defaultLimit, modifySearchParams, columns]) + }, [modifySearchParams, queryFromProps]) return ( { + let page = 'page' in newQuery ? newQuery.page : currentQuery?.page + + if ('where' in newQuery || 'search' in newQuery) { + page = 1 + } + + const mergedQuery: ListQuery = { + ...currentQuery, + ...newQuery, + columns: 'columns' in newQuery ? newQuery.columns : currentQuery.columns, + groupBy: + 'groupBy' in newQuery + ? newQuery.groupBy + : (currentQuery?.groupBy ?? options?.defaults?.groupBy), + limit: 'limit' in newQuery ? newQuery.limit : (currentQuery?.limit ?? options?.defaults?.limit), + page, + preset: 'preset' in newQuery ? newQuery.preset : currentQuery?.preset, + search: 'search' in newQuery ? newQuery.search : currentQuery?.search, + sort: + 'sort' in newQuery + ? newQuery.sort + : ((currentQuery?.sort as string) ?? options?.defaults?.sort), + where: 'where' in newQuery ? newQuery.where : currentQuery?.where, + } + + return mergedQuery +} diff --git a/packages/ui/src/providers/ListQuery/sanitizeQuery.ts b/packages/ui/src/providers/ListQuery/sanitizeQuery.ts new file mode 100644 index 0000000000..551ddf459e --- /dev/null +++ b/packages/ui/src/providers/ListQuery/sanitizeQuery.ts @@ -0,0 +1,38 @@ +import type { ListQuery, Where } from 'payload' + +/** + * Sanitize empty strings from the query, e.g. `?preset=` + * This is how we determine whether to clear user preferences for certain params + * Once cleared, they are no longer needed in the URL + */ +export const sanitizeQuery = (toSanitize: ListQuery): ListQuery => { + const sanitized = { ...toSanitize } + + Object.entries(sanitized).forEach(([key, value]) => { + if ( + key === 'columns' && + (value === '[]' || (Array.isArray(sanitized[key]) && sanitized[key].length === 0)) + ) { + delete sanitized[key] + } + + if (key === 'where' && typeof value === 'object' && !Object.keys(value as Where).length) { + delete sanitized[key] + } + + if ((key === 'limit' || key === 'page') && typeof value === 'string') { + const parsed = parseInt(value, 10) + sanitized[key] = Number.isNaN(parsed) ? undefined : parsed + } + + if (key === 'page' && value === 0) { + delete sanitized[key] + } + + if (value === '') { + delete sanitized[key] + } + }) + + return sanitized +} diff --git a/packages/ui/src/providers/ListQuery/types.ts b/packages/ui/src/providers/ListQuery/types.ts index ec91923027..ea009b2a53 100644 --- a/packages/ui/src/providers/ListQuery/types.ts +++ b/packages/ui/src/providers/ListQuery/types.ts @@ -1,6 +1,5 @@ import type { ClientCollectionConfig, - CollectionPreferences, ColumnPreference, ListQuery, PaginatedDocs, @@ -21,11 +20,7 @@ export type OnListQueryChange = (query: ListQuery) => void export type ListQueryProps = { readonly children: React.ReactNode readonly collectionSlug?: ClientCollectionConfig['slug'] - readonly columns?: ColumnPreference[] readonly data: PaginatedDocs - readonly defaultLimit?: number - readonly defaultSort?: Sort - readonly listPreferences?: CollectionPreferences readonly modifySearchParams?: boolean readonly onQueryChange?: OnListQueryChange readonly orderableFieldName?: string @@ -33,6 +28,7 @@ export type ListQueryProps = { * @deprecated */ readonly preferenceKey?: string + query?: ListQuery } export type IListQueryContext = { diff --git a/packages/ui/src/providers/TableColumns/buildColumnState/index.tsx b/packages/ui/src/providers/TableColumns/buildColumnState/index.tsx index 48c39b0e98..bf23d0c5d0 100644 --- a/packages/ui/src/providers/TableColumns/buildColumnState/index.tsx +++ b/packages/ui/src/providers/TableColumns/buildColumnState/index.tsx @@ -38,7 +38,6 @@ import { sortFieldMap } from './sortFieldMap.js' export type BuildColumnStateArgs = { beforeRows?: Column[] clientFields: ClientField[] - columnPreferences: CollectionPreferences['columns'] columns?: CollectionPreferences['columns'] customCellProps: DefaultCellComponentProps['customCellProps'] enableLinkedCell?: boolean @@ -70,7 +69,6 @@ export const buildColumnState = (args: BuildColumnStateArgs): Column[] => { beforeRows, clientFields, collectionSlug, - columnPreferences, columns, customCellProps, dataType, @@ -99,7 +97,7 @@ export const buildColumnState = (args: BuildColumnStateArgs): Column[] => { // place the `ID` field first, if it exists // do the same for the `useAsTitle` field with precedence over the `ID` field - // then sort the rest of the fields based on the `defaultColumns` or `columnPreferences` + // then sort the rest of the fields based on the `defaultColumns` or `columns` const idFieldIndex = sortedFieldMap?.findIndex((field) => fieldIsID(field)) if (idFieldIndex > -1) { @@ -116,10 +114,10 @@ export const buildColumnState = (args: BuildColumnStateArgs): Column[] => { sortedFieldMap.unshift(useAsTitleField) } - const sortTo = columnPreferences || columns + const sortTo = columns if (sortTo) { - // sort the fields to the order of `defaultColumns` or `columnPreferences` + // sort the fields to the order of `defaultColumns` or `columns` sortedFieldMap = sortFieldMap(sortedFieldMap, sortTo) _sortedFieldMap = sortFieldMap(_sortedFieldMap, sortTo) // TODO: think of a way to avoid this additional sort } @@ -150,14 +148,14 @@ export const buildColumnState = (args: BuildColumnStateArgs): Column[] => { return acc // skip any group without a custom cell } - const columnPreference = columnPreferences?.find( + const columnPref = columns?.find( (preference) => clientField && 'name' in clientField && preference.accessor === accessor, ) const isActive = isColumnActive({ accessor, activeColumnsIndices, - columnPreference, + column: columnPref, columns, }) diff --git a/packages/ui/src/providers/TableColumns/buildColumnState/isColumnActive.ts b/packages/ui/src/providers/TableColumns/buildColumnState/isColumnActive.ts index 517fbb5c2f..52561546da 100644 --- a/packages/ui/src/providers/TableColumns/buildColumnState/isColumnActive.ts +++ b/packages/ui/src/providers/TableColumns/buildColumnState/isColumnActive.ts @@ -3,18 +3,18 @@ import type { ColumnPreference } from 'payload' export function isColumnActive({ accessor, activeColumnsIndices, - columnPreference, + column, columns, }: { accessor: string activeColumnsIndices: number[] - columnPreference: ColumnPreference + column: ColumnPreference columns: ColumnPreference[] }) { - if (columnPreference) { - return columnPreference.active + if (column) { + return column.active } else if (columns && Array.isArray(columns) && columns.length > 0) { - return Boolean(columns.find((column) => column.accessor === accessor)?.active) + return Boolean(columns.find((col) => col.accessor === accessor)?.active) } else if (activeColumnsIndices.length < 4) { return true } diff --git a/packages/ui/src/utilities/buildTableState.ts b/packages/ui/src/utilities/buildTableState.ts index e2b436f9c6..c76fb6e326 100644 --- a/packages/ui/src/utilities/buildTableState.ts +++ b/packages/ui/src/utilities/buildTableState.ts @@ -215,10 +215,10 @@ const buildTableState = async ( collection: collectionSlug, depth: 0, draft: true, - limit: query?.limit ? parseInt(query.limit, 10) : undefined, + limit: query?.limit, locale: req.locale, overrideAccess: false, - page: query?.page ? parseInt(query.page, 10) : undefined, + page: query?.page, sort: query?.sort, user: req.user, where: query?.where, @@ -232,7 +232,6 @@ const buildTableState = async ( clientConfig, collectionConfig, collections: Array.isArray(collectionSlug) ? collectionSlug : undefined, - columnPreferences: Array.isArray(collectionSlug) ? collectionPreferences?.columns : undefined, // TODO, might not be neededcolumns, columns, docs, enableRowSelections, diff --git a/packages/ui/src/utilities/renderTable.tsx b/packages/ui/src/utilities/renderTable.tsx index d0b0216119..9a3fe5716f 100644 --- a/packages/ui/src/utilities/renderTable.tsx +++ b/packages/ui/src/utilities/renderTable.tsx @@ -64,7 +64,6 @@ export const renderTable = ({ clientConfig, collectionConfig, collections, - columnPreferences, columns: columnsFromArgs, customCellProps, docs, @@ -80,7 +79,6 @@ export const renderTable = ({ clientConfig?: ClientConfig collectionConfig?: SanitizedCollectionConfig collections?: string[] - columnPreferences: CollectionPreferences['columns'] columns?: CollectionPreferences['columns'] customCellProps?: Record docs: PaginatedDocs['docs'] @@ -154,7 +152,6 @@ export const renderTable = ({ const sharedArgs: Pick< BuildColumnStateArgs, | 'clientFields' - | 'columnPreferences' | 'columns' | 'customCellProps' | 'enableRowSelections' @@ -164,7 +161,6 @@ export const renderTable = ({ | 'useAsTitle' > = { clientFields, - columnPreferences, columns, enableRowSelections, i18n, diff --git a/packages/ui/src/utilities/upsertPreferences.ts b/packages/ui/src/utilities/upsertPreferences.ts index 26e51ac814..477d98a041 100644 --- a/packages/ui/src/utilities/upsertPreferences.ts +++ b/packages/ui/src/utilities/upsertPreferences.ts @@ -5,13 +5,26 @@ import { cache } from 'react' import { removeUndefined } from './removeUndefined.js' +type PreferenceDoc = { + id: DefaultDocumentIDType | undefined + value?: T | undefined +} + +type DefaultMerge = (existingValue: T, incomingValue: T | undefined) => T + +const defaultMerge: DefaultMerge = (existingValue: T, incomingValue: T | undefined) => + ({ + ...(typeof existingValue === 'object' ? existingValue : {}), // Shallow merge existing prefs to acquire any missing keys from incoming value + ...removeUndefined(incomingValue || {}), + }) as T + export const getPreferences = cache( async ( key: string, payload: Payload, userID: DefaultDocumentIDType, userSlug: string, - ): Promise<{ id: DefaultDocumentIDType; value: T }> => { + ): Promise> => { const result = (await payload .find({ collection: 'payload-preferences', @@ -58,21 +71,14 @@ export const upsertPreferences = async | stri req, value: incomingValue, }: { + customMerge?: (existingValue: T, incomingValue: T, defaultMerge: DefaultMerge) => T key: string req: PayloadRequest -} & ( - | { - customMerge: (existingValue: T) => T - value?: never - } - | { - customMerge?: never - value: T - } -)): Promise => { - const existingPrefs: { id?: DefaultDocumentIDType; value?: T } = req.user + value: T +}): Promise => { + const existingPrefs: PreferenceDoc = req.user ? await getPreferences(key, req.payload, req.user.id, req.user.collection) - : {} + : ({} as PreferenceDoc) let newPrefs = existingPrefs?.value @@ -95,15 +101,12 @@ export const upsertPreferences = async | stri let mergedPrefs: T if (typeof customMerge === 'function') { - mergedPrefs = customMerge(existingPrefs.value) + mergedPrefs = customMerge(existingPrefs.value, incomingValue, defaultMerge) } else { // Strings are valid JSON, i.e. `locale` saved as a string to the locale preferences mergedPrefs = typeof incomingValue === 'object' - ? ({ - ...(typeof existingPrefs.value === 'object' ? existingPrefs?.value : {}), // Shallow merge existing prefs to acquire any missing keys from incoming value - ...removeUndefined(incomingValue || {}), - } as T) + ? defaultMerge(existingPrefs.value, incomingValue) : incomingValue } diff --git a/test/eslint.config.js b/test/eslint.config.js index 2621eb0a66..8cdafaa8f4 100644 --- a/test/eslint.config.js +++ b/test/eslint.config.js @@ -74,6 +74,7 @@ export const testEslintConfig = [ 'expectNoResultsAndCreateFolderButton', 'createFolder', 'createFolderFromDoc', + 'assertURLParams', ], }, ], diff --git a/test/fields/payload-types.ts b/test/fields/payload-types.ts index 352cb40db4..8ee74af84f 100644 --- a/test/fields/payload-types.ts +++ b/test/fields/payload-types.ts @@ -549,6 +549,14 @@ export interface BlockField { } )[] | null; + readOnly?: + | { + title?: string | null; + id?: string | null; + blockName?: string | null; + blockType: 'readOnlyBlock'; + }[] + | null; updatedAt: string; createdAt: string; } @@ -2222,6 +2230,17 @@ export interface BlockFieldsSelect { blockName?: T; }; }; + readOnly?: + | T + | { + readOnlyBlock?: + | T + | { + title?: T; + id?: T; + blockName?: T; + }; + }; updatedAt?: T; createdAt?: T; } diff --git a/test/query-presets/e2e.spec.ts b/test/query-presets/e2e.spec.ts index 14ded55fca..317c96a2d6 100644 --- a/test/query-presets/e2e.spec.ts +++ b/test/query-presets/e2e.spec.ts @@ -4,6 +4,7 @@ import { expect, test } from '@playwright/test' import { devUser } from 'credentials.js' import { openListColumns } from 'helpers/e2e/openListColumns.js' import { toggleColumn } from 'helpers/e2e/toggleColumn.js' +import { openNav } from 'helpers/e2e/toggleNav.js' import * as path from 'path' import { fileURLToPath } from 'url' @@ -152,23 +153,38 @@ describe('Query Presets', () => { test('should select preset and apply filters', async () => { await page.goto(pagesUrl.list) + await selectPreset({ page, presetTitle: seededData.everyone.title }) await assertURLParams({ page, columns: seededData.everyone.columns, - where: seededData.everyone.where, - presetID: everyoneID, + preset: everyoneID, }) - - expect(true).toBe(true) }) test('should clear selected preset and reset filters', async () => { await page.goto(pagesUrl.list) + await selectPreset({ page, presetTitle: seededData.everyone.title }) + await clearSelectedPreset({ page }) - expect(true).toBe(true) + + // ensure that the preset was cleared from preferences by navigating without the `?preset=` param + // e.g. do not do `page.reload()` + await page.goto(pagesUrl.list) + + // poll url to ensure that `?preset=` param is not present + // this is first set to an empty string to clear from the user's preferences + // it is then removed entirely after it is processed on the server + const regex = /preset=/ + await page.waitForURL((url) => !regex.test(url.search), { timeout: TEST_TIMEOUT_LONG }) + + await expect( + page.locator('button#select-preset', { + hasText: exactText('Select Preset'), + }), + ).toBeVisible() }) test('should delete a preset, clear selection, and reset changes', async () => { @@ -205,18 +221,29 @@ describe('Query Presets', () => { test('should save last used preset to preferences and load on initial render', async () => { await page.goto(pagesUrl.list) + await selectPreset({ page, presetTitle: seededData.everyone.title }) - await page.reload() + await page.goto(pagesUrl.list) await assertURLParams({ page, columns: seededData.everyone.columns, where: seededData.everyone.where, - // presetID: everyoneID, + preset: everyoneID, }) - expect(true).toBe(true) + // for good measure, also soft navigate away and back + await page.goto(pagesUrl.admin) + await openNav(page) + await page.click(`a[href="/admin/collections/${pagesSlug}"]`) + + await assertURLParams({ + page, + columns: seededData.everyone.columns, + where: seededData.everyone.where, + preset: everyoneID, + }) }) test('should only show "edit" and "delete" controls when there is an active preset', async () => { diff --git a/test/query-presets/helpers/assertURLParams.ts b/test/query-presets/helpers/assertURLParams.ts index 2e9bf1b166..36e6653d9a 100644 --- a/test/query-presets/helpers/assertURLParams.ts +++ b/test/query-presets/helpers/assertURLParams.ts @@ -10,12 +10,12 @@ export async function assertURLParams({ page, columns, where, - presetID, + preset, }: { columns?: ColumnPreference[] page: Page - presetID?: string | undefined - where: Where + preset?: string | undefined + where?: Where }) { if (where) { // TODO: can't get columns to encode correctly @@ -32,8 +32,8 @@ export async function assertURLParams({ await page.waitForURL(columnsRegex) } - if (presetID) { - const presetRegex = new RegExp(`preset=${presetID}`) + if (preset) { + const presetRegex = new RegExp(`preset=${preset}`) await page.waitForURL(presetRegex) } } diff --git a/test/query-presets/payload-types.ts b/test/query-presets/payload-types.ts index b1c23df8e6..b81b0e04ec 100644 --- a/test/query-presets/payload-types.ts +++ b/test/query-presets/payload-types.ts @@ -154,6 +154,13 @@ export interface User { hash?: string | null; loginAttempts?: number | null; lockUntil?: string | null; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** @@ -301,6 +308,13 @@ export interface UsersSelect { hash?: T; loginAttempts?: T; lockUntil?: T; + sessions?: + | T + | { + id?: T; + createdAt?: T; + expiresAt?: T; + }; } /** * This interface was referenced by `Config`'s JSON-Schema diff --git a/tsconfig.base.json b/tsconfig.base.json index 0898ad390f..153abb8a5f 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -21,8 +21,15 @@ "skipLibCheck": true, "emitDeclarationOnly": true, "sourceMap": true, - "lib": ["DOM", "DOM.Iterable", "ES2022"], - "types": ["node", "jest"], + "lib": [ + "DOM", + "DOM.Iterable", + "ES2022" + ], + "types": [ + "node", + "jest" + ], "incremental": true, "isolatedModules": true, "plugins": [ @@ -31,36 +38,72 @@ } ], "paths": { - "@payload-config": ["./test/_community/config.ts"], - "@payloadcms/admin-bar": ["./packages/admin-bar/src"], - "@payloadcms/live-preview": ["./packages/live-preview/src"], - "@payloadcms/live-preview-react": ["./packages/live-preview-react/src/index.ts"], - "@payloadcms/live-preview-vue": ["./packages/live-preview-vue/src/index.ts"], - "@payloadcms/ui": ["./packages/ui/src/exports/client/index.ts"], - "@payloadcms/ui/shared": ["./packages/ui/src/exports/shared/index.ts"], - "@payloadcms/ui/rsc": ["./packages/ui/src/exports/rsc/index.ts"], - "@payloadcms/ui/scss": ["./packages/ui/src/scss.scss"], - "@payloadcms/ui/scss/app.scss": ["./packages/ui/src/scss/app.scss"], - "@payloadcms/next/*": ["./packages/next/src/exports/*.ts"], + "@payload-config": [ + "./test/fields/config.ts" + ], + "@payloadcms/admin-bar": [ + "./packages/admin-bar/src" + ], + "@payloadcms/live-preview": [ + "./packages/live-preview/src" + ], + "@payloadcms/live-preview-react": [ + "./packages/live-preview-react/src/index.ts" + ], + "@payloadcms/live-preview-vue": [ + "./packages/live-preview-vue/src/index.ts" + ], + "@payloadcms/ui": [ + "./packages/ui/src/exports/client/index.ts" + ], + "@payloadcms/ui/shared": [ + "./packages/ui/src/exports/shared/index.ts" + ], + "@payloadcms/ui/rsc": [ + "./packages/ui/src/exports/rsc/index.ts" + ], + "@payloadcms/ui/scss": [ + "./packages/ui/src/scss.scss" + ], + "@payloadcms/ui/scss/app.scss": [ + "./packages/ui/src/scss/app.scss" + ], + "@payloadcms/next/*": [ + "./packages/next/src/exports/*.ts" + ], "@payloadcms/richtext-lexical/client": [ "./packages/richtext-lexical/src/exports/client/index.ts" ], - "@payloadcms/richtext-lexical/rsc": ["./packages/richtext-lexical/src/exports/server/rsc.ts"], - "@payloadcms/richtext-slate/rsc": ["./packages/richtext-slate/src/exports/server/rsc.ts"], + "@payloadcms/richtext-lexical/rsc": [ + "./packages/richtext-lexical/src/exports/server/rsc.ts" + ], + "@payloadcms/richtext-slate/rsc": [ + "./packages/richtext-slate/src/exports/server/rsc.ts" + ], "@payloadcms/richtext-slate/client": [ "./packages/richtext-slate/src/exports/client/index.ts" ], - "@payloadcms/plugin-seo/client": ["./packages/plugin-seo/src/exports/client.ts"], - "@payloadcms/plugin-sentry/client": ["./packages/plugin-sentry/src/exports/client.ts"], - "@payloadcms/plugin-stripe/client": ["./packages/plugin-stripe/src/exports/client.ts"], - "@payloadcms/plugin-search/client": ["./packages/plugin-search/src/exports/client.ts"], + "@payloadcms/plugin-seo/client": [ + "./packages/plugin-seo/src/exports/client.ts" + ], + "@payloadcms/plugin-sentry/client": [ + "./packages/plugin-sentry/src/exports/client.ts" + ], + "@payloadcms/plugin-stripe/client": [ + "./packages/plugin-stripe/src/exports/client.ts" + ], + "@payloadcms/plugin-search/client": [ + "./packages/plugin-search/src/exports/client.ts" + ], "@payloadcms/plugin-form-builder/client": [ "./packages/plugin-form-builder/src/exports/client.ts" ], "@payloadcms/plugin-import-export/rsc": [ "./packages/plugin-import-export/src/exports/rsc.ts" ], - "@payloadcms/plugin-multi-tenant/rsc": ["./packages/plugin-multi-tenant/src/exports/rsc.ts"], + "@payloadcms/plugin-multi-tenant/rsc": [ + "./packages/plugin-multi-tenant/src/exports/rsc.ts" + ], "@payloadcms/plugin-multi-tenant/utilities": [ "./packages/plugin-multi-tenant/src/exports/utilities.ts" ], @@ -70,25 +113,42 @@ "@payloadcms/plugin-multi-tenant/client": [ "./packages/plugin-multi-tenant/src/exports/client.ts" ], - "@payloadcms/plugin-multi-tenant": ["./packages/plugin-multi-tenant/src/index.ts"], + "@payloadcms/plugin-multi-tenant": [ + "./packages/plugin-multi-tenant/src/index.ts" + ], "@payloadcms/plugin-multi-tenant/translations/languages/all": [ "./packages/plugin-multi-tenant/src/translations/index.ts" ], "@payloadcms/plugin-multi-tenant/translations/languages/*": [ "./packages/plugin-multi-tenant/src/translations/languages/*.ts" ], - "@payloadcms/next": ["./packages/next/src/exports/*"], - "@payloadcms/storage-azure/client": ["./packages/storage-azure/src/exports/client.ts"], - "@payloadcms/storage-s3/client": ["./packages/storage-s3/src/exports/client.ts"], + "@payloadcms/next": [ + "./packages/next/src/exports/*" + ], + "@payloadcms/storage-azure/client": [ + "./packages/storage-azure/src/exports/client.ts" + ], + "@payloadcms/storage-s3/client": [ + "./packages/storage-s3/src/exports/client.ts" + ], "@payloadcms/storage-vercel-blob/client": [ "./packages/storage-vercel-blob/src/exports/client.ts" ], - "@payloadcms/storage-gcs/client": ["./packages/storage-gcs/src/exports/client.ts"], + "@payloadcms/storage-gcs/client": [ + "./packages/storage-gcs/src/exports/client.ts" + ], "@payloadcms/storage-uploadthing/client": [ "./packages/storage-uploadthing/src/exports/client.ts" ] } }, - "include": ["${configDir}/src"], - "exclude": ["${configDir}/dist", "${configDir}/build", "${configDir}/temp", "**/*.spec.ts"] + "include": [ + "${configDir}/src" + ], + "exclude": [ + "${configDir}/dist", + "${configDir}/build", + "${configDir}/temp", + "**/*.spec.ts" + ] } From d6e21adaf0fdc2eaaf3f8212a226c7436afc9cf9 Mon Sep 17 00:00:00 2001 From: German Jablonski <43938777+GermanJablo@users.noreply.github.com> Date: Fri, 18 Jul 2025 15:28:44 +0100 Subject: [PATCH 14/91] docs: shorten line length in code snippet comments to avoid horizontal scrolling (#13217) prettier doesn't seem to cover that, and horizontal scrolling in the browser is even more annoying than in the IDE. Regex used in the search engine: `^[ \t]*\* ` --- docs/plugins/multi-tenant.mdx | 57 ++++++++++++++++++---------- docs/rich-text/custom-features.mdx | 57 +++++++++++++++++++--------- docs/rich-text/official-features.mdx | 21 +++++++--- docs/upload/storage-adapters.mdx | 3 +- 4 files changed, 95 insertions(+), 43 deletions(-) diff --git a/docs/plugins/multi-tenant.mdx b/docs/plugins/multi-tenant.mdx index 3b507aae8b..f9c16f3e3a 100644 --- a/docs/plugins/multi-tenant.mdx +++ b/docs/plugins/multi-tenant.mdx @@ -54,7 +54,8 @@ The plugin accepts an object with the following properties: ```ts type MultiTenantPluginConfig = { /** - * After a tenant is deleted, the plugin will attempt to clean up related documents + * After a tenant is deleted, the plugin will attempt + * to clean up related documents * - removing documents with the tenant ID * - removing the tenant from users * @@ -67,19 +68,22 @@ type MultiTenantPluginConfig = { collections: { [key in CollectionSlug]?: { /** - * Set to `true` if you want the collection to behave as a global + * Set to `true` if you want the collection to + * behave as a global * * @default false */ isGlobal?: boolean /** - * Set to `false` if you want to manually apply the baseListFilter + * Set to `false` if you want to manually apply + * the baseListFilter * * @default true */ useBaseListFilter?: boolean /** - * Set to `false` if you want to handle collection access manually without the multi-tenant constraints applied + * Set to `false` if you want to handle collection access + * manually without the multi-tenant constraints applied * * @default true */ @@ -88,7 +92,8 @@ type MultiTenantPluginConfig = { } /** * Enables debug mode - * - Makes the tenant field visible in the admin UI within applicable collections + * - Makes the tenant field visible in the + * admin UI within applicable collections * * @default false */ @@ -100,22 +105,27 @@ type MultiTenantPluginConfig = { */ enabled?: boolean /** - * Field configuration for the field added to all tenant enabled collections + * Field configuration for the field added + * to all tenant enabled collections */ tenantField?: { access?: RelationshipField['access'] /** - * The name of the field added to all tenant enabled collections + * The name of the field added to all tenant + * enabled collections * * @default 'tenant' */ name?: string } /** - * Field configuration for the field added to the users collection + * Field configuration for the field added + * to the users collection * - * If `includeDefaultField` is `false`, you must include the field on your users collection manually - * This is useful if you want to customize the field or place the field in a specific location + * If `includeDefaultField` is `false`, you must + * include the field on your users collection manually + * This is useful if you want to customize the field + * or place the field in a specific location */ tenantsArrayField?: | { @@ -136,7 +146,8 @@ type MultiTenantPluginConfig = { */ arrayTenantFieldName?: string /** - * When `includeDefaultField` is `true`, the field will be added to the users collection automatically + * When `includeDefaultField` is `true`, the field will + * be added to the users collection automatically */ includeDefaultField?: true /** @@ -153,7 +164,8 @@ type MultiTenantPluginConfig = { arrayFieldName?: string arrayTenantFieldName?: string /** - * When `includeDefaultField` is `false`, you must include the field on your users collection manually + * When `includeDefaultField` is `false`, you must + * include the field on your users collection manually */ includeDefaultField?: false rowFields?: never @@ -162,7 +174,8 @@ type MultiTenantPluginConfig = { /** * Customize tenant selector label * - * Either a string or an object where the keys are i18n codes and the values are the string labels + * Either a string or an object where the keys are i18n + * codes and the values are the string labels */ tenantSelectorLabel?: | Partial<{ @@ -176,7 +189,8 @@ type MultiTenantPluginConfig = { */ tenantsSlug?: string /** - * Function that determines if a user has access to _all_ tenants + * Function that determines if a user has access + * to _all_ tenants * * Useful for super-admin type users */ @@ -184,15 +198,18 @@ type MultiTenantPluginConfig = { user: ConfigTypes extends { user: unknown } ? ConfigTypes['user'] : User, ) => boolean /** - * Opt out of adding access constraints to the tenants collection + * Opt out of adding access constraints to + * the tenants collection */ useTenantsCollectionAccess?: boolean /** - * Opt out including the baseListFilter to filter tenants by selected tenant + * Opt out including the baseListFilter to filter + * tenants by selected tenant */ useTenantsListFilter?: boolean /** - * Opt out including the baseListFilter to filter users by selected tenant + * Opt out including the baseListFilter to filter + * users by selected tenant */ useUsersTenantFilter?: boolean } @@ -327,14 +344,16 @@ type ContextType = { /** * Prevents a refresh when the tenant is changed * - * If not switching tenants while viewing a "global", set to true + * If not switching tenants while viewing a "global", + * set to true */ setPreventRefreshOnChange: React.Dispatch> /** * Sets the selected tenant ID * * @param args.id - The ID of the tenant to select - * @param args.refresh - Whether to refresh the page after changing the tenant + * @param args.refresh - Whether to refresh the page + * after changing the tenant */ setTenant: (args: { id: number | string | undefined diff --git a/docs/rich-text/custom-features.mdx b/docs/rich-text/custom-features.mdx index c29935dd83..217819ec03 100644 --- a/docs/rich-text/custom-features.mdx +++ b/docs/rich-text/custom-features.mdx @@ -474,11 +474,15 @@ const MyNodeComponent = React.lazy(() => ) /** - * This node is a DecoratorNode. DecoratorNodes allow you to render React components in the editor. + * This node is a DecoratorNode. DecoratorNodes allow + * you to render React components in the editor. * - * They need both createDom and decorate functions. createDom => outside of the html. decorate => React Component inside of the html. + * They need both createDom and decorate functions. + * createDom => outside of the html. + * decorate => React Component inside of the html. * - * If we used DecoratorBlockNode instead, we would only need a decorate method + * If we used DecoratorBlockNode instead, + * we would only need a decorate method */ export class MyNode extends DecoratorNode { static clone(node: MyNode): MyNode { @@ -490,9 +494,11 @@ export class MyNode extends DecoratorNode { } /** - * Defines what happens if you copy a div element from another page and paste it into the lexical editor + * Defines what happens if you copy a div element + * from another page and paste it into the lexical editor * - * This also determines the behavior of lexical's internal HTML -> Lexical converter + * This also determines the behavior of lexical's + * internal HTML -> Lexical converter */ static importDOM(): DOMConversionMap | null { return { @@ -504,14 +510,18 @@ export class MyNode extends DecoratorNode { } /** - * The data for this node is stored serialized as JSON. This is the "load function" of that node: it takes the saved data and converts it into a node. + * The data for this node is stored serialized as JSON. + * This is the "load function" of that node: it takes + * the saved data and converts it into a node. */ static importJSON(serializedNode: SerializedMyNode): MyNode { return $createMyNode() } /** - * Determines how the hr element is rendered in the lexical editor. This is only the "initial" / "outer" HTML element. + * Determines how the hr element is rendered in the + * lexical editor. This is only the "initial" / "outer" + * HTML element. */ createDOM(config: EditorConfig): HTMLElement { const element = document.createElement('div') @@ -519,22 +529,28 @@ export class MyNode extends DecoratorNode { } /** - * Allows you to render a React component within whatever createDOM returns. + * Allows you to render a React component within + * whatever createDOM returns. */ decorate(): React.ReactElement { return } /** - * Opposite of importDOM, this function defines what happens when you copy a div element from the lexical editor and paste it into another page. + * Opposite of importDOM, this function defines what + * happens when you copy a div element from the lexical + * editor and paste it into another page. * - * This also determines the behavior of lexical's internal Lexical -> HTML converter + * This also determines the behavior of lexical's + * internal Lexical -> HTML converter */ exportDOM(): DOMExportOutput { return { element: document.createElement('div') } } /** - * Opposite of importJSON. This determines what data is saved in the database / in the lexical editor state. + * Opposite of importJSON. This determines what + * data is saved in the database / in the lexical + * editor state. */ exportJSON(): SerializedLexicalNode { return { @@ -556,18 +572,23 @@ export class MyNode extends DecoratorNode { } } -// This is used in the importDOM method. Totally optional if you do not want your node to be created automatically when copy & pasting certain dom elements -// into your editor. +// This is used in the importDOM method. Totally optional +// if you do not want your node to be created automatically +// when copy & pasting certain dom elements into your editor. function $yourConversionMethod(): DOMConversionOutput { return { node: $createMyNode() } } -// This is a utility method to create a new MyNode. Utility methods prefixed with $ make it explicit that this should only be used within lexical +// This is a utility method to create a new MyNode. +// Utility methods prefixed with $ make it explicit +// that this should only be used within lexical export function $createMyNode(): MyNode { return $applyNodeReplacement(new MyNode()) } -// This is just a utility method you can use to check if a node is a MyNode. This also ensures correct typing. +// This is just a utility method you can use +// to check if a node is a MyNode. This also +// ensures correct typing. export function $isMyNode( node: LexicalNode | null | undefined, ): node is MyNode { @@ -626,10 +647,12 @@ export const INSERT_MYNODE_COMMAND: LexicalCommand = createCommand( ) /** - * Plugin which registers a lexical command to insert a new MyNode into the editor + * Plugin which registers a lexical command to + * insert a new MyNode into the editor */ export const MyNodePlugin: PluginComponent = () => { - // The useLexicalComposerContext hook can be used to access the lexical editor instance + // The useLexicalComposerContext hook can be used + // to access the lexical editor instance const [editor] = useLexicalComposerContext() useEffect(() => { diff --git a/docs/rich-text/official-features.mdx b/docs/rich-text/official-features.mdx index cdf1efcb18..bffa4c0b2e 100644 --- a/docs/rich-text/official-features.mdx +++ b/docs/rich-text/official-features.mdx @@ -124,12 +124,15 @@ HeadingFeature({ ```ts type IndentFeatureProps = { /** - * The nodes that should not be indented. "type" property of the nodes you don't want to be indented. - * These can be: "paragraph", "heading", "listitem", "quote" or other indentable nodes if they exist. + * The nodes that should not be indented. "type" + * property of the nodes you don't want to be indented. + * These can be: "paragraph", "heading", "listitem", + * "quote" or other indentable nodes if they exist. */ disabledNodes?: string[] /** - * If true, pressing Tab in the middle of a block such as a paragraph or heading will not insert a tabNode. + * If true, pressing Tab in the middle of a block such + * as a paragraph or heading will not insert a tabNode. * Instead, Tab will only be used for block-level indentation. * @default false */ @@ -180,7 +183,8 @@ type LinkFeatureServerProps = { */ disableAutoLinks?: 'creationOnly' | true /** - * A function or array defining additional fields for the link feature. + * A function or array defining additional + * fields for the link feature. * These will be displayed in the link editor drawer. */ fields?: @@ -235,7 +239,9 @@ LinkFeature({ ```ts type RelationshipFeatureProps = { /** - * Sets a maximum population depth for this relationship, regardless of the remaining depth when the respective field is reached. + * Sets a maximum population depth for this relationship, + * regardless of the remaining depth when the respective + * field is reached. */ maxDepth?: number } & ExclusiveRelationshipFeatureProps @@ -274,7 +280,10 @@ type UploadFeatureProps = { } } /** - * Sets a maximum population depth for this upload (not the fields for this upload), regardless of the remaining depth when the respective field is reached. + * Sets a maximum population depth for this upload + * (not the fields for this upload), regardless of + * the remaining depth when the respective field is + * reached. */ maxDepth?: number } diff --git a/docs/upload/storage-adapters.mdx b/docs/upload/storage-adapters.mdx index fa25571939..de6420d07b 100644 --- a/docs/upload/storage-adapters.mdx +++ b/docs/upload/storage-adapters.mdx @@ -292,7 +292,8 @@ Reference any of the existing storage adapters for guidance on how this should b ```ts export interface GeneratedAdapter { /** - * Additional fields to be injected into the base collection and image sizes + * Additional fields to be injected into the base + * collection and image sizes */ fields?: Field[] /** From 7f9de6d10146739895b12c40d4046aab7c547c5b Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Mon, 21 Jul 2025 08:39:18 -0400 Subject: [PATCH 15/91] fix: empty folderType arrays break relational dbs (#13219) Relational databases were broken with folders because it was querying on: ```ts { folderType: { equals: [] } } ``` Which does not work since the select hasMany stores values in a separate table. --- .../ui/src/elements/FolderView/FolderTypeField/index.tsx | 4 ++-- .../ui/src/utilities/getFolderResultsComponentAndData.tsx | 5 ----- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx b/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx index 2592eff529..3ac8125945 100644 --- a/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx +++ b/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx @@ -80,10 +80,10 @@ export const FolderTypeField = ({ if (!readOnly || disabled) { let newValue: string | string[] = null if (selectedOption && hasMany) { - if (Array.isArray(selectedOption)) { + if (Array.isArray(selectedOption) && selectedOption.length > 0) { newValue = selectedOption.map((option) => option.value) } else { - newValue = [] + newValue = null } } else if (selectedOption && !Array.isArray(selectedOption)) { newValue = selectedOption.value diff --git a/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx b/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx index 69378293b6..e2a58f3507 100644 --- a/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx +++ b/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx @@ -120,11 +120,6 @@ export const getFolderResultsComponentAndData = async ({ exists: false, }, }, - { - folderType: { - equals: [], - }, - }, { folderType: { equals: null, From dce898d7ca8e59a23eb4091e4c74d3414b2ea49a Mon Sep 17 00:00:00 2001 From: Jessica Rynkar <67977755+jessrynkar@users.noreply.github.com> Date: Mon, 21 Jul 2025 14:19:51 +0100 Subject: [PATCH 16/91] fix(ui): ensure publishSpecificLocale works during create operation (#13129) ### What? This PR ensures that when a document is created using the `Publish in __` button, it is saved to the correct locale. ### Why? During document creation, the buttons `Publish` or `Publish in [locale]` have the same effect. As a result, we overlooked the case where a user may specifically click `Publish in [locale]` for the first save. In this scenario, the create operation does not respect the `publishSpecificLocale` value, so the document was always saved in the default locale regardless of the intended one. ### How? Passes the `publishSpecificLocale` value to the create operation, ensuring the document and version is saved to the correct locale. **Fixes:** #13117 --- .../Version/VersionPillLabel/VersionPillLabel.tsx | 2 +- .../payload/src/collections/endpoints/create.ts | 2 ++ .../payload/src/collections/operations/create.ts | 7 +++++++ test/helpers.ts | 7 ++++++- test/localization/e2e.spec.ts | 15 +++++++++++++++ 5 files changed, 31 insertions(+), 2 deletions(-) diff --git a/packages/next/src/views/Version/VersionPillLabel/VersionPillLabel.tsx b/packages/next/src/views/Version/VersionPillLabel/VersionPillLabel.tsx index 1ca29a7f43..45a5cde220 100644 --- a/packages/next/src/views/Version/VersionPillLabel/VersionPillLabel.tsx +++ b/packages/next/src/views/Version/VersionPillLabel/VersionPillLabel.tsx @@ -116,7 +116,7 @@ export const VersionPillLabel: React.FC<{ )} )} - {localeLabel && {localeLabel}} + {localeLabel && {localeLabel}}
) } diff --git a/packages/payload/src/collections/endpoints/create.ts b/packages/payload/src/collections/endpoints/create.ts index 9398eedd72..24641fab8b 100644 --- a/packages/payload/src/collections/endpoints/create.ts +++ b/packages/payload/src/collections/endpoints/create.ts @@ -16,6 +16,7 @@ export const createHandler: PayloadHandler = async (req) => { const autosave = searchParams.get('autosave') === 'true' const draft = searchParams.get('draft') === 'true' const depth = searchParams.get('depth') + const publishSpecificLocale = req.query.publishSpecificLocale as string | undefined const doc = await createOperation({ autosave, @@ -24,6 +25,7 @@ export const createHandler: PayloadHandler = async (req) => { depth: isNumber(depth) ? depth : undefined, draft, populate: sanitizePopulateParam(req.query.populate), + publishSpecificLocale, req, select: sanitizeSelectParam(req.query.select), }) diff --git a/packages/payload/src/collections/operations/create.ts b/packages/payload/src/collections/operations/create.ts index cc09c1ad6a..9c0bfc071a 100644 --- a/packages/payload/src/collections/operations/create.ts +++ b/packages/payload/src/collections/operations/create.ts @@ -47,6 +47,7 @@ export type Arguments = { overrideAccess?: boolean overwriteExistingFiles?: boolean populate?: PopulateType + publishSpecificLocale?: string req: PayloadRequest select?: SelectType showHiddenFields?: boolean @@ -88,6 +89,10 @@ export const createOperation = async < } } + if (args.publishSpecificLocale) { + args.req.locale = args.publishSpecificLocale + } + const { autosave = false, collection: { config: collectionConfig }, @@ -99,6 +104,7 @@ export const createOperation = async < overrideAccess, overwriteExistingFiles = false, populate, + publishSpecificLocale, req: { fallbackLocale, locale, @@ -286,6 +292,7 @@ export const createOperation = async < collection: collectionConfig, docWithLocales: result, payload, + publishSpecificLocale, req, }) } diff --git a/test/helpers.ts b/test/helpers.ts index 83339774bd..ed3e73f487 100644 --- a/test/helpers.ts +++ b/test/helpers.ts @@ -279,7 +279,12 @@ export async function saveDocHotkeyAndAssert(page: Page): Promise { export async function saveDocAndAssert( page: Page, - selector: '#action-publish' | '#action-save' | '#action-save-draft' | string = '#action-save', + selector: + | '#action-publish' + | '#action-save' + | '#action-save-draft' + | '#publish-locale' + | string = '#action-save', expectation: 'error' | 'success' = 'success', ): Promise { await wait(500) // TODO: Fix this diff --git a/test/localization/e2e.spec.ts b/test/localization/e2e.spec.ts index ddb0dbf175..4866ef8d4d 100644 --- a/test/localization/e2e.spec.ts +++ b/test/localization/e2e.spec.ts @@ -618,6 +618,21 @@ describe('Localization', () => { await expect(searchInput).toBeVisible() await expect(searchInput).toHaveAttribute('placeholder', 'Search by Full title') }) + + describe('publish specific locale', () => { + test('should create post in correct locale with publishSpecificLocale', async () => { + await page.goto(urlPostsWithDrafts.create) + await changeLocale(page, 'es') + await fillValues({ title: 'Created In Spanish' }) + const chevronButton = page.locator('.form-submit .popup__trigger-wrap > .popup-button') + await chevronButton.click() + await saveDocAndAssert(page, '#publish-locale') + + await expect(page.locator('#field-title')).toHaveValue('Created In Spanish') + await changeLocale(page, defaultLocale) + await expect(page.locator('#field-title')).toBeEmpty() + }) + }) }) async function fillValues(data: Partial) { From af2ddff203185d51dcf0411814a931ebaf9f2c86 Mon Sep 17 00:00:00 2001 From: Chandler Gonzales Date: Mon, 21 Jul 2025 06:23:44 -0700 Subject: [PATCH 17/91] fix: text field validation for minLength: 1, required: false (#13124) Fixes #13113 ### How? Does not rely on JS falseyness, instead explicitly checking for null & undefined I'm not actually certain this is the approach we want to take. Some people might interpret "required" as not null, not-undefined and min length > 1 in the case of strings. If they do, this change to the behavior in the not-required case will break their expectations --- packages/payload/src/fields/validations.spec.ts | 5 +++++ packages/payload/src/fields/validations.ts | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/payload/src/fields/validations.spec.ts b/packages/payload/src/fields/validations.spec.ts index b3b822771d..4d3d3dd116 100644 --- a/packages/payload/src/fields/validations.spec.ts +++ b/packages/payload/src/fields/validations.spec.ts @@ -61,6 +61,11 @@ describe('Field Validations', () => { const result = text(val, { ...options, minLength: 10 }) expect(result).toBe(true) }) + it('should validate minLength with empty string', () => { + const val = '' + const result = text(val, { ...options, required: false, minLength: 1 }) + expect(result).toBe('validation:longerThanMin') + }) it('should validate an array of texts', async () => { const val = ['test'] const result = text(val, { ...options, hasMany: true }) diff --git a/packages/payload/src/fields/validations.ts b/packages/payload/src/fields/validations.ts index bd89a162ba..7dc86df952 100644 --- a/packages/payload/src/fields/validations.ts +++ b/packages/payload/src/fields/validations.ts @@ -61,7 +61,7 @@ export const text: TextFieldValidation = ( let maxLength!: number if (!required) { - if (!value) { + if (value === undefined || value === null) { return true } } From 0eb8f759461769532c149daee49e83e637337669 Mon Sep 17 00:00:00 2001 From: fgrsource <107464125+fgrsource@users.noreply.github.com> Date: Mon, 21 Jul 2025 17:18:40 +0200 Subject: [PATCH 18/91] docs: fix typo, example was not valid JSON (#13224) ### What? A comma is missing in the example code. This results in not valid JSON. ### Why? I stumbled upon it, while setting up a Tenant-based Payload for the first time. ### How? Adding a comma results in valid JSON. Fixes # Added a comma. ;) --- docs/plugins/multi-tenant.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugins/multi-tenant.mdx b/docs/plugins/multi-tenant.mdx index f9c16f3e3a..39eab63b8b 100644 --- a/docs/plugins/multi-tenant.mdx +++ b/docs/plugins/multi-tenant.mdx @@ -230,7 +230,7 @@ const config = buildConfig({ slug: 'tenants', admin: { useAsTitle: 'name' - } + }, fields: [ // remember, you own these fields // these are merely suggestions/examples From c1cfceb7dcb92319393fb4f2152f2cb3d2b4a8c7 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Tue, 22 Jul 2025 19:53:25 +0300 Subject: [PATCH 19/91] fix(db-mongodb): handle duplicate unique index error for DocumentDB (#13239) Currently, with DocumentDB instead of a friendly error like "Value must be unique" we see a generic "Something went wrong" message. This PR fixes that by adding a fallback to parse the message instead of using `error.keyValue` which doesn't exist for responses from DocumentDB. --- .../db-mongodb/src/utilities/handleError.ts | 27 +++++++++++++------ 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/packages/db-mongodb/src/utilities/handleError.ts b/packages/db-mongodb/src/utilities/handleError.ts index d7a44656ef..172548ff6d 100644 --- a/packages/db-mongodb/src/utilities/handleError.ts +++ b/packages/db-mongodb/src/utilities/handleError.ts @@ -2,6 +2,15 @@ import type { PayloadRequest } from 'payload' import { ValidationError } from 'payload' +function extractFieldFromMessage(message: string) { + // eslint-disable-next-line regexp/no-super-linear-backtracking + const match = message.match(/index:\s*(.*?)_/) + if (match && match[1]) { + return match[1] // e.g., returns "email" from "index: email_1" + } + return null +} + export const handleError = ({ collection, error, @@ -18,20 +27,22 @@ export const handleError = ({ } // Handle uniqueness error from MongoDB - if ( - 'code' in error && - error.code === 11000 && - 'keyValue' in error && - error.keyValue && - typeof error.keyValue === 'object' - ) { + if ('code' in error && error.code === 11000) { + let path: null | string = null + + if ('keyValue' in error && error.keyValue && typeof error.keyValue === 'object') { + path = Object.keys(error.keyValue)[0] ?? '' + } else if ('message' in error && typeof error.message === 'string') { + path = extractFieldFromMessage(error.message) + } + throw new ValidationError( { collection, errors: [ { message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique', - path: Object.keys(error.keyValue)[0] ?? '', + path: path ?? '', }, ], global, From 77f279e7680d860adb845f3fc3ac90ad48d20897 Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Tue, 22 Jul 2025 13:12:20 -0400 Subject: [PATCH 20/91] docs: remove payload cloud (#13240) Remove Payload Cloud from docs --- docs/cloud/configuration.mdx | 62 -------------- docs/cloud/creating-a-project.mdx | 53 ------------ docs/cloud/projects.mdx | 137 ------------------------------ docs/cloud/teams.mdx | 35 -------- docs/production/deployment.mdx | 10 --- 5 files changed, 297 deletions(-) delete mode 100644 docs/cloud/configuration.mdx delete mode 100644 docs/cloud/creating-a-project.mdx delete mode 100644 docs/cloud/projects.mdx delete mode 100644 docs/cloud/teams.mdx diff --git a/docs/cloud/configuration.mdx b/docs/cloud/configuration.mdx deleted file mode 100644 index 6bb352ef96..0000000000 --- a/docs/cloud/configuration.mdx +++ /dev/null @@ -1,62 +0,0 @@ ---- -title: Project Configuration -label: Configuration -order: 20 -desc: Quickly configure and deploy your Payload Cloud project in a few simple steps. -keywords: configuration, config, settings, project, cloud, payload cloud, deploy, deployment ---- - -## Select your plan - -Once you have created a project, you will need to select your plan. This will determine the resources that are allocated to your project and the features that are available to you. - - - Note: All Payload Cloud teams that deploy a project require a card on file. - This helps us prevent fraud and abuse on our platform. If you select a plan - with a free trial, you will not be charged until your trial period is over. - We’ll remind you 7 days before your trial ends and you can cancel anytime. - - -## Project Details - -| Option | Description | -| ---------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **Region** | Select the region closest to your audience. This will ensure the fastest communication between your data and your client. | -| **Project Name** | A name for your project. You can change this at any time. | -| **Project Slug** | Choose a unique slug to identify your project. This needs to be unique for your team and you can change it any time. | -| **Team** | Select the team you want to create the project under. If this is your first project, a personal team will be created for you automatically. You can modify your team settings and invite new members at any time from the Team Settings page. | - -## Build Settings - -If you are deploying a new project from a template, the following settings will be automatically configured for you. If you are using your own repository, you need to make sure your build settings are accurate for your project to deploy correctly. - -| Option | Description | -| -------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **Root Directory** | The folder where your `package.json` file lives. | -| **Install Command** | The command used to install your modules, for example: `yarn install` or `npm install` | -| **Build Command** | The command used to build your application, for example: `yarn build` or `npm run build` | -| **Serve Command** | The command used to serve your application, for example: `yarn serve` or `npm run serve` | -| **Branch to Deploy** | Select the branch of your repository that you want to deploy from. This is the branch that will be used to build your project when you commit new changes. | -| **Default Domain** | Set a default domain for your project. This must be unique and you will not able to change it. You can always add a custom domain later in your project settings. | - -## Environment Variables - -Any of the features in Payload Cloud that require environment variables will automatically be provided to your application. If your app requires any custom environment variables, you can set them here. - - - Note: For security reasons, any variables you wish to provide to the [Admin - Panel](../admin/overview) must be prefixed with `NEXT_PUBLIC_`.  Learn more - [here](../configuration/environment-vars). - - -## Payment - -Payment methods can be set per project and can be updated any time. You can use team’s default payment method, or add a new one. Modify your payment methods in your Project settings / Team settings. - - - **Note:** All Payload Cloud teams that deploy a project require a card on - file. This helps us prevent fraud and abuse on our platform. If you select a - plan with a free trial, you will not be charged until your trial period is - over. We’ll remind you 7 days before your trial ends and you can cancel - anytime. - diff --git a/docs/cloud/creating-a-project.mdx b/docs/cloud/creating-a-project.mdx deleted file mode 100644 index cabda09025..0000000000 --- a/docs/cloud/creating-a-project.mdx +++ /dev/null @@ -1,53 +0,0 @@ ---- -title: Getting Started -label: Getting Started -order: 10 -desc: Get started with Payload Cloud, a deployment solution specifically designed for Node + MongoDB applications. -keywords: cloud, hosted, database, storage, email, deployment, serverless, node, mongodb, s3, aws, cloudflare, atlas, resend, payload, cms ---- - -A deployment solution specifically designed for Node.js + MongoDB applications, offering seamless deployment of your entire stack in one place. You can get started in minutes with a one-click template or bring your own codebase with you. - -Payload Cloud offers various plans tailored to meet your specific needs, including a MongoDB Atlas database, S3 file storage, and email delivery powered by [Resend](https://resend.com). To see a full breakdown of features and plans, see our [Cloud Pricing page](https://payloadcms.com/cloud-pricing). - -To get started, you first need to create an account. Head over to [the login screen](https://payloadcms.com/login) and **Register for Free**. - - - To create your first project, you can either select [a - template](#starting-from-a-template) or [import an existing - project](#importing-from-an-existing-codebase) from GitHub. - - -## Starting from a Template - -Templates come preconfigured and provide a one-click solution to quickly deploy a new application. - -![Screen for creating a new project from a template](https://payloadcms.com/images/docs/cloud/create-from-template.jpg) -_Creating a new project from a template._ - -After creating an account, select your desired template from the Projects page. At this point, you need to connect to authorize the Payload Cloud application with your GitHub account. Click Continue with GitHub and follow the prompts to authorize the app. - -Next, select your `GitHub Scope`. If you belong to multiple organizations, they will show up here. If you do not see the organization you are looking for, you may need to adjust your GitHub app permissions. - -After selecting your scope, create a unique `repository name` and select whether you want your repository to be public or private on GitHub. - - - **Note:** Public repositories can be accessed by anyone online, while private - repositories grant access only to you and anyone you explicitly authorize. - - -Once you are ready, click **Create Project**. This will clone the selected template to a new repository in your GitHub account, and take you to the configuration page to set up your project for deployment. - -## Importing from an Existing Codebase - -Payload Cloud works for any Node.js + MongoDB app. From the New Project page, select **import an existing Git codebase**. Choose the organization and select the repository you want to import. From here, you will be taken to the configuration page to set up your project for deployment. - -![Screen for creating a new project from an existing repository](https://payloadcms.com/images/docs/cloud/create-from-existing.jpg) -_Creating a new project from an existing repository._ - - - **Note:** In order to make use of the features of Payload Cloud in your own - codebase, you will need to add the [Cloud - Plugin](https://github.com/payloadcms/payload/tree/main/packages/payload-cloud) - to your Payload app. - diff --git a/docs/cloud/projects.mdx b/docs/cloud/projects.mdx deleted file mode 100644 index 79df6a69bb..0000000000 --- a/docs/cloud/projects.mdx +++ /dev/null @@ -1,137 +0,0 @@ ---- -title: Cloud Projects -label: Projects -order: 40 -desc: Manage your Payload Cloud projects. -keywords: cloud, payload cloud, projects, project, overview, database, file storage, build settings, environment variables, custom domains, email, developing locally ---- - -## Overview - - - The overview tab shows your most recent deployment, along with build and - deployment logs. From here, you can see your live URL, deployment details like - timestamps and commit hash, as well as the status of your deployment. You can - also trigger a redeployment manually, which will rebuild your project using - the current configuration. - - -![Payload Cloud Overview Page](https://payloadcms.com/images/docs/cloud/overview-page.jpg) -_A screenshot of the Overview page for a Cloud project._ - -## Database - -Your Payload Cloud project comes with a MongoDB serverless Atlas DB instance or a Dedicated Atlas cluster, depending on your plan. To interact with your cloud database, you will be provided with a MongoDB connection string. This can be found under the **Database** tab of your project. - -`mongodb+srv://your_connection_string` - -## File Storage - -Payload Cloud gives you S3 file storage backed by Cloudflare as a CDN, and this plugin extends Payload so that all of your media will be stored in S3 rather than locally. - -AWS Cognito is used for authentication to your S3 bucket. The [Payload Cloud Plugin](https://github.com/payloadcms/payload/tree/main/packages/payload-cloud) will automatically pick up these values. These values are only if you'd like to access your files directly, outside of Payload Cloud. - -### Accessing Files Outside of Payload Cloud - -If you'd like to access your files outside of Payload Cloud, you'll need to retrieve some values from your project's settings and put them into your environment variables. In Payload Cloud, navigate to the File Storage tab and copy the values using the copy button. Put these values in your .env file. Also copy the Cognito Password value separately and put into your .env file as well. - -When you are done, you should have the following values in your .env file: - -```env -PAYLOAD_CLOUD=true -PAYLOAD_CLOUD_ENVIRONMENT=prod -PAYLOAD_CLOUD_COGNITO_USER_POOL_CLIENT_ID= -PAYLOAD_CLOUD_COGNITO_USER_POOL_ID= -PAYLOAD_CLOUD_COGNITO_IDENTITY_POOL_ID= -PAYLOAD_CLOUD_PROJECT_ID= -PAYLOAD_CLOUD_BUCKET= -PAYLOAD_CLOUD_BUCKET_REGION= -PAYLOAD_CLOUD_COGNITO_PASSWORD= -``` - -The plugin will pick up these values and use them to access your files. - -## Build Settings - -You can update settings from your Project’s Settings tab. Changes to your build settings will trigger a redeployment of your project. - -## Environment Variables - -From the Environment Variables page of the Settings tab, you can add, update and delete variables for use in your project. Like build settings, these changes will trigger a redeployment of your project. - - - Note: For security reasons, any variables you wish to provide to the [Admin - Panel](../admin/overview) must be prefixed with `NEXT_PUBLIC_`. [More - details](../configuration/environment-vars). - - -## Custom Domains - -With Payload Cloud, you can add custom domain names to your project. To do so, first go to the Domains page of the Settings tab of your project. Here you can see your default domain. To add a new domain, type in the domain name you wish to use. - - - Note: do not include the protocol (http:// or https://) or any paths (/page). - Only include the domain name and extension, and optionally a subdomain. - - your-domain.com - backend.your-domain.com - - -Once you click save, a DNS record will be generated for your domain name to point to your live project. Add this record into your DNS provider’s records, and once the records are resolving properly (this can take 1hr to 48hrs in some cases), your domain will now to point to your live project. - -You will also need to configure your Payload project to use your specified domain. In your `payload.config.ts` file, specify your `serverURL` with your domain: - -```ts -export default buildConfig({ - serverURL: 'https://example.com', - // the rest of your config, -}) -``` - -## Email - -Powered by [Resend](https://resend.com), Payload Cloud comes with integrated email support out of the box. No configuration is needed, and you can use `payload.sendEmail()` to send email right from your Payload app. To learn more about sending email with Payload, checkout the [Email Configuration](../email/overview) overview. - -If you are on the Pro or Enterprise plan, you can add your own custom Email domain name. From the Email page of your project’s Settings, add the domain you wish to use for email delivery. This will generate a set of DNS records. Add these records to your DNS provider and click verify to check that your records are resolving properly. Once verified, your emails will now be sent from your custom domain name. - -## Developing Locally - -To make changes to your project, you will need to clone the repository defined in your project settings to your local machine. In order to run your project locally, you will need configure your local environment first. Refer to your repository’s `README.md` file to see the steps needed for your specific template. - -From there, you are ready to make updates to your project. When you are ready to make your changes live, commit your changes to the branch you specified in your Project settings, and your application will automatically trigger a redeploy and build from your latest commit. - -## Cloud Plugin - -Projects generated from a template will come pre-configured with the official Cloud Plugin, but if you are using your own repository you will need to add this into your project. To do so, add the plugin to your Payload Config: - -`pnpm add @payloadcms/payload-cloud` - -```js -import { payloadCloudPlugin } from '@payloadcms/payload-cloud' -import { buildConfig } from 'payload' - -export default buildConfig({ - plugins: [payloadCloudPlugin()], - // rest of config -}) -``` - - - **Note:** If your Payload Config already has an email with transport, this - will take precedence over Payload Cloud's email service. - - - - Good to know: the Payload Cloud Plugin was previously named - `@payloadcms/plugin-cloud`. If you are using this plugin, you should update to - the new package name. - - -#### **Optional configuration** - -If you wish to opt-out of any Payload cloud features, the plugin also accepts options to do so. - -```js -payloadCloud({ - storage: false, // Disable file storage - email: false, // Disable email delivery -}) -``` diff --git a/docs/cloud/teams.mdx b/docs/cloud/teams.mdx deleted file mode 100644 index a7f5bd97db..0000000000 --- a/docs/cloud/teams.mdx +++ /dev/null @@ -1,35 +0,0 @@ ---- -title: Cloud Teams -label: Teams -order: 30 -desc: Manage your Payload Cloud team and billing settings. -keywords: team, teams, billing, subscription, payment, plan, plans, cloud, payload cloud ---- - - - Within Payload Cloud, the team management feature offers you the ability to - manage your organization, team members, billing, and subscription settings. - - -![Payload Cloud Team Settings](https://payloadcms.com/images/docs/cloud/team-settings.jpg) -_A screenshot of the Team Settings page._ - -## Members - -Each team has members that can interact with your projects. You can invite multiple people to your team and each individual can belong to more than one team. You can assign them either `owner` or `user` permissions. Owners are able to make admin-only changes, such as deleting projects, and editing billing information. - -## Adding Members - -To add a new member to your team, visit your Team’s Settings page, and click “Invite Teammate”. You can then add their email address, and assign their role. Press “Save” to send the invitations, which will send an email to the invited team member where they can create a new account. - -## Billing - -Users can update billing settings and subscriptions for any teams where they are designated as an `owner`. To make updates to the team’s payment methods, visit the Billing page under the Team Settings tab. You can add new cards, delete cards, and set a payment method as a default. The default payment method will be used in the event that another payment method fails. - -## Subscriptions - -From the Subscriptions page, a team owner can see all current plans for their team. From here, you can see the price of each plan, if there is an active trial, and when you will be billed next. - -## Invoices - -The Invoices page will you show you the invoices for your account, as well as the status on their payment. diff --git a/docs/production/deployment.mdx b/docs/production/deployment.mdx index e86898f004..1865133ece 100644 --- a/docs/production/deployment.mdx +++ b/docs/production/deployment.mdx @@ -24,16 +24,6 @@ Payload can be deployed _anywhere that Next.js can run_ - including Vercel, Netl But it's important to remember that most Payload projects will also need a database, file storage, an email provider, and a CDN. Make sure you have all of the requirements that your project needs, no matter what deployment platform you choose. -Often, the easiest and fastest way to deploy Payload is to use [Payload Cloud](https://payloadcms.com/new) — where you get everything you need out of the box, including: - -1. A MongoDB Atlas database -1. S3 file storage -1. Resend email service -1. Cloudflare CDN -1. Blue / green deployments -1. Logs -1. And more - ## Basics Payload runs fully in Next.js, so the [Next.js build process](https://nextjs.org/docs/app/building-your-application/deploying) is used for building Payload. If you've used `create-payload-app` to create your project, executing the `build` From e7a652f0a8aedf5a3aa148c599709de4625dfbdc Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Tue, 22 Jul 2025 13:27:44 -0400 Subject: [PATCH 21/91] build: suppress pnpm update notification (#13241) Suppress pnpm update notification --- pnpm-workspace.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 054555c688..3c8d382cdc 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,9 +1,8 @@ packages: - # all packages in direct subdirs of packages/ - 'packages/*' - 'tools/*' - 'test' - 'templates/blank' - 'templates/website' - # exclude packages that are inside test directories - # - '!**/test/**' + +updateNotifier: false From 246a42b72781231472cbc4acf6f06fa7035c00d7 Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Tue, 22 Jul 2025 14:09:04 -0400 Subject: [PATCH 22/91] chore(plugin-import-export): use debug-level logging for createExport process (#13242) ### What? Replaces all `payload.logger.info` calls with `payload.logger.debug` in the `createExport` function. ### Why? info logs are too verbose. Using debug ensures detailed logs. ### How? - Updated all logger calls in `createExport` to use `debug` instead of `info`. --- .../src/export/createExport.ts | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/packages/plugin-import-export/src/export/createExport.ts b/packages/plugin-import-export/src/export/createExport.ts index 2b4b05bff2..40e1b954ff 100644 --- a/packages/plugin-import-export/src/export/createExport.ts +++ b/packages/plugin-import-export/src/export/createExport.ts @@ -64,7 +64,7 @@ export const createExport = async (args: CreateExportArgs) => { } = args if (debug) { - req.payload.logger.info({ + req.payload.logger.debug({ message: 'Starting export process with args:', collectionSlug, drafts, @@ -84,7 +84,7 @@ export const createExport = async (args: CreateExportArgs) => { const select = Array.isArray(fields) && fields.length > 0 ? getSelect(fields) : undefined if (debug) { - req.payload.logger.info({ message: 'Export configuration:', name, isCSV, locale }) + req.payload.logger.debug({ message: 'Export configuration:', name, isCSV, locale }) } const findArgs = { @@ -102,7 +102,7 @@ export const createExport = async (args: CreateExportArgs) => { } if (debug) { - req.payload.logger.info({ message: 'Find arguments:', findArgs }) + req.payload.logger.debug({ message: 'Find arguments:', findArgs }) } const toCSVFunctions = getCustomFieldFunctions({ @@ -129,7 +129,7 @@ export const createExport = async (args: CreateExportArgs) => { if (download) { if (debug) { - req.payload.logger.info('Pre-scanning all columns before streaming') + req.payload.logger.debug('Pre-scanning all columns before streaming') } const allColumnsSet = new Set() @@ -155,7 +155,7 @@ export const createExport = async (args: CreateExportArgs) => { } if (debug) { - req.payload.logger.info(`Discovered ${allColumns.length} columns`) + req.payload.logger.debug(`Discovered ${allColumns.length} columns`) } const encoder = new TextEncoder() @@ -167,7 +167,7 @@ export const createExport = async (args: CreateExportArgs) => { const result = await payload.find({ ...findArgs, page: streamPage }) if (debug) { - req.payload.logger.info(`Streaming batch ${streamPage} with ${result.docs.length} docs`) + req.payload.logger.debug(`Streaming batch ${streamPage} with ${result.docs.length} docs`) } if (result.docs.length === 0) { @@ -198,7 +198,7 @@ export const createExport = async (args: CreateExportArgs) => { if (!result.hasNextPage) { if (debug) { - req.payload.logger.info('Stream complete - no more pages') + req.payload.logger.debug('Stream complete - no more pages') } this.push(null) // End the stream } @@ -215,7 +215,7 @@ export const createExport = async (args: CreateExportArgs) => { // Non-download path (buffered export) if (debug) { - req.payload.logger.info('Starting file generation') + req.payload.logger.debug('Starting file generation') } const outputData: string[] = [] @@ -232,7 +232,7 @@ export const createExport = async (args: CreateExportArgs) => { }) if (debug) { - req.payload.logger.info( + req.payload.logger.debug( `Processing batch ${findArgs.page} with ${result.docs.length} documents`, ) } @@ -281,12 +281,12 @@ export const createExport = async (args: CreateExportArgs) => { const buffer = Buffer.from(format === 'json' ? `[${outputData.join(',')}]` : outputData.join('')) if (debug) { - req.payload.logger.info(`${format} file generation complete`) + req.payload.logger.debug(`${format} file generation complete`) } if (!id) { if (debug) { - req.payload.logger.info('Creating new export file') + req.payload.logger.debug('Creating new export file') } req.file = { name, @@ -296,7 +296,7 @@ export const createExport = async (args: CreateExportArgs) => { } } else { if (debug) { - req.payload.logger.info(`Updating existing export with id: ${id}`) + req.payload.logger.debug(`Updating existing export with id: ${id}`) } await req.payload.update({ id, @@ -312,6 +312,6 @@ export const createExport = async (args: CreateExportArgs) => { }) } if (debug) { - req.payload.logger.info('Export process completed successfully') + req.payload.logger.debug('Export process completed successfully') } } From 412bf4ff735c5dc317ecaa20913d4270bcd92b29 Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Tue, 22 Jul 2025 15:23:02 -0400 Subject: [PATCH 23/91] fix(ui): select all should reset when params change, page, filter, etc (#12612) Fixes #11938 Fixes https://github.com/payloadcms/payload/issues/13154 When select-all is checked and you filter or change the page, the selected documents should reset. --- packages/ui/src/providers/Selection/index.tsx | 9 +++++-- test/admin/e2e/list-view/e2e.spec.ts | 27 +++++++++++++++++++ 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/packages/ui/src/providers/Selection/index.tsx b/packages/ui/src/providers/Selection/index.tsx index 921ae8afa2..079866061b 100644 --- a/packages/ui/src/providers/Selection/index.tsx +++ b/packages/ui/src/providers/Selection/index.tsx @@ -6,6 +6,7 @@ import * as qs from 'qs-esm' import React, { createContext, use, useCallback, useEffect, useRef, useState } from 'react' import { parseSearchParams } from '../../utilities/parseSearchParams.js' +import { useListQuery } from '../ListQuery/index.js' import { useLocale } from '../Locale/index.js' export enum SelectAllStatus { @@ -54,6 +55,7 @@ export const SelectionProvider: React.FC = ({ children, docs = [], totalD const [selectAll, setSelectAll] = useState(SelectAllStatus.None) const [count, setCount] = useState(0) const searchParams = useSearchParams() + const { query } = useListQuery() const toggleAll = useCallback( (allAvailable = false) => { @@ -201,7 +203,11 @@ export const SelectionProvider: React.FC = ({ children, docs = [], totalD setCount(newCount) }, [selectAll, selected, totalDocs]) - // eslint-disable-next-line react-compiler/react-compiler -- TODO: fix + useEffect(() => { + setSelectAll(SelectAllStatus.None) + setSelected(new Map()) + }, [query]) + contextRef.current = { count, getQueryParams, @@ -213,7 +219,6 @@ export const SelectionProvider: React.FC = ({ children, docs = [], totalD totalDocs, } - // eslint-disable-next-line react-compiler/react-compiler -- TODO: fix return {children} } diff --git a/test/admin/e2e/list-view/e2e.spec.ts b/test/admin/e2e/list-view/e2e.spec.ts index bff63dd6fd..2e8ec07ccf 100644 --- a/test/admin/e2e/list-view/e2e.spec.ts +++ b/test/admin/e2e/list-view/e2e.spec.ts @@ -1649,6 +1649,33 @@ describe('List View', () => { 'Custom placeholder', ) }) + + test('should reset list selection when query params change', async () => { + await deleteAllPosts() + await Promise.all(Array.from({ length: 12 }, (_, i) => createPost({ title: `post${i + 1}` }))) + await page.goto(postsUrl.list) + + const pageOneButton = page.locator('.paginator__page', { hasText: '1' }) + await expect(pageOneButton).toBeVisible() + await pageOneButton.click() + + await page.locator('.checkbox-input:has(#select-all)').locator('input').click() + await expect(page.locator('.checkbox-input:has(#select-all)').locator('input')).toBeChecked() + await expect(page.locator('.list-selection')).toContainText('5 selected') + + const pageTwoButton = page.locator('.paginator__page', { hasText: '2' }) + await expect(pageTwoButton).toBeVisible() + await pageTwoButton.click() + + await expect( + page.locator('.checkbox-input:has(#select-all) input:not([checked])'), + ).toBeVisible() + + await page.locator('.row-1 .cell-_select input').check() + await page.locator('.row-2 .cell-_select input').check() + + await expect(page.locator('.list-selection')).toContainText('2 selected') + }) }) async function createPost(overrides?: Partial): Promise { From 3f8fb6734cc5a8cff9dd529b5f4ba4c9252b8157 Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Tue, 22 Jul 2025 16:44:56 -0400 Subject: [PATCH 24/91] ci: default audit-dependencies script to high severity (#13244) Default the audit-dependencies workflow to use high severity by default. --- .github/workflows/audit-dependencies.sh | 7 ++++--- .github/workflows/audit-dependencies.yml | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/audit-dependencies.sh b/.github/workflows/audit-dependencies.sh index 107c2a34a4..5e16310078 100755 --- a/.github/workflows/audit-dependencies.sh +++ b/.github/workflows/audit-dependencies.sh @@ -1,14 +1,15 @@ #!/bin/bash -severity=${1:-"critical"} -audit_json=$(pnpm audit --prod --json) +severity=${1:-"high"} output_file="audit_output.json" echo "Auditing for ${severity} vulnerabilities..." +audit_json=$(pnpm audit --prod --json) + echo "${audit_json}" | jq --arg severity "${severity}" ' .advisories | to_entries | - map(select(.value.patched_versions != "<0.0.0" and .value.severity == $severity) | + map(select(.value.patched_versions != "<0.0.0" and (.value.severity == $severity or ($severity == "high" and .value.severity == "critical"))) | { package: .value.module_name, vulnerable: .value.vulnerable_versions, diff --git a/.github/workflows/audit-dependencies.yml b/.github/workflows/audit-dependencies.yml index 043ef633e9..df4056691b 100644 --- a/.github/workflows/audit-dependencies.yml +++ b/.github/workflows/audit-dependencies.yml @@ -9,7 +9,7 @@ on: audit-level: description: The level of audit to run (low, moderate, high, critical) required: false - default: critical + default: high debug: description: Enable debug logging required: false From 94f5e790f6ed0fa3727cb830d827e49fae71f399 Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Wed, 23 Jul 2025 01:45:55 -0700 Subject: [PATCH 25/91] perf(drizzle): single-roundtrip db updates for simple collections (#13186) Currently, an optimized DB update (simple data => no delete-and-create-row) does the following: 1. sql UPDATE 2. sql SELECT This PR reduces this further to one single DB call for simple collections: 1. sql UPDATE with RETURNING() This only works for simple collections that do not have any fields that need to be fetched from other tables. If a collection has fields like relationship or blocks, we'll need that separate SELECT call to join in the other tables. In 4.0, we can remove all "complex" fields from the jobs collection and replace them with a JSON field to make use of this optimization --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210803039809814 --- .../drizzle/src/find/buildFindManyArgs.ts | 9 +- packages/drizzle/src/upsertRow/index.ts | 761 +++++++------- test/database/config.postgreslogs.ts | 19 + test/database/config.ts | 933 +---------------- test/database/getConfig.ts | 942 ++++++++++++++++++ test/database/payload-types.ts | 27 + test/database/postgres-logs.int.spec.ts | 91 ++ test/database/postgres-vector.int.spec.ts | 4 +- test/database/seed.ts | 9 - test/database/shared.ts | 15 - test/select/config.postgreslogs.ts | 19 + test/select/config.ts | 122 +-- test/select/getConfig.ts | 119 +++ test/select/postgreslogs.int.spec.ts | 179 ++++ 14 files changed, 1822 insertions(+), 1427 deletions(-) create mode 100644 test/database/config.postgreslogs.ts create mode 100644 test/database/getConfig.ts create mode 100644 test/database/postgres-logs.int.spec.ts create mode 100644 test/select/config.postgreslogs.ts create mode 100644 test/select/getConfig.ts create mode 100644 test/select/postgreslogs.int.spec.ts diff --git a/packages/drizzle/src/find/buildFindManyArgs.ts b/packages/drizzle/src/find/buildFindManyArgs.ts index 4febf335d1..c45bff699f 100644 --- a/packages/drizzle/src/find/buildFindManyArgs.ts +++ b/packages/drizzle/src/find/buildFindManyArgs.ts @@ -44,7 +44,7 @@ export const buildFindManyArgs = ({ select, tableName, versions, -}: BuildFindQueryArgs): Record => { +}: BuildFindQueryArgs): Result => { const result: Result = { extras: {}, with: {}, @@ -134,5 +134,12 @@ export const buildFindManyArgs = ({ result.with._locales = _locales } + // Delete properties that are empty + for (const key of Object.keys(result)) { + if (!Object.keys(result[key]).length) { + delete result[key] + } + } + return result } diff --git a/packages/drizzle/src/upsertRow/index.ts b/packages/drizzle/src/upsertRow/index.ts index 72f89435ec..52d686a55e 100644 --- a/packages/drizzle/src/upsertRow/index.ts +++ b/packages/drizzle/src/upsertRow/index.ts @@ -1,4 +1,5 @@ import type { LibSQLDatabase } from 'drizzle-orm/libsql' +import type { SelectedFields } from 'drizzle-orm/sqlite-core' import type { TypeWithID } from 'payload' import { eq } from 'drizzle-orm' @@ -53,434 +54,496 @@ export const upsertRow = async | TypeWithID>( const drizzle = db as LibSQLDatabase - await drizzle - .update(adapter.tables[tableName]) - .set(row) - // TODO: we can skip fetching idToUpdate here with using the incoming where - .where(eq(adapter.tables[tableName].id, id)) - } else { - // Split out the incoming data into the corresponding: - // base row, locales, relationships, blocks, and arrays - const rowToInsert = transformForWrite({ + if (ignoreResult) { + await drizzle + .update(adapter.tables[tableName]) + .set(row) + .where(eq(adapter.tables[tableName].id, id)) + return ignoreResult === 'idOnly' ? ({ id } as T) : null + } + + const findManyArgs = buildFindManyArgs({ adapter, - data, - enableAtomicWrites: false, + depth: 0, fields, - path, + joinQuery: false, + select, tableName, }) - // First, we insert the main row - try { - if (operation === 'update') { - const target = upsertTarget || adapter.tables[tableName].id + const findManyKeysLength = Object.keys(findManyArgs).length + const hasOnlyColumns = Object.keys(findManyArgs.columns || {}).length > 0 - if (id) { - rowToInsert.row.id = id - ;[insertedRow] = await adapter.insert({ - db, - onConflictDoUpdate: { set: rowToInsert.row, target }, - tableName, - values: rowToInsert.row, - }) - } else { - ;[insertedRow] = await adapter.insert({ - db, - onConflictDoUpdate: { set: rowToInsert.row, target, where }, - tableName, - values: rowToInsert.row, - }) - } - } else { - if (adapter.allowIDOnCreate && data.id) { - rowToInsert.row.id = data.id + if (findManyKeysLength === 0 || hasOnlyColumns) { + // Optimization - No need for joins => can simply use returning(). This is optimal for very simple collections + // without complex fields that live in separate tables like blocks, arrays, relationships, etc. + + const selectedFields: SelectedFields = {} + if (hasOnlyColumns) { + for (const [column, enabled] of Object.entries(findManyArgs.columns)) { + if (enabled) { + selectedFields[column] = adapter.tables[tableName][column] + } } + } + + const docs = await drizzle + .update(adapter.tables[tableName]) + .set(row) + .where(eq(adapter.tables[tableName].id, id)) + .returning(Object.keys(selectedFields).length ? selectedFields : undefined) + + return transform({ + adapter, + config: adapter.payload.config, + data: docs[0], + fields, + joinQuery: false, + tableName, + }) + } + + // DB Update that needs the result, potentially with joins => need to update first, then find. returning() does not work with joins. + + await drizzle + .update(adapter.tables[tableName]) + .set(row) + .where(eq(adapter.tables[tableName].id, id)) + + findManyArgs.where = eq(adapter.tables[tableName].id, insertedRow.id) + + const doc = await db.query[tableName].findFirst(findManyArgs) + + return transform({ + adapter, + config: adapter.payload.config, + data: doc, + fields, + joinQuery: false, + tableName, + }) + } + // Split out the incoming data into the corresponding: + // base row, locales, relationships, blocks, and arrays + const rowToInsert = transformForWrite({ + adapter, + data, + enableAtomicWrites: false, + fields, + path, + tableName, + }) + + // First, we insert the main row + try { + if (operation === 'update') { + const target = upsertTarget || adapter.tables[tableName].id + + if (id) { + rowToInsert.row.id = id ;[insertedRow] = await adapter.insert({ db, + onConflictDoUpdate: { set: rowToInsert.row, target }, + tableName, + values: rowToInsert.row, + }) + } else { + ;[insertedRow] = await adapter.insert({ + db, + onConflictDoUpdate: { set: rowToInsert.row, target, where }, tableName, values: rowToInsert.row, }) } - - const localesToInsert: Record[] = [] - const relationsToInsert: Record[] = [] - const textsToInsert: Record[] = [] - const numbersToInsert: Record[] = [] - const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {} - const selectsToInsert: { [selectTableName: string]: Record[] } = {} - - // If there are locale rows with data, add the parent and locale to each - if (Object.keys(rowToInsert.locales).length > 0) { - Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => { - localeRow._parentID = insertedRow.id - localeRow._locale = locale - localesToInsert.push(localeRow) - }) + } else { + if (adapter.allowIDOnCreate && data.id) { + rowToInsert.row.id = data.id } + ;[insertedRow] = await adapter.insert({ + db, + tableName, + values: rowToInsert.row, + }) + } - // If there are relationships, add parent to each - if (rowToInsert.relationships.length > 0) { - rowToInsert.relationships.forEach((relation) => { - relation.parent = insertedRow.id - relationsToInsert.push(relation) - }) - } + const localesToInsert: Record[] = [] + const relationsToInsert: Record[] = [] + const textsToInsert: Record[] = [] + const numbersToInsert: Record[] = [] + const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {} + const selectsToInsert: { [selectTableName: string]: Record[] } = {} - // If there are texts, add parent to each - if (rowToInsert.texts.length > 0) { - rowToInsert.texts.forEach((textRow) => { - textRow.parent = insertedRow.id - textsToInsert.push(textRow) - }) - } + // If there are locale rows with data, add the parent and locale to each + if (Object.keys(rowToInsert.locales).length > 0) { + Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => { + localeRow._parentID = insertedRow.id + localeRow._locale = locale + localesToInsert.push(localeRow) + }) + } - // If there are numbers, add parent to each - if (rowToInsert.numbers.length > 0) { - rowToInsert.numbers.forEach((numberRow) => { - numberRow.parent = insertedRow.id - numbersToInsert.push(numberRow) - }) - } + // If there are relationships, add parent to each + if (rowToInsert.relationships.length > 0) { + rowToInsert.relationships.forEach((relation) => { + relation.parent = insertedRow.id + relationsToInsert.push(relation) + }) + } - // If there are selects, add parent to each, and then - // store by table name and rows - if (Object.keys(rowToInsert.selects).length > 0) { - Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => { - selectsToInsert[selectTableName] = [] + // If there are texts, add parent to each + if (rowToInsert.texts.length > 0) { + rowToInsert.texts.forEach((textRow) => { + textRow.parent = insertedRow.id + textsToInsert.push(textRow) + }) + } - selectRows.forEach((row) => { - if (typeof row.parent === 'undefined') { - row.parent = insertedRow.id - } + // If there are numbers, add parent to each + if (rowToInsert.numbers.length > 0) { + rowToInsert.numbers.forEach((numberRow) => { + numberRow.parent = insertedRow.id + numbersToInsert.push(numberRow) + }) + } - selectsToInsert[selectTableName].push(row) - }) - }) - } + // If there are selects, add parent to each, and then + // store by table name and rows + if (Object.keys(rowToInsert.selects).length > 0) { + Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => { + selectsToInsert[selectTableName] = [] - // If there are blocks, add parent to each, and then - // store by table name and rows - Object.keys(rowToInsert.blocks).forEach((tableName) => { - rowToInsert.blocks[tableName].forEach((blockRow) => { - blockRow.row._parentID = insertedRow.id - if (!blocksToInsert[tableName]) { - blocksToInsert[tableName] = [] + selectRows.forEach((row) => { + if (typeof row.parent === 'undefined') { + row.parent = insertedRow.id } - if (blockRow.row.uuid) { - delete blockRow.row.uuid - } - blocksToInsert[tableName].push(blockRow) + + selectsToInsert[selectTableName].push(row) }) }) + } - // ////////////////////////////////// - // INSERT LOCALES - // ////////////////////////////////// - - if (localesToInsert.length > 0) { - const localeTableName = `${tableName}${adapter.localesSuffix}` - const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`] - - if (operation === 'update') { - await adapter.deleteWhere({ - db, - tableName: localeTableName, - where: eq(localeTable._parentID, insertedRow.id), - }) + // If there are blocks, add parent to each, and then + // store by table name and rows + Object.keys(rowToInsert.blocks).forEach((tableName) => { + rowToInsert.blocks[tableName].forEach((blockRow) => { + blockRow.row._parentID = insertedRow.id + if (!blocksToInsert[tableName]) { + blocksToInsert[tableName] = [] } + if (blockRow.row.uuid) { + delete blockRow.row.uuid + } + blocksToInsert[tableName].push(blockRow) + }) + }) - await adapter.insert({ + // ////////////////////////////////// + // INSERT LOCALES + // ////////////////////////////////// + + if (localesToInsert.length > 0) { + const localeTableName = `${tableName}${adapter.localesSuffix}` + const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`] + + if (operation === 'update') { + await adapter.deleteWhere({ db, tableName: localeTableName, - values: localesToInsert, + where: eq(localeTable._parentID, insertedRow.id), }) } - // ////////////////////////////////// - // INSERT RELATIONSHIPS - // ////////////////////////////////// + await adapter.insert({ + db, + tableName: localeTableName, + values: localesToInsert, + }) + } - const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}` + // ////////////////////////////////// + // INSERT RELATIONSHIPS + // ////////////////////////////////// - if (operation === 'update') { - await deleteExistingRowsByPath({ - adapter, - db, - localeColumnName: 'locale', - parentColumnName: 'parent', - parentID: insertedRow.id, - pathColumnName: 'path', - rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete], - tableName: relationshipsTableName, - }) - } + const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}` - if (relationsToInsert.length > 0) { - await adapter.insert({ - db, - tableName: relationshipsTableName, - values: relationsToInsert, - }) - } + if (operation === 'update') { + await deleteExistingRowsByPath({ + adapter, + db, + localeColumnName: 'locale', + parentColumnName: 'parent', + parentID: insertedRow.id, + pathColumnName: 'path', + rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete], + tableName: relationshipsTableName, + }) + } - // ////////////////////////////////// - // INSERT hasMany TEXTS - // ////////////////////////////////// + if (relationsToInsert.length > 0) { + await adapter.insert({ + db, + tableName: relationshipsTableName, + values: relationsToInsert, + }) + } - const textsTableName = `${tableName}_texts` + // ////////////////////////////////// + // INSERT hasMany TEXTS + // ////////////////////////////////// - if (operation === 'update') { - await deleteExistingRowsByPath({ - adapter, - db, - localeColumnName: 'locale', - parentColumnName: 'parent', - parentID: insertedRow.id, - pathColumnName: 'path', - rows: [...textsToInsert, ...rowToInsert.textsToDelete], - tableName: textsTableName, - }) - } + const textsTableName = `${tableName}_texts` - if (textsToInsert.length > 0) { - await adapter.insert({ - db, - tableName: textsTableName, - values: textsToInsert, - }) - } + if (operation === 'update') { + await deleteExistingRowsByPath({ + adapter, + db, + localeColumnName: 'locale', + parentColumnName: 'parent', + parentID: insertedRow.id, + pathColumnName: 'path', + rows: [...textsToInsert, ...rowToInsert.textsToDelete], + tableName: textsTableName, + }) + } - // ////////////////////////////////// - // INSERT hasMany NUMBERS - // ////////////////////////////////// + if (textsToInsert.length > 0) { + await adapter.insert({ + db, + tableName: textsTableName, + values: textsToInsert, + }) + } - const numbersTableName = `${tableName}_numbers` + // ////////////////////////////////// + // INSERT hasMany NUMBERS + // ////////////////////////////////// - if (operation === 'update') { - await deleteExistingRowsByPath({ - adapter, - db, - localeColumnName: 'locale', - parentColumnName: 'parent', - parentID: insertedRow.id, - pathColumnName: 'path', - rows: [...numbersToInsert, ...rowToInsert.numbersToDelete], - tableName: numbersTableName, - }) - } + const numbersTableName = `${tableName}_numbers` - if (numbersToInsert.length > 0) { - await adapter.insert({ - db, - tableName: numbersTableName, - values: numbersToInsert, - }) - } + if (operation === 'update') { + await deleteExistingRowsByPath({ + adapter, + db, + localeColumnName: 'locale', + parentColumnName: 'parent', + parentID: insertedRow.id, + pathColumnName: 'path', + rows: [...numbersToInsert, ...rowToInsert.numbersToDelete], + tableName: numbersTableName, + }) + } - // ////////////////////////////////// - // INSERT BLOCKS - // ////////////////////////////////// + if (numbersToInsert.length > 0) { + await adapter.insert({ + db, + tableName: numbersTableName, + values: numbersToInsert, + }) + } - const insertedBlockRows: Record[]> = {} + // ////////////////////////////////// + // INSERT BLOCKS + // ////////////////////////////////// - if (operation === 'update') { - for (const tableName of rowToInsert.blocksToDelete) { - const blockTable = adapter.tables[tableName] - await adapter.deleteWhere({ - db, - tableName, - where: eq(blockTable._parentID, insertedRow.id), - }) - } - } + const insertedBlockRows: Record[]> = {} - // When versions are enabled, adapter is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions. - const arraysBlocksUUIDMap: Record = {} - - for (const [tableName, blockRows] of Object.entries(blocksToInsert)) { - insertedBlockRows[tableName] = await adapter.insert({ + if (operation === 'update') { + for (const tableName of rowToInsert.blocksToDelete) { + const blockTable = adapter.tables[tableName] + await adapter.deleteWhere({ db, tableName, - values: blockRows.map(({ row }) => row), - }) - - insertedBlockRows[tableName].forEach((row, i) => { - blockRows[i].row = row - if ( - typeof row._uuid === 'string' && - (typeof row.id === 'string' || typeof row.id === 'number') - ) { - arraysBlocksUUIDMap[row._uuid] = row.id - } - }) - - const blockLocaleIndexMap: number[] = [] - - const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => { - if (Object.entries(blockRow.locales).length > 0) { - Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => { - if (Object.keys(blockLocaleData).length > 0) { - blockLocaleData._parentID = blockRow.row.id - blockLocaleData._locale = blockLocale - acc.push(blockLocaleData) - blockLocaleIndexMap.push(i) - } - }) - } - - return acc - }, []) - - if (blockLocaleRowsToInsert.length > 0) { - await adapter.insert({ - db, - tableName: `${tableName}${adapter.localesSuffix}`, - values: blockLocaleRowsToInsert, - }) - } - - await insertArrays({ - adapter, - arrays: blockRows.map(({ arrays }) => arrays), - db, - parentRows: insertedBlockRows[tableName], - uuidMap: arraysBlocksUUIDMap, + where: eq(blockTable._parentID, insertedRow.id), }) } + } - // ////////////////////////////////// - // INSERT ARRAYS RECURSIVELY - // ////////////////////////////////// + // When versions are enabled, adapter is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions. + const arraysBlocksUUIDMap: Record = {} - if (operation === 'update') { - for (const arrayTableName of Object.keys(rowToInsert.arrays)) { - await deleteExistingArrayRows({ - adapter, - db, - parentID: insertedRow.id, - tableName: arrayTableName, + for (const [tableName, blockRows] of Object.entries(blocksToInsert)) { + insertedBlockRows[tableName] = await adapter.insert({ + db, + tableName, + values: blockRows.map(({ row }) => row), + }) + + insertedBlockRows[tableName].forEach((row, i) => { + blockRows[i].row = row + if ( + typeof row._uuid === 'string' && + (typeof row.id === 'string' || typeof row.id === 'number') + ) { + arraysBlocksUUIDMap[row._uuid] = row.id + } + }) + + const blockLocaleIndexMap: number[] = [] + + const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => { + if (Object.entries(blockRow.locales).length > 0) { + Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => { + if (Object.keys(blockLocaleData).length > 0) { + blockLocaleData._parentID = blockRow.row.id + blockLocaleData._locale = blockLocale + acc.push(blockLocaleData) + blockLocaleIndexMap.push(i) + } }) } + + return acc + }, []) + + if (blockLocaleRowsToInsert.length > 0) { + await adapter.insert({ + db, + tableName: `${tableName}${adapter.localesSuffix}`, + values: blockLocaleRowsToInsert, + }) } await insertArrays({ adapter, - arrays: [rowToInsert.arrays], + arrays: blockRows.map(({ arrays }) => arrays), db, - parentRows: [insertedRow], + parentRows: insertedBlockRows[tableName], uuidMap: arraysBlocksUUIDMap, }) + } - // ////////////////////////////////// - // INSERT hasMany SELECTS - // ////////////////////////////////// + // ////////////////////////////////// + // INSERT ARRAYS RECURSIVELY + // ////////////////////////////////// - for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) { - const selectTable = adapter.tables[selectTableName] - if (operation === 'update') { - await adapter.deleteWhere({ - db, - tableName: selectTableName, - where: eq(selectTable.parent, insertedRow.id), - }) - } + if (operation === 'update') { + for (const arrayTableName of Object.keys(rowToInsert.arrays)) { + await deleteExistingArrayRows({ + adapter, + db, + parentID: insertedRow.id, + tableName: arrayTableName, + }) + } + } - if (Object.keys(arraysBlocksUUIDMap).length > 0) { - tableRows.forEach((row: any) => { - if (row.parent in arraysBlocksUUIDMap) { - row.parent = arraysBlocksUUIDMap[row.parent] - } - }) - } + await insertArrays({ + adapter, + arrays: [rowToInsert.arrays], + db, + parentRows: [insertedRow], + uuidMap: arraysBlocksUUIDMap, + }) - if (tableRows.length) { - await adapter.insert({ - db, - tableName: selectTableName, - values: tableRows, - }) - } + // ////////////////////////////////// + // INSERT hasMany SELECTS + // ////////////////////////////////// + + for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) { + const selectTable = adapter.tables[selectTableName] + if (operation === 'update') { + await adapter.deleteWhere({ + db, + tableName: selectTableName, + where: eq(selectTable.parent, insertedRow.id), + }) } - // ////////////////////////////////// - // Error Handling - // ////////////////////////////////// - } catch (caughtError) { - // Unique constraint violation error - // '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite - - let error = caughtError - if (typeof caughtError === 'object' && 'cause' in caughtError) { - error = caughtError.cause + if (Object.keys(arraysBlocksUUIDMap).length > 0) { + tableRows.forEach((row: any) => { + if (row.parent in arraysBlocksUUIDMap) { + row.parent = arraysBlocksUUIDMap[row.parent] + } + }) } - if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') { - let fieldName: null | string = null - // We need to try and find the right constraint for the field but if we can't we fallback to a generic message - if (error.code === '23505') { - // For PostgreSQL, we can try to extract the field name from the error constraint - if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) { - fieldName = adapter.fieldConstraints[tableName]?.[error.constraint] - } else { - const replacement = `${tableName}_` + if (tableRows.length) { + await adapter.insert({ + db, + tableName: selectTableName, + values: tableRows, + }) + } + } - if (error.constraint.includes(replacement)) { - const replacedConstraint = error.constraint.replace(replacement, '') + // ////////////////////////////////// + // Error Handling + // ////////////////////////////////// + } catch (caughtError) { + // Unique constraint violation error + // '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite - if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) { - fieldName = adapter.fieldConstraints[tableName][replacedConstraint] - } + let error = caughtError + if (typeof caughtError === 'object' && 'cause' in caughtError) { + error = caughtError.cause + } + + if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') { + let fieldName: null | string = null + // We need to try and find the right constraint for the field but if we can't we fallback to a generic message + if (error.code === '23505') { + // For PostgreSQL, we can try to extract the field name from the error constraint + if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) { + fieldName = adapter.fieldConstraints[tableName]?.[error.constraint] + } else { + const replacement = `${tableName}_` + + if (error.constraint.includes(replacement)) { + const replacedConstraint = error.constraint.replace(replacement, '') + + if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) { + fieldName = adapter.fieldConstraints[tableName][replacedConstraint] } } + } + + if (!fieldName) { + // Last case scenario we extract the key and value from the detail on the error + const detail = error.detail + const regex = /Key \(([^)]+)\)=\(([^)]+)\)/ + const match: string[] = detail.match(regex) + + if (match && match[1]) { + const key = match[1] + + fieldName = key + } + } + } else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') { + /** + * For SQLite, we can try to extract the field name from the error message + * The message typically looks like: + * "UNIQUE constraint failed: table_name.field_name" + */ + const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/ + const match: string[] = error.message.match(regex) + + if (match && match[2]) { + if (adapter.fieldConstraints[tableName]) { + fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`] + } if (!fieldName) { - // Last case scenario we extract the key and value from the detail on the error - const detail = error.detail - const regex = /Key \(([^)]+)\)=\(([^)]+)\)/ - const match: string[] = detail.match(regex) - - if (match && match[1]) { - const key = match[1] - - fieldName = key - } - } - } else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') { - /** - * For SQLite, we can try to extract the field name from the error message - * The message typically looks like: - * "UNIQUE constraint failed: table_name.field_name" - */ - const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/ - const match: string[] = error.message.match(regex) - - if (match && match[2]) { - if (adapter.fieldConstraints[tableName]) { - fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`] - } - - if (!fieldName) { - fieldName = match[2] - } + fieldName = match[2] } } - - throw new ValidationError( - { - id, - errors: [ - { - message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique', - path: fieldName, - }, - ], - req, - }, - req?.t, - ) - } else { - throw error } + + throw new ValidationError( + { + id, + errors: [ + { + message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique', + path: fieldName, + }, + ], + req, + }, + req?.t, + ) + } else { + throw error } } diff --git a/test/database/config.postgreslogs.ts b/test/database/config.postgreslogs.ts new file mode 100644 index 0000000000..d47ee88d83 --- /dev/null +++ b/test/database/config.postgreslogs.ts @@ -0,0 +1,19 @@ +/* eslint-disable no-restricted-exports */ +import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' +import { getConfig } from './getConfig.js' + +const config = getConfig() + +import { postgresAdapter } from '@payloadcms/db-postgres' + +export const databaseAdapter = postgresAdapter({ + pool: { + connectionString: process.env.POSTGRES_URL || 'postgres://127.0.0.1:5432/payloadtests', + }, + logger: true, +}) + +export default buildConfigWithDefaults({ + ...config, + db: databaseAdapter, +}) diff --git a/test/database/config.ts b/test/database/config.ts index 1027491eae..6c16a4bd2b 100644 --- a/test/database/config.ts +++ b/test/database/config.ts @@ -1,933 +1,4 @@ -import { fileURLToPath } from 'node:url' -import path from 'path' -const filename = fileURLToPath(import.meta.url) -const dirname = path.dirname(filename) -import type { TextField } from 'payload' - -import { randomUUID } from 'crypto' - import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' -import { seed } from './seed.js' -import { - customIDsSlug, - customSchemaSlug, - defaultValuesSlug, - errorOnUnnamedFieldsSlug, - fakeCustomIDsSlug, - fieldsPersistanceSlug, - pgMigrationSlug, - placesSlug, - postsSlug, - relationASlug, - relationBSlug, - relationshipsMigrationSlug, -} from './shared.js' +import { getConfig } from './getConfig.js' -const defaultValueField: TextField = { - name: 'defaultValue', - type: 'text', - defaultValue: 'default value from database', -} - -export default buildConfigWithDefaults({ - admin: { - importMap: { - baseDir: path.resolve(dirname), - }, - }, - collections: [ - { - slug: 'categories', - versions: { drafts: true }, - fields: [ - { - type: 'text', - name: 'title', - }, - ], - }, - { - slug: 'categories-custom-id', - versions: { drafts: true }, - fields: [ - { - type: 'number', - name: 'id', - }, - ], - }, - { - slug: postsSlug, - fields: [ - { - name: 'title', - type: 'text', - required: true, - // access: { read: () => false }, - }, - { - type: 'relationship', - relationTo: 'categories', - name: 'category', - }, - { - type: 'relationship', - relationTo: 'categories-custom-id', - name: 'categoryCustomID', - }, - { - name: 'localized', - type: 'text', - localized: true, - }, - { - name: 'text', - type: 'text', - }, - { - name: 'number', - type: 'number', - }, - { - type: 'blocks', - name: 'blocks', - blocks: [ - { - slug: 'block-third', - fields: [ - { - type: 'blocks', - name: 'nested', - blocks: [ - { - slug: 'block-fourth', - fields: [ - { - type: 'blocks', - name: 'nested', - blocks: [], - }, - ], - }, - ], - }, - ], - }, - ], - }, - { - type: 'tabs', - tabs: [ - { - name: 'D1', - fields: [ - { - name: 'D2', - type: 'group', - fields: [ - { - type: 'row', - fields: [ - { - type: 'collapsible', - fields: [ - { - type: 'tabs', - tabs: [ - { - fields: [ - { - name: 'D3', - type: 'group', - fields: [ - { - type: 'row', - fields: [ - { - type: 'collapsible', - fields: [ - { - name: 'D4', - type: 'text', - }, - ], - label: 'Collapsible2', - }, - ], - }, - ], - }, - ], - label: 'Tab1', - }, - ], - }, - ], - label: 'Collapsible2', - }, - ], - }, - ], - }, - ], - label: 'Tab1', - }, - ], - }, - { - name: 'hasTransaction', - type: 'checkbox', - hooks: { - beforeChange: [({ req }) => !!req.transactionID], - }, - admin: { - readOnly: true, - }, - }, - { - name: 'throwAfterChange', - type: 'checkbox', - defaultValue: false, - hooks: { - afterChange: [ - ({ value }) => { - if (value) { - throw new Error('throw after change') - } - }, - ], - }, - }, - { - name: 'arrayWithIDs', - type: 'array', - fields: [ - { - name: 'text', - type: 'text', - }, - ], - }, - { - name: 'blocksWithIDs', - type: 'blocks', - blocks: [ - { - slug: 'block-first', - fields: [ - { - name: 'text', - type: 'text', - }, - ], - }, - ], - }, - { - type: 'group', - name: 'group', - fields: [{ name: 'text', type: 'text' }], - }, - { - type: 'tabs', - tabs: [ - { - name: 'tab', - fields: [{ name: 'text', type: 'text' }], - }, - ], - }, - ], - hooks: { - beforeOperation: [ - ({ args, operation, req }) => { - if (operation === 'update') { - const defaultIDType = req.payload.db.defaultIDType - - if (defaultIDType === 'number' && typeof args.id === 'string') { - throw new Error('ID was not sanitized to a number properly') - } - } - - return args - }, - ], - }, - }, - { - slug: errorOnUnnamedFieldsSlug, - fields: [ - { - type: 'tabs', - tabs: [ - { - label: 'UnnamedTab', - fields: [ - { - name: 'groupWithinUnnamedTab', - type: 'group', - fields: [ - { - name: 'text', - type: 'text', - required: true, - }, - ], - }, - ], - }, - ], - }, - ], - }, - { - slug: defaultValuesSlug, - fields: [ - { - name: 'title', - type: 'text', - }, - defaultValueField, - { - name: 'array', - type: 'array', - // default array with one object to test subfield defaultValue properties for Mongoose - defaultValue: [{}], - fields: [defaultValueField], - }, - { - name: 'group', - type: 'group', - // we need to have to use as default in order to have subfield defaultValue properties directly for Mongoose - defaultValue: {}, - fields: [defaultValueField], - }, - { - name: 'select', - type: 'select', - defaultValue: 'default', - options: [ - { value: 'option0', label: 'Option 0' }, - { value: 'option1', label: 'Option 1' }, - { value: 'default', label: 'Default' }, - ], - }, - { - name: 'point', - type: 'point', - defaultValue: [10, 20], - }, - { - name: 'escape', - type: 'text', - defaultValue: "Thanks, we're excited for you to join us.", - }, - ], - }, - { - slug: relationASlug, - fields: [ - { - name: 'title', - type: 'text', - }, - { - name: 'richText', - type: 'richText', - }, - ], - labels: { - plural: 'Relation As', - singular: 'Relation A', - }, - }, - { - slug: relationBSlug, - fields: [ - { - name: 'title', - type: 'text', - }, - { - name: 'relationship', - type: 'relationship', - relationTo: 'relation-a', - }, - { - name: 'richText', - type: 'richText', - }, - ], - labels: { - plural: 'Relation Bs', - singular: 'Relation B', - }, - }, - { - slug: pgMigrationSlug, - fields: [ - { - name: 'relation1', - type: 'relationship', - relationTo: 'relation-a', - }, - { - name: 'myArray', - type: 'array', - fields: [ - { - name: 'relation2', - type: 'relationship', - relationTo: 'relation-b', - }, - { - name: 'mySubArray', - type: 'array', - fields: [ - { - name: 'relation3', - type: 'relationship', - localized: true, - relationTo: 'relation-b', - }, - ], - }, - ], - }, - { - name: 'myGroup', - type: 'group', - fields: [ - { - name: 'relation4', - type: 'relationship', - localized: true, - relationTo: 'relation-b', - }, - ], - }, - { - name: 'myBlocks', - type: 'blocks', - blocks: [ - { - slug: 'myBlock', - fields: [ - { - name: 'relation5', - type: 'relationship', - relationTo: 'relation-a', - }, - { - name: 'relation6', - type: 'relationship', - localized: true, - relationTo: 'relation-b', - }, - ], - }, - ], - }, - ], - versions: true, - }, - { - slug: customSchemaSlug, - dbName: 'customs', - fields: [ - { - name: 'text', - type: 'text', - }, - { - name: 'localizedText', - type: 'text', - localized: true, - }, - { - name: 'relationship', - type: 'relationship', - hasMany: true, - relationTo: 'relation-a', - }, - { - name: 'select', - type: 'select', - dbName: ({ tableName }) => `${tableName}_customSelect`, - enumName: 'selectEnum', - hasMany: true, - options: ['a', 'b', 'c'], - }, - { - name: 'radio', - type: 'select', - enumName: 'radioEnum', - options: ['a', 'b', 'c'], - }, - { - name: 'array', - type: 'array', - dbName: 'customArrays', - fields: [ - { - name: 'text', - type: 'text', - }, - { - name: 'localizedText', - type: 'text', - localized: true, - }, - ], - }, - { - name: 'blocks', - type: 'blocks', - blocks: [ - { - slug: 'block-second', - dbName: 'customBlocks', - fields: [ - { - name: 'text', - type: 'text', - }, - { - name: 'localizedText', - type: 'text', - localized: true, - }, - ], - }, - ], - }, - ], - versions: { - drafts: true, - }, - }, - { - slug: placesSlug, - fields: [ - { - name: 'country', - type: 'text', - }, - { - name: 'city', - type: 'text', - }, - ], - }, - { - slug: 'virtual-relations', - admin: { useAsTitle: 'postTitle' }, - access: { read: () => true }, - fields: [ - { - name: 'postTitle', - type: 'text', - virtual: 'post.title', - }, - { - name: 'postTitleHidden', - type: 'text', - virtual: 'post.title', - hidden: true, - }, - { - name: 'postCategoryTitle', - type: 'text', - virtual: 'post.category.title', - }, - { - name: 'postCategoryID', - type: 'json', - virtual: 'post.category.id', - }, - { - name: 'postCategoryCustomID', - type: 'number', - virtual: 'post.categoryCustomID.id', - }, - { - name: 'postID', - type: 'json', - virtual: 'post.id', - }, - { - name: 'postLocalized', - type: 'text', - virtual: 'post.localized', - }, - { - name: 'post', - type: 'relationship', - relationTo: 'posts', - }, - { - name: 'customID', - type: 'relationship', - relationTo: 'custom-ids', - }, - { - name: 'customIDValue', - type: 'text', - virtual: 'customID.id', - }, - ], - versions: { drafts: true }, - }, - { - slug: fieldsPersistanceSlug, - fields: [ - { - name: 'text', - type: 'text', - virtual: true, - }, - { - name: 'textHooked', - type: 'text', - virtual: true, - hooks: { afterRead: [() => 'hooked'] }, - }, - { - name: 'array', - type: 'array', - virtual: true, - fields: [], - }, - { - type: 'row', - fields: [ - { - type: 'text', - name: 'textWithinRow', - virtual: true, - }, - ], - }, - { - type: 'collapsible', - fields: [ - { - type: 'text', - name: 'textWithinCollapsible', - virtual: true, - }, - ], - label: 'Colllapsible', - }, - { - type: 'tabs', - tabs: [ - { - label: 'tab', - fields: [ - { - type: 'text', - name: 'textWithinTabs', - virtual: true, - }, - ], - }, - ], - }, - ], - }, - { - slug: customIDsSlug, - fields: [ - { - name: 'id', - type: 'text', - admin: { - readOnly: true, - }, - hooks: { - beforeChange: [ - ({ value, operation }) => { - if (operation === 'create') { - return randomUUID() - } - return value - }, - ], - }, - }, - { - name: 'title', - type: 'text', - }, - ], - versions: { drafts: true }, - }, - { - slug: fakeCustomIDsSlug, - fields: [ - { - name: 'title', - type: 'text', - }, - { - name: 'group', - type: 'group', - fields: [ - { - name: 'id', - type: 'text', - }, - ], - }, - { - type: 'tabs', - tabs: [ - { - name: 'myTab', - fields: [ - { - name: 'id', - type: 'text', - }, - ], - }, - ], - }, - ], - }, - { - slug: relationshipsMigrationSlug, - fields: [ - { - type: 'relationship', - relationTo: 'default-values', - name: 'relationship', - }, - { - type: 'relationship', - relationTo: ['default-values'], - name: 'relationship_2', - }, - ], - versions: true, - }, - { - slug: 'compound-indexes', - fields: [ - { - name: 'one', - type: 'text', - }, - { - name: 'two', - type: 'text', - }, - { - name: 'three', - type: 'text', - }, - { - name: 'group', - type: 'group', - fields: [ - { - name: 'four', - type: 'text', - }, - ], - }, - ], - indexes: [ - { - fields: ['one', 'two'], - unique: true, - }, - { - fields: ['three', 'group.four'], - unique: true, - }, - ], - }, - { - slug: 'aliases', - fields: [ - { - name: 'thisIsALongFieldNameThatCanCauseAPostgresErrorEvenThoughWeSetAShorterDBName', - dbName: 'shortname', - type: 'array', - fields: [ - { - name: 'nestedArray', - type: 'array', - dbName: 'short_nested_1', - fields: [ - { - type: 'text', - name: 'text', - }, - ], - }, - ], - }, - ], - }, - { - slug: 'blocks-docs', - fields: [ - { - type: 'blocks', - localized: true, - blocks: [ - { - slug: 'cta', - fields: [ - { - type: 'text', - name: 'text', - }, - ], - }, - ], - name: 'testBlocksLocalized', - }, - { - type: 'blocks', - blocks: [ - { - slug: 'cta', - fields: [ - { - type: 'text', - name: 'text', - }, - ], - }, - ], - name: 'testBlocks', - }, - ], - }, - { - slug: 'unique-fields', - fields: [ - { - name: 'slugField', - type: 'text', - unique: true, - }, - ], - }, - ], - globals: [ - { - slug: 'header', - fields: [ - { - name: 'itemsLvl1', - type: 'array', - dbName: 'header_items_lvl1', - fields: [ - { - name: 'label', - type: 'text', - }, - { - name: 'itemsLvl2', - type: 'array', - dbName: 'header_items_lvl2', - fields: [ - { - name: 'label', - type: 'text', - }, - { - name: 'itemsLvl3', - type: 'array', - dbName: 'header_items_lvl3', - fields: [ - { - name: 'label', - type: 'text', - }, - { - name: 'itemsLvl4', - type: 'array', - dbName: 'header_items_lvl4', - fields: [ - { - name: 'label', - type: 'text', - }, - ], - }, - ], - }, - ], - }, - ], - }, - ], - }, - { - slug: 'global', - dbName: 'customGlobal', - fields: [ - { - name: 'text', - type: 'text', - }, - ], - versions: true, - }, - { - slug: 'global-2', - fields: [ - { - name: 'text', - type: 'text', - }, - ], - }, - { - slug: 'global-3', - fields: [ - { - name: 'text', - type: 'text', - }, - ], - }, - { - slug: 'virtual-relation-global', - fields: [ - { - type: 'text', - name: 'postTitle', - virtual: 'post.title', - }, - { - type: 'relationship', - name: 'post', - relationTo: 'posts', - }, - ], - }, - ], - localization: { - defaultLocale: 'en', - locales: ['en', 'es'], - }, - onInit: async (payload) => { - if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') { - await seed(payload) - } - }, - typescript: { - outputFile: path.resolve(dirname, 'payload-types.ts'), - }, -}) - -export const postDoc = { - title: 'test post', -} +export default buildConfigWithDefaults(getConfig()) diff --git a/test/database/getConfig.ts b/test/database/getConfig.ts new file mode 100644 index 0000000000..b5ea622a4f --- /dev/null +++ b/test/database/getConfig.ts @@ -0,0 +1,942 @@ +import type { Config, TextField } from 'payload' + +import { randomUUID } from 'crypto' +import { fileURLToPath } from 'node:url' +import path from 'path' + +import { seed } from './seed.js' +import { + customIDsSlug, + customSchemaSlug, + defaultValuesSlug, + errorOnUnnamedFieldsSlug, + fakeCustomIDsSlug, + fieldsPersistanceSlug, + pgMigrationSlug, + placesSlug, + postsSlug, + relationASlug, + relationBSlug, + relationshipsMigrationSlug, +} from './shared.js' + +const defaultValueField: TextField = { + name: 'defaultValue', + type: 'text', + defaultValue: 'default value from database', +} + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +export const getConfig: () => Partial = () => ({ + admin: { + importMap: { + baseDir: path.resolve(dirname), + }, + }, + collections: [ + { + slug: 'categories', + versions: { drafts: true }, + fields: [ + { + type: 'text', + name: 'title', + }, + ], + }, + { + slug: 'simple', + fields: [ + { + type: 'text', + name: 'text', + }, + { + type: 'number', + name: 'number', + }, + ], + }, + { + slug: 'categories-custom-id', + versions: { drafts: true }, + fields: [ + { + type: 'number', + name: 'id', + }, + ], + }, + { + slug: postsSlug, + fields: [ + { + name: 'title', + type: 'text', + required: true, + // access: { read: () => false }, + }, + { + type: 'relationship', + relationTo: 'categories', + name: 'category', + }, + { + type: 'relationship', + relationTo: 'categories-custom-id', + name: 'categoryCustomID', + }, + { + name: 'localized', + type: 'text', + localized: true, + }, + { + name: 'text', + type: 'text', + }, + { + name: 'number', + type: 'number', + }, + { + type: 'blocks', + name: 'blocks', + blocks: [ + { + slug: 'block-third', + fields: [ + { + type: 'blocks', + name: 'nested', + blocks: [ + { + slug: 'block-fourth', + fields: [ + { + type: 'blocks', + name: 'nested', + blocks: [], + }, + ], + }, + ], + }, + ], + }, + ], + }, + { + type: 'tabs', + tabs: [ + { + name: 'D1', + fields: [ + { + name: 'D2', + type: 'group', + fields: [ + { + type: 'row', + fields: [ + { + type: 'collapsible', + fields: [ + { + type: 'tabs', + tabs: [ + { + fields: [ + { + name: 'D3', + type: 'group', + fields: [ + { + type: 'row', + fields: [ + { + type: 'collapsible', + fields: [ + { + name: 'D4', + type: 'text', + }, + ], + label: 'Collapsible2', + }, + ], + }, + ], + }, + ], + label: 'Tab1', + }, + ], + }, + ], + label: 'Collapsible2', + }, + ], + }, + ], + }, + ], + label: 'Tab1', + }, + ], + }, + { + name: 'hasTransaction', + type: 'checkbox', + hooks: { + beforeChange: [({ req }) => !!req.transactionID], + }, + admin: { + readOnly: true, + }, + }, + { + name: 'throwAfterChange', + type: 'checkbox', + defaultValue: false, + hooks: { + afterChange: [ + ({ value }) => { + if (value) { + throw new Error('throw after change') + } + }, + ], + }, + }, + { + name: 'arrayWithIDs', + type: 'array', + fields: [ + { + name: 'text', + type: 'text', + }, + ], + }, + { + name: 'blocksWithIDs', + type: 'blocks', + blocks: [ + { + slug: 'block-first', + fields: [ + { + name: 'text', + type: 'text', + }, + ], + }, + ], + }, + { + type: 'group', + name: 'group', + fields: [{ name: 'text', type: 'text' }], + }, + { + type: 'tabs', + tabs: [ + { + name: 'tab', + fields: [{ name: 'text', type: 'text' }], + }, + ], + }, + ], + hooks: { + beforeOperation: [ + ({ args, operation, req }) => { + if (operation === 'update') { + const defaultIDType = req.payload.db.defaultIDType + + if (defaultIDType === 'number' && typeof args.id === 'string') { + throw new Error('ID was not sanitized to a number properly') + } + } + + return args + }, + ], + }, + }, + { + slug: errorOnUnnamedFieldsSlug, + fields: [ + { + type: 'tabs', + tabs: [ + { + label: 'UnnamedTab', + fields: [ + { + name: 'groupWithinUnnamedTab', + type: 'group', + fields: [ + { + name: 'text', + type: 'text', + required: true, + }, + ], + }, + ], + }, + ], + }, + ], + }, + { + slug: defaultValuesSlug, + fields: [ + { + name: 'title', + type: 'text', + }, + defaultValueField, + { + name: 'array', + type: 'array', + // default array with one object to test subfield defaultValue properties for Mongoose + defaultValue: [{}], + fields: [defaultValueField], + }, + { + name: 'group', + type: 'group', + // we need to have to use as default in order to have subfield defaultValue properties directly for Mongoose + defaultValue: {}, + fields: [defaultValueField], + }, + { + name: 'select', + type: 'select', + defaultValue: 'default', + options: [ + { value: 'option0', label: 'Option 0' }, + { value: 'option1', label: 'Option 1' }, + { value: 'default', label: 'Default' }, + ], + }, + { + name: 'point', + type: 'point', + defaultValue: [10, 20], + }, + { + name: 'escape', + type: 'text', + defaultValue: "Thanks, we're excited for you to join us.", + }, + ], + }, + { + slug: relationASlug, + fields: [ + { + name: 'title', + type: 'text', + }, + { + name: 'richText', + type: 'richText', + }, + ], + labels: { + plural: 'Relation As', + singular: 'Relation A', + }, + }, + { + slug: relationBSlug, + fields: [ + { + name: 'title', + type: 'text', + }, + { + name: 'relationship', + type: 'relationship', + relationTo: 'relation-a', + }, + { + name: 'richText', + type: 'richText', + }, + ], + labels: { + plural: 'Relation Bs', + singular: 'Relation B', + }, + }, + { + slug: pgMigrationSlug, + fields: [ + { + name: 'relation1', + type: 'relationship', + relationTo: 'relation-a', + }, + { + name: 'myArray', + type: 'array', + fields: [ + { + name: 'relation2', + type: 'relationship', + relationTo: 'relation-b', + }, + { + name: 'mySubArray', + type: 'array', + fields: [ + { + name: 'relation3', + type: 'relationship', + localized: true, + relationTo: 'relation-b', + }, + ], + }, + ], + }, + { + name: 'myGroup', + type: 'group', + fields: [ + { + name: 'relation4', + type: 'relationship', + localized: true, + relationTo: 'relation-b', + }, + ], + }, + { + name: 'myBlocks', + type: 'blocks', + blocks: [ + { + slug: 'myBlock', + fields: [ + { + name: 'relation5', + type: 'relationship', + relationTo: 'relation-a', + }, + { + name: 'relation6', + type: 'relationship', + localized: true, + relationTo: 'relation-b', + }, + ], + }, + ], + }, + ], + versions: true, + }, + { + slug: customSchemaSlug, + dbName: 'customs', + fields: [ + { + name: 'text', + type: 'text', + }, + { + name: 'localizedText', + type: 'text', + localized: true, + }, + { + name: 'relationship', + type: 'relationship', + hasMany: true, + relationTo: 'relation-a', + }, + { + name: 'select', + type: 'select', + dbName: ({ tableName }) => `${tableName}_customSelect`, + enumName: 'selectEnum', + hasMany: true, + options: ['a', 'b', 'c'], + }, + { + name: 'radio', + type: 'select', + enumName: 'radioEnum', + options: ['a', 'b', 'c'], + }, + { + name: 'array', + type: 'array', + dbName: 'customArrays', + fields: [ + { + name: 'text', + type: 'text', + }, + { + name: 'localizedText', + type: 'text', + localized: true, + }, + ], + }, + { + name: 'blocks', + type: 'blocks', + blocks: [ + { + slug: 'block-second', + dbName: 'customBlocks', + fields: [ + { + name: 'text', + type: 'text', + }, + { + name: 'localizedText', + type: 'text', + localized: true, + }, + ], + }, + ], + }, + ], + versions: { + drafts: true, + }, + }, + { + slug: placesSlug, + fields: [ + { + name: 'country', + type: 'text', + }, + { + name: 'city', + type: 'text', + }, + ], + }, + { + slug: 'virtual-relations', + admin: { useAsTitle: 'postTitle' }, + access: { read: () => true }, + fields: [ + { + name: 'postTitle', + type: 'text', + virtual: 'post.title', + }, + { + name: 'postTitleHidden', + type: 'text', + virtual: 'post.title', + hidden: true, + }, + { + name: 'postCategoryTitle', + type: 'text', + virtual: 'post.category.title', + }, + { + name: 'postCategoryID', + type: 'json', + virtual: 'post.category.id', + }, + { + name: 'postCategoryCustomID', + type: 'number', + virtual: 'post.categoryCustomID.id', + }, + { + name: 'postID', + type: 'json', + virtual: 'post.id', + }, + { + name: 'postLocalized', + type: 'text', + virtual: 'post.localized', + }, + { + name: 'post', + type: 'relationship', + relationTo: 'posts', + }, + { + name: 'customID', + type: 'relationship', + relationTo: 'custom-ids', + }, + { + name: 'customIDValue', + type: 'text', + virtual: 'customID.id', + }, + ], + versions: { drafts: true }, + }, + { + slug: fieldsPersistanceSlug, + fields: [ + { + name: 'text', + type: 'text', + virtual: true, + }, + { + name: 'textHooked', + type: 'text', + virtual: true, + hooks: { afterRead: [() => 'hooked'] }, + }, + { + name: 'array', + type: 'array', + virtual: true, + fields: [], + }, + { + type: 'row', + fields: [ + { + type: 'text', + name: 'textWithinRow', + virtual: true, + }, + ], + }, + { + type: 'collapsible', + fields: [ + { + type: 'text', + name: 'textWithinCollapsible', + virtual: true, + }, + ], + label: 'Colllapsible', + }, + { + type: 'tabs', + tabs: [ + { + label: 'tab', + fields: [ + { + type: 'text', + name: 'textWithinTabs', + virtual: true, + }, + ], + }, + ], + }, + ], + }, + { + slug: customIDsSlug, + fields: [ + { + name: 'id', + type: 'text', + admin: { + readOnly: true, + }, + hooks: { + beforeChange: [ + ({ value, operation }) => { + if (operation === 'create') { + return randomUUID() + } + return value + }, + ], + }, + }, + { + name: 'title', + type: 'text', + }, + ], + versions: { drafts: true }, + }, + { + slug: fakeCustomIDsSlug, + fields: [ + { + name: 'title', + type: 'text', + }, + { + name: 'group', + type: 'group', + fields: [ + { + name: 'id', + type: 'text', + }, + ], + }, + { + type: 'tabs', + tabs: [ + { + name: 'myTab', + fields: [ + { + name: 'id', + type: 'text', + }, + ], + }, + ], + }, + ], + }, + { + slug: relationshipsMigrationSlug, + fields: [ + { + type: 'relationship', + relationTo: 'default-values', + name: 'relationship', + }, + { + type: 'relationship', + relationTo: ['default-values'], + name: 'relationship_2', + }, + ], + versions: true, + }, + { + slug: 'compound-indexes', + fields: [ + { + name: 'one', + type: 'text', + }, + { + name: 'two', + type: 'text', + }, + { + name: 'three', + type: 'text', + }, + { + name: 'group', + type: 'group', + fields: [ + { + name: 'four', + type: 'text', + }, + ], + }, + ], + indexes: [ + { + fields: ['one', 'two'], + unique: true, + }, + { + fields: ['three', 'group.four'], + unique: true, + }, + ], + }, + { + slug: 'aliases', + fields: [ + { + name: 'thisIsALongFieldNameThatCanCauseAPostgresErrorEvenThoughWeSetAShorterDBName', + dbName: 'shortname', + type: 'array', + fields: [ + { + name: 'nestedArray', + type: 'array', + dbName: 'short_nested_1', + fields: [ + { + type: 'text', + name: 'text', + }, + ], + }, + ], + }, + ], + }, + { + slug: 'blocks-docs', + fields: [ + { + type: 'blocks', + localized: true, + blocks: [ + { + slug: 'cta', + fields: [ + { + type: 'text', + name: 'text', + }, + ], + }, + ], + name: 'testBlocksLocalized', + }, + { + type: 'blocks', + blocks: [ + { + slug: 'cta', + fields: [ + { + type: 'text', + name: 'text', + }, + ], + }, + ], + name: 'testBlocks', + }, + ], + }, + { + slug: 'unique-fields', + fields: [ + { + name: 'slugField', + type: 'text', + unique: true, + }, + ], + }, + ], + globals: [ + { + slug: 'header', + fields: [ + { + name: 'itemsLvl1', + type: 'array', + dbName: 'header_items_lvl1', + fields: [ + { + name: 'label', + type: 'text', + }, + { + name: 'itemsLvl2', + type: 'array', + dbName: 'header_items_lvl2', + fields: [ + { + name: 'label', + type: 'text', + }, + { + name: 'itemsLvl3', + type: 'array', + dbName: 'header_items_lvl3', + fields: [ + { + name: 'label', + type: 'text', + }, + { + name: 'itemsLvl4', + type: 'array', + dbName: 'header_items_lvl4', + fields: [ + { + name: 'label', + type: 'text', + }, + ], + }, + ], + }, + ], + }, + ], + }, + ], + }, + { + slug: 'global', + dbName: 'customGlobal', + fields: [ + { + name: 'text', + type: 'text', + }, + ], + versions: true, + }, + { + slug: 'global-2', + fields: [ + { + name: 'text', + type: 'text', + }, + ], + }, + { + slug: 'global-3', + fields: [ + { + name: 'text', + type: 'text', + }, + ], + }, + { + slug: 'virtual-relation-global', + fields: [ + { + type: 'text', + name: 'postTitle', + virtual: 'post.title', + }, + { + type: 'relationship', + name: 'post', + relationTo: 'posts', + }, + ], + }, + ], + localization: { + defaultLocale: 'en', + locales: ['en', 'es'], + }, + onInit: async (payload) => { + if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') { + await seed(payload) + } + }, + typescript: { + outputFile: path.resolve(dirname, 'payload-types.ts'), + }, +}) diff --git a/test/database/payload-types.ts b/test/database/payload-types.ts index d1f52cb4a1..18b196b38e 100644 --- a/test/database/payload-types.ts +++ b/test/database/payload-types.ts @@ -68,6 +68,7 @@ export interface Config { blocks: {}; collections: { categories: Category; + simple: Simple; 'categories-custom-id': CategoriesCustomId; posts: Post; 'error-on-unnamed-fields': ErrorOnUnnamedField; @@ -94,6 +95,7 @@ export interface Config { collectionsJoins: {}; collectionsSelect: { categories: CategoriesSelect | CategoriesSelect; + simple: SimpleSelect | SimpleSelect; 'categories-custom-id': CategoriesCustomIdSelect | CategoriesCustomIdSelect; posts: PostsSelect | PostsSelect; 'error-on-unnamed-fields': ErrorOnUnnamedFieldsSelect | ErrorOnUnnamedFieldsSelect; @@ -172,6 +174,17 @@ export interface Category { createdAt: string; _status?: ('draft' | 'published') | null; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "simple". + */ +export interface Simple { + id: string; + text?: string | null; + number?: number | null; + updatedAt: string; + createdAt: string; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "categories-custom-id". @@ -608,6 +621,10 @@ export interface PayloadLockedDocument { relationTo: 'categories'; value: string | Category; } | null) + | ({ + relationTo: 'simple'; + value: string | Simple; + } | null) | ({ relationTo: 'categories-custom-id'; value: number | CategoriesCustomId; @@ -736,6 +753,16 @@ export interface CategoriesSelect { createdAt?: T; _status?: T; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "simple_select". + */ +export interface SimpleSelect { + text?: T; + number?: T; + updatedAt?: T; + createdAt?: T; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "categories-custom-id_select". diff --git a/test/database/postgres-logs.int.spec.ts b/test/database/postgres-logs.int.spec.ts new file mode 100644 index 0000000000..a179b64c56 --- /dev/null +++ b/test/database/postgres-logs.int.spec.ts @@ -0,0 +1,91 @@ +import type { Payload } from 'payload' + +/* eslint-disable jest/require-top-level-describe */ +import assert from 'assert' +import path from 'path' +import { fileURLToPath } from 'url' + +import { initPayloadInt } from '../helpers/initPayloadInt.js' + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +const describePostgres = process.env.PAYLOAD_DATABASE?.startsWith('postgres') + ? describe + : describe.skip + +let payload: Payload + +describePostgres('database - postgres logs', () => { + beforeAll(async () => { + const initialized = await initPayloadInt( + dirname, + undefined, + undefined, + 'config.postgreslogs.ts', + ) + assert(initialized.payload) + assert(initialized.restClient) + ;({ payload } = initialized) + }) + + afterAll(async () => { + await payload.destroy() + }) + + it('ensure simple update uses optimized upsertRow with returning()', async () => { + const doc = await payload.create({ + collection: 'simple', + data: { + text: 'Some title', + number: 5, + }, + }) + + // Count every console log + const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {}) + + const result: any = await payload.db.updateOne({ + collection: 'simple', + id: doc.id, + data: { + text: 'Updated Title', + number: 5, + }, + }) + + expect(result.text).toEqual('Updated Title') + expect(result.number).toEqual(5) // Ensure the update did not reset the number field + + expect(consoleCount).toHaveBeenCalledTimes(1) // Should be 1 single sql call if the optimization is used. If not, this would be 2 calls + consoleCount.mockRestore() + }) + + it('ensure simple update of complex collection uses optimized upsertRow without returning()', async () => { + const doc = await payload.create({ + collection: 'posts', + data: { + title: 'Some title', + number: 5, + }, + }) + + // Count every console log + const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {}) + + const result: any = await payload.db.updateOne({ + collection: 'posts', + id: doc.id, + data: { + title: 'Updated Title', + number: 5, + }, + }) + + expect(result.title).toEqual('Updated Title') + expect(result.number).toEqual(5) // Ensure the update did not reset the number field + + expect(consoleCount).toHaveBeenCalledTimes(2) // Should be 2 sql call if the optimization is used (update + find). If not, this would be 5 calls + consoleCount.mockRestore() + }) +}) diff --git a/test/database/postgres-vector.int.spec.ts b/test/database/postgres-vector.int.spec.ts index 81d374a108..58a10743fd 100644 --- a/test/database/postgres-vector.int.spec.ts +++ b/test/database/postgres-vector.int.spec.ts @@ -12,11 +12,11 @@ import { fileURLToPath } from 'url' const filename = fileURLToPath(import.meta.url) const dirname = path.dirname(filename) -const describeToUse = process.env.PAYLOAD_DATABASE?.startsWith('postgres') +const describePostgres = process.env.PAYLOAD_DATABASE?.startsWith('postgres') ? describe : describe.skip -describeToUse('postgres vector custom column', () => { +describePostgres('postgres vector custom column', () => { const vectorColumnQueryTest = async (vectorType: string) => { const { databaseAdapter, diff --git a/test/database/seed.ts b/test/database/seed.ts index 921273e4bb..48fd373021 100644 --- a/test/database/seed.ts +++ b/test/database/seed.ts @@ -1,15 +1,6 @@ import type { Payload } from 'payload' -import path from 'path' -import { getFileByPath } from 'payload' -import { fileURLToPath } from 'url' - import { devUser } from '../credentials.js' -import { seedDB } from '../helpers/seed.js' -import { collectionSlugs } from './shared.js' - -const filename = fileURLToPath(import.meta.url) -const dirname = path.dirname(filename) export const _seed = async (_payload: Payload) => { await _payload.create({ diff --git a/test/database/shared.ts b/test/database/shared.ts index 7600f66547..5932229581 100644 --- a/test/database/shared.ts +++ b/test/database/shared.ts @@ -20,18 +20,3 @@ export const customIDsSlug = 'custom-ids' export const fakeCustomIDsSlug = 'fake-custom-ids' export const relationshipsMigrationSlug = 'relationships-migration' - -export const collectionSlugs = [ - postsSlug, - errorOnUnnamedFieldsSlug, - defaultValuesSlug, - relationASlug, - relationBSlug, - pgMigrationSlug, - customSchemaSlug, - placesSlug, - fieldsPersistanceSlug, - customIDsSlug, - fakeCustomIDsSlug, - relationshipsMigrationSlug, -] diff --git a/test/select/config.postgreslogs.ts b/test/select/config.postgreslogs.ts new file mode 100644 index 0000000000..d47ee88d83 --- /dev/null +++ b/test/select/config.postgreslogs.ts @@ -0,0 +1,19 @@ +/* eslint-disable no-restricted-exports */ +import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' +import { getConfig } from './getConfig.js' + +const config = getConfig() + +import { postgresAdapter } from '@payloadcms/db-postgres' + +export const databaseAdapter = postgresAdapter({ + pool: { + connectionString: process.env.POSTGRES_URL || 'postgres://127.0.0.1:5432/payloadtests', + }, + logger: true, +}) + +export default buildConfigWithDefaults({ + ...config, + db: databaseAdapter, +}) diff --git a/test/select/config.ts b/test/select/config.ts index 280946aa51..6c16a4bd2b 100644 --- a/test/select/config.ts +++ b/test/select/config.ts @@ -1,122 +1,4 @@ -import type { GlobalConfig } from 'payload' - -import { lexicalEditor } from '@payloadcms/richtext-lexical' -import { fileURLToPath } from 'node:url' -import path from 'path' - -import type { Post } from './payload-types.js' - import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' -import { devUser } from '../credentials.js' -import { CustomID } from './collections/CustomID/index.js' -import { DeepPostsCollection } from './collections/DeepPosts/index.js' -import { ForceSelect } from './collections/ForceSelect/index.js' -import { LocalizedPostsCollection } from './collections/LocalizedPosts/index.js' -import { Pages } from './collections/Pages/index.js' -import { Points } from './collections/Points/index.js' -import { PostsCollection } from './collections/Posts/index.js' -import { UsersCollection } from './collections/Users/index.js' -import { VersionedPostsCollection } from './collections/VersionedPosts/index.js' +import { getConfig } from './getConfig.js' -const filename = fileURLToPath(import.meta.url) -const dirname = path.dirname(filename) - -export default buildConfigWithDefaults({ - // ...extend config here - collections: [ - PostsCollection, - LocalizedPostsCollection, - VersionedPostsCollection, - DeepPostsCollection, - Pages, - Points, - ForceSelect, - { - slug: 'upload', - fields: [], - upload: { - staticDir: path.resolve(dirname, 'media'), - }, - }, - { - slug: 'rels', - fields: [], - }, - CustomID, - UsersCollection, - ], - globals: [ - { - slug: 'global-post', - fields: [ - { - name: 'text', - type: 'text', - }, - { - name: 'number', - type: 'number', - }, - ], - }, - { - slug: 'force-select-global', - fields: [ - { - name: 'text', - type: 'text', - }, - { - name: 'forceSelected', - type: 'text', - }, - { - name: 'array', - type: 'array', - fields: [ - { - name: 'forceSelected', - type: 'text', - }, - ], - }, - ], - forceSelect: { array: { forceSelected: true }, forceSelected: true }, - } satisfies GlobalConfig<'force-select-global'>, - ], - admin: { - importMap: { - baseDir: path.resolve(dirname), - }, - }, - localization: { - locales: ['en', 'de'], - defaultLocale: 'en', - }, - editor: lexicalEditor({ - features: ({ defaultFeatures }) => [...defaultFeatures], - }), - cors: ['http://localhost:3000', 'http://localhost:3001'], - onInit: async (payload) => { - await payload.create({ - collection: 'users', - data: { - email: devUser.email, - password: devUser.password, - }, - }) - - // // Create image - // const imageFilePath = path.resolve(dirname, '../uploads/image.png') - // const imageFile = await getFileByPath(imageFilePath) - - // await payload.create({ - // collection: 'media', - // data: {}, - // file: imageFile, - // }) - }, - typescript: { - outputFile: path.resolve(dirname, 'payload-types.ts'), - }, -}) +export default buildConfigWithDefaults(getConfig()) diff --git a/test/select/getConfig.ts b/test/select/getConfig.ts new file mode 100644 index 0000000000..7712c3e82d --- /dev/null +++ b/test/select/getConfig.ts @@ -0,0 +1,119 @@ +import type { Config, GlobalConfig } from 'payload' + +import { lexicalEditor } from '@payloadcms/richtext-lexical' +import { fileURLToPath } from 'node:url' +import path from 'path' + +import { devUser } from '../credentials.js' +import { CustomID } from './collections/CustomID/index.js' +import { DeepPostsCollection } from './collections/DeepPosts/index.js' +import { ForceSelect } from './collections/ForceSelect/index.js' +import { LocalizedPostsCollection } from './collections/LocalizedPosts/index.js' +import { Pages } from './collections/Pages/index.js' +import { Points } from './collections/Points/index.js' +import { PostsCollection } from './collections/Posts/index.js' +import { UsersCollection } from './collections/Users/index.js' +import { VersionedPostsCollection } from './collections/VersionedPosts/index.js' + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +export const getConfig: () => Partial = () => ({ + // ...extend config here + collections: [ + PostsCollection, + LocalizedPostsCollection, + VersionedPostsCollection, + DeepPostsCollection, + Pages, + Points, + ForceSelect, + { + slug: 'upload', + fields: [], + upload: { + staticDir: path.resolve(dirname, 'media'), + }, + }, + { + slug: 'rels', + fields: [], + }, + CustomID, + UsersCollection, + ], + globals: [ + { + slug: 'global-post', + fields: [ + { + name: 'text', + type: 'text', + }, + { + name: 'number', + type: 'number', + }, + ], + }, + { + slug: 'force-select-global', + fields: [ + { + name: 'text', + type: 'text', + }, + { + name: 'forceSelected', + type: 'text', + }, + { + name: 'array', + type: 'array', + fields: [ + { + name: 'forceSelected', + type: 'text', + }, + ], + }, + ], + forceSelect: { array: { forceSelected: true }, forceSelected: true }, + } satisfies GlobalConfig<'force-select-global'>, + ], + admin: { + importMap: { + baseDir: path.resolve(dirname), + }, + }, + localization: { + locales: ['en', 'de'], + defaultLocale: 'en', + }, + editor: lexicalEditor({ + features: ({ defaultFeatures }) => [...defaultFeatures], + }), + cors: ['http://localhost:3000', 'http://localhost:3001'], + onInit: async (payload) => { + await payload.create({ + collection: 'users', + data: { + email: devUser.email, + password: devUser.password, + }, + }) + + // // Create image + // const imageFilePath = path.resolve(dirname, '../uploads/image.png') + // const imageFile = await getFileByPath(imageFilePath) + + // await payload.create({ + // collection: 'media', + // data: {}, + // file: imageFile, + // }) + }, + typescript: { + outputFile: path.resolve(dirname, 'payload-types.ts'), + }, +}) diff --git a/test/select/postgreslogs.int.spec.ts b/test/select/postgreslogs.int.spec.ts new file mode 100644 index 0000000000..58517e196e --- /dev/null +++ b/test/select/postgreslogs.int.spec.ts @@ -0,0 +1,179 @@ +/* eslint-disable jest/require-top-level-describe */ +import type { Payload } from 'payload' + +import path from 'path' +import { assert } from 'ts-essentials' +import { fileURLToPath } from 'url' + +import type { Point, Post } from './payload-types.js' + +import { initPayloadInt } from '../helpers/initPayloadInt.js' + +let payload: Payload + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +const describePostgres = process.env.PAYLOAD_DATABASE === 'postgres' ? describe : describe.skip + +describePostgres('Select - with postgres logs', () => { + // --__--__--__--__--__--__--__--__--__ + // Boilerplate test setup/teardown + // --__--__--__--__--__--__--__--__--__ + beforeAll(async () => { + const initialized = await initPayloadInt( + dirname, + undefined, + undefined, + 'config.postgreslogs.ts', + ) + assert(initialized.payload) + assert(initialized.restClient) + ;({ payload } = initialized) + }) + + afterAll(async () => { + await payload.destroy() + }) + + describe('Local API - Base', () => { + let post: Post + let postId: number | string + + let point: Point + let pointId: number | string + + beforeEach(async () => { + post = await createPost() + postId = post.id + + point = await createPoint() + pointId = point.id + }) + + // Clean up to safely mutate in each test + afterEach(async () => { + await payload.delete({ id: postId, collection: 'posts' }) + await payload.delete({ id: pointId, collection: 'points' }) + }) + + describe('Local API - operations', () => { + it('ensure optimized db update is still used when using select', async () => { + const post = await createPost() + + // Count every console log + const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {}) + + const res = removeEmptyAndUndefined( + (await payload.db.updateOne({ + collection: 'posts', + id: post.id, + data: { + text: 'new text', + }, + select: { text: true, number: true }, + })) as any, + ) + + expect(consoleCount).toHaveBeenCalledTimes(1) // Should be 1 single sql call if the optimization is used. If not, this would be 2 calls + consoleCount.mockRestore() + + expect(res.number).toEqual(1) + expect(res.text).toEqual('new text') + expect(res.id).toEqual(post.id) + expect(Object.keys(res)).toHaveLength(3) + }) + }) + }) +}) + +function removeEmptyAndUndefined(obj: any): any { + if (Array.isArray(obj)) { + const cleanedArray = obj + .map(removeEmptyAndUndefined) + .filter( + (item) => + item !== undefined && !(typeof item === 'object' && Object.keys(item).length === 0), + ) + + return cleanedArray.length > 0 ? cleanedArray : undefined + } + + if (obj !== null && typeof obj === 'object') { + const cleanedEntries = Object.entries(obj) + .map(([key, value]) => [key, removeEmptyAndUndefined(value)]) + .filter( + ([, value]) => + value !== undefined && + !( + typeof value === 'object' && + (Array.isArray(value) ? value.length === 0 : Object.keys(value).length === 0) + ), + ) + + return cleanedEntries.length > 0 ? Object.fromEntries(cleanedEntries) : undefined + } + + return obj +} +async function createPost() { + const upload = await payload.create({ + collection: 'upload', + data: {}, + filePath: path.resolve(dirname, 'image.jpg'), + }) + + const relation = await payload.create({ + depth: 0, + collection: 'rels', + data: {}, + }) + + return payload.create({ + collection: 'posts', + depth: 0, + data: { + number: 1, + text: 'text', + select: 'a', + selectMany: ['a'], + group: { + number: 1, + text: 'text', + }, + hasMany: [relation], + hasManyUpload: [upload], + hasOne: relation, + hasManyPoly: [{ relationTo: 'rels', value: relation }], + hasOnePoly: { relationTo: 'rels', value: relation }, + blocks: [ + { + blockType: 'cta', + ctaText: 'cta-text', + text: 'text', + }, + { + blockType: 'intro', + introText: 'intro-text', + text: 'text', + }, + ], + array: [ + { + text: 'text', + number: 1, + }, + ], + tab: { + text: 'text', + number: 1, + }, + unnamedTabNumber: 2, + unnamedTabText: 'text2', + }, + }) +} + +function createPoint() { + return payload.create({ collection: 'points', data: { text: 'some', point: [10, 20] } }) +} From 380ce04d5c2c6b22c41d9998952478796f0e4e01 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Wed, 23 Jul 2025 19:05:31 +0300 Subject: [PATCH 26/91] perf(db-postgres): avoid including `prettier` to the bundle (#13251) This PR optimizes bundle size with drizzle adapters by avoiding including `prettier` to the production bundle --- packages/drizzle/src/utilities/createSchemaGenerator.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/drizzle/src/utilities/createSchemaGenerator.ts b/packages/drizzle/src/utilities/createSchemaGenerator.ts index b979460b26..cc6c85656e 100644 --- a/packages/drizzle/src/utilities/createSchemaGenerator.ts +++ b/packages/drizzle/src/utilities/createSchemaGenerator.ts @@ -296,12 +296,13 @@ declare module '${this.packageName}' { if (prettify) { try { - const prettier = await import('prettier') + const prettier = await eval('import("prettier")') const configPath = await prettier.resolveConfigFile() const config = configPath ? await prettier.resolveConfig(configPath) : {} code = await prettier.format(code, { ...config, parser: 'typescript' }) - // eslint-disable-next-line no-empty - } catch {} + } catch { + /* empty */ + } } await writeFile(outputFile, code, 'utf-8') From 0eac58ed723917399895be8737078416bdfcbba0 Mon Sep 17 00:00:00 2001 From: Jacob Fletcher Date: Wed, 23 Jul 2025 15:19:10 -0400 Subject: [PATCH 27/91] fix(next): prevent base list filters from being injected into the url (#13253) Prevents base list filters from being injected into the URL. This is a problem with the multi-tenant plugin, for example, where changing the tenant adds a `baseListFilter` to the query, but should never be exposed to the end user. Introduced in #13200. --- packages/next/src/views/List/index.tsx | 23 ++--- tsconfig.base.json | 114 ++++++------------------- 2 files changed, 36 insertions(+), 101 deletions(-) diff --git a/packages/next/src/views/List/index.tsx b/packages/next/src/views/List/index.tsx index 1a019e48a4..59c0c3dfc5 100644 --- a/packages/next/src/views/List/index.tsx +++ b/packages/next/src/views/List/index.tsx @@ -14,6 +14,7 @@ import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerCompo import { renderFilters, renderTable, upsertPreferences } from '@payloadcms/ui/rsc' import { notFound } from 'next/navigation.js' import { + combineWhereConstraints, formatAdminURL, isNumber, mergeListSearchAndWhere, @@ -122,27 +123,17 @@ export const renderListView = async ( throw new Error('not-found') } + let baseListFilter = undefined + if (typeof collectionConfig.admin?.baseListFilter === 'function') { - const baseListFilter = await collectionConfig.admin.baseListFilter({ + baseListFilter = await collectionConfig.admin.baseListFilter({ limit: query.limit, page: query.page, req, sort: query.sort, }) - - if (baseListFilter) { - query.where = { - and: [query.where, baseListFilter].filter(Boolean), - } - } } - const whereWithMergedSearch = mergeListSearchAndWhere({ - collectionConfig, - search: typeof query?.search === 'string' ? query.search : undefined, - where: query?.where, - }) - let queryPreset: QueryPreset | undefined let queryPresetPermissions: SanitizedCollectionPermission | undefined @@ -182,7 +173,11 @@ export const renderListView = async ( req, sort: query.sort, user, - where: whereWithMergedSearch, + where: mergeListSearchAndWhere({ + collectionConfig, + search: typeof query?.search === 'string' ? query.search : undefined, + where: combineWhereConstraints([query?.where, baseListFilter]), + }), }) const clientCollectionConfig = clientConfig.collections.find((c) => c.slug === collectionSlug) diff --git a/tsconfig.base.json b/tsconfig.base.json index 153abb8a5f..0898ad390f 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -21,15 +21,8 @@ "skipLibCheck": true, "emitDeclarationOnly": true, "sourceMap": true, - "lib": [ - "DOM", - "DOM.Iterable", - "ES2022" - ], - "types": [ - "node", - "jest" - ], + "lib": ["DOM", "DOM.Iterable", "ES2022"], + "types": ["node", "jest"], "incremental": true, "isolatedModules": true, "plugins": [ @@ -38,72 +31,36 @@ } ], "paths": { - "@payload-config": [ - "./test/fields/config.ts" - ], - "@payloadcms/admin-bar": [ - "./packages/admin-bar/src" - ], - "@payloadcms/live-preview": [ - "./packages/live-preview/src" - ], - "@payloadcms/live-preview-react": [ - "./packages/live-preview-react/src/index.ts" - ], - "@payloadcms/live-preview-vue": [ - "./packages/live-preview-vue/src/index.ts" - ], - "@payloadcms/ui": [ - "./packages/ui/src/exports/client/index.ts" - ], - "@payloadcms/ui/shared": [ - "./packages/ui/src/exports/shared/index.ts" - ], - "@payloadcms/ui/rsc": [ - "./packages/ui/src/exports/rsc/index.ts" - ], - "@payloadcms/ui/scss": [ - "./packages/ui/src/scss.scss" - ], - "@payloadcms/ui/scss/app.scss": [ - "./packages/ui/src/scss/app.scss" - ], - "@payloadcms/next/*": [ - "./packages/next/src/exports/*.ts" - ], + "@payload-config": ["./test/_community/config.ts"], + "@payloadcms/admin-bar": ["./packages/admin-bar/src"], + "@payloadcms/live-preview": ["./packages/live-preview/src"], + "@payloadcms/live-preview-react": ["./packages/live-preview-react/src/index.ts"], + "@payloadcms/live-preview-vue": ["./packages/live-preview-vue/src/index.ts"], + "@payloadcms/ui": ["./packages/ui/src/exports/client/index.ts"], + "@payloadcms/ui/shared": ["./packages/ui/src/exports/shared/index.ts"], + "@payloadcms/ui/rsc": ["./packages/ui/src/exports/rsc/index.ts"], + "@payloadcms/ui/scss": ["./packages/ui/src/scss.scss"], + "@payloadcms/ui/scss/app.scss": ["./packages/ui/src/scss/app.scss"], + "@payloadcms/next/*": ["./packages/next/src/exports/*.ts"], "@payloadcms/richtext-lexical/client": [ "./packages/richtext-lexical/src/exports/client/index.ts" ], - "@payloadcms/richtext-lexical/rsc": [ - "./packages/richtext-lexical/src/exports/server/rsc.ts" - ], - "@payloadcms/richtext-slate/rsc": [ - "./packages/richtext-slate/src/exports/server/rsc.ts" - ], + "@payloadcms/richtext-lexical/rsc": ["./packages/richtext-lexical/src/exports/server/rsc.ts"], + "@payloadcms/richtext-slate/rsc": ["./packages/richtext-slate/src/exports/server/rsc.ts"], "@payloadcms/richtext-slate/client": [ "./packages/richtext-slate/src/exports/client/index.ts" ], - "@payloadcms/plugin-seo/client": [ - "./packages/plugin-seo/src/exports/client.ts" - ], - "@payloadcms/plugin-sentry/client": [ - "./packages/plugin-sentry/src/exports/client.ts" - ], - "@payloadcms/plugin-stripe/client": [ - "./packages/plugin-stripe/src/exports/client.ts" - ], - "@payloadcms/plugin-search/client": [ - "./packages/plugin-search/src/exports/client.ts" - ], + "@payloadcms/plugin-seo/client": ["./packages/plugin-seo/src/exports/client.ts"], + "@payloadcms/plugin-sentry/client": ["./packages/plugin-sentry/src/exports/client.ts"], + "@payloadcms/plugin-stripe/client": ["./packages/plugin-stripe/src/exports/client.ts"], + "@payloadcms/plugin-search/client": ["./packages/plugin-search/src/exports/client.ts"], "@payloadcms/plugin-form-builder/client": [ "./packages/plugin-form-builder/src/exports/client.ts" ], "@payloadcms/plugin-import-export/rsc": [ "./packages/plugin-import-export/src/exports/rsc.ts" ], - "@payloadcms/plugin-multi-tenant/rsc": [ - "./packages/plugin-multi-tenant/src/exports/rsc.ts" - ], + "@payloadcms/plugin-multi-tenant/rsc": ["./packages/plugin-multi-tenant/src/exports/rsc.ts"], "@payloadcms/plugin-multi-tenant/utilities": [ "./packages/plugin-multi-tenant/src/exports/utilities.ts" ], @@ -113,42 +70,25 @@ "@payloadcms/plugin-multi-tenant/client": [ "./packages/plugin-multi-tenant/src/exports/client.ts" ], - "@payloadcms/plugin-multi-tenant": [ - "./packages/plugin-multi-tenant/src/index.ts" - ], + "@payloadcms/plugin-multi-tenant": ["./packages/plugin-multi-tenant/src/index.ts"], "@payloadcms/plugin-multi-tenant/translations/languages/all": [ "./packages/plugin-multi-tenant/src/translations/index.ts" ], "@payloadcms/plugin-multi-tenant/translations/languages/*": [ "./packages/plugin-multi-tenant/src/translations/languages/*.ts" ], - "@payloadcms/next": [ - "./packages/next/src/exports/*" - ], - "@payloadcms/storage-azure/client": [ - "./packages/storage-azure/src/exports/client.ts" - ], - "@payloadcms/storage-s3/client": [ - "./packages/storage-s3/src/exports/client.ts" - ], + "@payloadcms/next": ["./packages/next/src/exports/*"], + "@payloadcms/storage-azure/client": ["./packages/storage-azure/src/exports/client.ts"], + "@payloadcms/storage-s3/client": ["./packages/storage-s3/src/exports/client.ts"], "@payloadcms/storage-vercel-blob/client": [ "./packages/storage-vercel-blob/src/exports/client.ts" ], - "@payloadcms/storage-gcs/client": [ - "./packages/storage-gcs/src/exports/client.ts" - ], + "@payloadcms/storage-gcs/client": ["./packages/storage-gcs/src/exports/client.ts"], "@payloadcms/storage-uploadthing/client": [ "./packages/storage-uploadthing/src/exports/client.ts" ] } }, - "include": [ - "${configDir}/src" - ], - "exclude": [ - "${configDir}/dist", - "${configDir}/build", - "${configDir}/temp", - "**/*.spec.ts" - ] + "include": ["${configDir}/src"], + "exclude": ["${configDir}/dist", "${configDir}/build", "${configDir}/temp", "**/*.spec.ts"] } From 29fb9ee5b49f11151e88f62df086d284db0bee62 Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Wed, 23 Jul 2025 16:31:05 -0400 Subject: [PATCH 28/91] fix(ui): monomorphic relationship fields should not show relationTo option labels (#13245) --- packages/ui/src/fields/Relationship/Input.tsx | 40 +++++++++++-------- packages/ui/src/fields/Relationship/index.tsx | 3 ++ packages/ui/src/fields/Relationship/types.ts | 1 + .../Lexical/e2e/blocks/e2e.spec.ts | 6 +-- 4 files changed, 31 insertions(+), 19 deletions(-) diff --git a/packages/ui/src/fields/Relationship/Input.tsx b/packages/ui/src/fields/Relationship/Input.tsx index 328bfc2e8e..0792911cd8 100644 --- a/packages/ui/src/fields/Relationship/Input.tsx +++ b/packages/ui/src/fields/Relationship/Input.tsx @@ -49,6 +49,7 @@ export const RelationshipInput: React.FC = (props) => { Description, Error, filterOptions, + formatDisplayedOptions, hasMany, initialValue, isSortable = true, @@ -100,9 +101,6 @@ export const RelationshipInput: React.FC = (props) => { const [options, dispatchOptions] = useReducer(optionsReducer, []) const valueRef = useRef(value) - // the line below seems odd - - valueRef.current = value const [DocumentDrawer, , { isDrawerOpen, openDrawer }] = useDocumentDrawer({ id: currentlyOpenRelationship.id, @@ -474,11 +472,7 @@ export const RelationshipInput: React.FC = (props) => { const docID = args.doc.id if (hasMany) { - const currentValue = valueRef.current - ? Array.isArray(valueRef.current) - ? valueRef.current - : [valueRef.current] - : [] + const currentValue = value ? (Array.isArray(value) ? value : [value]) : [] const valuesToSet = currentValue.map((option: ValueWithRelation) => { return { @@ -492,7 +486,7 @@ export const RelationshipInput: React.FC = (props) => { onChange({ relationTo: args.collectionConfig.slug, value: docID }) } }, - [i18n, config, hasMany, onChange], + [i18n, config, hasMany, onChange, value], ) const onDuplicate = useCallback( @@ -508,8 +502,8 @@ export const RelationshipInput: React.FC = (props) => { if (hasMany) { onChange( - valueRef.current - ? (valueRef.current as ValueWithRelation[]).concat({ + value + ? value.concat({ relationTo: args.collectionConfig.slug, value: args.doc.id, }) @@ -522,7 +516,7 @@ export const RelationshipInput: React.FC = (props) => { }) } }, - [i18n, config, hasMany, onChange], + [i18n, config, hasMany, onChange, value], ) const onDelete = useCallback( @@ -537,8 +531,8 @@ export const RelationshipInput: React.FC = (props) => { if (hasMany) { onChange( - valueRef.current - ? (valueRef.current as ValueWithRelation[]).filter((option) => { + value + ? value.filter((option) => { return option.value !== args.id }) : null, @@ -549,7 +543,7 @@ export const RelationshipInput: React.FC = (props) => { return }, - [i18n, config, hasMany, onChange], + [i18n, config, hasMany, onChange, value], ) const filterOption = useCallback((item: Option, searchFilter: string) => { @@ -671,6 +665,12 @@ export const RelationshipInput: React.FC = (props) => { } }, [openDrawer, currentlyOpenRelationship]) + useEffect(() => { + // needed to sync the ref value when other fields influence the value + // i.e. when a drawer is opened and the value is set + valueRef.current = value + }, [value]) + const valueToRender = findOptionsByValue({ allowEdit, options, value }) if (!Array.isArray(valueToRender) && valueToRender?.value === 'null') { @@ -742,14 +742,18 @@ export const RelationshipInput: React.FC = (props) => { ? (selected) => { if (hasMany) { if (selected === null) { + valueRef.current = [] onChange([]) } else { + valueRef.current = selected as ValueWithRelation[] onChange(selected as ValueWithRelation[]) } } else if (hasMany === false) { if (selected === null) { + valueRef.current = null onChange(null) } else { + valueRef.current = selected as ValueWithRelation onChange(selected as ValueWithRelation) } } @@ -822,7 +826,11 @@ export const RelationshipInput: React.FC = (props) => { }), }) }} - options={options} + options={ + typeof formatDisplayedOptions === 'function' + ? formatDisplayedOptions(options) + : options + } placeholder={placeholder} showError={showError} value={valueToRender ?? null} diff --git a/packages/ui/src/fields/Relationship/index.tsx b/packages/ui/src/fields/Relationship/index.tsx index a4b4f6dd04..d226727df8 100644 --- a/packages/ui/src/fields/Relationship/index.tsx +++ b/packages/ui/src/fields/Relationship/index.tsx @@ -196,6 +196,9 @@ const RelationshipFieldComponent: RelationshipFieldClientComponent = (props) => description={description} Error={Error} filterOptions={filterOptions} + formatDisplayedOptions={ + isPolymorphic ? undefined : (options) => options.map((opt) => opt.options).flat() + } isSortable={isSortable} Label={Label} label={label} diff --git a/packages/ui/src/fields/Relationship/types.ts b/packages/ui/src/fields/Relationship/types.ts index 98c7500cfb..cf48acfa75 100644 --- a/packages/ui/src/fields/Relationship/types.ts +++ b/packages/ui/src/fields/Relationship/types.ts @@ -100,6 +100,7 @@ export type RelationshipInputProps = { readonly description?: StaticDescription readonly Error?: React.ReactNode readonly filterOptions?: FilterOptionsResult + readonly formatDisplayedOptions?: (options: OptionGroup[]) => Option[] | OptionGroup[] readonly isSortable?: boolean readonly Label?: React.ReactNode readonly label?: StaticLabel diff --git a/test/lexical/collections/Lexical/e2e/blocks/e2e.spec.ts b/test/lexical/collections/Lexical/e2e/blocks/e2e.spec.ts index 772faf68b6..c12b4264aa 100644 --- a/test/lexical/collections/Lexical/e2e/blocks/e2e.spec.ts +++ b/test/lexical/collections/Lexical/e2e/blocks/e2e.spec.ts @@ -240,7 +240,7 @@ describe('lexicalBlocks', () => { ) await dependsOnDocData.locator('.rs__control').click() - await expect(newBlock.locator('.rs__menu')).toHaveText('Text Fieldsinvalid') + await expect(newBlock.locator('.rs__menu')).toHaveText('invalid') await dependsOnDocData.locator('.rs__control').click() await dependsOnSiblingData.locator('.rs__control').click() @@ -281,7 +281,7 @@ describe('lexicalBlocks', () => { await dependsOnDocData.locator('.rs__control').click() await dependsOnSiblingData.locator('.rs__control').click() - await expect(newBlock.locator('.rs__menu')).toHaveText('Text Fieldsinvalid') + await expect(newBlock.locator('.rs__menu')).toHaveText('invalid') await dependsOnSiblingData.locator('.rs__control').click() await dependsOnBlockData.locator('.rs__control').click() @@ -322,7 +322,7 @@ describe('lexicalBlocks', () => { await dependsOnSiblingData.locator('.rs__control').click() await dependsOnBlockData.locator('.rs__control').click() - await expect(newBlock.locator('.rs__menu')).toHaveText('Text Fieldsinvalid') + await expect(newBlock.locator('.rs__menu')).toHaveText('invalid') await dependsOnBlockData.locator('.rs__control').click() await saveDocAndAssert(page) From aeee0704dd640e3a3a522f77b0dda16a4d16d608 Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Wed, 23 Jul 2025 16:48:25 -0700 Subject: [PATCH 29/91] chore: add new int test verifying that select *improves* performance of new optimization (#13254) https://github.com/payloadcms/payload/pull/13186 actually made the select API _more powerful_, as it can reduce the amount of db calls even for complex collections with blocks down to 1. This PR adds a test that verifies this. --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210871676349303 --- test/select/postgreslogs.int.spec.ts | 120 +++++++++++++++++++++------ 1 file changed, 96 insertions(+), 24 deletions(-) diff --git a/test/select/postgreslogs.int.spec.ts b/test/select/postgreslogs.int.spec.ts index 58517e196e..076d4758e2 100644 --- a/test/select/postgreslogs.int.spec.ts +++ b/test/select/postgreslogs.int.spec.ts @@ -57,31 +57,101 @@ describePostgres('Select - with postgres logs', () => { await payload.delete({ id: pointId, collection: 'points' }) }) - describe('Local API - operations', () => { - it('ensure optimized db update is still used when using select', async () => { - const post = await createPost() + it('ensure optimized db update is still used when using select', async () => { + const post = await createPost() - // Count every console log - const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {}) + // Count every console log + const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {}) - const res = removeEmptyAndUndefined( - (await payload.db.updateOne({ - collection: 'posts', - id: post.id, - data: { - text: 'new text', + const res = removeEmptyAndUndefined( + (await payload.db.updateOne({ + collection: 'posts', + id: post.id, + data: { + text: 'new text', + }, + select: { text: true, number: true }, + })) as any, + ) + + expect(consoleCount).toHaveBeenCalledTimes(1) // Should be 1 single sql call if the optimization is used. If not, this would be 2 calls + consoleCount.mockRestore() + + expect(res.number).toEqual(1) + expect(res.text).toEqual('new text') + expect(res.id).toEqual(post.id) + expect(Object.keys(res)).toHaveLength(3) + }) + + // This verifies that select actually improves performance of simple updates for complex collections. + // This is possible as no `with` is returned by buildFindManyArgs for the blocks field, only if we have a select that does not select that blocks field. + it('ensure simple update of complex collection uses optimized upsertRow with optimized returning() if only simple fields are selected', async () => { + const page = await payload.create({ + collection: 'pages', + data: { + slug: 'test-page', + additional: 'value', + blocks: [ + { + id: '123', + blockType: 'some', + other: 'value', + title: 'Test Block', }, - select: { text: true, number: true }, - })) as any, - ) + ], + }, + }) - expect(consoleCount).toHaveBeenCalledTimes(1) // Should be 1 single sql call if the optimization is used. If not, this would be 2 calls - consoleCount.mockRestore() + // Count every console log + const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {}) - expect(res.number).toEqual(1) - expect(res.text).toEqual('new text') - expect(res.id).toEqual(post.id) - expect(Object.keys(res)).toHaveLength(3) + const res = removeEmptyAndUndefined( + (await payload.db.updateOne({ + collection: 'pages', + id: page.id, + select: { + slug: true, + additional: true, + }, + data: { + slug: 'new-slug', + }, + })) as any, + ) + + expect(consoleCount).toHaveBeenCalledTimes(1) // Should be 1 single sql call if the optimization is used. If not, this would be 2 calls + consoleCount.mockRestore() + + expect(res.slug).toEqual('new-slug') + expect(res.additional).toEqual('value') + expect(res.id).toEqual(page.id) + expect(Object.keys(res)).toHaveLength(3) + + // Do full find without select just to ensure that the update worked + const fullPage: any = await payload.findByID({ + collection: 'pages', + id: page.id, + }) + + delete fullPage.createdAt + delete fullPage.updatedAt + delete fullPage.array + delete fullPage.content + + expect(fullPage).toEqual({ + id: page.id, + slug: 'new-slug', + additional: 'value', + relatedPage: null, + blocks: [ + { + id: '123', + blockType: 'some', + blockName: null, + other: 'value', + title: 'Test Block', + }, + ], }) }) }) @@ -102,14 +172,16 @@ function removeEmptyAndUndefined(obj: any): any { if (obj !== null && typeof obj === 'object') { const cleanedEntries = Object.entries(obj) .map(([key, value]) => [key, removeEmptyAndUndefined(value)]) - .filter( - ([, value]) => + .filter(([, value]) => { + return ( value !== undefined && + value !== null && !( typeof value === 'object' && (Array.isArray(value) ? value.length === 0 : Object.keys(value).length === 0) - ), - ) + ) + ) + }) return cleanedEntries.length > 0 ? Object.fromEntries(cleanedEntries) : undefined } From 1ad7b55e057907c524c5200c843b7d2e56845211 Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Thu, 24 Jul 2025 05:04:16 -0700 Subject: [PATCH 30/91] refactor(drizzle): use getTableName utility (#13257) ~~Sometimes, drizzle is adding the same join to the joins array twice (`addJoinTable`), despite the table being the same. This is due to a bug in `getNameFromDrizzleTable` where it would sometimes return a UUID instead of the table name.~~ ~~This PR changes it to read from the drizzle:BaseName symbol instead, which is correctly returning the table name in my testing. It falls back to `getTableName`, which uses drizzle:Name.~~ This for some reason fails the tests. Instead, this PR just uses the getTableName utility now instead of searching for the symbol manually. --- packages/drizzle/src/utilities/getNameFromDrizzleTable.ts | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/packages/drizzle/src/utilities/getNameFromDrizzleTable.ts b/packages/drizzle/src/utilities/getNameFromDrizzleTable.ts index 7395c46ab9..e8c4233f9a 100644 --- a/packages/drizzle/src/utilities/getNameFromDrizzleTable.ts +++ b/packages/drizzle/src/utilities/getNameFromDrizzleTable.ts @@ -1,9 +1,7 @@ import type { Table } from 'drizzle-orm' -export const getNameFromDrizzleTable = (table: Table): string => { - const symbol = Object.getOwnPropertySymbols(table).find((symb) => - symb.description.includes('Name'), - ) +import { getTableName } from 'drizzle-orm' - return table[symbol] +export const getNameFromDrizzleTable = (table: Table): string => { + return getTableName(table) } From 7ae4f8c709c429ec8e20bef9eb65dc94b1fe1643 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Thu, 24 Jul 2025 16:29:53 +0300 Subject: [PATCH 31/91] docs: add `status` to forbidden field names when using Postgres and drafts are enabled (#13233) Fixes https://github.com/payloadcms/payload/issues/13144 --- docs/fields/overview.mdx | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/fields/overview.mdx b/docs/fields/overview.mdx index 7ecc88d5f2..be4ffe2238 100644 --- a/docs/fields/overview.mdx +++ b/docs/fields/overview.mdx @@ -157,6 +157,7 @@ The following field names are forbidden and cannot be used: - `salt` - `hash` - `file` +- `status` - with Postgres Adapter and when drafts are enabled ### Field-level Hooks From e48427e59a149711edbd76fe178eb348bf6123c4 Mon Sep 17 00:00:00 2001 From: Jacob Fletcher Date: Thu, 24 Jul 2025 10:12:45 -0400 Subject: [PATCH 32/91] feat(ui): expose refresh method to list drawer context (#13173) --- .../src/elements/ListDrawer/DrawerContent.tsx | 61 +++++++--- .../ui/src/elements/ListDrawer/Provider.tsx | 12 +- packages/ui/src/elements/ListDrawer/index.tsx | 19 +++ .../CustomListDrawer/Component.tsx | 60 +++++++++ .../collections/CustomListDrawer/index.ts | 16 +++ test/admin/config.ts | 2 + test/admin/e2e/list-view/e2e.spec.ts | 36 ++++++ test/admin/payload-types.ts | 23 ++++ tsconfig.base.json | 114 +++++++++++++----- 9 files changed, 293 insertions(+), 50 deletions(-) create mode 100644 test/admin/collections/CustomListDrawer/Component.tsx create mode 100644 test/admin/collections/CustomListDrawer/index.ts diff --git a/packages/ui/src/elements/ListDrawer/DrawerContent.tsx b/packages/ui/src/elements/ListDrawer/DrawerContent.tsx index 19c7c26c30..37232b8b82 100644 --- a/packages/ui/src/elements/ListDrawer/DrawerContent.tsx +++ b/packages/ui/src/elements/ListDrawer/DrawerContent.tsx @@ -1,10 +1,11 @@ 'use client' -import type { ListQuery } from 'payload' +import type { CollectionSlug, ListQuery } from 'payload' import { useModal } from '@faceless-ui/modal' import { hoistQueryParamsToAnd } from 'payload/shared' import React, { useCallback, useEffect, useState } from 'react' +import type { ListDrawerContextProps, ListDrawerContextType } from '../ListDrawer/Provider.js' import type { ListDrawerProps } from './types.js' import { useDocumentDrawer } from '../../elements/DocumentDrawer/index.js' @@ -25,7 +26,7 @@ export const ListDrawerContent: React.FC = ({ onBulkSelect, onSelect, overrideEntityVisibility = true, - selectedCollection: selectedCollectionFromProps, + selectedCollection: collectionSlugFromProps, }) => { const { closeModal, isModalOpen } = useModal() @@ -45,7 +46,7 @@ export const ListDrawerContent: React.FC = ({ }) const [selectedOption, setSelectedOption] = useState>(() => { - const initialSelection = selectedCollectionFromProps || enabledCollections[0]?.slug + const initialSelection = collectionSlugFromProps || enabledCollections[0]?.slug const found = getEntityConfig({ collectionSlug: initialSelection }) return found @@ -61,20 +62,25 @@ export const ListDrawerContent: React.FC = ({ collectionSlug: selectedOption.value, }) - const updateSelectedOption = useEffectEvent((selectedCollectionFromProps: string) => { - if (selectedCollectionFromProps && selectedCollectionFromProps !== selectedOption?.value) { + const updateSelectedOption = useEffectEvent((collectionSlug: CollectionSlug) => { + if (collectionSlug && collectionSlug !== selectedOption?.value) { setSelectedOption({ - label: getEntityConfig({ collectionSlug: selectedCollectionFromProps })?.labels, - value: selectedCollectionFromProps, + label: getEntityConfig({ collectionSlug })?.labels, + value: collectionSlug, }) } }) useEffect(() => { - updateSelectedOption(selectedCollectionFromProps) - }, [selectedCollectionFromProps]) + updateSelectedOption(collectionSlugFromProps) + }, [collectionSlugFromProps]) - const renderList = useCallback( + /** + * This performs a full server round trip to get the list view for the selected collection. + * On the server, the data is freshly queried for the list view and all components are fully rendered. + * This work includes building column state, rendering custom components, etc. + */ + const refresh = useCallback( async ({ slug, query }: { query?: ListQuery; slug: string }) => { try { const newQuery: ListQuery = { ...(query || {}), where: { ...(query?.where || {}) } } @@ -129,9 +135,9 @@ export const ListDrawerContent: React.FC = ({ useEffect(() => { if (!ListView) { - void renderList({ slug: selectedOption?.value }) + void refresh({ slug: selectedOption?.value }) } - }, [renderList, ListView, selectedOption.value]) + }, [refresh, ListView, selectedOption.value]) const onCreateNew = useCallback( ({ doc }) => { @@ -149,19 +155,33 @@ export const ListDrawerContent: React.FC = ({ [closeModal, documentDrawerSlug, drawerSlug, onSelect, selectedOption.value], ) - const onQueryChange = useCallback( - (query: ListQuery) => { - void renderList({ slug: selectedOption?.value, query }) + const onQueryChange: ListDrawerContextProps['onQueryChange'] = useCallback( + (query) => { + void refresh({ slug: selectedOption?.value, query }) }, - [renderList, selectedOption.value], + [refresh, selectedOption.value], ) - const setMySelectedOption = useCallback( - (incomingSelection: Option) => { + const setMySelectedOption: ListDrawerContextProps['setSelectedOption'] = useCallback( + (incomingSelection) => { setSelectedOption(incomingSelection) - void renderList({ slug: incomingSelection?.value }) + void refresh({ slug: incomingSelection?.value }) }, - [renderList], + [refresh], + ) + + const refreshSelf: ListDrawerContextType['refresh'] = useCallback( + async (incomingCollectionSlug) => { + if (incomingCollectionSlug) { + setSelectedOption({ + label: getEntityConfig({ collectionSlug: incomingCollectionSlug })?.labels, + value: incomingCollectionSlug, + }) + } + + await refresh({ slug: selectedOption.value || incomingCollectionSlug }) + }, + [getEntityConfig, refresh, selectedOption.value], ) if (isLoading) { @@ -178,6 +198,7 @@ export const ListDrawerContent: React.FC = ({ onBulkSelect={onBulkSelect} onQueryChange={onQueryChange} onSelect={onSelect} + refresh={refreshSelf} selectedOption={selectedOption} setSelectedOption={setMySelectedOption} > diff --git a/packages/ui/src/elements/ListDrawer/Provider.tsx b/packages/ui/src/elements/ListDrawer/Provider.tsx index 8aeb4ed7ae..7a0156ed80 100644 --- a/packages/ui/src/elements/ListDrawer/Provider.tsx +++ b/packages/ui/src/elements/ListDrawer/Provider.tsx @@ -24,12 +24,17 @@ export type ListDrawerContextProps = { */ docID: string }) => void - readonly selectedOption?: Option - readonly setSelectedOption?: (option: Option) => void + readonly selectedOption?: Option + readonly setSelectedOption?: (option: Option) => void } export type ListDrawerContextType = { - isInDrawer: boolean + readonly isInDrawer: boolean + /** + * When called, will either refresh the list view with its currently selected collection. + * If an collection slug is provided, will use that instead of the currently selected one. + */ + readonly refresh: (collectionSlug?: CollectionSlug) => Promise } & ListDrawerContextProps export const ListDrawerContext = createContext({} as ListDrawerContextType) @@ -37,6 +42,7 @@ export const ListDrawerContext = createContext({} as ListDrawerContextType) export const ListDrawerContextProvider: React.FC< { children: React.ReactNode + refresh: ListDrawerContextType['refresh'] } & ListDrawerContextProps > = ({ children, ...rest }) => { return ( diff --git a/packages/ui/src/elements/ListDrawer/index.tsx b/packages/ui/src/elements/ListDrawer/index.tsx index c4eaae6188..342bdb5600 100644 --- a/packages/ui/src/elements/ListDrawer/index.tsx +++ b/packages/ui/src/elements/ListDrawer/index.tsx @@ -51,6 +51,25 @@ export const ListDrawer: React.FC = (props) => { ) } +/** + * Returns an array containing the ListDrawer component, the ListDrawerToggler component, and an object with state and methods for controlling the drawer. + * @example + * import { useListDrawer } from '@payloadcms/ui' + * + * // inside a React component + * const [ListDrawer, ListDrawerToggler, { closeDrawer, openDrawer }] = useListDrawer({ + * collectionSlugs: ['users'], + * selectedCollection: 'users', + * }) + * + * // inside the return statement + * return ( + * <> + * + * Open List Drawer + * + * ) + */ export const useListDrawer: UseListDrawer = ({ collectionSlugs: collectionSlugsFromProps, filterOptions, diff --git a/test/admin/collections/CustomListDrawer/Component.tsx b/test/admin/collections/CustomListDrawer/Component.tsx new file mode 100644 index 0000000000..c3b6c09a23 --- /dev/null +++ b/test/admin/collections/CustomListDrawer/Component.tsx @@ -0,0 +1,60 @@ +'use client' +import { toast, useListDrawer, useListDrawerContext, useTranslation } from '@payloadcms/ui' +import React, { useCallback } from 'react' + +export const CustomListDrawer = () => { + const [isCreating, setIsCreating] = React.useState(false) + + // this is the _outer_ drawer context (if any), not the one for the list drawer below + const { refresh } = useListDrawerContext() + const { t } = useTranslation() + + const [ListDrawer, ListDrawerToggler] = useListDrawer({ + collectionSlugs: ['custom-list-drawer'], + }) + + const createDoc = useCallback(async () => { + if (isCreating) { + return + } + + setIsCreating(true) + + try { + await fetch('/api/custom-list-drawer', { + body: JSON.stringify({}), + credentials: 'include', + headers: { + 'Content-Type': 'application/json', + }, + method: 'POST', + }) + + setIsCreating(false) + + toast.success( + t('general:successfullyCreated', { + label: 'Custom List Drawer', + }), + ) + + // In the root document view, there is no outer drawer context, so this will be `undefined` + if (typeof refresh === 'function') { + await refresh() + } + } catch (_err) { + console.error('Error creating document:', _err) // eslint-disable-line no-console + setIsCreating(false) + } + }, [isCreating, refresh, t]) + + return ( +
+ + + Open list drawer +
+ ) +} diff --git a/test/admin/collections/CustomListDrawer/index.ts b/test/admin/collections/CustomListDrawer/index.ts new file mode 100644 index 0000000000..5a8caefc79 --- /dev/null +++ b/test/admin/collections/CustomListDrawer/index.ts @@ -0,0 +1,16 @@ +import type { CollectionConfig } from 'payload' + +export const CustomListDrawer: CollectionConfig = { + slug: 'custom-list-drawer', + fields: [ + { + name: 'customListDrawer', + type: 'ui', + admin: { + components: { + Field: '/collections/CustomListDrawer/Component.js#CustomListDrawer', + }, + }, + }, + ], +} diff --git a/test/admin/config.ts b/test/admin/config.ts index 52d1d84e4e..69ba64555d 100644 --- a/test/admin/config.ts +++ b/test/admin/config.ts @@ -5,6 +5,7 @@ import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' import { Array } from './collections/Array.js' import { BaseListFilter } from './collections/BaseListFilter.js' import { CustomFields } from './collections/CustomFields/index.js' +import { CustomListDrawer } from './collections/CustomListDrawer/index.js' import { CustomViews1 } from './collections/CustomViews1.js' import { CustomViews2 } from './collections/CustomViews2.js' import { DisableBulkEdit } from './collections/DisableBulkEdit.js' @@ -185,6 +186,7 @@ export default buildConfigWithDefaults({ Placeholder, UseAsTitleGroupField, DisableBulkEdit, + CustomListDrawer, ], globals: [ GlobalHidden, diff --git a/test/admin/e2e/list-view/e2e.spec.ts b/test/admin/e2e/list-view/e2e.spec.ts index 2e8ec07ccf..bdcc5ef83d 100644 --- a/test/admin/e2e/list-view/e2e.spec.ts +++ b/test/admin/e2e/list-view/e2e.spec.ts @@ -1676,6 +1676,42 @@ describe('List View', () => { await expect(page.locator('.list-selection')).toContainText('2 selected') }) + + test('should refresh custom list drawer using the refresh method from context', async () => { + const url = new AdminUrlUtil(serverURL, 'custom-list-drawer') + + await payload.delete({ + collection: 'custom-list-drawer', + where: { id: { exists: true } }, + }) + + const { id } = await payload.create({ + collection: 'custom-list-drawer', + data: {}, + }) + + await page.goto(url.list) + + await expect(page.locator('.table > table > tbody > tr')).toHaveCount(1) + + await page.goto(url.edit(id)) + + await page.locator('#open-custom-list-drawer').click() + const drawer = page.locator('[id^=list-drawer_1_]') + await expect(drawer).toBeVisible() + + await expect(drawer.locator('.table > table > tbody > tr')).toHaveCount(1) + + await drawer.locator('.list-header__create-new-button.doc-drawer__toggler').click() + const createNewDrawer = page.locator('[id^=doc-drawer_custom-list-drawer_1_]') + await createNewDrawer.locator('#create-custom-list-drawer-doc').click() + + await expect(page.locator('.payload-toast-container')).toContainText('successfully') + + await createNewDrawer.locator('.doc-drawer__header-close').click() + + await expect(drawer.locator('.table > table > tbody > tr')).toHaveCount(2) + }) }) async function createPost(overrides?: Partial): Promise { diff --git a/test/admin/payload-types.ts b/test/admin/payload-types.ts index 10e34bc1bd..0a739c4fa0 100644 --- a/test/admin/payload-types.ts +++ b/test/admin/payload-types.ts @@ -93,6 +93,7 @@ export interface Config { placeholder: Placeholder; 'use-as-title-group-field': UseAsTitleGroupField; 'disable-bulk-edit': DisableBulkEdit; + 'custom-list-drawer': CustomListDrawer; 'payload-locked-documents': PayloadLockedDocument; 'payload-preferences': PayloadPreference; 'payload-migrations': PayloadMigration; @@ -125,6 +126,7 @@ export interface Config { placeholder: PlaceholderSelect | PlaceholderSelect; 'use-as-title-group-field': UseAsTitleGroupFieldSelect | UseAsTitleGroupFieldSelect; 'disable-bulk-edit': DisableBulkEditSelect | DisableBulkEditSelect; + 'custom-list-drawer': CustomListDrawerSelect | CustomListDrawerSelect; 'payload-locked-documents': PayloadLockedDocumentsSelect | PayloadLockedDocumentsSelect; 'payload-preferences': PayloadPreferencesSelect | PayloadPreferencesSelect; 'payload-migrations': PayloadMigrationsSelect | PayloadMigrationsSelect; @@ -565,6 +567,15 @@ export interface DisableBulkEdit { updatedAt: string; createdAt: string; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "custom-list-drawer". + */ +export interface CustomListDrawer { + id: string; + updatedAt: string; + createdAt: string; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "payload-locked-documents". @@ -675,6 +686,10 @@ export interface PayloadLockedDocument { | ({ relationTo: 'disable-bulk-edit'; value: string | DisableBulkEdit; + } | null) + | ({ + relationTo: 'custom-list-drawer'; + value: string | CustomListDrawer; } | null); globalSlug?: string | null; user: { @@ -1074,6 +1089,14 @@ export interface DisableBulkEditSelect { updatedAt?: T; createdAt?: T; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "custom-list-drawer_select". + */ +export interface CustomListDrawerSelect { + updatedAt?: T; + createdAt?: T; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "payload-locked-documents_select". diff --git a/tsconfig.base.json b/tsconfig.base.json index 0898ad390f..5e4e343504 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -21,8 +21,15 @@ "skipLibCheck": true, "emitDeclarationOnly": true, "sourceMap": true, - "lib": ["DOM", "DOM.Iterable", "ES2022"], - "types": ["node", "jest"], + "lib": [ + "DOM", + "DOM.Iterable", + "ES2022" + ], + "types": [ + "node", + "jest" + ], "incremental": true, "isolatedModules": true, "plugins": [ @@ -31,36 +38,72 @@ } ], "paths": { - "@payload-config": ["./test/_community/config.ts"], - "@payloadcms/admin-bar": ["./packages/admin-bar/src"], - "@payloadcms/live-preview": ["./packages/live-preview/src"], - "@payloadcms/live-preview-react": ["./packages/live-preview-react/src/index.ts"], - "@payloadcms/live-preview-vue": ["./packages/live-preview-vue/src/index.ts"], - "@payloadcms/ui": ["./packages/ui/src/exports/client/index.ts"], - "@payloadcms/ui/shared": ["./packages/ui/src/exports/shared/index.ts"], - "@payloadcms/ui/rsc": ["./packages/ui/src/exports/rsc/index.ts"], - "@payloadcms/ui/scss": ["./packages/ui/src/scss.scss"], - "@payloadcms/ui/scss/app.scss": ["./packages/ui/src/scss/app.scss"], - "@payloadcms/next/*": ["./packages/next/src/exports/*.ts"], + "@payload-config": [ + "./test/admin/config.ts" + ], + "@payloadcms/admin-bar": [ + "./packages/admin-bar/src" + ], + "@payloadcms/live-preview": [ + "./packages/live-preview/src" + ], + "@payloadcms/live-preview-react": [ + "./packages/live-preview-react/src/index.ts" + ], + "@payloadcms/live-preview-vue": [ + "./packages/live-preview-vue/src/index.ts" + ], + "@payloadcms/ui": [ + "./packages/ui/src/exports/client/index.ts" + ], + "@payloadcms/ui/shared": [ + "./packages/ui/src/exports/shared/index.ts" + ], + "@payloadcms/ui/rsc": [ + "./packages/ui/src/exports/rsc/index.ts" + ], + "@payloadcms/ui/scss": [ + "./packages/ui/src/scss.scss" + ], + "@payloadcms/ui/scss/app.scss": [ + "./packages/ui/src/scss/app.scss" + ], + "@payloadcms/next/*": [ + "./packages/next/src/exports/*.ts" + ], "@payloadcms/richtext-lexical/client": [ "./packages/richtext-lexical/src/exports/client/index.ts" ], - "@payloadcms/richtext-lexical/rsc": ["./packages/richtext-lexical/src/exports/server/rsc.ts"], - "@payloadcms/richtext-slate/rsc": ["./packages/richtext-slate/src/exports/server/rsc.ts"], + "@payloadcms/richtext-lexical/rsc": [ + "./packages/richtext-lexical/src/exports/server/rsc.ts" + ], + "@payloadcms/richtext-slate/rsc": [ + "./packages/richtext-slate/src/exports/server/rsc.ts" + ], "@payloadcms/richtext-slate/client": [ "./packages/richtext-slate/src/exports/client/index.ts" ], - "@payloadcms/plugin-seo/client": ["./packages/plugin-seo/src/exports/client.ts"], - "@payloadcms/plugin-sentry/client": ["./packages/plugin-sentry/src/exports/client.ts"], - "@payloadcms/plugin-stripe/client": ["./packages/plugin-stripe/src/exports/client.ts"], - "@payloadcms/plugin-search/client": ["./packages/plugin-search/src/exports/client.ts"], + "@payloadcms/plugin-seo/client": [ + "./packages/plugin-seo/src/exports/client.ts" + ], + "@payloadcms/plugin-sentry/client": [ + "./packages/plugin-sentry/src/exports/client.ts" + ], + "@payloadcms/plugin-stripe/client": [ + "./packages/plugin-stripe/src/exports/client.ts" + ], + "@payloadcms/plugin-search/client": [ + "./packages/plugin-search/src/exports/client.ts" + ], "@payloadcms/plugin-form-builder/client": [ "./packages/plugin-form-builder/src/exports/client.ts" ], "@payloadcms/plugin-import-export/rsc": [ "./packages/plugin-import-export/src/exports/rsc.ts" ], - "@payloadcms/plugin-multi-tenant/rsc": ["./packages/plugin-multi-tenant/src/exports/rsc.ts"], + "@payloadcms/plugin-multi-tenant/rsc": [ + "./packages/plugin-multi-tenant/src/exports/rsc.ts" + ], "@payloadcms/plugin-multi-tenant/utilities": [ "./packages/plugin-multi-tenant/src/exports/utilities.ts" ], @@ -70,25 +113,42 @@ "@payloadcms/plugin-multi-tenant/client": [ "./packages/plugin-multi-tenant/src/exports/client.ts" ], - "@payloadcms/plugin-multi-tenant": ["./packages/plugin-multi-tenant/src/index.ts"], + "@payloadcms/plugin-multi-tenant": [ + "./packages/plugin-multi-tenant/src/index.ts" + ], "@payloadcms/plugin-multi-tenant/translations/languages/all": [ "./packages/plugin-multi-tenant/src/translations/index.ts" ], "@payloadcms/plugin-multi-tenant/translations/languages/*": [ "./packages/plugin-multi-tenant/src/translations/languages/*.ts" ], - "@payloadcms/next": ["./packages/next/src/exports/*"], - "@payloadcms/storage-azure/client": ["./packages/storage-azure/src/exports/client.ts"], - "@payloadcms/storage-s3/client": ["./packages/storage-s3/src/exports/client.ts"], + "@payloadcms/next": [ + "./packages/next/src/exports/*" + ], + "@payloadcms/storage-azure/client": [ + "./packages/storage-azure/src/exports/client.ts" + ], + "@payloadcms/storage-s3/client": [ + "./packages/storage-s3/src/exports/client.ts" + ], "@payloadcms/storage-vercel-blob/client": [ "./packages/storage-vercel-blob/src/exports/client.ts" ], - "@payloadcms/storage-gcs/client": ["./packages/storage-gcs/src/exports/client.ts"], + "@payloadcms/storage-gcs/client": [ + "./packages/storage-gcs/src/exports/client.ts" + ], "@payloadcms/storage-uploadthing/client": [ "./packages/storage-uploadthing/src/exports/client.ts" ] } }, - "include": ["${configDir}/src"], - "exclude": ["${configDir}/dist", "${configDir}/build", "${configDir}/temp", "**/*.spec.ts"] + "include": [ + "${configDir}/src" + ], + "exclude": [ + "${configDir}/dist", + "${configDir}/build", + "${configDir}/temp", + "**/*.spec.ts" + ] } From 8f85da893135e3b97cbb9d5b94783dd4390f019e Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Thu, 24 Jul 2025 11:36:46 -0400 Subject: [PATCH 33/91] fix(plugin-import-export): json preview and downloads preserve nesting and exclude disabled fields (#13210) ### What? Improves both the JSON preview and export functionality in the import-export plugin: - Preserves proper nesting of object and array fields (e.g., groups, tabs, arrays) - Excludes any fields explicitly marked as `disabled` via `custom.plugin-import-export` - Ensures downloaded files use proper JSON formatting when `format` is `json` (no CSV-style flattening) ### Why? Previously: - The JSON preview flattened all fields to a single level and included disabled fields. - Exported files with `format: json` were still CSV-style data encoded as `.json`, rather than real JSON. ### How? - Refactored `/preview-data` JSON handling to preserve original document shape. - Applied `removeDisabledFields` to clean nested fields using dot-notation paths. - Updated `createExport` to skip `flattenObject` for JSON formats, using a nested JSON filter instead. - Fixed streaming and buffered export paths to output valid JSON arrays when `format` is `json`. --- .../src/components/Preview/index.tsx | 11 +- .../src/export/createExport.ts | 124 ++++++++++++------ packages/plugin-import-export/src/index.ts | 67 +++++++--- .../src/utilities/getFlattenedFieldKeys.ts | 42 +++--- .../src/utilities/getvalueAtPath.ts | 59 +++++++++ .../src/utilities/removeDisabledFields.ts | 80 +++++++++++ .../src/utilities/setNestedValue.ts | 65 +++++++++ .../plugin-import-export/collections/Pages.ts | 20 +++ test/plugin-import-export/int.spec.ts | 23 ++++ test/plugin-import-export/payload-types.ts | 2 + 10 files changed, 413 insertions(+), 80 deletions(-) create mode 100644 packages/plugin-import-export/src/utilities/getvalueAtPath.ts create mode 100644 packages/plugin-import-export/src/utilities/removeDisabledFields.ts create mode 100644 packages/plugin-import-export/src/utilities/setNestedValue.ts diff --git a/packages/plugin-import-export/src/components/Preview/index.tsx b/packages/plugin-import-export/src/components/Preview/index.tsx index 4cafe1f4fe..3dbdd9a417 100644 --- a/packages/plugin-import-export/src/components/Preview/index.tsx +++ b/packages/plugin-import-export/src/components/Preview/index.tsx @@ -68,6 +68,7 @@ export const Preview = () => { collectionSlug, draft, fields, + format, limit, locale, sort, @@ -115,8 +116,13 @@ export const Preview = () => { const fieldKeys = Array.isArray(fields) && fields.length > 0 - ? selectedKeys // strictly only what was selected - : [...selectedKeys, ...defaultMetaFields.filter((key) => allKeys.includes(key))] + ? selectedKeys // strictly use selected fields only + : [ + ...selectedKeys, + ...defaultMetaFields.filter( + (key) => allKeys.includes(key) && !selectedKeys.includes(key), + ), + ] // Build columns based on flattened keys const newColumns: Column[] = fieldKeys.map((key) => ({ @@ -158,6 +164,7 @@ export const Preview = () => { disabledFieldRegexes, draft, fields, + format, i18n, limit, locale, diff --git a/packages/plugin-import-export/src/export/createExport.ts b/packages/plugin-import-export/src/export/createExport.ts index 40e1b954ff..fba5e11a0c 100644 --- a/packages/plugin-import-export/src/export/createExport.ts +++ b/packages/plugin-import-export/src/export/createExport.ts @@ -114,7 +114,7 @@ export const createExport = async (args: CreateExportArgs) => { const disabledRegexes: RegExp[] = disabledFields.map(buildDisabledFieldRegex) - const filterDisabled = (row: Record): Record => { + const filterDisabledCSV = (row: Record): Record => { const filtered: Record = {} for (const [key, value] of Object.entries(row)) { @@ -127,35 +127,62 @@ export const createExport = async (args: CreateExportArgs) => { return filtered } + const filterDisabledJSON = (doc: any, parentPath = ''): any => { + if (Array.isArray(doc)) { + return doc.map((item) => filterDisabledJSON(item, parentPath)) + } + + if (typeof doc !== 'object' || doc === null) { + return doc + } + + const filtered: Record = {} + for (const [key, value] of Object.entries(doc)) { + const currentPath = parentPath ? `${parentPath}.${key}` : key + + // Only remove if this exact path is disabled + const isDisabled = disabledFields.includes(currentPath) + + if (!isDisabled) { + filtered[key] = filterDisabledJSON(value, currentPath) + } + } + + return filtered + } + if (download) { if (debug) { req.payload.logger.debug('Pre-scanning all columns before streaming') } - const allColumnsSet = new Set() const allColumns: string[] = [] - let scanPage = 1 - let hasMore = true - while (hasMore) { - const result = await payload.find({ ...findArgs, page: scanPage }) + if (isCSV) { + const allColumnsSet = new Set() + let scanPage = 1 + let hasMore = true - result.docs.forEach((doc) => { - const flat = filterDisabled(flattenObject({ doc, fields, toCSVFunctions })) - Object.keys(flat).forEach((key) => { - if (!allColumnsSet.has(key)) { - allColumnsSet.add(key) - allColumns.push(key) - } + while (hasMore) { + const result = await payload.find({ ...findArgs, page: scanPage }) + + result.docs.forEach((doc) => { + const flat = filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })) + Object.keys(flat).forEach((key) => { + if (!allColumnsSet.has(key)) { + allColumnsSet.add(key) + allColumns.push(key) + } + }) }) - }) - hasMore = result.hasNextPage - scanPage += 1 - } + hasMore = result.hasNextPage + scanPage += 1 + } - if (debug) { - req.payload.logger.debug(`Discovered ${allColumns.length} columns`) + if (debug) { + req.payload.logger.debug(`Discovered ${allColumns.length} columns`) + } } const encoder = new TextEncoder() @@ -171,28 +198,48 @@ export const createExport = async (args: CreateExportArgs) => { } if (result.docs.length === 0) { + // Close JSON array properly if JSON + if (!isCSV) { + this.push(encoder.encode(']')) + } this.push(null) return } - const batchRows = result.docs.map((doc) => - filterDisabled(flattenObject({ doc, fields, toCSVFunctions })), - ) + if (isCSV) { + // --- CSV Streaming --- + const batchRows = result.docs.map((doc) => + filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })), + ) - const paddedRows = batchRows.map((row) => { - const fullRow: Record = {} - for (const col of allColumns) { - fullRow[col] = row[col] ?? '' + const paddedRows = batchRows.map((row) => { + const fullRow: Record = {} + for (const col of allColumns) { + fullRow[col] = row[col] ?? '' + } + return fullRow + }) + + const csvString = stringify(paddedRows, { + header: isFirstBatch, + columns: allColumns, + }) + + this.push(encoder.encode(csvString)) + } else { + // --- JSON Streaming --- + const batchRows = result.docs.map((doc) => filterDisabledJSON(doc)) + + // Convert each filtered/flattened row into JSON string + const batchJSON = batchRows.map((row) => JSON.stringify(row)).join(',') + + if (isFirstBatch) { + this.push(encoder.encode('[' + batchJSON)) + } else { + this.push(encoder.encode(',' + batchJSON)) } - return fullRow - }) + } - const csvString = stringify(paddedRows, { - header: isFirstBatch, - columns: allColumns, - }) - - this.push(encoder.encode(csvString)) isFirstBatch = false streamPage += 1 @@ -200,6 +247,9 @@ export const createExport = async (args: CreateExportArgs) => { if (debug) { req.payload.logger.debug('Stream complete - no more pages') } + if (!isCSV) { + this.push(encoder.encode(']')) + } this.push(null) // End the stream } }, @@ -239,7 +289,7 @@ export const createExport = async (args: CreateExportArgs) => { if (isCSV) { const batchRows = result.docs.map((doc) => - filterDisabled(flattenObject({ doc, fields, toCSVFunctions })), + filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })), ) // Track discovered column keys @@ -254,8 +304,8 @@ export const createExport = async (args: CreateExportArgs) => { rows.push(...batchRows) } else { - const jsonInput = result.docs.map((doc) => JSON.stringify(doc)) - outputData.push(jsonInput.join(',\n')) + const batchRows = result.docs.map((doc) => filterDisabledJSON(doc)) + outputData.push(batchRows.map((doc) => JSON.stringify(doc)).join(',\n')) } hasNextPage = result.hasNextPage diff --git a/packages/plugin-import-export/src/index.ts b/packages/plugin-import-export/src/index.ts index a64e80bf15..366d444c54 100644 --- a/packages/plugin-import-export/src/index.ts +++ b/packages/plugin-import-export/src/index.ts @@ -13,6 +13,9 @@ import { getExportCollection } from './getExportCollection.js' import { translations } from './translations/index.js' import { collectDisabledFieldPaths } from './utilities/collectDisabledFieldPaths.js' import { getFlattenedFieldKeys } from './utilities/getFlattenedFieldKeys.js' +import { getValueAtPath } from './utilities/getvalueAtPath.js' +import { removeDisabledFields } from './utilities/removeDisabledFields.js' +import { setNestedValue } from './utilities/setNestedValue.js' export const importExportPlugin = (pluginConfig: ImportExportPluginConfig) => @@ -91,6 +94,7 @@ export const importExportPlugin = collectionSlug: string draft?: 'no' | 'yes' fields?: string[] + format?: 'csv' | 'json' limit?: number locale?: string sort?: any @@ -120,29 +124,58 @@ export const importExportPlugin = where, }) + const isCSV = req?.data?.format === 'csv' const docs = result.docs - const toCSVFunctions = getCustomFieldFunctions({ - fields: collection.config.fields as FlattenedField[], - }) + let transformed: Record[] = [] - const possibleKeys = getFlattenedFieldKeys(collection.config.fields as FlattenedField[]) - - const transformed = docs.map((doc) => { - const row = flattenObject({ - doc, - fields, - toCSVFunctions, + if (isCSV) { + const toCSVFunctions = getCustomFieldFunctions({ + fields: collection.config.fields as FlattenedField[], }) - for (const key of possibleKeys) { - if (!(key in row)) { - row[key] = null - } - } + const possibleKeys = getFlattenedFieldKeys(collection.config.fields as FlattenedField[]) - return row - }) + transformed = docs.map((doc) => { + const row = flattenObject({ + doc, + fields, + toCSVFunctions, + }) + + for (const key of possibleKeys) { + if (!(key in row)) { + row[key] = null + } + } + + return row + }) + } else { + const disabledFields = + collection.config.admin.custom?.['plugin-import-export']?.disabledFields + + transformed = docs.map((doc) => { + let output: Record = { ...doc } + + // Remove disabled fields first + output = removeDisabledFields(output, disabledFields) + + // Then trim to selected fields only (if fields are provided) + if (Array.isArray(fields) && fields.length > 0) { + const trimmed: Record = {} + + for (const key of fields) { + const value = getValueAtPath(output, key) + setNestedValue(trimmed, key, value ?? null) + } + + output = trimmed + } + + return output + }) + } return Response.json({ docs: transformed, diff --git a/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts b/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts index f124208dc9..db25206b8b 100644 --- a/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts +++ b/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts @@ -22,21 +22,18 @@ export const getFlattenedFieldKeys = (fields: FieldWithPresentational[], prefix 'plugin-import-export' in field.custom && field.custom['plugin-import-export']?.toCSV - if (!('name' in field) || typeof field.name !== 'string' || fieldHasToCSVFunction) { - return - } - - const name = prefix ? `${prefix}_${field.name}` : field.name + const name = 'name' in field && typeof field.name === 'string' ? field.name : undefined + const fullKey = name && prefix ? `${prefix}_${name}` : (name ?? prefix) switch (field.type) { case 'array': { - const subKeys = getFlattenedFieldKeys(field.fields as FlattenedField[], `${name}_0`) + const subKeys = getFlattenedFieldKeys(field.fields as FlattenedField[], `${fullKey}_0`) keys.push(...subKeys) break } case 'blocks': { field.blocks.forEach((block) => { - const blockPrefix = `${name}_0_${block.slug}` + const blockPrefix = `${fullKey}_0_${block.slug}` keys.push(`${blockPrefix}_blockType`) keys.push(`${blockPrefix}_id`) keys.push(...getFlattenedFieldKeys(block.fields as FlattenedField[], blockPrefix)) @@ -46,45 +43,42 @@ export const getFlattenedFieldKeys = (fields: FieldWithPresentational[], prefix case 'collapsible': case 'group': case 'row': - keys.push(...getFlattenedFieldKeys(field.fields as FlattenedField[], name)) + keys.push(...getFlattenedFieldKeys(field.fields as FlattenedField[], fullKey)) break case 'relationship': if (field.hasMany) { if (Array.isArray(field.relationTo)) { // hasMany polymorphic - keys.push(`${name}_0_relationTo`, `${name}_0_id`) + keys.push(`${fullKey}_0_relationTo`, `${fullKey}_0_id`) } else { // hasMany monomorphic - keys.push(`${name}_0`) + keys.push(`${fullKey}_0`) } } else { if (Array.isArray(field.relationTo)) { // hasOne polymorphic - keys.push(`${name}_relationTo`, `${name}_id`) + keys.push(`${fullKey}_relationTo`, `${fullKey}_id`) } else { // hasOne monomorphic - keys.push(name) + keys.push(fullKey) } } break case 'tabs': - if (field.tabs) { - field.tabs.forEach((tab) => { - if (tab.name) { - const tabPrefix = prefix ? `${prefix}_${tab.name}` : tab.name - keys.push(...getFlattenedFieldKeys(tab.fields, tabPrefix)) - } else { - keys.push(...getFlattenedFieldKeys(tab.fields, prefix)) - } - }) - } + field.tabs?.forEach((tab) => { + const tabPrefix = tab.name ? `${fullKey}_${tab.name}` : fullKey + keys.push(...getFlattenedFieldKeys(tab.fields || [], tabPrefix)) + }) break default: + if (!name || fieldHasToCSVFunction) { + break + } if ('hasMany' in field && field.hasMany) { // Push placeholder for first index - keys.push(`${name}_0`) + keys.push(`${fullKey}_0`) } else { - keys.push(name) + keys.push(fullKey) } break } diff --git a/packages/plugin-import-export/src/utilities/getvalueAtPath.ts b/packages/plugin-import-export/src/utilities/getvalueAtPath.ts new file mode 100644 index 0000000000..4173b51730 --- /dev/null +++ b/packages/plugin-import-export/src/utilities/getvalueAtPath.ts @@ -0,0 +1,59 @@ +/** + * Safely retrieves a deeply nested value from an object using a dot-notation path. + * + * Supports: + * - Indexed array access (e.g., "array.0.field1") + * - Polymorphic blocks or keyed unions (e.g., "blocks.0.hero.title"), where the block key + * (e.g., "hero") maps to a nested object inside the block item. + * + * + * @param obj - The input object to traverse. + * @param path - A dot-separated string representing the path to retrieve. + * @returns The value at the specified path, or undefined if not found. + */ +export const getValueAtPath = (obj: unknown, path: string): unknown => { + if (!obj || typeof obj !== 'object') { + return undefined + } + + const parts = path.split('.') + let current: any = obj + + for (const part of parts) { + if (current == null) { + return undefined + } + + // If the path part is a number, treat it as an array index + if (!isNaN(Number(part))) { + current = current[Number(part)] + continue + } + + // Special case: if current is an array of blocks like [{ hero: { title: '...' } }] + // and the path is "blocks.0.hero.title", then `part` would be "hero" + if (Array.isArray(current)) { + const idx = Number(parts[parts.indexOf(part) - 1]) + const blockItem = current[idx] + + if (typeof blockItem === 'object') { + const keys = Object.keys(blockItem) + + // Find the key (e.g., "hero") that maps to an object + const matchingBlock = keys.find( + (key) => blockItem[key] && typeof blockItem[key] === 'object', + ) + + if (matchingBlock && part === matchingBlock) { + current = blockItem[matchingBlock] + continue + } + } + } + + // Fallback to plain object key access + current = current[part] + } + + return current +} diff --git a/packages/plugin-import-export/src/utilities/removeDisabledFields.ts b/packages/plugin-import-export/src/utilities/removeDisabledFields.ts new file mode 100644 index 0000000000..4f68799b42 --- /dev/null +++ b/packages/plugin-import-export/src/utilities/removeDisabledFields.ts @@ -0,0 +1,80 @@ +/** + * Recursively removes fields from a deeply nested object based on dot-notation paths. + * + * This utility supports removing: + * - Nested fields in plain objects (e.g., "group.value") + * - Fields inside arrays of objects (e.g., "group.array.field1") + * + * It safely traverses both object and array structures and avoids mutating the original input. + * + * @param obj - The original object to clean. + * @param disabled - An array of dot-separated paths indicating which fields to remove. + * @returns A deep clone of the original object with specified fields removed. + */ + +export const removeDisabledFields = ( + obj: Record, + disabled: string[] = [], +): Record => { + if (!disabled.length) { + return obj + } + + const clone = structuredClone(obj) + + // Process each disabled path independently + for (const path of disabled) { + const parts = path.split('.') + + /** + * Recursively walks the object tree according to the dot path, + * and deletes the field once the full path is reached. + * + * @param target - The current object or array being traversed + * @param i - The index of the current path part + */ + const removeRecursively = (target: any, i = 0): void => { + if (target == null) { + return + } + + const key = parts[i] + + // If at the final part of the path, perform the deletion + if (i === parts.length - 1) { + // If the current level is an array, delete the key from each item + if (Array.isArray(target)) { + for (const item of target) { + if (item && typeof item === 'object' && key !== undefined) { + delete item[key as keyof typeof item] + } + } + } else if (typeof target === 'object' && key !== undefined) { + delete target[key] + } + return + } + + if (key === undefined) { + return + } + + // Traverse to the next level in the path + const next = target[key] + + if (Array.isArray(next)) { + // If the next value is an array, recurse into each item + for (const item of next) { + removeRecursively(item, i + 1) + } + } else { + // Otherwise, continue down the object path + removeRecursively(next, i + 1) + } + } + + removeRecursively(clone) + } + + return clone +} diff --git a/packages/plugin-import-export/src/utilities/setNestedValue.ts b/packages/plugin-import-export/src/utilities/setNestedValue.ts new file mode 100644 index 0000000000..89e5487329 --- /dev/null +++ b/packages/plugin-import-export/src/utilities/setNestedValue.ts @@ -0,0 +1,65 @@ +/** + * Sets a value deeply into a nested object or array, based on a dot-notation path. + * + * This function: + * - Supports array indexing (e.g., "array.0.field1") + * - Creates intermediate arrays/objects as needed + * - Mutates the target object directly + * + * @example + * const obj = {} + * setNestedValue(obj, 'group.array.0.field1', 'hello') + * // Result: { group: { array: [ { field1: 'hello' } ] } } + * + * @param obj - The target object to mutate. + * @param path - A dot-separated string path indicating where to assign the value. + * @param value - The value to set at the specified path. + */ + +export const setNestedValue = ( + obj: Record, + path: string, + value: unknown, +): void => { + const parts = path.split('.') + let current: any = obj + + for (let i = 0; i < parts.length; i++) { + const part = parts[i] + const isLast = i === parts.length - 1 + const isIndex = !Number.isNaN(Number(part)) + + if (isIndex) { + const index = Number(part) + + // Ensure the current target is an array + if (!Array.isArray(current)) { + current = [] + } + + // Ensure the array slot is initialized + if (!current[index]) { + current[index] = {} + } + + if (isLast) { + current[index] = value + } else { + current = current[index] as Record + } + } else { + // Ensure the object key exists + if (isLast) { + if (typeof part === 'string') { + current[part] = value + } + } else { + if (typeof current[part as string] !== 'object' || current[part as string] === null) { + current[part as string] = {} + } + + current = current[part as string] as Record + } + } + } +} diff --git a/test/plugin-import-export/collections/Pages.ts b/test/plugin-import-export/collections/Pages.ts index 35f38032fd..818978b15e 100644 --- a/test/plugin-import-export/collections/Pages.ts +++ b/test/plugin-import-export/collections/Pages.ts @@ -61,6 +61,11 @@ export const Pages: CollectionConfig = { name: 'value', type: 'text', defaultValue: 'group value', + // custom: { + // 'plugin-import-export': { + // disabled: true, + // }, + // }, }, { name: 'ignore', @@ -216,5 +221,20 @@ export const Pages: CollectionConfig = { relationTo: ['users', 'posts'], hasMany: true, }, + { + type: 'collapsible', + label: 'Collapsible Field', + fields: [ + { + name: 'textFieldInCollapsible', + type: 'text', + // custom: { + // 'plugin-import-export': { + // disabled: true, + // }, + // }, + }, + ], + }, ], } diff --git a/test/plugin-import-export/int.spec.ts b/test/plugin-import-export/int.spec.ts index 64d2516de0..caa57f5d2e 100644 --- a/test/plugin-import-export/int.spec.ts +++ b/test/plugin-import-export/int.spec.ts @@ -467,6 +467,29 @@ describe('@payloadcms/plugin-import-export', () => { expect(data[0].title).toStrictEqual('JSON 0') }) + it('should download an existing export JSON file', async () => { + const response = await restClient.POST('/exports/download', { + body: JSON.stringify({ + data: { + collectionSlug: 'pages', + fields: ['id', 'title'], + format: 'json', + sort: 'title', + }, + }), + headers: { 'Content-Type': 'application/json' }, + }) + + expect(response.status).toBe(200) + expect(response.headers.get('content-type')).toMatch(/application\/json/) + + const data = await response.json() + + expect(Array.isArray(data)).toBe(true) + expect(['string', 'number']).toContain(typeof data[0].id) + expect(typeof data[0].title).toBe('string') + }) + it('should create an export with every field when no fields are defined', async () => { let doc = await payload.create({ collection: 'exports', diff --git a/test/plugin-import-export/payload-types.ts b/test/plugin-import-export/payload-types.ts index 83ef2a640d..c598e3b25e 100644 --- a/test/plugin-import-export/payload-types.ts +++ b/test/plugin-import-export/payload-types.ts @@ -242,6 +242,7 @@ export interface Page { } )[] | null; + textFieldInCollapsible?: string | null; updatedAt: string; createdAt: string; _status?: ('draft' | 'published') | null; @@ -579,6 +580,7 @@ export interface PagesSelect { excerpt?: T; hasOnePolymorphic?: T; hasManyPolymorphic?: T; + textFieldInCollapsible?: T; updatedAt?: T; createdAt?: T; _status?: T; From a83ed5ebb5ce6a03ff691c41f24c4b582d51a5f1 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Thu, 24 Jul 2025 18:42:17 +0300 Subject: [PATCH 34/91] fix(db-postgres): search is broken when `useAsTitle` is not specified (#13232) Fixes https://github.com/payloadcms/payload/issues/13171 --- packages/drizzle/src/queries/parseParams.ts | 5 ++++- test/database/int.spec.ts | 15 +++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/packages/drizzle/src/queries/parseParams.ts b/packages/drizzle/src/queries/parseParams.ts index b43dad70a4..9c12c69416 100644 --- a/packages/drizzle/src/queries/parseParams.ts +++ b/packages/drizzle/src/queries/parseParams.ts @@ -219,7 +219,10 @@ export function parseParams({ if ( operator === 'like' && - (field.type === 'number' || table[columnName].columnType === 'PgUUID') + (field.type === 'number' || + field.type === 'relationship' || + field.type === 'upload' || + table[columnName].columnType === 'PgUUID') ) { operator = 'equals' } diff --git a/test/database/int.spec.ts b/test/database/int.spec.ts index 8cc74b84f7..c4cbff4393 100644 --- a/test/database/int.spec.ts +++ b/test/database/int.spec.ts @@ -3001,6 +3001,21 @@ describe('database', () => { } }) + it('should allow to query like by ID with draft: true', async () => { + const category = await payload.create({ + collection: 'categories', + data: { title: 'category123' }, + }) + const res = await payload.find({ + collection: 'categories', + draft: true, + // eslint-disable-next-line jest/no-conditional-in-test + where: { id: { like: typeof category.id === 'number' ? `${category.id}` : category.id } }, + }) + expect(res.docs).toHaveLength(1) + expect(res.docs[0].id).toBe(category.id) + }) + it('should allow incremental number update', async () => { const post = await payload.create({ collection: 'posts', data: { number: 1, title: 'post' } }) From 7e81d30808bf6a9ea3256e060c05d0133ae31b1f Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Thu, 24 Jul 2025 12:18:49 -0400 Subject: [PATCH 35/91] fix(ui): ensure document unlocks when logging out from edit view of a locked document (#13142) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What? Refactors the `LeaveWithoutSaving` modal to be generic and delegates document unlock logic back to the `DefaultEditView` component via a callback. ### Why? Previously, `unlockDocument` was triggered in a cleanup `useEffect` in the edit view. When logging out from the edit view, the unlock request would often fail due to the session ending — leaving the document in a locked state. ### How? - Introduced `onConfirm` and `onPrevent` props for `LeaveWithoutSaving`. - Moved all document lock/unlock logic into `DefaultEditView`’s `handleLeaveConfirm`. - Captures the next navigation target via `onPrevent` and evaluates whether to unlock based on: - Locking being enabled. - Current user owning the lock. - Navigation not targeting internal admin views (`/preview`, `/api`, `/versions`). --------- Co-authored-by: Jarrod Flesch --- .../views/CreateFirstUser/index.client.tsx | 9 +- .../src/elements/LeaveWithoutSaving/index.tsx | 30 +- .../ui/src/elements/RenderTitle/index.tsx | 1 + .../ui/src/providers/DocumentInfo/index.tsx | 11 + .../ui/src/providers/DocumentInfo/types.ts | 5 + packages/ui/src/views/Edit/index.tsx | 121 +++-- test/auth/config.ts | 31 +- test/auth/e2e.spec.ts | 453 ++++++++++-------- test/auth/payload-types.ts | 36 +- test/auth/seed.ts | 34 ++ test/helpers.ts | 24 +- test/helpers/reInitEndpoint.ts | 5 +- test/locked-documents/payload-types.ts | 14 + 13 files changed, 473 insertions(+), 301 deletions(-) create mode 100644 test/auth/seed.ts diff --git a/packages/next/src/views/CreateFirstUser/index.client.tsx b/packages/next/src/views/CreateFirstUser/index.client.tsx index caaabdaa09..d1462cfae5 100644 --- a/packages/next/src/views/CreateFirstUser/index.client.tsx +++ b/packages/next/src/views/CreateFirstUser/index.client.tsx @@ -85,7 +85,14 @@ export const CreateFirstUserClient: React.FC<{ return (
{ +type LeaveWithoutSavingProps = { + onConfirm?: () => Promise | void + onPrevent?: (nextHref: null | string) => void +} + +export const LeaveWithoutSaving: React.FC = ({ onConfirm, onPrevent }) => { const { closeModal, openModal } = useModal() const modified = useFormModified() const { isValid } = useForm() @@ -22,23 +27,34 @@ export const LeaveWithoutSaving: React.FC = () => { const prevent = Boolean((modified || !isValid) && user) - const onPrevent = useCallback(() => { + const handlePrevent = useCallback(() => { + const activeHref = (document.activeElement as HTMLAnchorElement)?.href || null + if (onPrevent) { + onPrevent(activeHref) + } openModal(modalSlug) - }, [openModal]) + }, [openModal, onPrevent]) const handleAccept = useCallback(() => { closeModal(modalSlug) }, [closeModal]) - usePreventLeave({ hasAccepted, onAccept: handleAccept, onPrevent, prevent }) + usePreventLeave({ hasAccepted, onAccept: handleAccept, onPrevent: handlePrevent, prevent }) const onCancel: OnCancel = useCallback(() => { closeModal(modalSlug) }, [closeModal]) - const onConfirm = useCallback(() => { + const handleConfirm = useCallback(async () => { + if (onConfirm) { + try { + await onConfirm() + } catch (err) { + console.error('Error in LeaveWithoutSaving onConfirm:', err) + } + } setHasAccepted(true) - }, []) + }, [onConfirm]) return ( { heading={t('general:leaveWithoutSaving')} modalSlug={modalSlug} onCancel={onCancel} - onConfirm={onConfirm} + onConfirm={handleConfirm} /> ) } diff --git a/packages/ui/src/elements/RenderTitle/index.tsx b/packages/ui/src/elements/RenderTitle/index.tsx index c0e51e5c2d..cdd105aa0a 100644 --- a/packages/ui/src/elements/RenderTitle/index.tsx +++ b/packages/ui/src/elements/RenderTitle/index.tsx @@ -36,6 +36,7 @@ export const RenderTitle: React.FC = (props) => { className={[className, baseClass, idAsTitle && `${baseClass}--has-id`] .filter(Boolean) .join(' ')} + data-doc-id={id} title={title} > {isInitializing ? ( diff --git a/packages/ui/src/providers/DocumentInfo/index.tsx b/packages/ui/src/providers/DocumentInfo/index.tsx index b98e4f944d..0fdb4d9324 100644 --- a/packages/ui/src/providers/DocumentInfo/index.tsx +++ b/packages/ui/src/providers/DocumentInfo/index.tsx @@ -113,6 +113,16 @@ const DocumentInfo: React.FC< 'idle', ) + const documentLockState = useRef<{ + hasShownLockedModal: boolean + isLocked: boolean + user: ClientUser | number | string + } | null>({ + hasShownLockedModal: false, + isLocked: false, + user: null, + }) + const updateUploadStatus = useCallback( (status: 'failed' | 'idle' | 'uploading') => { setUploadStatus(status) @@ -344,6 +354,7 @@ const DocumentInfo: React.FC< docConfig, docPermissions, documentIsLocked, + documentLockState, getDocPermissions, getDocPreferences, hasPublishedDoc, diff --git a/packages/ui/src/providers/DocumentInfo/types.ts b/packages/ui/src/providers/DocumentInfo/types.ts index 93ec9674e2..273df0aa7c 100644 --- a/packages/ui/src/providers/DocumentInfo/types.ts +++ b/packages/ui/src/providers/DocumentInfo/types.ts @@ -49,6 +49,11 @@ export type DocumentInfoContext = { currentEditor?: ClientUser | null | number | string docConfig?: ClientCollectionConfig | ClientGlobalConfig documentIsLocked?: boolean + documentLockState: React.RefObject<{ + hasShownLockedModal: boolean + isLocked: boolean + user: ClientUser | number | string + } | null> getDocPermissions: (data?: Data) => Promise getDocPreferences: () => Promise incrementVersionCount: () => void diff --git a/packages/ui/src/views/Edit/index.tsx b/packages/ui/src/views/Edit/index.tsx index 6de54ad36e..b4b5446861 100644 --- a/packages/ui/src/views/Edit/index.tsx +++ b/packages/ui/src/views/Edit/index.tsx @@ -70,6 +70,7 @@ export function DefaultEditView({ disableLeaveWithoutSaving, docPermissions, documentIsLocked, + documentLockState, getDocPermissions, getDocPreferences, globalSlug, @@ -164,16 +165,6 @@ export function DefaultEditView({ const isLockExpired = Date.now() > lockExpiryTime - const documentLockStateRef = useRef<{ - hasShownLockedModal: boolean - isLocked: boolean - user: ClientUser | number | string - } | null>({ - hasShownLockedModal: false, - isLocked: false, - user: null, - }) - const schemaPathSegments = useMemo(() => [entitySlug], [entitySlug]) const [validateBeforeSubmit, setValidateBeforeSubmit] = useState(() => { @@ -184,13 +175,15 @@ export function DefaultEditView({ return false }) + const nextHrefRef = React.useRef(null) + const handleDocumentLocking = useCallback( (lockedState: LockedState) => { setDocumentIsLocked(true) const previousOwnerID = - typeof documentLockStateRef.current?.user === 'object' - ? documentLockStateRef.current?.user?.id - : documentLockStateRef.current?.user + typeof documentLockState.current?.user === 'object' + ? documentLockState.current?.user?.id + : documentLockState.current?.user if (lockedState) { const lockedUserID = @@ -198,14 +191,14 @@ export function DefaultEditView({ ? lockedState.user : lockedState.user.id - if (!documentLockStateRef.current || lockedUserID !== previousOwnerID) { + if (!documentLockState.current || lockedUserID !== previousOwnerID) { if (previousOwnerID === user.id && lockedUserID !== user.id) { setShowTakeOverModal(true) - documentLockStateRef.current.hasShownLockedModal = true + documentLockState.current.hasShownLockedModal = true } - documentLockStateRef.current = { - hasShownLockedModal: documentLockStateRef.current?.hasShownLockedModal || false, + documentLockState.current = { + hasShownLockedModal: documentLockState.current?.hasShownLockedModal || false, isLocked: true, user: lockedState.user as ClientUser, } @@ -213,9 +206,52 @@ export function DefaultEditView({ } } }, - [setCurrentEditor, setDocumentIsLocked, user?.id], + [documentLockState, setCurrentEditor, setDocumentIsLocked, user?.id], ) + const handlePrevent = useCallback((nextHref: null | string) => { + nextHrefRef.current = nextHref + }, []) + + const handleLeaveConfirm = useCallback(async () => { + const lockUser = documentLockState.current?.user + + const isLockOwnedByCurrentUser = + typeof lockUser === 'object' ? lockUser?.id === user?.id : lockUser === user?.id + + if (isLockingEnabled && documentIsLocked && (id || globalSlug)) { + // Check where user is trying to go + const nextPath = nextHrefRef.current ? new URL(nextHrefRef.current).pathname : '' + const isInternalView = ['/preview', '/api', '/versions'].some((path) => + nextPath.includes(path), + ) + + // Only retain the lock if the user is still viewing the document + if (!isInternalView) { + if (isLockOwnedByCurrentUser) { + try { + await unlockDocument(id, collectionSlug ?? globalSlug) + setDocumentIsLocked(false) + setCurrentEditor(null) + } catch (err) { + console.error('Failed to unlock before leave', err) + } + } + } + } + }, [ + collectionSlug, + documentIsLocked, + documentLockState, + globalSlug, + id, + isLockingEnabled, + setCurrentEditor, + setDocumentIsLocked, + unlockDocument, + user?.id, + ]) + const onSave = useCallback( async (json): Promise => { const controller = handleAbortRef(abortOnSaveRef) @@ -342,7 +378,7 @@ export function DefaultEditView({ const docPreferences = await getDocPreferences() - const { lockedState, state } = await getFormState({ + const result = await getFormState({ id, collectionSlug, docPermissions, @@ -360,6 +396,12 @@ export function DefaultEditView({ updateLastEdited, }) + if (!result) { + return + } + + const { lockedState, state } = result + if (isLockingEnabled) { handleDocumentLocking(lockedState) } @@ -386,38 +428,9 @@ export function DefaultEditView({ // Clean up when the component unmounts or when the document is unlocked useEffect(() => { return () => { - if (isLockingEnabled && documentIsLocked && (id || globalSlug)) { - // Only retain the lock if the user is still viewing the document - const shouldUnlockDocument = !['preview', 'api', 'versions'].some((path) => - window.location.pathname.includes(path), - ) - if (shouldUnlockDocument) { - // Check if this user is still the current editor - if ( - typeof documentLockStateRef.current?.user === 'object' - ? documentLockStateRef.current?.user?.id === user?.id - : documentLockStateRef.current?.user === user?.id - ) { - void unlockDocument(id, collectionSlug ?? globalSlug) - setDocumentIsLocked(false) - setCurrentEditor(null) - } - } - } - setShowTakeOverModal(false) } - }, [ - collectionSlug, - globalSlug, - id, - unlockDocument, - user, - setCurrentEditor, - isLockingEnabled, - documentIsLocked, - setDocumentIsLocked, - ]) + }, []) useEffect(() => { const abortOnChange = abortOnChangeRef.current @@ -437,7 +450,7 @@ export function DefaultEditView({ : currentEditor !== user?.id) && !isReadOnlyForIncomingUser && !showTakeOverModal && - !documentLockStateRef.current?.hasShownLockedModal && + !documentLockState.current?.hasShownLockedModal && !isLockExpired const isFolderCollection = config.folders && collectionSlug === config.folders?.slug @@ -487,7 +500,7 @@ export function DefaultEditView({ false, updateDocumentEditor, setCurrentEditor, - documentLockStateRef, + documentLockState, isLockingEnabled, ) } @@ -505,7 +518,9 @@ export function DefaultEditView({ }} /> )} - {!isReadOnlyForIncomingUser && preventLeaveWithoutSaving && } + {!isReadOnlyForIncomingUser && preventLeaveWithoutSaving && ( + + )} {!isInDrawer && ( { - await payload.create({ - collection: 'users', - data: { - custom: 'Hello, world!', - email: devUser.email, - password: devUser.password, - roles: ['admin'], - }, - }) - - await payload.create({ - collection: apiKeysSlug, - data: { - apiKey: uuid(), - enableAPIKey: true, - }, - }) - - await payload.create({ - collection: apiKeysSlug, - data: { - apiKey: uuid(), - enableAPIKey: true, - }, - }) - }, + onInit: seed, typescript: { outputFile: path.resolve(dirname, 'payload-types.ts'), }, diff --git a/test/auth/e2e.spec.ts b/test/auth/e2e.spec.ts index 02253074d5..fd9ec90f0f 100644 --- a/test/auth/e2e.spec.ts +++ b/test/auth/e2e.spec.ts @@ -1,8 +1,8 @@ import type { BrowserContext, Page } from '@playwright/test' -import type { SanitizedConfig } from 'payload' import { expect, test } from '@playwright/test' import { devUser } from 'credentials.js' +import { openNav } from 'helpers/e2e/toggleNav.js' import path from 'path' import { fileURLToPath } from 'url' import { v4 as uuid } from 'uuid' @@ -15,6 +15,7 @@ import { exactText, getRoutes, initPageConsoleErrorCatch, + login, saveDocAndAssert, } from '../helpers.js' import { AdminUrlUtil } from '../helpers/adminUrlUtil.js' @@ -28,59 +29,12 @@ const dirname = path.dirname(filename) let payload: PayloadTestSDK -const { beforeAll, describe } = test +const { beforeAll, afterAll, describe } = test const headers = { 'Content-Type': 'application/json', } -const createFirstUser = async ({ - page, - serverURL, -}: { - customAdminRoutes?: SanitizedConfig['admin']['routes'] - customRoutes?: SanitizedConfig['routes'] - page: Page - serverURL: string -}) => { - const { - admin: { - routes: { createFirstUser: createFirstUserRoute }, - }, - routes: { admin: adminRoute }, - } = getRoutes({}) - - // wait for create first user route - await page.goto(serverURL + `${adminRoute}${createFirstUserRoute}`) - - // forget to fill out confirm password - await page.locator('#field-email').fill(devUser.email) - await page.locator('#field-password').fill(devUser.password) - await page.locator('.form-submit > button').click() - await expect(page.locator('.field-type.confirm-password .field-error')).toHaveText( - 'This field is required.', - ) - - // make them match, but does not pass password validation - await page.locator('#field-email').fill(devUser.email) - await page.locator('#field-password').fill('12') - await page.locator('#field-confirm-password').fill('12') - await page.locator('.form-submit > button').click() - await expect(page.locator('.field-type.password .field-error')).toHaveText( - 'This value must be longer than the minimum length of 3 characters.', - ) - - await page.locator('#field-email').fill(devUser.email) - await page.locator('#field-password').fill(devUser.password) - await page.locator('#field-confirm-password').fill(devUser.password) - await page.locator('#field-custom').fill('Hello, world!') - await page.locator('.form-submit > button').click() - - await expect - .poll(() => page.url(), { timeout: POLL_TOPASS_TIMEOUT }) - .not.toContain('create-first-user') -} - describe('Auth', () => { let page: Page let context: BrowserContext @@ -97,169 +51,288 @@ describe('Auth', () => { context = await browser.newContext() page = await context.newPage() initPageConsoleErrorCatch(page) - - await ensureCompilationIsDone({ page, serverURL, noAutoLogin: true }) - - // Undo onInit seeding, as we need to test this without having a user created, or testing create-first-user - await reInitializeDB({ - serverURL, - snapshotKey: 'auth', - deleteOnly: true, - }) - - await payload.create({ - collection: apiKeysSlug, - data: { - apiKey: uuid(), - enableAPIKey: true, - }, - }) - - await payload.create({ - collection: apiKeysSlug, - data: { - apiKey: uuid(), - enableAPIKey: true, - }, - }) - - await createFirstUser({ page, serverURL }) - - await ensureCompilationIsDone({ page, serverURL }) }) - - describe('passwords', () => { - beforeAll(() => { - url = new AdminUrlUtil(serverURL, slug) - }) - - test('should allow change password', async () => { - await page.goto(url.account) - const emailBeforeSave = await page.locator('#field-email').inputValue() - await page.locator('#change-password').click() - await page.locator('#field-password').fill('password') - // should fail to save without confirm password - await page.locator('#action-save').click() - await expect( - page.locator('.field-type.confirm-password .tooltip--show', { - hasText: exactText('This field is required.'), - }), - ).toBeVisible() - - // should fail to save with incorrect confirm password - await page.locator('#field-confirm-password').fill('wrong password') - await page.locator('#action-save').click() - await expect( - page.locator('.field-type.confirm-password .tooltip--show', { - hasText: exactText('Passwords do not match.'), - }), - ).toBeVisible() - - // should succeed with matching confirm password - await page.locator('#field-confirm-password').fill('password') - await saveDocAndAssert(page, '#action-save') - - // should still have the same email - await expect(page.locator('#field-email')).toHaveValue(emailBeforeSave) - }) - - test('should prevent new user creation without confirm password', async () => { - await page.goto(url.create) - await page.locator('#field-email').fill('dev2@payloadcms.com') - await page.locator('#field-password').fill('password') - // should fail to save without confirm password - await page.locator('#action-save').click() - await expect( - page.locator('.field-type.confirm-password .tooltip--show', { - hasText: exactText('This field is required.'), - }), - ).toBeVisible() - - // should succeed with matching confirm password - await page.locator('#field-confirm-password').fill('password') - await saveDocAndAssert(page, '#action-save') - }) - }) - - describe('authenticated users', () => { - beforeAll(() => { - url = new AdminUrlUtil(serverURL, slug) - }) - - test('should have up-to-date user in `useAuth` hook', async () => { - await page.goto(url.account) - await expect(page.locator('#users-api-result')).toHaveText('Hello, world!') - await expect(page.locator('#use-auth-result')).toHaveText('Hello, world!') - const field = page.locator('#field-custom') - await field.fill('Goodbye, world!') - await saveDocAndAssert(page) - await expect(page.locator('#users-api-result')).toHaveText('Goodbye, world!') - await expect(page.locator('#use-auth-result')).toHaveText('Goodbye, world!') - }) - }) - - describe('api-keys', () => { - let user - + describe('create first user', () => { beforeAll(async () => { - url = new AdminUrlUtil(serverURL, apiKeysSlug) + await reInitializeDB({ + serverURL, + snapshotKey: 'create-first-user', + deleteOnly: true, + }) - user = await payload.create({ - collection: apiKeysSlug, - data: { - apiKey: uuid(), - enableAPIKey: true, + await ensureCompilationIsDone({ page, serverURL, noAutoLogin: true }) + + await payload.delete({ + collection: slug, + where: { + email: { + exists: true, + }, }, }) }) - test('should enable api key', async () => { - await page.goto(url.create) + async function waitForVisibleAuthFields() { + await expect(page.locator('#field-email')).toBeVisible() + await expect(page.locator('#field-password')).toBeVisible() + await expect(page.locator('#field-confirm-password')).toBeVisible() + } - // click enable api key checkbox - await page.locator('#field-enableAPIKey').click() + test('should create first user and redirect to admin', async () => { + const { + admin: { + routes: { createFirstUser: createFirstUserRoute }, + }, + routes: { admin: adminRoute }, + } = getRoutes({}) + + // wait for create first user route + await page.goto(serverURL + `${adminRoute}${createFirstUserRoute}`) + + await expect(page.locator('.create-first-user')).toBeVisible() + + await waitForVisibleAuthFields() + + // forget to fill out confirm password + await page.locator('#field-email').fill(devUser.email) + await page.locator('#field-password').fill(devUser.password) + + await page.locator('.form-submit > button').click() + await expect(page.locator('.field-type.confirm-password .field-error')).toHaveText( + 'This field is required.', + ) + + // make them match, but does not pass password validation + await page.locator('#field-email').fill(devUser.email) + await page.locator('#field-password').fill('12') + await page.locator('#field-confirm-password').fill('12') + + await page.locator('.form-submit > button').click() + await expect(page.locator('.field-type.password .field-error')).toHaveText( + 'This value must be longer than the minimum length of 3 characters.', + ) + + // should fill out all fields correctly + await page.locator('#field-email').fill(devUser.email) + await page.locator('#field-password').fill(devUser.password) + await page.locator('#field-confirm-password').fill(devUser.password) + await page.locator('#field-custom').fill('Hello, world!') + + await page.locator('.form-submit > button').click() - // assert that the value is set - const apiKeyLocator = page.locator('#apiKey') await expect - .poll(async () => await apiKeyLocator.inputValue(), { timeout: POLL_TOPASS_TIMEOUT }) - .toBeDefined() + .poll(() => page.url(), { timeout: POLL_TOPASS_TIMEOUT }) + .not.toContain('create-first-user') + }) + }) - const apiKey = await apiKeyLocator.inputValue() + describe('non create first user', () => { + beforeAll(async () => { + await reInitializeDB({ + serverURL, + snapshotKey: 'auth', + deleteOnly: false, + }) - await saveDocAndAssert(page) + await ensureCompilationIsDone({ page, serverURL, noAutoLogin: true }) - await expect(async () => { - const apiKeyAfterSave = await apiKeyLocator.inputValue() - expect(apiKey).toStrictEqual(apiKeyAfterSave) - }).toPass({ - timeout: POLL_TOPASS_TIMEOUT, + await login({ page, serverURL }) + }) + + describe('passwords', () => { + beforeAll(() => { + url = new AdminUrlUtil(serverURL, slug) + }) + + afterAll(async () => { + // reset password to original password + await page.goto(url.account) + await page.locator('#change-password').click() + await page.locator('#field-password').fill(devUser.password) + await page.locator('#field-confirm-password').fill(devUser.password) + await saveDocAndAssert(page, '#action-save') + }) + + test('should allow change password', async () => { + await page.goto(url.account) + const emailBeforeSave = await page.locator('#field-email').inputValue() + await page.locator('#change-password').click() + await page.locator('#field-password').fill('password') + // should fail to save without confirm password + await page.locator('#action-save').click() + await expect( + page.locator('.field-type.confirm-password .tooltip--show', { + hasText: exactText('This field is required.'), + }), + ).toBeVisible() + + // should fail to save with incorrect confirm password + await page.locator('#field-confirm-password').fill('wrong password') + await page.locator('#action-save').click() + await expect( + page.locator('.field-type.confirm-password .tooltip--show', { + hasText: exactText('Passwords do not match.'), + }), + ).toBeVisible() + + // should succeed with matching confirm password + await page.locator('#field-confirm-password').fill('password') + await saveDocAndAssert(page, '#action-save') + + // should still have the same email + await expect(page.locator('#field-email')).toHaveValue(emailBeforeSave) + }) + + test('should prevent new user creation without confirm password', async () => { + await page.goto(url.create) + await page.locator('#field-email').fill('dev2@payloadcms.com') + await page.locator('#field-password').fill('password') + // should fail to save without confirm password + await page.locator('#action-save').click() + await expect( + page.locator('.field-type.confirm-password .tooltip--show', { + hasText: exactText('This field is required.'), + }), + ).toBeVisible() + + // should succeed with matching confirm password + await page.locator('#field-confirm-password').fill('password') + await saveDocAndAssert(page, '#action-save') }) }) - test('should disable api key', async () => { - await page.goto(url.edit(user.id)) + describe('authenticated users', () => { + beforeAll(() => { + url = new AdminUrlUtil(serverURL, slug) + }) - // click enable api key checkbox - await page.locator('#field-enableAPIKey').click() + test('should have up-to-date user in `useAuth` hook', async () => { + await page.goto(url.account) + await expect(page.locator('#users-api-result')).toHaveText('Hello, world!') + await expect(page.locator('#use-auth-result')).toHaveText('Hello, world!') + const field = page.locator('#field-custom') + await field.fill('Goodbye, world!') + await saveDocAndAssert(page) + await expect(page.locator('#users-api-result')).toHaveText('Goodbye, world!') + await expect(page.locator('#use-auth-result')).toHaveText('Goodbye, world!') + }) - // assert that the apiKey field is hidden - await expect(page.locator('#apiKey')).toBeHidden() + // Need to test unlocking documents on logout here as this test suite does not auto login users + test('should unlock document on logout after editing without saving', async () => { + await page.goto(url.list) - await saveDocAndAssert(page) + await page.locator('.table .row-1 .cell-custom a').click() - // use the api key in a fetch to assert that it is disabled - await expect(async () => { - const response = await fetch(`${apiURL}/${apiKeysSlug}/me`, { - headers: { - ...headers, - Authorization: `${apiKeysSlug} API-Key ${user.apiKey}`, + const textInput = page.locator('#field-namedSaveToJWT') + await expect(textInput).toBeVisible() + const docID = (await page.locator('.render-title').getAttribute('data-doc-id')) as string + + const lockDocRequest = page.waitForResponse( + (response) => + response.request().method() === 'POST' && response.request().url() === url.edit(docID), + ) + await textInput.fill('some text') + await lockDocRequest + + const lockedDocs = await payload.find({ + collection: 'payload-locked-documents', + limit: 1, + pagination: false, + }) + + await expect.poll(() => lockedDocs.docs.length).toBe(1) + + await openNav(page) + + await page.locator('.nav .nav__controls a[href="/admin/logout"]').click() + + // Locate the modal container + const modalContainer = page.locator('.payload__modal-container') + await expect(modalContainer).toBeVisible() + + // Click the "Leave anyway" button + await page + .locator('#leave-without-saving .confirmation-modal__controls .btn--style-primary') + .click() + + await expect(page.locator('.login')).toBeVisible() + + const unlockedDocs = await payload.find({ + collection: 'payload-locked-documents', + limit: 1, + pagination: false, + }) + + await expect.poll(() => unlockedDocs.docs.length).toBe(0) + + // added so tests after this do not need to re-login + await login({ page, serverURL }) + }) + }) + + describe('api-keys', () => { + let user + + beforeAll(async () => { + url = new AdminUrlUtil(serverURL, apiKeysSlug) + + user = await payload.create({ + collection: apiKeysSlug, + data: { + apiKey: uuid(), + enableAPIKey: true, }, - }).then((res) => res.json()) + }) + }) - expect(response.user).toBeNull() - }).toPass({ - timeout: POLL_TOPASS_TIMEOUT, + test('should enable api key', async () => { + await page.goto(url.create) + + // click enable api key checkbox + await page.locator('#field-enableAPIKey').click() + + // assert that the value is set + const apiKeyLocator = page.locator('#apiKey') + await expect + .poll(async () => await apiKeyLocator.inputValue(), { timeout: POLL_TOPASS_TIMEOUT }) + .toBeDefined() + + const apiKey = await apiKeyLocator.inputValue() + + await saveDocAndAssert(page) + + await expect(async () => { + const apiKeyAfterSave = await apiKeyLocator.inputValue() + expect(apiKey).toStrictEqual(apiKeyAfterSave) + }).toPass({ + timeout: POLL_TOPASS_TIMEOUT, + }) + }) + + test('should disable api key', async () => { + await page.goto(url.edit(user.id)) + + // click enable api key checkbox + await page.locator('#field-enableAPIKey').click() + + // assert that the apiKey field is hidden + await expect(page.locator('#apiKey')).toBeHidden() + + await saveDocAndAssert(page) + + // use the api key in a fetch to assert that it is disabled + await expect(async () => { + const response = await fetch(`${apiURL}/${apiKeysSlug}/me`, { + headers: { + ...headers, + Authorization: `${apiKeysSlug} API-Key ${user.apiKey}`, + }, + }).then((res) => res.json()) + + expect(response.user).toBeNull() + }).toPass({ + timeout: POLL_TOPASS_TIMEOUT, + }) }) }) }) diff --git a/test/auth/payload-types.ts b/test/auth/payload-types.ts index 176fae2020..f3630e3f5e 100644 --- a/test/auth/payload-types.ts +++ b/test/auth/payload-types.ts @@ -248,11 +248,13 @@ export interface User { hash?: string | null; loginAttempts?: number | null; lockUntil?: string | null; - sessions: { - id: string; - createdAt?: string | null; - expiresAt: string; - }[]; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** @@ -270,11 +272,13 @@ export interface PartialDisableLocalStrategy { hash?: string | null; loginAttempts?: number | null; lockUntil?: string | null; - sessions: { - id: string; - createdAt?: string | null; - expiresAt: string; - }[]; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** @@ -316,11 +320,13 @@ export interface PublicUser { _verificationToken?: string | null; loginAttempts?: number | null; lockUntil?: string | null; - sessions: { - id: string; - createdAt?: string | null; - expiresAt: string; - }[]; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** diff --git a/test/auth/seed.ts b/test/auth/seed.ts new file mode 100644 index 0000000000..57f1930350 --- /dev/null +++ b/test/auth/seed.ts @@ -0,0 +1,34 @@ +import type { Config } from 'payload' + +import { v4 as uuid } from 'uuid' + +import { devUser } from '../credentials.js' +import { apiKeysSlug } from './shared.js' + +export const seed: Config['onInit'] = async (payload) => { + await payload.create({ + collection: 'users', + data: { + custom: 'Hello, world!', + email: devUser.email, + password: devUser.password, + roles: ['admin'], + }, + }) + + await payload.create({ + collection: apiKeysSlug, + data: { + apiKey: uuid(), + enableAPIKey: true, + }, + }) + + await payload.create({ + collection: apiKeysSlug, + data: { + apiKey: uuid(), + enableAPIKey: true, + }, + }) +} diff --git a/test/helpers.ts b/test/helpers.ts index ed3e73f487..07127f43c9 100644 --- a/test/helpers.ts +++ b/test/helpers.ts @@ -98,10 +98,26 @@ export async function ensureCompilationIsDone({ await page.goto(adminURL) - await page.waitForURL( - readyURL ?? - (noAutoLogin ? `${adminURL + (adminURL.endsWith('/') ? '' : '/')}login` : adminURL), - ) + if (readyURL) { + await page.waitForURL(readyURL) + } else { + await expect + .poll( + () => { + if (noAutoLogin) { + const baseAdminURL = adminURL + (adminURL.endsWith('/') ? '' : '/') + return ( + page.url() === `${baseAdminURL}create-first-user` || + page.url() === `${baseAdminURL}login` + ) + } else { + return page.url() === adminURL + } + }, + { timeout: POLL_TOPASS_TIMEOUT }, + ) + .toBe(true) + } console.log('Successfully compiled') return diff --git a/test/helpers/reInitEndpoint.ts b/test/helpers/reInitEndpoint.ts index a6e98cc6ef..992f10de73 100644 --- a/test/helpers/reInitEndpoint.ts +++ b/test/helpers/reInitEndpoint.ts @@ -15,7 +15,7 @@ const handler: PayloadHandler = async (req) => { } const query: { - deleteOnly?: boolean + deleteOnly?: string snapshotKey?: string uploadsDir?: string | string[] } = qs.parse(req.url.split('?')[1] ?? '', { @@ -31,7 +31,8 @@ const handler: PayloadHandler = async (req) => { snapshotKey: String(query.snapshotKey), // uploadsDir can be string or stringlist uploadsDir: query.uploadsDir as string | string[], - deleteOnly: query.deleteOnly, + // query value will be a string of 'true' or 'false' + deleteOnly: query.deleteOnly === 'true', }) return Response.json( diff --git a/test/locked-documents/payload-types.ts b/test/locked-documents/payload-types.ts index 71e1e949cd..0960f2a653 100644 --- a/test/locked-documents/payload-types.ts +++ b/test/locked-documents/payload-types.ts @@ -174,6 +174,13 @@ export interface User { hash?: string | null; loginAttempts?: number | null; lockUntil?: string | null; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** @@ -288,6 +295,13 @@ export interface UsersSelect { hash?: T; loginAttempts?: T; lockUntil?: T; + sessions?: + | T + | { + id?: T; + createdAt?: T; + expiresAt?: T; + }; } /** * This interface was referenced by `Config`'s JSON-Schema From 14322a71bbe57d9c27b190779a9a126fc2416987 Mon Sep 17 00:00:00 2001 From: Dan Ribbens Date: Thu, 24 Jul 2025 13:03:21 -0400 Subject: [PATCH 36/91] docs(plugin-import-export): document plugin-import-export (#13243) Add documentation for @payloadcms/plugin-import-export. --- docs/plugins/form-builder.mdx | 2 +- docs/plugins/import-export.mdx | 155 +++++++++++++++++++++ docs/plugins/multi-tenant.mdx | 14 +- docs/plugins/nested-docs.mdx | 2 +- docs/plugins/overview.mdx | 1 + docs/plugins/redirects.mdx | 2 +- docs/plugins/search.mdx | 2 +- docs/plugins/sentry.mdx | 2 +- docs/plugins/seo.mdx | 2 +- docs/plugins/stripe.mdx | 2 +- packages/plugin-import-export/src/index.ts | 3 + 11 files changed, 173 insertions(+), 14 deletions(-) create mode 100644 docs/plugins/import-export.mdx diff --git a/docs/plugins/form-builder.mdx b/docs/plugins/form-builder.mdx index 2643188c5c..5872c887d0 100644 --- a/docs/plugins/form-builder.mdx +++ b/docs/plugins/form-builder.mdx @@ -1,7 +1,7 @@ --- title: Form Builder Plugin label: Form Builder -order: 40 +order: 30 desc: Easily build and manage forms from the Admin Panel. Send dynamic, personalized emails and even accept and process payments. keywords: plugins, plugin, form, forms, form builder --- diff --git a/docs/plugins/import-export.mdx b/docs/plugins/import-export.mdx new file mode 100644 index 0000000000..63c2cb2159 --- /dev/null +++ b/docs/plugins/import-export.mdx @@ -0,0 +1,155 @@ +--- +title: Import Export Plugin +label: Import Export +order: 40 +desc: Add Import and export functionality to create CSV and JSON data exports +keywords: plugins, plugin, import, export, csv, JSON, data, ETL, download +--- + +![https://www.npmjs.com/package/@payloadcms/plugin-import-export](https://img.shields.io/npm/v/@payloadcms/plugin-import-export) + + + **Note**: This plugin is in **beta** as some aspects of it may change on any + minor releases. It is under development and currently only supports exporting + of collection data. + + +This plugin adds features that give admin users the ability to download or create export data as an upload collection and import it back into a project. + +## Core Features + +- Export data as CSV or JSON format via the admin UI +- Download the export directly through the browser +- Create a file upload of the export data +- Use the jobs queue for large exports +- (Coming soon) Import collection data + +## Installation + +Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com): + +```bash +pnpm add @payloadcms/plugin-import-export +``` + +## Basic Usage + +In the `plugins` array of your [Payload Config](https://payloadcms.com/docs/configuration/overview), call the plugin with [options](#options): + +```ts +import { buildConfig } from 'payload' +import { importExportPlugin } from '@payloadcms/plugin-import-export' + +const config = buildConfig({ + collections: [Pages, Media], + plugins: [ + importExportPlugin({ + collections: ['users', 'pages'], + // see below for a list of available options + }), + ], +}) + +export default config +``` + +## Options + +| Property | Type | Description | +| -------------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------ | +| `collections` | string[] | Collections to include Import/Export controls in. Defaults to all collections. | +| `debug` | boolean | If true, enables debug logging. | +| `disableDownload` | boolean | If true, disables the download button in the export preview UI. | +| `disableJobsQueue` | boolean | If true, forces the export to run synchronously. | +| `disableSave` | boolean | If true, disables the save button in the export preview UI. | +| `format` | string | Forces a specific export format (`csv` or `json`), hides the format dropdown, and prevents the user from choosing the export format. | +| `overrideExportCollection` | function | Function to override the default export collection; takes the default export collection and allows you to modify and return it. | + +## Field Options + +In addition to the above plugin configuration options, you can granularly set the following field level options using the `custom['plugin-import-export']` properties in any of your collections. + +| Property | Type | Description | +| ---------- | -------- | ----------------------------------------------------------------------------------------------------------------------------- | +| `disabled` | boolean | When `true` the field is completely excluded from the import-export plugin. | +| `toCSV` | function | Custom function used to modify the outgoing csv data by manipulating the data, siblingData or by returning the desired value. | + +### Customizing the output of CSV data + +To manipulate the data that a field exports you can add `toCSV` custom functions. This allows you to modify the outgoing csv data by manipulating the data, siblingData or by returning the desired value. + +The toCSV function argument is an object with the following properties: + +| Property | Type | Description | +| ------------ | ------- | ----------------------------------------------------------------- | +| `columnName` | string | The CSV column name given to the field. | +| `doc` | object | The top level document | +| `row` | object | The object data that can be manipulated to assign data to the CSV | +| `siblingDoc` | object | The document data at the level where it belongs | +| `value` | unknown | The data for the field. | + +Example function: + +```ts +const pages: CollectionConfig = { + slug: 'pages', + fields: [ + { + name: 'author', + type: 'relationship', + relationTo: 'users', + custom: { + 'plugin-import-export': { + toCSV: ({ value, columnName, row }) => { + // add both `author_id` and the `author_email` to the csv export + if ( + value && + typeof value === 'object' && + 'id' in value && + 'email' in value + ) { + row[`${columnName}_id`] = (value as { id: number | string }).id + row[`${columnName}_email`] = (value as { email: string }).email + } + }, + }, + }, + }, + ], +} +``` + +## Exporting Data + +There are four possible ways that the plugin allows for exporting documents, the first two are available in the admin UI from the list view of a collection: + +1. Direct download - Using a `POST` to `/api/exports/download` and streams the response as a file download +2. File storage - Goes to the `exports` collection as an uploads enabled collection +3. Local API - A create call to the uploads collection: `payload.create({ slug: 'uploads', ...parameters })` +4. Jobs Queue - `payload.jobs.queue({ task: 'createCollectionExport', input: parameters })` + +By default, a user can use the Export drawer to create a file download by choosing `Save` or stream a downloadable file directly without persisting it by using the `Download` button. Either option can be disabled to provide the export experience you desire for your use-case. + +The UI for creating exports provides options so that users can be selective about which documents to include and also which columns or fields to include. + +It is necessary to add access control to the uploads collection configuration using the `overrideExportCollection` function if you have enabled this plugin on collections with data that some authenticated users should not have access to. + + + **Note**: Users who have read access to the upload collection may be able to + download data that is normally not readable due to [access + control](../access-control/overview). + + +The following parameters are used by the export function to handle requests: + +| Property | Type | Description | +| ---------------- | -------- | ----------------------------------------------------------------------------------------------------------------- | +| `format` | text | Either `csv` or `json` to determine the shape of data exported | +| `limit` | number | The max number of documents to return | +| `sort` | select | The field to use for ordering documents | +| `locale` | string | The locale code to query documents or `all` | +| `draft` | string | Either `yes` or `no` to return documents with their newest drafts for drafts enabled collections | +| `fields` | string[] | Which collection fields are used to create the export, defaults to all | +| `collectionSlug` | string | The slug to query against | +| `where` | object | The WhereObject used to query documents to export. This is set by making selections or filters from the list view | +| `filename` | text | What to call the export being created | diff --git a/docs/plugins/multi-tenant.mdx b/docs/plugins/multi-tenant.mdx index 39eab63b8b..32dcfb86c6 100644 --- a/docs/plugins/multi-tenant.mdx +++ b/docs/plugins/multi-tenant.mdx @@ -1,7 +1,7 @@ --- title: Multi-Tenant Plugin label: Multi-Tenant -order: 40 +order: 50 desc: Scaffolds multi-tenancy for your Payload application keywords: plugins, multi-tenant, multi-tenancy, plugin, payload, cms, seo, indexing, search, search engine --- @@ -229,15 +229,15 @@ const config = buildConfig({ { slug: 'tenants', admin: { - useAsTitle: 'name' + useAsTitle: 'name', }, fields: [ // remember, you own these fields // these are merely suggestions/examples { - name: 'name', - type: 'text', - required: true, + name: 'name', + type: 'text', + required: true, }, { name: 'slug', @@ -248,7 +248,7 @@ const config = buildConfig({ name: 'domain', type: 'text', required: true, - } + }, ], }, ], @@ -258,7 +258,7 @@ const config = buildConfig({ pages: {}, navigation: { isGlobal: true, - } + }, }, }), ], diff --git a/docs/plugins/nested-docs.mdx b/docs/plugins/nested-docs.mdx index 5725bdabc2..60c3d43323 100644 --- a/docs/plugins/nested-docs.mdx +++ b/docs/plugins/nested-docs.mdx @@ -1,7 +1,7 @@ --- title: Nested Docs Plugin label: Nested Docs -order: 40 +order: 60 desc: Nested documents in a parent, child, and sibling relationship. keywords: plugins, nested, documents, parent, child, sibling, relationship --- diff --git a/docs/plugins/overview.mdx b/docs/plugins/overview.mdx index 96b2430896..d5bce425fd 100644 --- a/docs/plugins/overview.mdx +++ b/docs/plugins/overview.mdx @@ -55,6 +55,7 @@ Payload maintains a set of Official Plugins that solve for some of the common us - [Sentry](./sentry) - [SEO](./seo) - [Stripe](./stripe) +- [Import/Export](./import-export) You can also [build your own plugin](./build-your-own) to easily extend Payload's functionality in some other way. Once your plugin is ready, consider [sharing it with the community](#community-plugins). diff --git a/docs/plugins/redirects.mdx b/docs/plugins/redirects.mdx index 3fbc624d58..dae099a499 100644 --- a/docs/plugins/redirects.mdx +++ b/docs/plugins/redirects.mdx @@ -1,7 +1,7 @@ --- title: Redirects Plugin label: Redirects -order: 40 +order: 70 desc: Automatically create redirects for your Payload application keywords: plugins, redirects, redirect, plugin, payload, cms, seo, indexing, search, search engine --- diff --git a/docs/plugins/search.mdx b/docs/plugins/search.mdx index 868e87b5b7..8eee4073aa 100644 --- a/docs/plugins/search.mdx +++ b/docs/plugins/search.mdx @@ -1,7 +1,7 @@ --- title: Search Plugin label: Search -order: 40 +order: 80 desc: Generates records of your documents that are extremely fast to search on. keywords: plugins, search, search plugin, search engine, search index, search results, search bar, search box, search field, search form, search input --- diff --git a/docs/plugins/sentry.mdx b/docs/plugins/sentry.mdx index fc87f2e2de..ecd6826487 100644 --- a/docs/plugins/sentry.mdx +++ b/docs/plugins/sentry.mdx @@ -1,7 +1,7 @@ --- title: Sentry Plugin label: Sentry -order: 40 +order: 90 desc: Integrate Sentry error tracking into your Payload application keywords: plugins, sentry, error, tracking, monitoring, logging, bug, reporting, performance --- diff --git a/docs/plugins/seo.mdx b/docs/plugins/seo.mdx index c0fa06d0e0..b22e01c829 100644 --- a/docs/plugins/seo.mdx +++ b/docs/plugins/seo.mdx @@ -2,7 +2,7 @@ description: Manage SEO metadata from your Payload admin keywords: plugins, seo, meta, search, engine, ranking, google label: SEO -order: 30 +order: 100 title: SEO Plugin --- diff --git a/docs/plugins/stripe.mdx b/docs/plugins/stripe.mdx index 214267f0a2..79111274d7 100644 --- a/docs/plugins/stripe.mdx +++ b/docs/plugins/stripe.mdx @@ -1,7 +1,7 @@ --- title: Stripe Plugin label: Stripe -order: 40 +order: 110 desc: Easily accept payments with Stripe keywords: plugins, stripe, payments, ecommerce --- diff --git a/packages/plugin-import-export/src/index.ts b/packages/plugin-import-export/src/index.ts index 366d444c54..e3b4f99f96 100644 --- a/packages/plugin-import-export/src/index.ts +++ b/packages/plugin-import-export/src/index.ts @@ -216,6 +216,9 @@ declare module 'payload' { * @default false */ disabled?: boolean + /** + * Custom function used to modify the outgoing csv data by manipulating the data, siblingData or by returning the desired value + */ toCSV?: ToCSVFunction } } From bccf6ab16f3562bbf2d9e26dc0e2d34d4a3cf732 Mon Sep 17 00:00:00 2001 From: Jacob Fletcher Date: Thu, 24 Jul 2025 14:00:52 -0400 Subject: [PATCH 37/91] feat: group by (#13138) Supports grouping documents by specific fields within the list view. For example, imagine having a "posts" collection with a "categories" field. To report on each specific category, you'd traditionally filter for each category, one at a time. This can be quite inefficient, especially with large datasets. Now, you can interact with all categories simultaneously, grouped by distinct values. Here is a simple demonstration: https://github.com/user-attachments/assets/0dcd19d2-e983-47e6-9ea2-cfdd2424d8b5 Enable on any collection by setting the `admin.groupBy` property: ```ts import type { CollectionConfig } from 'payload' const MyCollection: CollectionConfig = { // ... admin: { groupBy: true } } ``` This is currently marked as beta to gather feedback while we reach full stability, and to leave room for API changes and other modifications. Use at your own risk. Note: when using `groupBy`, bulk editing is done group-by-group. In the future we may support cross-group bulk editing. Dependent on #13102 (merged). --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210774523852467 --------- Co-authored-by: Paul Popus --- .github/workflows/main.yml | 2 + docs/admin/overview.mdx | 2 +- docs/configuration/collections.mdx | 1 + packages/next/src/views/List/handleGroupBy.ts | 199 + packages/next/src/views/List/index.tsx | 101 +- packages/payload/src/admin/functions/index.ts | 10 + packages/payload/src/admin/views/list.ts | 2 +- .../payload/src/collections/config/types.ts | 7 + packages/payload/src/index.ts | 1 - packages/payload/src/preferences/types.ts | 1 + .../utilities/transformColumnPreferences.ts | 4 + packages/translations/src/clientKeys.ts | 2 + packages/translations/src/languages/ar.ts | 2 + packages/translations/src/languages/az.ts | 3 + packages/translations/src/languages/bg.ts | 2 + packages/translations/src/languages/bnBd.ts | 3 + packages/translations/src/languages/bnIn.ts | 2 + packages/translations/src/languages/ca.ts | 2 + packages/translations/src/languages/cs.ts | 2 + packages/translations/src/languages/da.ts | 2 + packages/translations/src/languages/de.ts | 3 + packages/translations/src/languages/en.ts | 2 + packages/translations/src/languages/es.ts | 2 + packages/translations/src/languages/et.ts | 2 + packages/translations/src/languages/fa.ts | 2 + packages/translations/src/languages/fr.ts | 2 + packages/translations/src/languages/he.ts | 3 + packages/translations/src/languages/hr.ts | 2 + packages/translations/src/languages/hu.ts | 2 + packages/translations/src/languages/hy.ts | 3 + packages/translations/src/languages/it.ts | 2 + packages/translations/src/languages/ja.ts | 2 + packages/translations/src/languages/ko.ts | 3 + packages/translations/src/languages/lt.ts | 2 + packages/translations/src/languages/lv.ts | 3 + packages/translations/src/languages/my.ts | 2 + packages/translations/src/languages/nb.ts | 2 + packages/translations/src/languages/nl.ts | 2 + packages/translations/src/languages/pl.ts | 2 + packages/translations/src/languages/pt.ts | 2 + packages/translations/src/languages/ro.ts | 2 + packages/translations/src/languages/rs.ts | 2 + .../translations/src/languages/rsLatin.ts | 2 + packages/translations/src/languages/ru.ts | 2 + packages/translations/src/languages/sk.ts | 2 + packages/translations/src/languages/sl.ts | 2 + packages/translations/src/languages/sv.ts | 2 + packages/translations/src/languages/th.ts | 3 + packages/translations/src/languages/tr.ts | 2 + packages/translations/src/languages/uk.ts | 2 + packages/translations/src/languages/vi.ts | 2 + packages/translations/src/languages/zh.ts | 2 + packages/translations/src/languages/zhTw.ts | 2 + .../ui/src/elements/ColumnSelector/index.tsx | 2 +- packages/ui/src/elements/DeleteMany/index.tsx | 50 +- .../src/elements/EditMany/DrawerContent.tsx | 11 +- packages/ui/src/elements/EditMany/index.tsx | 23 +- .../ui/src/elements/GroupByBuilder/index.scss | 39 + .../ui/src/elements/GroupByBuilder/index.tsx | 144 + .../ui/src/elements/ListControls/index.scss | 6 +- .../ui/src/elements/ListControls/index.tsx | 43 +- .../PageControls/GroupByPageControls.tsx | 62 + .../ui/src/elements/PageControls/index.scss | 40 + .../ui/src/elements/PageControls/index.tsx | 94 + .../Pagination/ClickableArrow/index.scss | 6 +- .../ui/src/elements/Pagination/index.scss | 8 +- packages/ui/src/elements/Pagination/index.tsx | 2 +- .../elements/PublishMany/DrawerContent.tsx | 13 +- .../ui/src/elements/PublishMany/index.tsx | 19 +- packages/ui/src/elements/ReactSelect/types.ts | 1 + .../src/elements/RelationshipTable/index.tsx | 26 +- .../ui/src/elements/StickyToolbar/index.scss | 27 + .../ui/src/elements/StickyToolbar/index.tsx | 9 + .../ui/src/elements/Table/OrderableTable.tsx | 4 + packages/ui/src/elements/Table/index.tsx | 6 +- .../elements/UnpublishMany/DrawerContent.tsx | 8 +- .../ui/src/elements/UnpublishMany/index.tsx | 18 +- .../ui/src/elements/WhereBuilder/index.tsx | 8 +- packages/ui/src/exports/client/index.ts | 2 + packages/ui/src/fields/Select/Input.tsx | 3 + packages/ui/src/providers/ListQuery/index.tsx | 37 +- .../ui/src/providers/ListQuery/mergeQuery.ts | 31 +- packages/ui/src/providers/ListQuery/types.ts | 12 +- packages/ui/src/providers/Selection/index.tsx | 63 +- packages/ui/src/utilities/buildTableState.ts | 12 +- .../reduceFieldsToOptions.tsx} | 22 +- packages/ui/src/utilities/renderTable.tsx | 81 +- .../CollectionFolder/ListSelection/index.tsx | 1 + .../src/views/List/GroupByHeader/index.scss | 17 + .../ui/src/views/List/GroupByHeader/index.tsx | 31 + .../ui/src/views/List/ListHeader/index.tsx | 5 +- .../ui/src/views/List/ListSelection/index.tsx | 32 +- packages/ui/src/views/List/index.scss | 47 +- packages/ui/src/views/List/index.tsx | 122 +- test/admin/e2e/list-view/e2e.spec.ts | 72 +- test/bulk-edit/e2e.spec.ts | 2 +- test/fields-relationship/e2e.spec.ts | 5 +- test/group-by/.gitignore | 2 + test/group-by/collections/Categories/index.ts | 16 + test/group-by/collections/Media/index.ts | 33 + test/group-by/collections/Posts/index.ts | 48 + test/group-by/config.ts | 30 + test/group-by/e2e.spec.ts | 607 +++ test/group-by/payload-types.ts | 428 ++ test/group-by/schema.graphql | 4271 +++++++++++++++++ test/group-by/seed.ts | 84 + test/group-by/tsconfig.eslint.json | 13 + test/group-by/tsconfig.json | 3 + test/group-by/types.d.ts | 9 + test/helpers.ts | 6 +- test/helpers/e2e/goToNextPage.ts | 49 + test/helpers/e2e/groupBy.ts | 90 + test/helpers/e2e/openListFilters.ts | 8 +- test/helpers/e2e/sortColumn.ts | 36 + test/helpers/e2e/toggleListDrawer.ts | 3 + test/joins/e2e.spec.ts | 3 + test/joins/payload-types.ts | 2 + test/lexical/collections/RichText/e2e.spec.ts | 16 +- test/lexical/payload-types.ts | 14 + test/locked-documents/e2e.spec.ts | 11 +- test/query-presets/e2e.spec.ts | 3 +- test/query-presets/helpers/togglePreset.ts | 3 +- test/sort/payload-types.ts | 38 +- tsconfig.base.json | 114 +- 124 files changed, 7181 insertions(+), 447 deletions(-) create mode 100644 packages/next/src/views/List/handleGroupBy.ts create mode 100644 packages/ui/src/elements/GroupByBuilder/index.scss create mode 100644 packages/ui/src/elements/GroupByBuilder/index.tsx create mode 100644 packages/ui/src/elements/PageControls/GroupByPageControls.tsx create mode 100644 packages/ui/src/elements/PageControls/index.scss create mode 100644 packages/ui/src/elements/PageControls/index.tsx create mode 100644 packages/ui/src/elements/StickyToolbar/index.scss create mode 100644 packages/ui/src/elements/StickyToolbar/index.tsx rename packages/ui/src/{elements/WhereBuilder/reduceFields.tsx => utilities/reduceFieldsToOptions.tsx} (90%) create mode 100644 packages/ui/src/views/List/GroupByHeader/index.scss create mode 100644 packages/ui/src/views/List/GroupByHeader/index.tsx create mode 100644 test/group-by/.gitignore create mode 100644 test/group-by/collections/Categories/index.ts create mode 100644 test/group-by/collections/Media/index.ts create mode 100644 test/group-by/collections/Posts/index.ts create mode 100644 test/group-by/config.ts create mode 100644 test/group-by/e2e.spec.ts create mode 100644 test/group-by/payload-types.ts create mode 100644 test/group-by/schema.graphql create mode 100644 test/group-by/seed.ts create mode 100644 test/group-by/tsconfig.eslint.json create mode 100644 test/group-by/tsconfig.json create mode 100644 test/group-by/types.d.ts create mode 100644 test/helpers/e2e/goToNextPage.ts create mode 100644 test/helpers/e2e/groupBy.ts create mode 100644 test/helpers/e2e/sortColumn.ts diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e10abad457..17c907c446 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -284,6 +284,7 @@ jobs: - fields__collections__Text - fields__collections__UI - fields__collections__Upload + - group-by - folders - hooks - lexical__collections__Lexical__e2e__main @@ -419,6 +420,7 @@ jobs: - fields__collections__Text - fields__collections__UI - fields__collections__Upload + - group-by - folders - hooks - lexical__collections__Lexical__e2e__main diff --git a/docs/admin/overview.mdx b/docs/admin/overview.mdx index 069357d585..30be428847 100644 --- a/docs/admin/overview.mdx +++ b/docs/admin/overview.mdx @@ -77,7 +77,7 @@ All auto-generated files will contain the following comments at the top of each ## Admin Options -All options for the Admin Panel are defined in your [Payload Config](../configuration/overview) under the `admin` property: +All root-level options for the Admin Panel are defined in your [Payload Config](../configuration/overview) under the `admin` property: ```ts import { buildConfig } from 'payload' diff --git a/docs/configuration/collections.mdx b/docs/configuration/collections.mdx index f431c925f7..c6a6e1ebd4 100644 --- a/docs/configuration/collections.mdx +++ b/docs/configuration/collections.mdx @@ -130,6 +130,7 @@ The following options are available: | `description` | Text to display below the Collection label in the List View to give editors more information. Alternatively, you can use the `admin.components.Description` to render a React component. [More details](#custom-components). | | `defaultColumns` | Array of field names that correspond to which columns to show by default in this Collection's List View. | | `disableCopyToLocale` | Disables the "Copy to Locale" button while editing documents within this Collection. Only applicable when localization is enabled. | +| `groupBy` | Beta. Enable grouping by a field in the list view. | | `hideAPIURL` | Hides the "API URL" meta field while editing documents within this Collection. | | `enableRichTextLink` | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. | | `enableRichTextRelationship` | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. | diff --git a/packages/next/src/views/List/handleGroupBy.ts b/packages/next/src/views/List/handleGroupBy.ts new file mode 100644 index 0000000000..8d96e5d7e8 --- /dev/null +++ b/packages/next/src/views/List/handleGroupBy.ts @@ -0,0 +1,199 @@ +import type { + ClientConfig, + Column, + ListQuery, + PaginatedDocs, + PayloadRequest, + SanitizedCollectionConfig, + Where, +} from 'payload' + +import { renderTable } from '@payloadcms/ui/rsc' +import { formatDate } from '@payloadcms/ui/shared' +import { flattenAllFields } from 'payload' + +export const handleGroupBy = async ({ + clientConfig, + collectionConfig, + collectionSlug, + columns, + customCellProps, + drawerSlug, + enableRowSelections, + query, + req, + user, + where: whereWithMergedSearch, +}: { + clientConfig: ClientConfig + collectionConfig: SanitizedCollectionConfig + collectionSlug: string + columns: any[] + customCellProps?: Record + drawerSlug?: string + enableRowSelections?: boolean + query?: ListQuery + req: PayloadRequest + user: any + where: Where +}): Promise<{ + columnState: Column[] + data: PaginatedDocs + Table: null | React.ReactNode | React.ReactNode[] +}> => { + let Table: React.ReactNode | React.ReactNode[] = null + let columnState: Column[] + + const dataByGroup: Record = {} + const clientCollectionConfig = clientConfig.collections.find((c) => c.slug === collectionSlug) + + // NOTE: is there a faster/better way to do this? + const flattenedFields = flattenAllFields({ fields: collectionConfig.fields }) + + const groupByFieldPath = query.groupBy.replace(/^-/, '') + + const groupByField = flattenedFields.find((f) => f.name === groupByFieldPath) + + const relationshipConfig = + groupByField?.type === 'relationship' + ? clientConfig.collections.find((c) => c.slug === groupByField.relationTo) + : undefined + + let populate + + if (groupByField?.type === 'relationship' && groupByField.relationTo) { + const relationTo = + typeof groupByField.relationTo === 'string' + ? [groupByField.relationTo] + : groupByField.relationTo + + if (Array.isArray(relationTo)) { + relationTo.forEach((rel) => { + if (!populate) { + populate = {} + } + populate[rel] = { [relationshipConfig?.admin.useAsTitle || 'id']: true } + }) + } + } + + const distinct = await req.payload.findDistinct({ + collection: collectionSlug, + depth: 1, + field: groupByFieldPath, + limit: query?.limit ? Number(query.limit) : undefined, + locale: req.locale, + overrideAccess: false, + page: query?.page ? Number(query.page) : undefined, + populate, + req, + sort: query?.groupBy, + where: whereWithMergedSearch, + }) + + const data = { + ...distinct, + docs: distinct.values?.map(() => ({})) || [], + values: undefined, + } + + await Promise.all( + distinct.values.map(async (distinctValue, i) => { + const potentiallyPopulatedRelationship = distinctValue[groupByFieldPath] + + const valueOrRelationshipID = + groupByField?.type === 'relationship' && + potentiallyPopulatedRelationship && + typeof potentiallyPopulatedRelationship === 'object' && + 'id' in potentiallyPopulatedRelationship + ? potentiallyPopulatedRelationship.id + : potentiallyPopulatedRelationship + + const groupData = await req.payload.find({ + collection: collectionSlug, + depth: 0, + draft: true, + fallbackLocale: false, + includeLockStatus: true, + limit: query?.queryByGroup?.[valueOrRelationshipID]?.limit + ? Number(query.queryByGroup[valueOrRelationshipID].limit) + : undefined, + locale: req.locale, + overrideAccess: false, + page: query?.queryByGroup?.[valueOrRelationshipID]?.page + ? Number(query.queryByGroup[valueOrRelationshipID].page) + : undefined, + req, + // Note: if we wanted to enable table-by-table sorting, we could use this: + // sort: query?.queryByGroup?.[valueOrRelationshipID]?.sort, + sort: query?.sort, + user, + where: { + ...(whereWithMergedSearch || {}), + [groupByFieldPath]: { + equals: valueOrRelationshipID, + }, + }, + }) + + let heading = valueOrRelationshipID || req.i18n.t('general:noValue') + + if ( + groupByField?.type === 'relationship' && + typeof potentiallyPopulatedRelationship === 'object' + ) { + heading = + potentiallyPopulatedRelationship[relationshipConfig.admin.useAsTitle || 'id'] || + valueOrRelationshipID + } + + if (groupByField.type === 'date') { + heading = formatDate({ + date: String(heading), + i18n: req.i18n, + pattern: clientConfig.admin.dateFormat, + }) + } + + if (groupData.docs && groupData.docs.length > 0) { + const { columnState: newColumnState, Table: NewTable } = renderTable({ + clientCollectionConfig, + collectionConfig, + columns, + customCellProps, + data: groupData, + drawerSlug, + enableRowSelections, + groupByFieldPath, + groupByValue: valueOrRelationshipID, + heading, + i18n: req.i18n, + key: `table-${valueOrRelationshipID}`, + orderableFieldName: collectionConfig.orderable === true ? '_order' : undefined, + payload: req.payload, + query, + useAsTitle: collectionConfig.admin.useAsTitle, + }) + + // Only need to set `columnState` once, using the first table's column state + // This will avoid needing to generate column state explicitly for root context that wraps all tables + if (!columnState) { + columnState = newColumnState + } + + if (!Table) { + Table = [] + } + + dataByGroup[valueOrRelationshipID] = groupData + ;(Table as Array)[i] = NewTable + } + }), + ) + + return { + columnState, + data, + Table, + } +} diff --git a/packages/next/src/views/List/index.tsx b/packages/next/src/views/List/index.tsx index 59c0c3dfc5..41dbbc208e 100644 --- a/packages/next/src/views/List/index.tsx +++ b/packages/next/src/views/List/index.tsx @@ -1,10 +1,12 @@ import type { AdminViewServerProps, CollectionPreferences, + Column, ColumnPreference, ListQuery, ListViewClientProps, ListViewServerPropsOnly, + PaginatedDocs, QueryPreset, SanitizedCollectionPermission, } from 'payload' @@ -24,6 +26,7 @@ import { import React, { Fragment } from 'react' import { getDocumentPermissions } from '../Document/getDocumentPermissions.js' +import { handleGroupBy } from './handleGroupBy.js' import { renderListViewSlots } from './renderListViewSlots.js' import { resolveAllFilterOptions } from './resolveAllFilterOptions.js' @@ -74,7 +77,6 @@ export const renderListView = async ( req, req: { i18n, - locale, payload, payload: { config }, query: queryFromReq, @@ -91,11 +93,17 @@ export const renderListView = async ( const columnsFromQuery: ColumnPreference[] = transformColumnsToPreferences(query?.columns) + query.queryByGroup = + query?.queryByGroup && typeof query.queryByGroup === 'string' + ? JSON.parse(query.queryByGroup) + : query?.queryByGroup + const collectionPreferences = await upsertPreferences({ key: `collection-${collectionSlug}`, req, value: { columns: columnsFromQuery, + groupBy: query?.groupBy, limit: isNumber(query?.limit) ? Number(query.limit) : undefined, preset: query?.preset, sort: query?.sort as string, @@ -112,6 +120,8 @@ export const renderListView = async ( collectionPreferences?.sort || (typeof collectionConfig.defaultSort === 'string' ? collectionConfig.defaultSort : undefined) + query.groupBy = collectionPreferences?.groupBy + query.columns = transformColumnsToSearchParams(collectionPreferences?.columns || []) const { @@ -137,6 +147,12 @@ export const renderListView = async ( let queryPreset: QueryPreset | undefined let queryPresetPermissions: SanitizedCollectionPermission | undefined + const whereWithMergedSearch = mergeListSearchAndWhere({ + collectionConfig, + search: typeof query?.search === 'string' ? query.search : undefined, + where: combineWhereConstraints([query?.where, baseListFilter]), + }) + if (collectionPreferences?.preset) { try { queryPreset = (await payload.findByID({ @@ -160,41 +176,55 @@ export const renderListView = async ( } } - const data = await payload.find({ - collection: collectionSlug, - depth: 0, - draft: true, - fallbackLocale: false, - includeLockStatus: true, - limit: query.limit, - locale, - overrideAccess: false, - page: query.page, - req, - sort: query.sort, - user, - where: mergeListSearchAndWhere({ + let data: PaginatedDocs | undefined + let Table: React.ReactNode | React.ReactNode[] = null + let columnState: Column[] = [] + + if (collectionConfig.admin.groupBy && query.groupBy) { + ;({ columnState, data, Table } = await handleGroupBy({ + clientConfig, collectionConfig, - search: typeof query?.search === 'string' ? query.search : undefined, - where: combineWhereConstraints([query?.where, baseListFilter]), - }), - }) - - const clientCollectionConfig = clientConfig.collections.find((c) => c.slug === collectionSlug) - - const { columnState, Table } = renderTable({ - clientCollectionConfig, - collectionConfig, - columns: collectionPreferences?.columns, - customCellProps, - docs: data.docs, - drawerSlug, - enableRowSelections, - i18n: req.i18n, - orderableFieldName: collectionConfig.orderable === true ? '_order' : undefined, - payload, - useAsTitle: collectionConfig.admin.useAsTitle, - }) + collectionSlug, + columns: collectionPreferences?.columns, + customCellProps, + drawerSlug, + enableRowSelections, + query, + req, + user, + where: whereWithMergedSearch, + })) + } else { + data = await req.payload.find({ + collection: collectionSlug, + depth: 0, + draft: true, + fallbackLocale: false, + includeLockStatus: true, + limit: query?.limit ? Number(query.limit) : undefined, + locale: req.locale, + overrideAccess: false, + page: query?.page ? Number(query.page) : undefined, + req, + sort: query?.sort, + user, + where: whereWithMergedSearch, + }) + ;({ columnState, Table } = renderTable({ + clientCollectionConfig: clientConfig.collections.find((c) => c.slug === collectionSlug), + collectionConfig, + columns: collectionPreferences?.columns, + customCellProps, + data, + drawerSlug, + enableRowSelections, + i18n: req.i18n, + orderableFieldName: collectionConfig.orderable === true ? '_order' : undefined, + payload: req.payload, + query, + useAsTitle: collectionConfig.admin.useAsTitle, + })) + } const renderedFilters = renderFilters(collectionConfig.fields, req.payload.importMap) @@ -249,6 +279,7 @@ export const renderListView = async ( const isInDrawer = Boolean(drawerSlug) // Needed to prevent: Only plain objects can be passed to Client Components from Server Components. Objects with toJSON methods are not supported. Convert it manually to a simple value before passing it to props. + // Is there a way to avoid this? The `where` object is already seemingly plain, but is not bc it originates from the params. query.where = query?.where ? JSON.parse(JSON.stringify(query?.where || {})) : undefined return { diff --git a/packages/payload/src/admin/functions/index.ts b/packages/payload/src/admin/functions/index.ts index 1d42c1cca0..aaa1106fe5 100644 --- a/packages/payload/src/admin/functions/index.ts +++ b/packages/payload/src/admin/functions/index.ts @@ -45,9 +45,15 @@ export type ListQuery = { * Use `transformColumnsToPreferences` and `transformColumnsToSearchParams` to convert it back and forth */ columns?: ColumnsFromURL + /* + * A string representing the field to group by, e.g. `category` + * A leading hyphen represents descending order, e.g. `-category` + */ + groupBy?: string limit?: number page?: number preset?: number | string + queryByGroup?: Record /* When provided, is automatically injected into the `where` object */ @@ -59,6 +65,10 @@ export type ListQuery = { export type BuildTableStateArgs = { collectionSlug: string | string[] columns?: ColumnPreference[] + data?: PaginatedDocs + /** + * @deprecated Use `data` instead + */ docs?: PaginatedDocs['docs'] enableRowSelections?: boolean orderableFieldName: string diff --git a/packages/payload/src/admin/views/list.ts b/packages/payload/src/admin/views/list.ts index 7097e0bd41..6a3b320aca 100644 --- a/packages/payload/src/admin/views/list.ts +++ b/packages/payload/src/admin/views/list.ts @@ -17,7 +17,7 @@ export type ListViewSlots = { BeforeListTable?: React.ReactNode Description?: React.ReactNode listMenuItems?: React.ReactNode[] - Table: React.ReactNode + Table: React.ReactNode | React.ReactNode[] } /** diff --git a/packages/payload/src/collections/config/types.ts b/packages/payload/src/collections/config/types.ts index 4414715544..97c4004c89 100644 --- a/packages/payload/src/collections/config/types.ts +++ b/packages/payload/src/collections/config/types.ts @@ -367,6 +367,13 @@ export type CollectionAdminOptions = { * - Set to `false` to exclude the entity from the sidebar / dashboard without disabling its routes. */ group?: false | Record | string + /** + * @experimental This option is currently in beta and may change in future releases and/or contain bugs. + * Use at your own risk. + * @description Enable grouping by a field in the list view. + * Uses `payload.findDistinct` under the hood to populate the group-by options. + */ + groupBy?: boolean /** * Exclude the collection from the admin nav and routes */ diff --git a/packages/payload/src/index.ts b/packages/payload/src/index.ts index 55a5207224..542f18c71d 100644 --- a/packages/payload/src/index.ts +++ b/packages/payload/src/index.ts @@ -1208,7 +1208,6 @@ export { findVersionsOperation } from './collections/operations/findVersions.js' export { restoreVersionOperation } from './collections/operations/restoreVersion.js' export { updateOperation } from './collections/operations/update.js' export { updateByIDOperation } from './collections/operations/updateByID.js' - export { buildConfig } from './config/build.js' export { type ClientConfig, diff --git a/packages/payload/src/preferences/types.ts b/packages/payload/src/preferences/types.ts index 0e4a137a39..245fd2ad63 100644 --- a/packages/payload/src/preferences/types.ts +++ b/packages/payload/src/preferences/types.ts @@ -37,6 +37,7 @@ export type ColumnPreference = { export type CollectionPreferences = { columns?: ColumnPreference[] editViewType?: 'default' | 'live-preview' + groupBy?: string limit?: number preset?: DefaultDocumentIDType sort?: string diff --git a/packages/payload/src/utilities/transformColumnPreferences.ts b/packages/payload/src/utilities/transformColumnPreferences.ts index d0412df476..b6619c9532 100644 --- a/packages/payload/src/utilities/transformColumnPreferences.ts +++ b/packages/payload/src/utilities/transformColumnPreferences.ts @@ -13,6 +13,10 @@ export type ColumnsFromURL = string[] export const transformColumnsToPreferences = ( columns: Column[] | ColumnPreference[] | ColumnsFromURL | string | undefined, ): ColumnPreference[] | undefined => { + if (!columns) { + return undefined + } + let columnsToTransform = columns // Columns that originate from the URL are a stringified JSON array and need to be parsed first diff --git a/packages/translations/src/clientKeys.ts b/packages/translations/src/clientKeys.ts index f50aa54f8f..71e84d9b6c 100644 --- a/packages/translations/src/clientKeys.ts +++ b/packages/translations/src/clientKeys.ts @@ -185,6 +185,7 @@ export const clientTranslationKeys = createClientTranslationKeys([ 'general:confirmReindexDescription', 'general:confirmReindexDescriptionAll', 'general:copied', + 'general:clear', 'general:clearAll', 'general:copy', 'general:copyField', @@ -232,6 +233,7 @@ export const clientTranslationKeys = createClientTranslationKeys([ 'general:filterWhere', 'general:globals', 'general:goBack', + 'general:groupByLabel', 'general:isEditing', 'general:item', 'general:items', diff --git a/packages/translations/src/languages/ar.ts b/packages/translations/src/languages/ar.ts index ce23c1780c..300a893294 100644 --- a/packages/translations/src/languages/ar.ts +++ b/packages/translations/src/languages/ar.ts @@ -224,6 +224,7 @@ export const arTranslations: DefaultTranslationsObject = { backToDashboard: 'العودة للوحة التّحكّم', cancel: 'إلغاء', changesNotSaved: 'لم يتمّ حفظ التّغييرات. إن غادرت الآن ، ستفقد تغييراتك.', + clear: 'واضح', clearAll: 'امسح الكل', close: 'إغلاق', collapse: 'طيّ', @@ -294,6 +295,7 @@ export const arTranslations: DefaultTranslationsObject = { filterWhere: 'تصفية {{label}} حيث', globals: 'عامة', goBack: 'العودة', + groupByLabel: 'التجميع حسب {{label}}', import: 'استيراد', isEditing: 'يحرر', item: 'عنصر', diff --git a/packages/translations/src/languages/az.ts b/packages/translations/src/languages/az.ts index bc2ecb7ab7..1efebc54dc 100644 --- a/packages/translations/src/languages/az.ts +++ b/packages/translations/src/languages/az.ts @@ -229,6 +229,8 @@ export const azTranslations: DefaultTranslationsObject = { cancel: 'Ləğv et', changesNotSaved: 'Dəyişiklikləriniz saxlanılmayıb. İndi çıxsanız, dəyişikliklərinizi itirəcəksiniz.', + clear: + 'Payload kontekstində orijinal mətnin mənasını qoruya. İşte Payload terminləri siyahısıdır ki, onlar üzərində çox xüsusi mənalar gəlir:\n - Kolleksiya: Kolleksiya sənədlərin hamıya ortaq struktur və məqsəd sərbəst olan bir qrupdur. Kolleksiyalar Payload-da məzmunu təşkil etmək və idarə etmək üçün istifadə edilir.\n - Sahə: Sahə', clearAll: 'Hamısını təmizlə', close: 'Bağla', collapse: 'Bağla', @@ -300,6 +302,7 @@ export const azTranslations: DefaultTranslationsObject = { filterWhere: '{{label}} filtrlə', globals: 'Qloballar', goBack: 'Geri qayıt', + groupByLabel: '{{label}} ilə qruplaşdırın', import: 'İdxal', isEditing: 'redaktə edir', item: 'əşya', diff --git a/packages/translations/src/languages/bg.ts b/packages/translations/src/languages/bg.ts index 308778b051..8f99debc9a 100644 --- a/packages/translations/src/languages/bg.ts +++ b/packages/translations/src/languages/bg.ts @@ -229,6 +229,7 @@ export const bgTranslations: DefaultTranslationsObject = { backToDashboard: 'Обратно към таблото', cancel: 'Отмени', changesNotSaved: 'Промените ти не са запазени. Ако напуснеш сега, ще ги загубиш.', + clear: 'Ясно', clearAll: 'Изчисти всичко', close: 'Затвори', collapse: 'Свий', @@ -300,6 +301,7 @@ export const bgTranslations: DefaultTranslationsObject = { filterWhere: 'Филтрирай {{label}} където', globals: 'Глобални', goBack: 'Върни се', + groupByLabel: 'Групирай по {{label}}', import: 'Внос', isEditing: 'редактира', item: 'артикул', diff --git a/packages/translations/src/languages/bnBd.ts b/packages/translations/src/languages/bnBd.ts index 9a41c809f9..4a1b06d22e 100644 --- a/packages/translations/src/languages/bnBd.ts +++ b/packages/translations/src/languages/bnBd.ts @@ -231,6 +231,8 @@ export const bnBdTranslations: DefaultTranslationsObject = { cancel: 'বাতিল করুন', changesNotSaved: 'আপনার পরিবর্তনগুলি সংরক্ষণ করা হয়নি। আপনি যদি এখন চলে যান, তাহলে আপনার পরিবর্তনগুলি হারিয়ে যাবে।', + clear: + 'মূল পাঠের অর্থ সম্মান করুন পেলোড প্রসঙ্গে। এখানে পেলোড নির্দিষ্ট বিশেষ অর্থ বহন করে এরকম একটি সাধারণ টার্মের তালিকা:\n - সংগ্রহ', clearAll: 'সমস্ত সাফ করুন', close: 'বন্ধ করুন', collapse: 'সংকুচিত করুন', @@ -302,6 +304,7 @@ export const bnBdTranslations: DefaultTranslationsObject = { filterWhere: '{{label}} যেখানে ফিল্টার করুন', globals: 'গ্লোবালগুলি', goBack: 'পিছনে যান', + groupByLabel: '{{label}} অনুযায়ী গ্রুপ করুন', import: 'ইম্পোর্ট করুন', isEditing: 'সম্পাদনা করছেন', item: 'আইটেম', diff --git a/packages/translations/src/languages/bnIn.ts b/packages/translations/src/languages/bnIn.ts index 8c01eb2f78..0c527627b9 100644 --- a/packages/translations/src/languages/bnIn.ts +++ b/packages/translations/src/languages/bnIn.ts @@ -231,6 +231,7 @@ export const bnInTranslations: DefaultTranslationsObject = { cancel: 'বাতিল করুন', changesNotSaved: 'আপনার পরিবর্তনগুলি সংরক্ষণ করা হয়নি। আপনি যদি এখন চলে যান, তাহলে আপনার পরিবর্তনগুলি হারিয়ে যাবে।', + clear: 'স্পষ্ট', clearAll: 'সমস্ত সাফ করুন', close: 'বন্ধ করুন', collapse: 'সংকুচিত করুন', @@ -302,6 +303,7 @@ export const bnInTranslations: DefaultTranslationsObject = { filterWhere: '{{label}} যেখানে ফিল্টার করুন', globals: 'গ্লোবালগুলি', goBack: 'পিছনে যান', + groupByLabel: '{{label}} দ্বারা গ্রুপ করুন', import: 'ইম্পোর্ট করুন', isEditing: 'সম্পাদনা করছেন', item: 'আইটেম', diff --git a/packages/translations/src/languages/ca.ts b/packages/translations/src/languages/ca.ts index c3c2ecead5..5d15a184de 100644 --- a/packages/translations/src/languages/ca.ts +++ b/packages/translations/src/languages/ca.ts @@ -230,6 +230,7 @@ export const caTranslations: DefaultTranslationsObject = { backToDashboard: 'Torna al tauler', cancel: 'Cancel·la', changesNotSaved: 'El teu document té canvis no desats. Si continues, els canvis es perdran.', + clear: 'Clar', clearAll: 'Esborra-ho tot', close: 'Tanca', collapse: 'Replegar', @@ -301,6 +302,7 @@ export const caTranslations: DefaultTranslationsObject = { filterWhere: 'Filtra {{label}} on', globals: 'Globals', goBack: 'Torna enrere', + groupByLabel: 'Agrupa per {{label}}', import: 'Importar', isEditing: 'esta editant', item: 'element', diff --git a/packages/translations/src/languages/cs.ts b/packages/translations/src/languages/cs.ts index 7f8304f59b..6259af3519 100644 --- a/packages/translations/src/languages/cs.ts +++ b/packages/translations/src/languages/cs.ts @@ -229,6 +229,7 @@ export const csTranslations: DefaultTranslationsObject = { backToDashboard: 'Zpět na nástěnku', cancel: 'Zrušit', changesNotSaved: 'Vaše změny nebyly uloženy. Pokud teď odejdete, ztratíte své změny.', + clear: 'Jasný', clearAll: 'Vymazat vše', close: 'Zavřít', collapse: 'Sbalit', @@ -299,6 +300,7 @@ export const csTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrovat {{label}} kde', globals: 'Globální', goBack: 'Vrátit se', + groupByLabel: 'Seskupit podle {{label}}', import: 'Import', isEditing: 'upravuje', item: 'položka', diff --git a/packages/translations/src/languages/da.ts b/packages/translations/src/languages/da.ts index ec1ef4b6ef..ba32797276 100644 --- a/packages/translations/src/languages/da.ts +++ b/packages/translations/src/languages/da.ts @@ -228,6 +228,7 @@ export const daTranslations: DefaultTranslationsObject = { cancel: 'Anuller', changesNotSaved: 'Dine ændringer er ikke blevet gemt. Hvis du forlader siden, vil din ændringer gå tabt.', + clear: 'Klar', clearAll: 'Ryd alt', close: 'Luk', collapse: 'Skjul', @@ -298,6 +299,7 @@ export const daTranslations: DefaultTranslationsObject = { filterWhere: 'Filter {{label}} hvor', globals: 'Globale', goBack: 'Gå tilbage', + groupByLabel: 'Gruppér efter {{label}}', import: 'Import', isEditing: 'redigerer', item: 'vare', diff --git a/packages/translations/src/languages/de.ts b/packages/translations/src/languages/de.ts index ae924bb363..5adc816669 100644 --- a/packages/translations/src/languages/de.ts +++ b/packages/translations/src/languages/de.ts @@ -235,6 +235,8 @@ export const deTranslations: DefaultTranslationsObject = { cancel: 'Abbrechen', changesNotSaved: 'Deine Änderungen wurden nicht gespeichert. Wenn du diese Seite verlässt, gehen deine Änderungen verloren.', + clear: + 'Respektieren Sie die Bedeutung des ursprünglichen Textes im Kontext von Payload. Hier ist eine Liste von gängigen Payload-Begriffen, die sehr spezifische Bedeutungen tragen:\n - Sammlung: Eine Sammlung ist eine Gruppe von Dokumenten, die eine gemeinsame Struktur und Funktion teilen. Sammlungen werden verwendet, um Inhalte in Payload zu organisieren und zu verwalten.\n - Feld: Ein Feld ist ein spezifisches Datenstück innerhalb eines Dokuments in einer Sammlung. Felder definieren die Struktur und den Datentyp, der in einem Dokument gespeichert werden kann.\n -', clearAll: 'Alles löschen', close: 'Schließen', collapse: 'Einklappen', @@ -306,6 +308,7 @@ export const deTranslations: DefaultTranslationsObject = { filterWhere: 'Filter {{label}}, wo', globals: 'Globale Dokumente', goBack: 'Zurück', + groupByLabel: 'Gruppieren nach {{label}}', import: 'Importieren', isEditing: 'bearbeitet gerade', item: 'Artikel', diff --git a/packages/translations/src/languages/en.ts b/packages/translations/src/languages/en.ts index e1600e45ae..6e4116e7fe 100644 --- a/packages/translations/src/languages/en.ts +++ b/packages/translations/src/languages/en.ts @@ -230,6 +230,7 @@ export const enTranslations = { cancel: 'Cancel', changesNotSaved: 'Your changes have not been saved. If you leave now, you will lose your changes.', + clear: 'Clear', clearAll: 'Clear All', close: 'Close', collapse: 'Collapse', @@ -301,6 +302,7 @@ export const enTranslations = { filterWhere: 'Filter {{label}} where', globals: 'Globals', goBack: 'Go back', + groupByLabel: 'Group by {{label}}', import: 'Import', isEditing: 'is editing', item: 'item', diff --git a/packages/translations/src/languages/es.ts b/packages/translations/src/languages/es.ts index d91a45c21e..e61ad47f6e 100644 --- a/packages/translations/src/languages/es.ts +++ b/packages/translations/src/languages/es.ts @@ -234,6 +234,7 @@ export const esTranslations: DefaultTranslationsObject = { cancel: 'Cancelar', changesNotSaved: 'Tus cambios no han sido guardados. Si te sales ahora, se perderán tus cambios.', + clear: 'Claro', clearAll: 'Limpiar todo', close: 'Cerrar', collapse: 'Contraer', @@ -305,6 +306,7 @@ export const esTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrar {{label}} donde', globals: 'Globales', goBack: 'Volver', + groupByLabel: 'Agrupar por {{label}}', import: 'Importar', isEditing: 'está editando', item: 'artículo', diff --git a/packages/translations/src/languages/et.ts b/packages/translations/src/languages/et.ts index 15c77ebea4..f15b3adc54 100644 --- a/packages/translations/src/languages/et.ts +++ b/packages/translations/src/languages/et.ts @@ -227,6 +227,7 @@ export const etTranslations: DefaultTranslationsObject = { backToDashboard: 'Tagasi töölaua juurde', cancel: 'Tühista', changesNotSaved: 'Teie muudatusi pole salvestatud. Kui lahkute praegu, kaotate oma muudatused.', + clear: 'Selge', clearAll: 'Tühjenda kõik', close: 'Sulge', collapse: 'Ahenda', @@ -297,6 +298,7 @@ export const etTranslations: DefaultTranslationsObject = { filterWhere: 'Filtreeri {{label}} kus', globals: 'Globaalsed', goBack: 'Mine tagasi', + groupByLabel: 'Rühmita {{label}} järgi', import: 'Importimine', isEditing: 'muudab', item: 'üksus', diff --git a/packages/translations/src/languages/fa.ts b/packages/translations/src/languages/fa.ts index 1284b1d942..4b6d8db058 100644 --- a/packages/translations/src/languages/fa.ts +++ b/packages/translations/src/languages/fa.ts @@ -227,6 +227,7 @@ export const faTranslations: DefaultTranslationsObject = { cancel: 'لغو', changesNotSaved: 'تغییرات شما ذخیره نشده، اگر این برگه را ترک کنید. تمام تغییرات از دست خواهد رفت.', + clear: 'روشن', clearAll: 'همه را پاک کنید', close: 'بستن', collapse: 'بستن', @@ -298,6 +299,7 @@ export const faTranslations: DefaultTranslationsObject = { filterWhere: 'علامت گذاری کردن {{label}} جایی که', globals: 'سراسری', goBack: 'برگشت', + groupByLabel: 'گروه بندی بر اساس {{label}}', import: 'واردات', isEditing: 'در حال ویرایش است', item: 'مورد', diff --git a/packages/translations/src/languages/fr.ts b/packages/translations/src/languages/fr.ts index c5eab55fda..926a9917b5 100644 --- a/packages/translations/src/languages/fr.ts +++ b/packages/translations/src/languages/fr.ts @@ -237,6 +237,7 @@ export const frTranslations: DefaultTranslationsObject = { cancel: 'Annuler', changesNotSaved: 'Vos modifications n’ont pas été enregistrées. Vous perdrez vos modifications si vous quittez maintenant.', + clear: 'Clair', clearAll: 'Tout effacer', close: 'Fermer', collapse: 'Réduire', @@ -308,6 +309,7 @@ export const frTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrer {{label}} où', globals: 'Globals(es)', goBack: 'Retourner', + groupByLabel: 'Regrouper par {{label}}', import: 'Importation', isEditing: 'est en train de modifier', item: 'article', diff --git a/packages/translations/src/languages/he.ts b/packages/translations/src/languages/he.ts index f7a8d4ff93..394335db64 100644 --- a/packages/translations/src/languages/he.ts +++ b/packages/translations/src/languages/he.ts @@ -222,6 +222,8 @@ export const heTranslations: DefaultTranslationsObject = { backToDashboard: 'חזרה ללוח המחוונים', cancel: 'ביטול', changesNotSaved: 'השינויים שלך לא נשמרו. אם תצא כעת, תאבד את השינויים שלך.', + clear: + 'בהתחשב במשמעות של הטקסט המקורי בהקשר של Payload. הנה רשימה של מונחים מקוריים של Payload שנושאים משמעויות מסוימות:\n- אוסף: אוסף הוא קבוצה של מסמכים ששותפים למבנה ולמטרה משות', clearAll: 'נקה הכל', close: 'סגור', collapse: 'כווץ', @@ -292,6 +294,7 @@ export const heTranslations: DefaultTranslationsObject = { filterWhere: 'סנן {{label}} בהם', globals: 'גלובלים', goBack: 'חזור', + groupByLabel: 'קבץ לפי {{label}}', import: 'יבוא', isEditing: 'עורך', item: 'פריט', diff --git a/packages/translations/src/languages/hr.ts b/packages/translations/src/languages/hr.ts index 320217c8ef..5f2b7d7db2 100644 --- a/packages/translations/src/languages/hr.ts +++ b/packages/translations/src/languages/hr.ts @@ -230,6 +230,7 @@ export const hrTranslations: DefaultTranslationsObject = { backToDashboard: 'Natrag na nadzornu ploču', cancel: 'Otkaži', changesNotSaved: 'Vaše promjene nisu spremljene. Ako izađete sada, izgubit ćete promjene.', + clear: 'Jasan', clearAll: 'Očisti sve', close: 'Zatvori', collapse: 'Sažmi', @@ -301,6 +302,7 @@ export const hrTranslations: DefaultTranslationsObject = { filterWhere: 'Filter {{label}} gdje', globals: 'Globali', goBack: 'Vrati se', + groupByLabel: 'Grupiraj po {{label}}', import: 'Uvoz', isEditing: 'uređuje', item: 'stavka', diff --git a/packages/translations/src/languages/hu.ts b/packages/translations/src/languages/hu.ts index 8aaa81144b..7cad548bb0 100644 --- a/packages/translations/src/languages/hu.ts +++ b/packages/translations/src/languages/hu.ts @@ -232,6 +232,7 @@ export const huTranslations: DefaultTranslationsObject = { cancel: 'Mégsem', changesNotSaved: 'A módosítások nem lettek mentve. Ha most távozik, elveszíti a változtatásokat.', + clear: 'Tiszta', clearAll: 'Törölj mindent', close: 'Bezárás', collapse: 'Összecsukás', @@ -303,6 +304,7 @@ export const huTranslations: DefaultTranslationsObject = { filterWhere: 'Szűrő {{label}} ahol', globals: 'Globálisok', goBack: 'Vissza', + groupByLabel: 'Csoportosítás {{label}} szerint', import: 'Behozatal', isEditing: 'szerkeszt', item: 'tétel', diff --git a/packages/translations/src/languages/hy.ts b/packages/translations/src/languages/hy.ts index 704b20d8e1..c35a995178 100644 --- a/packages/translations/src/languages/hy.ts +++ b/packages/translations/src/languages/hy.ts @@ -230,6 +230,8 @@ export const hyTranslations: DefaultTranslationsObject = { cancel: 'Չեղարկել', changesNotSaved: 'Ձեր փոփոխությունները չեն պահպանվել։ Եթե հիմա հեռանաք, կկորցնեք չպահպանված փոփոխությունները։', + clear: + 'Հիմնական տեքստի իմաստը պետք է պահպանվի Payload կոնտեքստի մեջ: Այս այս այստեղ են հաճախակի', clearAll: 'Մաքրել բոլորը', close: 'Փակել', collapse: 'Փակել', @@ -301,6 +303,7 @@ export const hyTranslations: DefaultTranslationsObject = { filterWhere: 'Ֆիլտրել {{label}}-ը, որտեղ', globals: 'Համընդհանուրներ', goBack: 'Հետ գնալ', + groupByLabel: 'Խմբավորել {{label}}-ով', import: 'Ներմուծում', isEditing: 'խմբագրում է', item: 'տարր', diff --git a/packages/translations/src/languages/it.ts b/packages/translations/src/languages/it.ts index 3a51ef09b2..86e0d42fb5 100644 --- a/packages/translations/src/languages/it.ts +++ b/packages/translations/src/languages/it.ts @@ -234,6 +234,7 @@ export const itTranslations: DefaultTranslationsObject = { backToDashboard: 'Torna alla Dashboard', cancel: 'Cancella', changesNotSaved: 'Le tue modifiche non sono state salvate. Se esci ora, verranno perse.', + clear: 'Chiara', clearAll: 'Cancella Tutto', close: 'Chiudere', collapse: 'Comprimi', @@ -304,6 +305,7 @@ export const itTranslations: DefaultTranslationsObject = { filterWhere: 'Filtra {{label}} se', globals: 'Globali', goBack: 'Torna indietro', + groupByLabel: 'Raggruppa per {{label}}', import: 'Importare', isEditing: 'sta modificando', item: 'articolo', diff --git a/packages/translations/src/languages/ja.ts b/packages/translations/src/languages/ja.ts index 024cf4e1fe..128252db06 100644 --- a/packages/translations/src/languages/ja.ts +++ b/packages/translations/src/languages/ja.ts @@ -230,6 +230,7 @@ export const jaTranslations: DefaultTranslationsObject = { backToDashboard: 'ダッシュボードに戻る', cancel: 'キャンセル', changesNotSaved: '未保存の変更があります。このまま画面を離れると内容が失われます。', + clear: 'クリア', clearAll: 'すべてクリア', close: '閉じる', collapse: '閉じる', @@ -301,6 +302,7 @@ export const jaTranslations: DefaultTranslationsObject = { filterWhere: '{{label}} の絞り込み', globals: 'グローバル', goBack: '戻る', + groupByLabel: '{{label}}でグループ化する', import: '輸入', isEditing: '編集中', item: 'アイテム', diff --git a/packages/translations/src/languages/ko.ts b/packages/translations/src/languages/ko.ts index 0d5af0445e..f093b2d0cd 100644 --- a/packages/translations/src/languages/ko.ts +++ b/packages/translations/src/languages/ko.ts @@ -227,6 +227,8 @@ export const koTranslations: DefaultTranslationsObject = { backToDashboard: '대시보드로 돌아가기', cancel: '취소', changesNotSaved: '변경 사항이 저장되지 않았습니다. 지금 떠나면 변경 사항을 잃게 됩니다.', + clear: + '페이로드의 맥락 내에서 원문의 의미를 존중하십시오. 다음은 페이로드에서 사용되는 특정 의미를 내포하는 일반적인 페이로드 용어 목록입니다: \n- Collection: 컬렉션은 공통의 구조와 목적을 공유하는 문서의 그룹입니다. 컬렉션은 페이로드에서 콘텐츠를 정리하고 관리하는 데 사용됩니다.\n- Field: 필드는 컬렉', clearAll: '모두 지우기', close: '닫기', collapse: '접기', @@ -297,6 +299,7 @@ export const koTranslations: DefaultTranslationsObject = { filterWhere: '{{label}} 필터링 조건', globals: '글로벌', goBack: '돌아가기', + groupByLabel: '{{label}}로 그룹화', import: '수입', isEditing: '편집 중', item: '항목', diff --git a/packages/translations/src/languages/lt.ts b/packages/translations/src/languages/lt.ts index 94048058cd..ac2afd6c62 100644 --- a/packages/translations/src/languages/lt.ts +++ b/packages/translations/src/languages/lt.ts @@ -232,6 +232,7 @@ export const ltTranslations: DefaultTranslationsObject = { cancel: 'Atšaukti', changesNotSaved: 'Jūsų pakeitimai nebuvo išsaugoti. Jei dabar išeisite, prarasite savo pakeitimus.', + clear: 'Aišku', clearAll: 'Išvalyti viską', close: 'Uždaryti', collapse: 'Susikolimas', @@ -303,6 +304,7 @@ export const ltTranslations: DefaultTranslationsObject = { filterWhere: 'Filtruoti {{label}}, kur', globals: 'Globalai', goBack: 'Grįžkite', + groupByLabel: 'Grupuoti pagal {{label}}', import: 'Importas', isEditing: 'redaguoja', item: 'daiktas', diff --git a/packages/translations/src/languages/lv.ts b/packages/translations/src/languages/lv.ts index 0dcd973687..62c907615b 100644 --- a/packages/translations/src/languages/lv.ts +++ b/packages/translations/src/languages/lv.ts @@ -229,6 +229,8 @@ export const lvTranslations: DefaultTranslationsObject = { backToDashboard: 'Atpakaļ uz paneli', cancel: 'Atcelt', changesNotSaved: 'Jūsu izmaiņas nav saglabātas. Ja tagad pametīsiet, izmaiņas tiks zaudētas.', + clear: + 'Izpratiet oriģinālteksta nozīmi Payload kontekstā. Šeit ir saraksts ar Payload terminiem, kas ir ļoti specifiskas nozīmes:\n - Kolekcija: Kolekcija ir dokumentu grupa, kuriem ir kopīga struktūra un mērķis. Kolekcijas tiek izmantotas saturu organizēšanai un pārvaldīšanai Payload.\n - Lauks: Lauks ir konkrēts datu fragments dokumentā iekš kolekcijas. Lauki definē struktūru un dat', clearAll: 'Notīrīt visu', close: 'Aizvērt', collapse: 'Sakļaut', @@ -300,6 +302,7 @@ export const lvTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrēt {{label}} kur', globals: 'Globālie', goBack: 'Doties atpakaļ', + groupByLabel: 'Grupēt pēc {{label}}', import: 'Imports', isEditing: 'redzē', item: 'vienība', diff --git a/packages/translations/src/languages/my.ts b/packages/translations/src/languages/my.ts index 78c87fa725..ec822f8359 100644 --- a/packages/translations/src/languages/my.ts +++ b/packages/translations/src/languages/my.ts @@ -231,6 +231,7 @@ export const myTranslations: DefaultTranslationsObject = { cancel: 'မလုပ်တော့ပါ။', changesNotSaved: 'သင်၏ပြောင်းလဲမှုများကို မသိမ်းဆည်းရသေးပါ။ ယခု စာမျက်နှာက ထွက်လိုက်ပါက သင်၏ပြောင်းလဲမှုများ အကုန် ဆုံးရှုံးသွားပါမည်။ အကုန်နော်။', + clear: 'Jelas', clearAll: 'အားလုံးကိုရှင်းလင်းပါ', close: 'ပိတ်', collapse: 'ခေါက်သိမ်းပါ။', @@ -302,6 +303,7 @@ export const myTranslations: DefaultTranslationsObject = { filterWhere: 'နေရာတွင် စစ်ထုတ်ပါ။', globals: 'Globals', goBack: 'နောက်သို့', + groupByLabel: 'Berkumpulkan mengikut {{label}}', import: 'သွင်းကုန်', isEditing: 'ပြင်ဆင်နေသည်', item: 'barang', diff --git a/packages/translations/src/languages/nb.ts b/packages/translations/src/languages/nb.ts index 291b85b6f7..c454ab3e7d 100644 --- a/packages/translations/src/languages/nb.ts +++ b/packages/translations/src/languages/nb.ts @@ -229,6 +229,7 @@ export const nbTranslations: DefaultTranslationsObject = { cancel: 'Avbryt', changesNotSaved: 'Endringene dine er ikke lagret. Hvis du forlater nå, vil du miste endringene dine.', + clear: 'Tydelig', clearAll: 'Tøm alt', close: 'Lukk', collapse: 'Skjul', @@ -300,6 +301,7 @@ export const nbTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrer {{label}} der', globals: 'Globale variabler', goBack: 'Gå tilbake', + groupByLabel: 'Grupper etter {{label}}', import: 'Import', isEditing: 'redigerer', item: 'vare', diff --git a/packages/translations/src/languages/nl.ts b/packages/translations/src/languages/nl.ts index 1ba7d51a26..33ab436238 100644 --- a/packages/translations/src/languages/nl.ts +++ b/packages/translations/src/languages/nl.ts @@ -233,6 +233,7 @@ export const nlTranslations: DefaultTranslationsObject = { cancel: 'Annuleren', changesNotSaved: 'Uw wijzigingen zijn niet bewaard. Als u weggaat zullen de wijzigingen verloren gaan.', + clear: 'Duidelijk', clearAll: 'Alles wissen', close: 'Dichtbij', collapse: 'Samenvouwen', @@ -304,6 +305,7 @@ export const nlTranslations: DefaultTranslationsObject = { filterWhere: 'Filter {{label}} waar', globals: 'Globalen', goBack: 'Ga terug', + groupByLabel: 'Groepeer op {{label}}', import: 'Importeren', isEditing: 'is aan het bewerken', item: 'artikel', diff --git a/packages/translations/src/languages/pl.ts b/packages/translations/src/languages/pl.ts index 1e60b6ac79..dd5d4ab4fe 100644 --- a/packages/translations/src/languages/pl.ts +++ b/packages/translations/src/languages/pl.ts @@ -229,6 +229,7 @@ export const plTranslations: DefaultTranslationsObject = { cancel: 'Anuluj', changesNotSaved: 'Twoje zmiany nie zostały zapisane. Jeśli teraz wyjdziesz, stracisz swoje zmiany.', + clear: 'Jasne', clearAll: 'Wyczyść wszystko', close: 'Zamknij', collapse: 'Zwiń', @@ -300,6 +301,7 @@ export const plTranslations: DefaultTranslationsObject = { filterWhere: 'Filtruj gdzie', globals: 'Globalne', goBack: 'Wróć', + groupByLabel: 'Grupuj według {{label}}', import: 'Import', isEditing: 'edytuje', item: 'przedmiot', diff --git a/packages/translations/src/languages/pt.ts b/packages/translations/src/languages/pt.ts index ac01e47c00..6a26c458b1 100644 --- a/packages/translations/src/languages/pt.ts +++ b/packages/translations/src/languages/pt.ts @@ -230,6 +230,7 @@ export const ptTranslations: DefaultTranslationsObject = { cancel: 'Cancelar', changesNotSaved: 'Suas alterações não foram salvas. Se você sair agora, essas alterações serão perdidas.', + clear: 'Claro', clearAll: 'Limpar Tudo', close: 'Fechar', collapse: 'Recolher', @@ -301,6 +302,7 @@ export const ptTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrar {{label}} em que', globals: 'Globais', goBack: 'Voltar', + groupByLabel: 'Agrupar por {{label}}', import: 'Importar', isEditing: 'está editando', item: 'item', diff --git a/packages/translations/src/languages/ro.ts b/packages/translations/src/languages/ro.ts index 34bae916f4..8d58ed276b 100644 --- a/packages/translations/src/languages/ro.ts +++ b/packages/translations/src/languages/ro.ts @@ -234,6 +234,7 @@ export const roTranslations: DefaultTranslationsObject = { cancel: 'Anulați', changesNotSaved: 'Modificările dvs. nu au fost salvate. Dacă plecați acum, vă veți pierde modificările.', + clear: 'Clar', clearAll: 'Șterge tot', close: 'Închide', collapse: 'Colaps', @@ -305,6 +306,7 @@ export const roTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrează {{label}} unde', globals: 'Globale', goBack: 'Înapoi', + groupByLabel: 'Grupare după {{label}}', import: 'Import', isEditing: 'editează', item: 'articol', diff --git a/packages/translations/src/languages/rs.ts b/packages/translations/src/languages/rs.ts index 1f0701c3f4..d8a2e38249 100644 --- a/packages/translations/src/languages/rs.ts +++ b/packages/translations/src/languages/rs.ts @@ -230,6 +230,7 @@ export const rsTranslations: DefaultTranslationsObject = { backToDashboard: 'Назад на контролни панел', cancel: 'Откажи', changesNotSaved: 'Ваше промене нису сачуване. Ако изађете сада, изгубићете промене.', + clear: 'Jasno', clearAll: 'Obriši sve', close: 'Затвори', collapse: 'Скупи', @@ -301,6 +302,7 @@ export const rsTranslations: DefaultTranslationsObject = { filterWhere: 'Филтер {{label}} где', globals: 'Глобали', goBack: 'Врати се', + groupByLabel: 'Grupiši po {{label}}', import: 'Uvoz', isEditing: 'уређује', item: 'artikal', diff --git a/packages/translations/src/languages/rsLatin.ts b/packages/translations/src/languages/rsLatin.ts index 2ae83c93db..1207d5cfb4 100644 --- a/packages/translations/src/languages/rsLatin.ts +++ b/packages/translations/src/languages/rsLatin.ts @@ -230,6 +230,7 @@ export const rsLatinTranslations: DefaultTranslationsObject = { backToDashboard: 'Nazad na kontrolni panel', cancel: 'Otkaži', changesNotSaved: 'Vaše promene nisu sačuvane. Ako izađete sada, izgubićete promene.', + clear: 'Jasno', clearAll: 'Očisti sve', close: 'Zatvori', collapse: 'Skupi', @@ -301,6 +302,7 @@ export const rsLatinTranslations: DefaultTranslationsObject = { filterWhere: 'Filter {{label}} gde', globals: 'Globali', goBack: 'Vrati se', + groupByLabel: 'Grupiši po {{label}}', import: 'Uvoz', isEditing: 'uređuje', item: 'stavka', diff --git a/packages/translations/src/languages/ru.ts b/packages/translations/src/languages/ru.ts index b23b2ef9fd..4eba3f49cc 100644 --- a/packages/translations/src/languages/ru.ts +++ b/packages/translations/src/languages/ru.ts @@ -232,6 +232,7 @@ export const ruTranslations: DefaultTranslationsObject = { cancel: 'Отмена', changesNotSaved: 'Ваши изменения не были сохранены. Если вы сейчас уйдете, то потеряете свои изменения.', + clear: 'Четкий', clearAll: 'Очистить все', close: 'Закрыть', collapse: 'Свернуть', @@ -303,6 +304,7 @@ export const ruTranslations: DefaultTranslationsObject = { filterWhere: 'Где фильтровать', globals: 'Глобальные', goBack: 'Назад', + groupByLabel: 'Группировать по {{label}}', import: 'Импорт', isEditing: 'редактирует', item: 'предмет', diff --git a/packages/translations/src/languages/sk.ts b/packages/translations/src/languages/sk.ts index 4c24b250d3..13a6c5d5aa 100644 --- a/packages/translations/src/languages/sk.ts +++ b/packages/translations/src/languages/sk.ts @@ -232,6 +232,7 @@ export const skTranslations: DefaultTranslationsObject = { backToDashboard: 'Späť na nástenku', cancel: 'Zrušiť', changesNotSaved: 'Vaše zmeny neboli uložené. Ak teraz odídete, stratíte svoje zmeny.', + clear: 'Jasný', clearAll: 'Vymazať všetko', close: 'Zavrieť', collapse: 'Zbaliť', @@ -302,6 +303,7 @@ export const skTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrovat kde je {{label}}', globals: 'Globalné', goBack: 'Vrátiť sa', + groupByLabel: 'Zoskupiť podľa {{label}}', import: 'Dovoz', isEditing: 'upravuje', item: 'položka', diff --git a/packages/translations/src/languages/sl.ts b/packages/translations/src/languages/sl.ts index 02e046a58b..6880c5a6e8 100644 --- a/packages/translations/src/languages/sl.ts +++ b/packages/translations/src/languages/sl.ts @@ -230,6 +230,7 @@ export const slTranslations: DefaultTranslationsObject = { cancel: 'Prekliči', changesNotSaved: 'Vaše spremembe niso shranjene. Če zapustite zdaj, boste izgubili svoje spremembe.', + clear: 'Čisto', clearAll: 'Počisti vse', close: 'Zapri', collapse: 'Strni', @@ -300,6 +301,7 @@ export const slTranslations: DefaultTranslationsObject = { filterWhere: 'Filtriraj {{label}} kjer', globals: 'Globalne nastavitve', goBack: 'Nazaj', + groupByLabel: 'Razvrsti po {{label}}', import: 'Uvoz', isEditing: 'ureja', item: 'predmet', diff --git a/packages/translations/src/languages/sv.ts b/packages/translations/src/languages/sv.ts index caef27df3b..bad3e2435e 100644 --- a/packages/translations/src/languages/sv.ts +++ b/packages/translations/src/languages/sv.ts @@ -229,6 +229,7 @@ export const svTranslations: DefaultTranslationsObject = { cancel: 'Avbryt', changesNotSaved: 'Dina ändringar har inte sparats. Om du lämnar nu kommer du att förlora dina ändringar.', + clear: 'Tydlig', clearAll: 'Rensa alla', close: 'Stänga', collapse: 'Kollapsa', @@ -300,6 +301,7 @@ export const svTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrera {{label}} där', globals: 'Globala', goBack: 'Gå tillbaka', + groupByLabel: 'Gruppera efter {{label}}', import: 'Importera', isEditing: 'redigerar', item: 'artikel', diff --git a/packages/translations/src/languages/th.ts b/packages/translations/src/languages/th.ts index 41cb9878b8..3346be921f 100644 --- a/packages/translations/src/languages/th.ts +++ b/packages/translations/src/languages/th.ts @@ -224,6 +224,8 @@ export const thTranslations: DefaultTranslationsObject = { backToDashboard: 'กลับไปหน้าแดชบอร์ด', cancel: 'ยกเลิก', changesNotSaved: 'การเปลี่ยนแปลงยังไม่ได้ถูกบันทึก ถ้าคุณออกตอนนี้ สิ่งที่แก้ไขไว้จะหายไป', + clear: + 'ให้เคารพความหมายของข้อความต้นฉบับภายในบริบทของ Payload นี่คือรายการของคำที่มักใช้ใน Payload ที่มีความหมายที่เฉพาะเจาะจงมาก:\n - Collection: Collection คือกลุ่มของเอกสารที่มีโครงสร้างและจุดประสงค์ท', clearAll: 'ล้างทั้งหมด', close: 'ปิด', collapse: 'ยุบ', @@ -295,6 +297,7 @@ export const thTranslations: DefaultTranslationsObject = { filterWhere: 'กรอง {{label}} เฉพาะ', globals: 'Globals', goBack: 'กลับไป', + groupByLabel: 'จัดกลุ่มตาม {{label}}', import: 'นำเข้า', isEditing: 'กำลังแก้ไข', item: 'รายการ', diff --git a/packages/translations/src/languages/tr.ts b/packages/translations/src/languages/tr.ts index 1daaae2925..b9d9551601 100644 --- a/packages/translations/src/languages/tr.ts +++ b/packages/translations/src/languages/tr.ts @@ -233,6 +233,7 @@ export const trTranslations: DefaultTranslationsObject = { cancel: 'İptal', changesNotSaved: 'Değişiklikleriniz henüz kaydedilmedi. Eğer bu sayfayı terk ederseniz değişiklikleri kaybedeceksiniz.', + clear: 'Temiz', clearAll: 'Hepsini Temizle', close: 'Kapat', collapse: 'Daralt', @@ -304,6 +305,7 @@ export const trTranslations: DefaultTranslationsObject = { filterWhere: '{{label}} filtrele:', globals: 'Globaller', goBack: 'Geri dön', + groupByLabel: "{{label}}'ye göre grupla", import: 'İthalat', isEditing: 'düzenliyor', item: 'öğe', diff --git a/packages/translations/src/languages/uk.ts b/packages/translations/src/languages/uk.ts index eb33c1daac..1d84f1eb7b 100644 --- a/packages/translations/src/languages/uk.ts +++ b/packages/translations/src/languages/uk.ts @@ -230,6 +230,7 @@ export const ukTranslations: DefaultTranslationsObject = { backToDashboard: 'Повернутись до головної сторінки', cancel: 'Скасувати', changesNotSaved: 'Ваши зміни не були збережені. Якщо ви вийдете зараз, то втратите свої зміни.', + clear: 'Чітко', clearAll: 'Очистити все', close: 'Закрити', collapse: 'Згорнути', @@ -300,6 +301,7 @@ export const ukTranslations: DefaultTranslationsObject = { filterWhere: 'Де фільтрувати {{label}}', globals: 'Глобальні', goBack: 'Повернутися', + groupByLabel: 'Групувати за {{label}}', import: 'Імпорт', isEditing: 'редагує', item: 'предмет', diff --git a/packages/translations/src/languages/vi.ts b/packages/translations/src/languages/vi.ts index 7f747ef15f..1af0493b64 100644 --- a/packages/translations/src/languages/vi.ts +++ b/packages/translations/src/languages/vi.ts @@ -228,6 +228,7 @@ export const viTranslations: DefaultTranslationsObject = { backToDashboard: 'Quay lại bảng điều khiển', cancel: 'Hủy', changesNotSaved: 'Thay đổi chưa được lưu lại. Bạn sẽ mất bản chỉnh sửa nếu thoát bây giờ.', + clear: 'Rõ ràng', clearAll: 'Xóa tất cả', close: 'Gần', collapse: 'Thu gọn', @@ -299,6 +300,7 @@ export const viTranslations: DefaultTranslationsObject = { filterWhere: 'Lọc {{label}} với điều kiện:', globals: 'Toàn thể (globals)', goBack: 'Quay lại', + groupByLabel: 'Nhóm theo {{label}}', import: 'Nhập khẩu', isEditing: 'đang chỉnh sửa', item: 'mặt hàng', diff --git a/packages/translations/src/languages/zh.ts b/packages/translations/src/languages/zh.ts index 84a477ba71..b8849294c8 100644 --- a/packages/translations/src/languages/zh.ts +++ b/packages/translations/src/languages/zh.ts @@ -218,6 +218,7 @@ export const zhTranslations: DefaultTranslationsObject = { backToDashboard: '返回到仪表板', cancel: '取消', changesNotSaved: '您的更改尚未保存。您确定要离开吗?', + clear: '清晰', clearAll: '清除全部', close: '关闭', collapse: '折叠', @@ -286,6 +287,7 @@ export const zhTranslations: DefaultTranslationsObject = { filterWhere: '过滤{{label}}', globals: '全局', goBack: '返回', + groupByLabel: '按{{label}}分组', import: '导入', isEditing: '正在编辑', item: '条目', diff --git a/packages/translations/src/languages/zhTw.ts b/packages/translations/src/languages/zhTw.ts index e659462f6b..451bc4fd21 100644 --- a/packages/translations/src/languages/zhTw.ts +++ b/packages/translations/src/languages/zhTw.ts @@ -217,6 +217,7 @@ export const zhTwTranslations: DefaultTranslationsObject = { backToDashboard: '返回到控制面板', cancel: '取消', changesNotSaved: '您還有尚未儲存的變更。您確定要離開嗎?', + clear: '清晰', clearAll: '清除全部', close: '關閉', collapse: '折疊', @@ -285,6 +286,7 @@ export const zhTwTranslations: DefaultTranslationsObject = { filterWhere: '過濾{{label}}', globals: '全域', goBack: '返回', + groupByLabel: '按照 {{label}} 分類', import: '進口', isEditing: '正在編輯', item: '物品', diff --git a/packages/ui/src/elements/ColumnSelector/index.tsx b/packages/ui/src/elements/ColumnSelector/index.tsx index caffabe9d7..f208d87e4c 100644 --- a/packages/ui/src/elements/ColumnSelector/index.tsx +++ b/packages/ui/src/elements/ColumnSelector/index.tsx @@ -21,7 +21,7 @@ export const ColumnSelector: React.FC = ({ collectionSlug }) => { const filteredColumns = useMemo( () => - columns.filter( + columns?.filter( (col) => !(fieldIsHiddenOrDisabled(col.field) && !fieldIsID(col.field)) && !col?.field?.admin?.disableListColumn, diff --git a/packages/ui/src/elements/DeleteMany/index.tsx b/packages/ui/src/elements/DeleteMany/index.tsx index 561cf39645..0c1a6d63c4 100644 --- a/packages/ui/src/elements/DeleteMany/index.tsx +++ b/packages/ui/src/elements/DeleteMany/index.tsx @@ -20,18 +20,23 @@ import { parseSearchParams } from '../../utilities/parseSearchParams.js' import { ConfirmationModal } from '../ConfirmationModal/index.js' import { ListSelectionButton } from '../ListSelection/index.js' -const confirmManyDeleteDrawerSlug = `confirm-delete-many-docs` - export type Props = { collection: ClientCollectionConfig + /** + * When multiple DeleteMany components are rendered on the page, this will differentiate them. + */ + modalPrefix?: string + /** + * When multiple PublishMany components are rendered on the page, this will differentiate them. + */ title?: string } export const DeleteMany: React.FC = (props) => { - const { collection: { slug } = {} } = props + const { collection: { slug } = {}, modalPrefix } = props const { permissions } = useAuth() - const { count, getSelectedIds, selectAll, toggleAll } = useSelection() + const { count, selectAll, selectedIDs, toggleAll } = useSelection() const router = useRouter() const searchParams = useSearchParams() const { clearRouteCache } = useRouteCache() @@ -39,13 +44,14 @@ export const DeleteMany: React.FC = (props) => { const collectionPermissions = permissions?.collections?.[slug] const hasDeletePermission = collectionPermissions?.delete + const selectingAll = selectAll === SelectAllStatus.AllAvailable + + const ids = selectingAll ? [] : selectedIDs + if (selectAll === SelectAllStatus.None || !hasDeletePermission) { return null } - const selectingAll = selectAll === SelectAllStatus.AllAvailable - const selectedIDs = !selectingAll ? getSelectedIds() : [] - return ( = (props) => { clearRouteCache() }} + modalPrefix={modalPrefix} search={parseSearchParams(searchParams)?.search as string} selections={{ [slug]: { all: selectAll === SelectAllStatus.AllAvailable, - ids: selectedIDs, - totalCount: selectingAll ? count : selectedIDs.length, + ids, + totalCount: selectingAll ? count : ids.length, }, }} where={parseSearchParams(searchParams)?.where as Where} @@ -91,6 +98,10 @@ type DeleteMany_v4Props = { * A callback function to be called after the delete request is completed. */ afterDelete?: (result: AfterDeleteResult) => void + /** + * When multiple DeleteMany components are rendered on the page, this will differentiate them. + */ + modalPrefix?: string /** * Optionally pass a search string to filter the documents to be deleted. * @@ -126,8 +137,15 @@ type DeleteMany_v4Props = { * * If you are deleting monomorphic documents, shape your `selections` to match the polymorphic structure. */ -export function DeleteMany_v4({ afterDelete, search, selections, where }: DeleteMany_v4Props) { +export function DeleteMany_v4({ + afterDelete, + modalPrefix, + search, + selections, + where, +}: DeleteMany_v4Props) { const { t } = useTranslation() + const { config: { collections, @@ -135,15 +153,20 @@ export function DeleteMany_v4({ afterDelete, search, selections, where }: Delete serverURL, }, } = useConfig() + const { code: locale } = useLocale() const { i18n } = useTranslation() const { openModal } = useModal() + const confirmManyDeleteDrawerSlug = `${modalPrefix ? `${modalPrefix}-` : ''}confirm-delete-many-docs` + const handleDelete = React.useCallback(async () => { const deletingOneCollection = Object.keys(selections).length === 1 const result: AfterDeleteResult = {} + for (const [relationTo, { all, ids = [] }] of Object.entries(selections)) { const collectionConfig = collections.find(({ slug }) => slug === relationTo) + if (collectionConfig) { let whereConstraint: Where @@ -153,7 +176,9 @@ export function DeleteMany_v4({ afterDelete, search, selections, where }: Delete whereConstraint = where } else { whereConstraint = { - id: { not_equals: '' }, + id: { + not_equals: '', + }, } } } else { @@ -219,6 +244,7 @@ export function DeleteMany_v4({ afterDelete, search, selections, where }: Delete toast.error(t('error:unknown')) result[relationTo].errors = [t('error:unknown')] } + continue } catch (_err) { toast.error(t('error:unknown')) @@ -247,7 +273,9 @@ export function DeleteMany_v4({ afterDelete, search, selections, where }: Delete value.totalCount > 1 ? collectionConfig.labels.plural : collectionConfig.labels.singular, i18n, )}` + let newLabel + if (index === array.length - 1 && index !== 0) { newLabel = `${acc.label} and ${collectionLabel}` } else if (index > 0) { diff --git a/packages/ui/src/elements/EditMany/DrawerContent.tsx b/packages/ui/src/elements/EditMany/DrawerContent.tsx index 74b6c50ff3..1a072db26f 100644 --- a/packages/ui/src/elements/EditMany/DrawerContent.tsx +++ b/packages/ui/src/elements/EditMany/DrawerContent.tsx @@ -139,7 +139,9 @@ type EditManyDrawerContentProps = { * The function to set the selected fields to bulk edit */ setSelectedFields: (fields: FieldOption[]) => void + where?: Where } & EditManyProps + export const EditManyDrawerContent: React.FC = (props) => { const { collection, @@ -151,6 +153,7 @@ export const EditManyDrawerContent: React.FC = (prop selectAll, selectedFields, setSelectedFields, + where, } = props const { permissions, user } = useAuth() @@ -220,6 +223,10 @@ export const EditManyDrawerContent: React.FC = (prop const queryString = useMemo((): string => { const whereConstraints: Where[] = [] + if (where) { + whereConstraints.push(where) + } + const queryWithSearch = mergeListSearchAndWhere({ collectionConfig: collection, search: searchParams.get('search'), @@ -234,7 +241,7 @@ export const EditManyDrawerContent: React.FC = (prop whereConstraints.push( (parseSearchParams(searchParams)?.where as Where) || { id: { - exists: true, + not_equals: '', }, }, ) @@ -254,7 +261,7 @@ export const EditManyDrawerContent: React.FC = (prop }, { addQueryPrefix: true }, ) - }, [collection, searchParams, selectAll, ids, locale]) + }, [collection, searchParams, selectAll, ids, locale, where]) const onSuccess = () => { router.replace( diff --git a/packages/ui/src/elements/EditMany/index.tsx b/packages/ui/src/elements/EditMany/index.tsx index b9aa3a11ae..6f1220bdc5 100644 --- a/packages/ui/src/elements/EditMany/index.tsx +++ b/packages/ui/src/elements/EditMany/index.tsx @@ -1,5 +1,5 @@ 'use client' -import type { ClientCollectionConfig } from 'payload' +import type { ClientCollectionConfig, Where } from 'payload' import { useModal } from '@faceless-ui/modal' import React, { useState } from 'react' @@ -11,9 +11,9 @@ import { EditDepthProvider } from '../../providers/EditDepth/index.js' import { SelectAllStatus, useSelection } from '../../providers/Selection/index.js' import { useTranslation } from '../../providers/Translation/index.js' import { Drawer } from '../Drawer/index.js' -import './index.scss' import { ListSelectionButton } from '../ListSelection/index.js' import { EditManyDrawerContent } from './DrawerContent.js' +import './index.scss' export const baseClass = 'edit-many' @@ -22,13 +22,14 @@ export type EditManyProps = { } export const EditMany: React.FC = (props) => { - const { count, selectAll, selected, toggleAll } = useSelection() + const { count, selectAll, selectedIDs, toggleAll } = useSelection() + return ( toggleAll(false)} + ids={selectedIDs} + onSuccess={() => toggleAll()} selectAll={selectAll === SelectAllStatus.AllAvailable} /> ) @@ -38,10 +39,15 @@ export const EditMany_v4: React.FC< { count: number ids: (number | string)[] + /** + * When multiple EditMany components are rendered on the page, this will differentiate them. + */ + modalPrefix?: string onSuccess?: () => void selectAll: boolean - } & EditManyProps -> = ({ collection, count, ids, onSuccess, selectAll }) => { + where?: Where + } & Omit +> = ({ collection, count, ids, modalPrefix, onSuccess, selectAll, where }) => { const { permissions } = useAuth() const { openModal } = useModal() @@ -51,7 +57,7 @@ export const EditMany_v4: React.FC< const collectionPermissions = permissions?.collections?.[collection.slug] - const drawerSlug = `edit-${collection.slug}` + const drawerSlug = `${modalPrefix ? `${modalPrefix}-` : ''}edit-${collection.slug}` if (count === 0 || !collectionPermissions?.update) { return null @@ -79,6 +85,7 @@ export const EditMany_v4: React.FC< selectAll={selectAll} selectedFields={selectedFields} setSelectedFields={setSelectedFields} + where={where} /> diff --git a/packages/ui/src/elements/GroupByBuilder/index.scss b/packages/ui/src/elements/GroupByBuilder/index.scss new file mode 100644 index 0000000000..f05f1c3fc9 --- /dev/null +++ b/packages/ui/src/elements/GroupByBuilder/index.scss @@ -0,0 +1,39 @@ +@import '../../scss/styles.scss'; + +@layer payload-default { + .group-by-builder { + background: var(--theme-elevation-50); + padding: var(--base); + display: flex; + flex-direction: column; + gap: calc(var(--base) / 2); + + &__header { + width: 100%; + display: flex; + justify-content: space-between; + } + + &__clear-button { + background: transparent; + border: none; + color: var(--theme-elevation-500); + line-height: inherit; + cursor: pointer; + font-size: inherit; + padding: 0; + text-decoration: underline; + } + + &__inputs { + width: 100%; + display: flex; + gap: base(1); + + & > * { + flex-grow: 1; + width: 50%; + } + } + } +} diff --git a/packages/ui/src/elements/GroupByBuilder/index.tsx b/packages/ui/src/elements/GroupByBuilder/index.tsx new file mode 100644 index 0000000000..390666cb97 --- /dev/null +++ b/packages/ui/src/elements/GroupByBuilder/index.tsx @@ -0,0 +1,144 @@ +'use client' +import type { ClientField, Field, SanitizedCollectionConfig } from 'payload' + +import './index.scss' + +import React, { useMemo } from 'react' + +import { SelectInput } from '../../fields/Select/Input.js' +import { useListQuery } from '../../providers/ListQuery/index.js' +import { useTranslation } from '../../providers/Translation/index.js' +import { reduceFieldsToOptions } from '../../utilities/reduceFieldsToOptions.js' +import { ReactSelect } from '../ReactSelect/index.js' + +export type Props = { + readonly collectionSlug: SanitizedCollectionConfig['slug'] + fields: ClientField[] +} + +const baseClass = 'group-by-builder' + +/** + * Note: Some fields are already omitted from the list of fields: + * - fields with nested field, e.g. `tabs`, `groups`, etc. + * - fields that don't affect data, i.e. `row`, `collapsible`, `ui`, etc. + * So we don't technically need to omit them here, but do anyway. + * But some remaining fields still need an additional check, e.g. `richText`, etc. + */ +const supportedFieldTypes: Field['type'][] = [ + 'text', + 'textarea', + 'number', + 'select', + 'relationship', + 'date', + 'checkbox', + 'radio', + 'email', + 'number', + 'upload', +] + +export const GroupByBuilder: React.FC = ({ collectionSlug, fields }) => { + const { i18n, t } = useTranslation() + + const reducedFields = useMemo(() => reduceFieldsToOptions({ fields, i18n }), [fields, i18n]) + + const { query, refineListData } = useListQuery() + + const groupByFieldName = query.groupBy?.replace(/^-/, '') + + const groupByField = reducedFields.find((field) => field.value === groupByFieldName) + + return ( +
+
+

+ {t('general:groupByLabel', { + label: '', + })} +

+ {query.groupBy && ( + + )} +
+
+ + ((option?.data?.plainTextLabel as string) || option.label) + .toLowerCase() + .includes(inputValue.toLowerCase()) + } + id="group-by--field-select" + isClearable + isMulti={false} + onChange={async (v: { value: string } | null) => { + const value = v === null ? undefined : v.value + + // value is being cleared + if (v === null) { + await refineListData({ + groupBy: '', + page: 1, + }) + } + + await refineListData({ + groupBy: value ? (query.groupBy?.startsWith('-') ? `-${value}` : value) : undefined, + page: 1, + }) + }} + options={reducedFields.filter( + (field) => + !field.field.admin.disableListFilter && + field.value !== 'id' && + supportedFieldTypes.includes(field.field.type), + )} + value={{ + label: groupByField?.label || t('general:selectValue'), + value: groupByFieldName || '', + }} + /> + { + if (!groupByFieldName) { + return + } + + await refineListData({ + groupBy: value === 'asc' ? groupByFieldName : `-${groupByFieldName}`, + page: 1, + }) + }} + options={[ + { label: t('general:ascending'), value: 'asc' }, + { label: t('general:descending'), value: 'desc' }, + ]} + path="direction" + readOnly={!groupByFieldName} + value={ + !query.groupBy + ? 'asc' + : typeof query.groupBy === 'string' + ? `${query.groupBy.startsWith('-') ? 'desc' : 'asc'}` + : '' + } + /> +
+
+ ) +} diff --git a/packages/ui/src/elements/ListControls/index.scss b/packages/ui/src/elements/ListControls/index.scss index 6443e5f162..86c2009f05 100644 --- a/packages/ui/src/elements/ListControls/index.scss +++ b/packages/ui/src/elements/ListControls/index.scss @@ -36,7 +36,8 @@ .pill-selector, .where-builder, - .sort-complex { + .sort-complex, + .group-by-builder { margin-top: base(1); } @@ -90,7 +91,8 @@ &__toggle-columns, &__toggle-where, - &__toggle-sort { + &__toggle-sort, + &__toggle-group-by { flex: 1; } } diff --git a/packages/ui/src/elements/ListControls/index.tsx b/packages/ui/src/elements/ListControls/index.tsx index a5adc88ce5..ebdb31d6ae 100644 --- a/packages/ui/src/elements/ListControls/index.tsx +++ b/packages/ui/src/elements/ListControls/index.tsx @@ -16,6 +16,7 @@ import { useListQuery } from '../../providers/ListQuery/index.js' import { useTranslation } from '../../providers/Translation/index.js' import { AnimateHeight } from '../AnimateHeight/index.js' import { ColumnSelector } from '../ColumnSelector/index.js' +import { GroupByBuilder } from '../GroupByBuilder/index.js' import { Pill } from '../Pill/index.js' import { SearchFilter } from '../SearchFilter/index.js' import { WhereBuilder } from '../WhereBuilder/index.js' @@ -97,7 +98,8 @@ export const ListControls: React.FC = (props) => { const hasWhereParam = useRef(Boolean(query?.where)) const shouldInitializeWhereOpened = validateWhereQuery(query?.where) - const [visibleDrawer, setVisibleDrawer] = useState<'columns' | 'sort' | 'where'>( + + const [visibleDrawer, setVisibleDrawer] = useState<'columns' | 'group-by' | 'sort' | 'where'>( shouldInitializeWhereOpened ? 'where' : undefined, ) @@ -140,7 +142,7 @@ export const ListControls: React.FC = (props) => { let listMenuItems: React.ReactNode[] = listMenuItemsFromProps if ( - collectionConfig?.enableQueryPresets && + collectionConfig.enableQueryPresets && !disableQueryPresets && queryPresetMenuItems?.length > 0 ) { @@ -160,7 +162,6 @@ export const ListControls: React.FC = (props) => { @@ -176,6 +177,7 @@ export const ListControls: React.FC = (props) => { aria-expanded={visibleDrawer === 'columns'} className={`${baseClass}__toggle-columns`} icon={} + id="toggle-columns" onClick={() => setVisibleDrawer(visibleDrawer !== 'columns' ? 'columns' : undefined) } @@ -191,6 +193,7 @@ export const ListControls: React.FC = (props) => { aria-expanded={visibleDrawer === 'where'} className={`${baseClass}__toggle-where`} icon={} + id="toggle-list-filters" onClick={() => setVisibleDrawer(visibleDrawer !== 'where' ? 'where' : undefined)} pillStyle="light" size="small" @@ -218,6 +221,24 @@ export const ListControls: React.FC = (props) => { resetPreset={resetPreset} /> )} + {collectionConfig.admin.groupBy && ( + } + id="toggle-group-by" + onClick={() => + setVisibleDrawer(visibleDrawer !== 'group-by' ? 'group-by' : undefined) + } + pillStyle="light" + size="small" + > + {t('general:groupByLabel', { + label: '', + })} + + )} {listMenuItems && Array.isArray(listMenuItems) && listMenuItems.length > 0 && ( } @@ -250,13 +271,25 @@ export const ListControls: React.FC = (props) => { id={`${baseClass}-where`} > + {collectionConfig.admin.groupBy && ( + + + + )}
{PresetListDrawer} {EditPresetDrawer} diff --git a/packages/ui/src/elements/PageControls/GroupByPageControls.tsx b/packages/ui/src/elements/PageControls/GroupByPageControls.tsx new file mode 100644 index 0000000000..e9ec5c878f --- /dev/null +++ b/packages/ui/src/elements/PageControls/GroupByPageControls.tsx @@ -0,0 +1,62 @@ +'use client' +import type { ClientCollectionConfig, PaginatedDocs } from 'payload' + +import React, { useCallback } from 'react' + +import type { IListQueryContext } from '../../providers/ListQuery/types.js' + +import { useListQuery } from '../../providers/ListQuery/context.js' +import { PageControlsComponent } from './index.js' + +/** + * If `groupBy` is set in the query, multiple tables will render, one for each group. + * In this case, each table needs its own `PageControls` to handle pagination. + * These page controls, however, should not modify the global `ListQuery` state. + * Instead, they should only handle the pagination for the current group. + * To do this, build a wrapper around `PageControlsComponent` that handles the pagination logic for the current group. + */ +export const GroupByPageControls: React.FC<{ + AfterPageControls?: React.ReactNode + collectionConfig: ClientCollectionConfig + data: PaginatedDocs + groupByValue?: number | string +}> = ({ AfterPageControls, collectionConfig, data, groupByValue }) => { + const { refineListData } = useListQuery() + + const handlePageChange: IListQueryContext['handlePageChange'] = useCallback( + async (page) => { + await refineListData({ + queryByGroup: { + [groupByValue]: { + page, + }, + }, + }) + }, + [refineListData, groupByValue], + ) + + const handlePerPageChange: IListQueryContext['handlePerPageChange'] = useCallback( + async (limit) => { + await refineListData({ + queryByGroup: { + [groupByValue]: { + limit, + page: 1, + }, + }, + }) + }, + [refineListData, groupByValue], + ) + + return ( + + ) +} diff --git a/packages/ui/src/elements/PageControls/index.scss b/packages/ui/src/elements/PageControls/index.scss new file mode 100644 index 0000000000..70be0db966 --- /dev/null +++ b/packages/ui/src/elements/PageControls/index.scss @@ -0,0 +1,40 @@ +@import '../../scss/styles.scss'; + +@layer payload-default { + .page-controls { + width: 100%; + display: flex; + align-items: center; + + &__page-info { + [dir='ltr'] & { + margin-right: base(1); + margin-left: auto; + } + + [dir='rtl'] & { + margin-left: base(1); + margin-right: auto; + } + } + + @include small-break { + flex-wrap: wrap; + + &__page-info { + [dir='ltr'] & { + margin-left: base(0.5); + } + + [dir='rtl'] & { + margin-right: 0; + } + } + + .paginator { + width: 100%; + margin-bottom: base(0.5); + } + } + } +} diff --git a/packages/ui/src/elements/PageControls/index.tsx b/packages/ui/src/elements/PageControls/index.tsx new file mode 100644 index 0000000000..a0ea41745c --- /dev/null +++ b/packages/ui/src/elements/PageControls/index.tsx @@ -0,0 +1,94 @@ +import type { ClientCollectionConfig, PaginatedDocs } from 'payload' + +import { isNumber } from 'payload/shared' +import React, { Fragment } from 'react' + +import type { IListQueryContext } from '../../providers/ListQuery/types.js' + +import { Pagination } from '../../elements/Pagination/index.js' +import { PerPage } from '../../elements/PerPage/index.js' +import { useListQuery } from '../../providers/ListQuery/context.js' +import { useTranslation } from '../../providers/Translation/index.js' +import './index.scss' + +const baseClass = 'page-controls' + +export const PageControlsComponent: React.FC<{ + AfterPageControls?: React.ReactNode + collectionConfig: ClientCollectionConfig + data: PaginatedDocs + handlePageChange?: IListQueryContext['handlePageChange'] + handlePerPageChange?: IListQueryContext['handlePerPageChange'] + limit?: number +}> = ({ + AfterPageControls, + collectionConfig, + data, + handlePageChange, + handlePerPageChange, + limit, +}) => { + const { i18n } = useTranslation() + + return ( +
+ + {data.totalDocs > 0 && ( + +
+ {data.page * data.limit - (data.limit - 1)}- + {data.totalPages > 1 && data.totalPages !== data.page + ? data.limit * data.page + : data.totalDocs}{' '} + {i18n.t('general:of')} {data.totalDocs} +
+ + {AfterPageControls} +
+ )} +
+ ) +} + +/* + * These page controls are controlled by the global ListQuery state. + * To override thi behavior, build your own wrapper around PageControlsComponent. + */ +export const PageControls: React.FC<{ + AfterPageControls?: React.ReactNode + collectionConfig: ClientCollectionConfig +}> = ({ AfterPageControls, collectionConfig }) => { + const { + data, + defaultLimit: initialLimit, + handlePageChange, + handlePerPageChange, + query, + } = useListQuery() + + return ( + + ) +} diff --git a/packages/ui/src/elements/Pagination/ClickableArrow/index.scss b/packages/ui/src/elements/Pagination/ClickableArrow/index.scss index 4cb8c6812e..e8c103d791 100644 --- a/packages/ui/src/elements/Pagination/ClickableArrow/index.scss +++ b/packages/ui/src/elements/Pagination/ClickableArrow/index.scss @@ -4,14 +4,14 @@ .clickable-arrow { cursor: pointer; @extend %btn-reset; - width: base(2); - height: base(2); + width: base(1.5); + height: base(1.5); display: flex; justify-content: center; align-content: center; align-items: center; outline: 0; - padding: base(0.5); + padding: base(0.25); color: var(--theme-elevation-800); line-height: base(1); diff --git a/packages/ui/src/elements/Pagination/index.scss b/packages/ui/src/elements/Pagination/index.scss index e7cb22ceb0..1bdac9dea2 100644 --- a/packages/ui/src/elements/Pagination/index.scss +++ b/packages/ui/src/elements/Pagination/index.scss @@ -3,7 +3,6 @@ @layer payload-default { .paginator { display: flex; - margin-bottom: $baseline; &__page { cursor: pointer; @@ -25,15 +24,16 @@ &__page { @extend %btn-reset; - width: base(2); - height: base(2); + width: base(1.5); + height: base(1.5); display: flex; justify-content: center; align-content: center; outline: 0; + border-radius: var(--style-radius-s); padding: base(0.5); color: var(--theme-elevation-800); - line-height: base(1); + line-height: 0.9; &:focus-visible { outline: var(--accessibility-outline); diff --git a/packages/ui/src/elements/Pagination/index.tsx b/packages/ui/src/elements/Pagination/index.tsx index fb6ef55a2e..e2e7ba3010 100644 --- a/packages/ui/src/elements/Pagination/index.tsx +++ b/packages/ui/src/elements/Pagination/index.tsx @@ -52,7 +52,7 @@ export const Pagination: React.FC = (props) => { totalPages = null, } = props - if (!hasNextPage && !hasPrevPage) { + if (!hasPrevPage && !hasNextPage) { return null } diff --git a/packages/ui/src/elements/PublishMany/DrawerContent.tsx b/packages/ui/src/elements/PublishMany/DrawerContent.tsx index 7d81e3dbdf..6e73afc8b8 100644 --- a/packages/ui/src/elements/PublishMany/DrawerContent.tsx +++ b/packages/ui/src/elements/PublishMany/DrawerContent.tsx @@ -22,7 +22,9 @@ type PublishManyDrawerContentProps = { ids: (number | string)[] onSuccess?: () => void selectAll: boolean + where?: Where } & PublishManyProps + export function PublishManyDrawerContent(props: PublishManyDrawerContentProps) { const { collection, @@ -31,15 +33,18 @@ export function PublishManyDrawerContent(props: PublishManyDrawerContentProps) { ids, onSuccess, selectAll, + where, } = props const { clearRouteCache } = useRouteCache() + const { config: { routes: { api }, serverURL, }, } = useConfig() + const { code: locale } = useLocale() const router = useRouter() @@ -59,6 +64,10 @@ export function PublishManyDrawerContent(props: PublishManyDrawerContentProps) { }, ] + if (where) { + whereConstraints.push(where) + } + const queryWithSearch = mergeListSearchAndWhere({ collectionConfig: collection, search: searchParams.get('search'), @@ -73,7 +82,7 @@ export function PublishManyDrawerContent(props: PublishManyDrawerContentProps) { whereConstraints.push( (parseSearchParams(searchParams)?.where as Where) || { id: { - exists: true, + not_equals: '', }, }, ) @@ -93,7 +102,7 @@ export function PublishManyDrawerContent(props: PublishManyDrawerContentProps) { }, { addQueryPrefix: true }, ) - }, [collection, searchParams, selectAll, ids, locale]) + }, [collection, searchParams, selectAll, ids, locale, where]) const handlePublish = useCallback(async () => { await requests diff --git a/packages/ui/src/elements/PublishMany/index.tsx b/packages/ui/src/elements/PublishMany/index.tsx index 0df29b1856..33dca8d276 100644 --- a/packages/ui/src/elements/PublishMany/index.tsx +++ b/packages/ui/src/elements/PublishMany/index.tsx @@ -1,5 +1,5 @@ 'use client' -import type { ClientCollectionConfig } from 'payload' +import type { ClientCollectionConfig, Where } from 'payload' import { useModal } from '@faceless-ui/modal' import React from 'react' @@ -15,14 +15,14 @@ export type PublishManyProps = { } export const PublishMany: React.FC = (props) => { - const { count, selectAll, selected, toggleAll } = useSelection() + const { count, selectAll, selectedIDs, toggleAll } = useSelection() return ( toggleAll(false)} + ids={selectedIDs} + onSuccess={() => toggleAll()} selectAll={selectAll === SelectAllStatus.AllAvailable} /> ) @@ -31,17 +31,25 @@ export const PublishMany: React.FC = (props) => { type PublishMany_v4Props = { count: number ids: (number | string)[] + /** + * When multiple PublishMany components are rendered on the page, this will differentiate them. + */ + modalPrefix?: string onSuccess?: () => void selectAll: boolean + where?: Where } & PublishManyProps + export const PublishMany_v4: React.FC = (props) => { const { collection, collection: { slug, versions } = {}, count, ids, + modalPrefix, onSuccess, selectAll, + where, } = props const { permissions } = useAuth() @@ -52,7 +60,7 @@ export const PublishMany_v4: React.FC = (props) => { const collectionPermissions = permissions?.collections?.[slug] const hasPermission = collectionPermissions?.update - const drawerSlug = `publish-${slug}` + const drawerSlug = `${modalPrefix ? `${modalPrefix}-` : ''}publish-${slug}` if (!versions?.drafts || count === 0 || !hasPermission) { return null @@ -74,6 +82,7 @@ export const PublishMany_v4: React.FC = (props) => { ids={ids} onSuccess={onSuccess} selectAll={selectAll} + where={where} /> ) diff --git a/packages/ui/src/elements/ReactSelect/types.ts b/packages/ui/src/elements/ReactSelect/types.ts index a2a2e7ca9d..72dd352b18 100644 --- a/packages/ui/src/elements/ReactSelect/types.ts +++ b/packages/ui/src/elements/ReactSelect/types.ts @@ -84,6 +84,7 @@ export type ReactSelectAdapterProps = { boolean, GroupBase