feat(db-*): support atomic array $push db updates (#13453)
This PR adds **atomic** `$push` **support for array fields**. It makes it possible to safely append new items to arrays, which is especially useful when running tasks in parallel (like job queues) where multiple processes might update the same record at the same time. By handling pushes atomically, we avoid race conditions and keep data consistent - especially on postgres, where the current implementation would nuke the entire array table before re-inserting every single array item. The feature works for both localized and unlocalized arrays, and supports pushing either single or multiple items at once. This PR is a requirement for reliably running parallel tasks in the job queue - see https://github.com/payloadcms/payload/pull/13452. Alongside documenting `$push`, this PR also adds documentation for `$inc`. ## Changes to updatedAt behavior https://github.com/payloadcms/payload/pull/13335 allows us to override the updatedAt property instead of the db always setting it to the current date. However, we are not able to skip updating the updatedAt property completely. This means, usage of $push results in 2 postgres db calls: 1. set updatedAt in main row 2. append array row in arrays table This PR changes the behavior to only automatically set updatedAt if it's undefined. If you explicitly set it to `null`, this now allows you to skip the db adapter automatically setting updatedAt. => This allows us to use $push in just one single db call ## Usage Examples ### Pushing a single item to an array ```ts const post = (await payload.db.updateOne({ data: { array: { $push: { text: 'some text 2', id: new mongoose.Types.ObjectId().toHexString(), }, }, }, collection: 'posts', id: post.id, })) ``` ### Pushing a single item to a localized array ```ts const post = (await payload.db.updateOne({ data: { arrayLocalized: { $push: { en: { text: 'some text 2', id: new mongoose.Types.ObjectId().toHexString(), }, es: { text: 'some text 2 es', id: new mongoose.Types.ObjectId().toHexString(), }, }, }, }, collection: 'posts', id: post.id, })) ``` ### Pushing multiple items to an array ```ts const post = (await payload.db.updateOne({ data: { array: { $push: [ { text: 'some text 2', id: new mongoose.Types.ObjectId().toHexString(), }, { text: 'some text 3', id: new mongoose.Types.ObjectId().toHexString(), }, ], }, }, collection: 'posts', id: post.id, })) ``` ### Pushing multiple items to a localized array ```ts const post = (await payload.db.updateOne({ data: { arrayLocalized: { $push: { en: { text: 'some text 2', id: new mongoose.Types.ObjectId().toHexString(), }, es: [ { text: 'some text 2 es', id: new mongoose.Types.ObjectId().toHexString(), }, { text: 'some text 3 es', id: new mongoose.Types.ObjectId().toHexString(), }, ], }, }, }, collection: 'posts', id: post.id, })) ``` --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1211110462564647
This commit is contained in:
@@ -52,11 +52,24 @@ export const updateOne: UpdateOne = async function updateOne(
|
||||
|
||||
let result
|
||||
|
||||
const $inc: Record<string, number> = {}
|
||||
let updateData: UpdateQuery<any> = data
|
||||
transform({ $inc, adapter: this, data, fields, operation: 'write' })
|
||||
|
||||
const $inc: Record<string, number> = {}
|
||||
const $push: Record<string, { $each: any[] } | any> = {}
|
||||
|
||||
transform({ $inc, $push, adapter: this, data, fields, operation: 'write' })
|
||||
|
||||
const updateOps: UpdateQuery<any> = {}
|
||||
|
||||
if (Object.keys($inc).length) {
|
||||
updateData = { $inc, $set: updateData }
|
||||
updateOps.$inc = $inc
|
||||
}
|
||||
if (Object.keys($push).length) {
|
||||
updateOps.$push = $push
|
||||
}
|
||||
if (Object.keys(updateOps).length) {
|
||||
updateOps.$set = updateData
|
||||
updateData = updateOps
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
@@ -209,6 +209,7 @@ const sanitizeDate = ({
|
||||
|
||||
type Args = {
|
||||
$inc?: Record<string, number>
|
||||
$push?: Record<string, { $each: any[] } | any>
|
||||
/** instance of the adapter */
|
||||
adapter: MongooseAdapter
|
||||
/** data to transform, can be an array of documents or a single document */
|
||||
@@ -398,6 +399,7 @@ const stripFields = ({
|
||||
|
||||
export const transform = ({
|
||||
$inc,
|
||||
$push,
|
||||
adapter,
|
||||
data,
|
||||
fields,
|
||||
@@ -412,7 +414,16 @@ export const transform = ({
|
||||
|
||||
if (Array.isArray(data)) {
|
||||
for (const item of data) {
|
||||
transform({ $inc, adapter, data: item, fields, globalSlug, operation, validateRelationships })
|
||||
transform({
|
||||
$inc,
|
||||
$push,
|
||||
adapter,
|
||||
data: item,
|
||||
fields,
|
||||
globalSlug,
|
||||
operation,
|
||||
validateRelationships,
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -470,6 +481,39 @@ export const transform = ({
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
$push &&
|
||||
field.type === 'array' &&
|
||||
operation === 'write' &&
|
||||
field.name in ref &&
|
||||
ref[field.name]
|
||||
) {
|
||||
const value = ref[field.name]
|
||||
if (value && typeof value === 'object' && '$push' in value) {
|
||||
const push = value.$push
|
||||
|
||||
if (config.localization && fieldShouldBeLocalized({ field, parentIsLocalized })) {
|
||||
if (typeof push === 'object' && push !== null) {
|
||||
Object.entries(push).forEach(([localeKey, localeData]) => {
|
||||
if (Array.isArray(localeData)) {
|
||||
$push[`${parentPath}${field.name}.${localeKey}`] = { $each: localeData }
|
||||
} else if (typeof localeData === 'object') {
|
||||
$push[`${parentPath}${field.name}.${localeKey}`] = localeData
|
||||
}
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if (Array.isArray(push)) {
|
||||
$push[`${parentPath}${field.name}`] = { $each: push }
|
||||
} else if (typeof push === 'object') {
|
||||
$push[`${parentPath}${field.name}`] = push
|
||||
}
|
||||
}
|
||||
|
||||
delete ref[field.name]
|
||||
}
|
||||
}
|
||||
|
||||
if (field.type === 'date' && operation === 'read' && field.name in ref && ref[field.name]) {
|
||||
if (config.localization && fieldShouldBeLocalized({ field, parentIsLocalized })) {
|
||||
const fieldRef = ref[field.name] as Record<string, unknown>
|
||||
@@ -550,8 +594,13 @@ export const transform = ({
|
||||
})
|
||||
|
||||
if (operation === 'write') {
|
||||
if (!data.updatedAt) {
|
||||
if (typeof data.updatedAt === 'undefined') {
|
||||
// If data.updatedAt is explicitly set to `null` we should not set it - this means we don't want to change the value of updatedAt.
|
||||
data.updatedAt = new Date().toISOString()
|
||||
} else if (data.updatedAt === null) {
|
||||
// `updatedAt` may be explicitly set to null to disable updating it - if that is the case, we need to delete the property. Keeping it as null will
|
||||
// cause the database to think we want to set it to null, which we don't.
|
||||
delete data.updatedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,6 +71,7 @@ export const transformArray = ({
|
||||
data.forEach((arrayRow, i) => {
|
||||
const newRow: ArrayRowToInsert = {
|
||||
arrays: {},
|
||||
arraysToPush: {},
|
||||
locales: {},
|
||||
row: {
|
||||
_order: i + 1,
|
||||
@@ -104,6 +105,7 @@ export const transformArray = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays: newRow.arrays,
|
||||
arraysToPush: newRow.arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
|
||||
@@ -78,6 +78,7 @@ export const transformBlocks = ({
|
||||
|
||||
const newRow: BlockRowToInsert = {
|
||||
arrays: {},
|
||||
arraysToPush: {},
|
||||
locales: {},
|
||||
row: {
|
||||
_order: i + 1,
|
||||
@@ -116,6 +117,7 @@ export const transformBlocks = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays: newRow.arrays,
|
||||
arraysToPush: newRow.arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
|
||||
@@ -27,6 +27,7 @@ export const transformForWrite = ({
|
||||
// Split out the incoming data into rows to insert / delete
|
||||
const rowToInsert: RowToInsert = {
|
||||
arrays: {},
|
||||
arraysToPush: {},
|
||||
blocks: {},
|
||||
blocksToDelete: new Set(),
|
||||
locales: {},
|
||||
@@ -45,6 +46,7 @@ export const transformForWrite = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays: rowToInsert.arrays,
|
||||
arraysToPush: rowToInsert.arraysToPush,
|
||||
baseTableName: tableName,
|
||||
blocks: rowToInsert.blocks,
|
||||
blocksToDelete: rowToInsert.blocksToDelete,
|
||||
|
||||
@@ -4,13 +4,7 @@ import { fieldIsVirtual, fieldShouldBeLocalized } from 'payload/shared'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from '../../types.js'
|
||||
import type {
|
||||
ArrayRowToInsert,
|
||||
BlockRowToInsert,
|
||||
NumberToDelete,
|
||||
RelationshipToDelete,
|
||||
TextToDelete,
|
||||
} from './types.js'
|
||||
import type { NumberToDelete, RelationshipToDelete, RowToInsert, TextToDelete } from './types.js'
|
||||
|
||||
import { isArrayOfRows } from '../../utilities/isArrayOfRows.js'
|
||||
import { resolveBlockTableName } from '../../utilities/validateExistingBlockIsIdentical.js'
|
||||
@@ -23,16 +17,20 @@ import { transformTexts } from './texts.js'
|
||||
|
||||
type Args = {
|
||||
adapter: DrizzleAdapter
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
/**
|
||||
* This will delete the array table and then re-insert all the new array rows.
|
||||
*/
|
||||
arrays: RowToInsert['arrays']
|
||||
/**
|
||||
* Array rows to push to the existing array. This will simply create
|
||||
* a new row in the array table.
|
||||
*/
|
||||
arraysToPush: RowToInsert['arraysToPush']
|
||||
/**
|
||||
* This is the name of the base table
|
||||
*/
|
||||
baseTableName: string
|
||||
blocks: {
|
||||
[blockType: string]: BlockRowToInsert[]
|
||||
}
|
||||
blocks: RowToInsert['blocks']
|
||||
blocksToDelete: Set<string>
|
||||
/**
|
||||
* A snake-case field prefix, representing prior fields
|
||||
@@ -82,6 +80,7 @@ type Args = {
|
||||
export const traverseFields = ({
|
||||
adapter,
|
||||
arrays,
|
||||
arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
@@ -129,13 +128,24 @@ export const traverseFields = ({
|
||||
if (field.type === 'array') {
|
||||
const arrayTableName = adapter.tableNameMap.get(`${parentTableName}_${columnName}`)
|
||||
|
||||
if (!arrays[arrayTableName]) {
|
||||
arrays[arrayTableName] = []
|
||||
if (isLocalized) {
|
||||
let value: {
|
||||
[locale: string]: unknown[]
|
||||
} = data[field.name] as any
|
||||
|
||||
let push = false
|
||||
if (typeof value === 'object' && '$push' in value) {
|
||||
value = value.$push as any
|
||||
push = true
|
||||
}
|
||||
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
Object.entries(value).forEach(([localeKey, _localeData]) => {
|
||||
let localeData = _localeData
|
||||
if (push && !Array.isArray(localeData)) {
|
||||
localeData = [localeData]
|
||||
}
|
||||
|
||||
if (isLocalized) {
|
||||
if (typeof data[field.name] === 'object' && data[field.name] !== null) {
|
||||
Object.entries(data[field.name]).forEach(([localeKey, localeData]) => {
|
||||
if (Array.isArray(localeData)) {
|
||||
const newRows = transformArray({
|
||||
adapter,
|
||||
@@ -158,18 +168,35 @@ export const traverseFields = ({
|
||||
withinArrayOrBlockLocale: localeKey,
|
||||
})
|
||||
|
||||
if (push) {
|
||||
if (!arraysToPush[arrayTableName]) {
|
||||
arraysToPush[arrayTableName] = []
|
||||
}
|
||||
arraysToPush[arrayTableName] = arraysToPush[arrayTableName].concat(newRows)
|
||||
} else {
|
||||
if (!arrays[arrayTableName]) {
|
||||
arrays[arrayTableName] = []
|
||||
}
|
||||
arrays[arrayTableName] = arrays[arrayTableName].concat(newRows)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
} else {
|
||||
let value = data[field.name]
|
||||
let push = false
|
||||
if (typeof value === 'object' && '$push' in value) {
|
||||
value = Array.isArray(value.$push) ? value.$push : [value.$push]
|
||||
push = true
|
||||
}
|
||||
|
||||
const newRows = transformArray({
|
||||
adapter,
|
||||
arrayTableName,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
data: data[field.name],
|
||||
data: value,
|
||||
field,
|
||||
numbers,
|
||||
numbersToDelete,
|
||||
@@ -183,8 +210,18 @@ export const traverseFields = ({
|
||||
withinArrayOrBlockLocale,
|
||||
})
|
||||
|
||||
if (push) {
|
||||
if (!arraysToPush[arrayTableName]) {
|
||||
arraysToPush[arrayTableName] = []
|
||||
}
|
||||
arraysToPush[arrayTableName] = arraysToPush[arrayTableName].concat(newRows)
|
||||
} else {
|
||||
if (!arrays[arrayTableName]) {
|
||||
arrays[arrayTableName] = []
|
||||
}
|
||||
arrays[arrayTableName] = arrays[arrayTableName].concat(newRows)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
@@ -264,6 +301,7 @@ export const traverseFields = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays,
|
||||
arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
@@ -298,6 +336,7 @@ export const traverseFields = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays,
|
||||
arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
@@ -547,8 +586,8 @@ export const traverseFields = ({
|
||||
let formattedValue = value
|
||||
|
||||
if (field.type === 'date') {
|
||||
if (fieldName === 'updatedAt' && !formattedValue) {
|
||||
// let the db handle this
|
||||
if (fieldName === 'updatedAt' && typeof formattedValue === 'undefined') {
|
||||
// let the db handle this. If formattedValue is explicitly set to `null` we should not set it - this means we don't want to change the value of updatedAt.
|
||||
formattedValue = new Date().toISOString()
|
||||
} else {
|
||||
if (typeof value === 'number' && !Number.isNaN(value)) {
|
||||
|
||||
@@ -2,6 +2,9 @@ export type ArrayRowToInsert = {
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
arraysToPush: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
locales: {
|
||||
[locale: string]: Record<string, unknown>
|
||||
}
|
||||
@@ -12,6 +15,9 @@ export type BlockRowToInsert = {
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
arraysToPush: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
locales: {
|
||||
[locale: string]: Record<string, unknown>
|
||||
}
|
||||
@@ -37,6 +43,9 @@ export type RowToInsert = {
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
arraysToPush: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
blocks: {
|
||||
[tableName: string]: BlockRowToInsert[]
|
||||
}
|
||||
|
||||
@@ -48,21 +48,48 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
|
||||
let insertedRow: Record<string, unknown> = { id }
|
||||
if (id && shouldUseOptimizedUpsertRow({ data, fields })) {
|
||||
const { row } = transformForWrite({
|
||||
const transformedForWrite = transformForWrite({
|
||||
adapter,
|
||||
data,
|
||||
enableAtomicWrites: true,
|
||||
fields,
|
||||
tableName,
|
||||
})
|
||||
const { row } = transformedForWrite
|
||||
const { arraysToPush } = transformedForWrite
|
||||
|
||||
const drizzle = db as LibSQLDatabase
|
||||
|
||||
// First, handle $push arrays
|
||||
|
||||
if (arraysToPush && Object.keys(arraysToPush)?.length) {
|
||||
await insertArrays({
|
||||
adapter,
|
||||
arrays: [arraysToPush],
|
||||
db,
|
||||
parentRows: [insertedRow],
|
||||
uuidMap: {},
|
||||
})
|
||||
}
|
||||
|
||||
// If row.updatedAt is not set, delete it to avoid triggering hasDataToUpdate. `updatedAt` may be explicitly set to null to
|
||||
// disable triggering hasDataToUpdate.
|
||||
if (typeof row.updatedAt === 'undefined' || row.updatedAt === null) {
|
||||
delete row.updatedAt
|
||||
}
|
||||
|
||||
const hasDataToUpdate = row && Object.keys(row)?.length
|
||||
|
||||
// Then, handle regular row update
|
||||
if (ignoreResult) {
|
||||
if (hasDataToUpdate) {
|
||||
// Only update row if there is something to update.
|
||||
// Example: if the data only consists of a single $push, calling insertArrays is enough - we don't need to update the row.
|
||||
await drizzle
|
||||
.update(adapter.tables[tableName])
|
||||
.set(row)
|
||||
.where(eq(adapter.tables[tableName].id, id))
|
||||
}
|
||||
return ignoreResult === 'idOnly' ? ({ id } as T) : null
|
||||
}
|
||||
|
||||
@@ -78,6 +105,22 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
const findManyKeysLength = Object.keys(findManyArgs).length
|
||||
const hasOnlyColumns = Object.keys(findManyArgs.columns || {}).length > 0
|
||||
|
||||
if (!hasDataToUpdate) {
|
||||
// Nothing to update => just fetch current row and return
|
||||
findManyArgs.where = eq(adapter.tables[tableName].id, insertedRow.id)
|
||||
|
||||
const doc = await db.query[tableName].findFirst(findManyArgs)
|
||||
|
||||
return transform<T>({
|
||||
adapter,
|
||||
config: adapter.payload.config,
|
||||
data: doc,
|
||||
fields,
|
||||
joinQuery: false,
|
||||
tableName,
|
||||
})
|
||||
}
|
||||
|
||||
if (findManyKeysLength === 0 || hasOnlyColumns) {
|
||||
// Optimization - No need for joins => can simply use returning(). This is optimal for very simple collections
|
||||
// without complex fields that live in separate tables like blocks, arrays, relationships, etc.
|
||||
@@ -433,9 +476,9 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
|
||||
await insertArrays({
|
||||
adapter,
|
||||
arrays: [rowToInsert.arrays],
|
||||
arrays: [rowToInsert.arrays, rowToInsert.arraysToPush],
|
||||
db,
|
||||
parentRows: [insertedRow],
|
||||
parentRows: [insertedRow, insertedRow],
|
||||
uuidMap: arraysBlocksUUIDMap,
|
||||
})
|
||||
|
||||
|
||||
@@ -32,6 +32,9 @@ export const insertArrays = async ({
|
||||
const rowsByTable: RowsByTable = {}
|
||||
|
||||
arrays.forEach((arraysByTable, parentRowIndex) => {
|
||||
if (!arraysByTable || Object.keys(arraysByTable).length === 0) {
|
||||
return
|
||||
}
|
||||
Object.entries(arraysByTable).forEach(([tableName, arrayRows]) => {
|
||||
// If the table doesn't exist in map, initialize it
|
||||
if (!rowsByTable[tableName]) {
|
||||
|
||||
@@ -20,7 +20,6 @@ export const shouldUseOptimizedUpsertRow = ({
|
||||
}
|
||||
|
||||
if (
|
||||
field.type === 'array' ||
|
||||
field.type === 'blocks' ||
|
||||
((field.type === 'text' ||
|
||||
field.type === 'relationship' ||
|
||||
@@ -35,6 +34,17 @@ export const shouldUseOptimizedUpsertRow = ({
|
||||
return false
|
||||
}
|
||||
|
||||
if (field.type === 'array') {
|
||||
if (typeof value === 'object' && '$push' in value && value.$push) {
|
||||
return shouldUseOptimizedUpsertRow({
|
||||
// Only check first row - this function cares about field definitions. Each array row will have the same field definitions.
|
||||
data: Array.isArray(value.$push) ? value.$push?.[0] : value.$push,
|
||||
fields: field.flattenedFields,
|
||||
})
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
if (
|
||||
(field.type === 'group' || field.type === 'tab') &&
|
||||
value &&
|
||||
|
||||
@@ -257,6 +257,22 @@ export const getConfig: () => Partial<Config> = () => ({
|
||||
{
|
||||
name: 'arrayWithIDs',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'text',
|
||||
type: 'text',
|
||||
},
|
||||
{
|
||||
name: 'textLocalized',
|
||||
type: 'text',
|
||||
localized: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'arrayWithIDsLocalized',
|
||||
type: 'array',
|
||||
localized: true,
|
||||
fields: [
|
||||
{
|
||||
name: 'text',
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { MongooseAdapter } from '@payloadcms/db-mongodb'
|
||||
import type { PostgresAdapter } from '@payloadcms/db-postgres/types'
|
||||
import type { PostgresAdapter } from '@payloadcms/db-postgres'
|
||||
import type { NextRESTClient } from 'helpers/NextRESTClient.js'
|
||||
import type {
|
||||
DataFromCollectionSlug,
|
||||
@@ -29,7 +29,7 @@ import {
|
||||
import { assert } from 'ts-essentials'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
import type { Global2 } from './payload-types.js'
|
||||
import type { Global2, Post } from './payload-types.js'
|
||||
|
||||
import { devUser } from '../credentials.js'
|
||||
import { initPayloadInt } from '../helpers/initPayloadInt.js'
|
||||
@@ -339,6 +339,57 @@ describe('database', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('ensure updatedAt is automatically set when using db.updateOne', async () => {
|
||||
const post = await payload.create({
|
||||
collection: postsSlug,
|
||||
data: {
|
||||
title: 'hello',
|
||||
},
|
||||
})
|
||||
|
||||
const result: any = await payload.db.updateOne({
|
||||
collection: postsSlug,
|
||||
id: post.id,
|
||||
data: {
|
||||
title: 'hello2',
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.updatedAt).not.toStrictEqual(post.updatedAt)
|
||||
|
||||
// Cleanup, as this test suite does not use clearAndSeedEverything
|
||||
await payload.db.deleteMany({
|
||||
collection: postsSlug,
|
||||
where: {},
|
||||
})
|
||||
})
|
||||
|
||||
it('ensure updatedAt is not automatically set when using db.updateOne if it is explicitly set to `null`', async () => {
|
||||
const post = await payload.create({
|
||||
collection: postsSlug,
|
||||
data: {
|
||||
title: 'hello',
|
||||
},
|
||||
})
|
||||
|
||||
const result: any = await payload.db.updateOne({
|
||||
collection: postsSlug,
|
||||
id: post.id,
|
||||
data: {
|
||||
updatedAt: null,
|
||||
title: 'hello2',
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.updatedAt).toStrictEqual(post.updatedAt)
|
||||
|
||||
// Cleanup, as this test suite does not use clearAndSeedEverything
|
||||
await payload.db.deleteMany({
|
||||
collection: postsSlug,
|
||||
where: {},
|
||||
})
|
||||
})
|
||||
|
||||
it('should allow createdAt to be set in updateVersion', async () => {
|
||||
const category = await payload.create({
|
||||
collection: 'categories',
|
||||
@@ -3327,7 +3378,7 @@ describe('database', () => {
|
||||
it('should allow incremental number update', async () => {
|
||||
const post = await payload.create({ collection: 'posts', data: { number: 1, title: 'post' } })
|
||||
|
||||
const res = await payload.db.updateOne({
|
||||
const res = (await payload.db.updateOne({
|
||||
data: {
|
||||
number: {
|
||||
$inc: 10,
|
||||
@@ -3335,11 +3386,11 @@ describe('database', () => {
|
||||
},
|
||||
collection: 'posts',
|
||||
where: { id: { equals: post.id } },
|
||||
})
|
||||
})) as unknown as Post
|
||||
|
||||
expect(res.number).toBe(11)
|
||||
|
||||
const res2 = await payload.db.updateOne({
|
||||
const res2 = (await payload.db.updateOne({
|
||||
data: {
|
||||
number: {
|
||||
$inc: -3,
|
||||
@@ -3347,11 +3398,314 @@ describe('database', () => {
|
||||
},
|
||||
collection: 'posts',
|
||||
where: { id: { equals: post.id } },
|
||||
})
|
||||
})) as unknown as Post
|
||||
|
||||
expect(res2.number).toBe(8)
|
||||
})
|
||||
|
||||
describe('array $push', () => {
|
||||
it('should allow atomic array updates and $inc', async () => {
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
number: 10,
|
||||
arrayWithIDs: [
|
||||
{
|
||||
text: 'some text',
|
||||
},
|
||||
],
|
||||
title: 'post',
|
||||
},
|
||||
})
|
||||
|
||||
const res = (await payload.db.updateOne({
|
||||
data: {
|
||||
arrayWithIDs: {
|
||||
$push: {
|
||||
text: 'some text 2',
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
},
|
||||
},
|
||||
number: {
|
||||
$inc: 5,
|
||||
},
|
||||
},
|
||||
collection: 'posts',
|
||||
id: post.id,
|
||||
})) as unknown as Post
|
||||
|
||||
expect(res.arrayWithIDs).toHaveLength(2)
|
||||
expect(res.arrayWithIDs?.[0]?.text).toBe('some text')
|
||||
expect(res.arrayWithIDs?.[1]?.text).toBe('some text 2')
|
||||
expect(res.number).toBe(15)
|
||||
})
|
||||
|
||||
it('should allow atomic array updates using $push with single value, unlocalized', async () => {
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
arrayWithIDs: [
|
||||
{
|
||||
text: 'some text',
|
||||
},
|
||||
],
|
||||
title: 'post',
|
||||
},
|
||||
})
|
||||
|
||||
const res = (await payload.db.updateOne({
|
||||
data: {
|
||||
arrayWithIDs: {
|
||||
$push: {
|
||||
text: 'some text 2',
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
},
|
||||
},
|
||||
},
|
||||
collection: 'posts',
|
||||
id: post.id,
|
||||
})) as unknown as Post
|
||||
|
||||
expect(res.arrayWithIDs).toHaveLength(2)
|
||||
expect(res.arrayWithIDs?.[0]?.text).toBe('some text')
|
||||
expect(res.arrayWithIDs?.[1]?.text).toBe('some text 2')
|
||||
})
|
||||
it('should allow atomic array updates using $push with single value, localized field within array', async () => {
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
arrayWithIDs: [
|
||||
{
|
||||
text: 'some text',
|
||||
textLocalized: 'Some text localized',
|
||||
},
|
||||
],
|
||||
title: 'post',
|
||||
},
|
||||
})
|
||||
|
||||
const res = (await payload.db.updateOne({
|
||||
data: {
|
||||
// Locales used => no optimized row update => need to pass full data, incuding title
|
||||
title: 'post',
|
||||
arrayWithIDs: {
|
||||
$push: {
|
||||
text: 'some text 2',
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
textLocalized: {
|
||||
en: 'Some text 2 localized',
|
||||
es: 'Algun texto 2 localizado',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
collection: 'posts',
|
||||
id: post.id,
|
||||
})) as unknown as Post
|
||||
|
||||
expect(res.arrayWithIDs).toHaveLength(2)
|
||||
expect(res.arrayWithIDs?.[0]?.text).toBe('some text')
|
||||
expect(res.arrayWithIDs?.[0]?.textLocalized).toEqual({
|
||||
en: 'Some text localized',
|
||||
})
|
||||
expect(res.arrayWithIDs?.[1]?.text).toBe('some text 2')
|
||||
expect(res.arrayWithIDs?.[1]?.textLocalized).toEqual({
|
||||
en: 'Some text 2 localized',
|
||||
es: 'Algun texto 2 localizado',
|
||||
})
|
||||
})
|
||||
|
||||
it('should allow atomic array updates using $push with single value, localized array', async () => {
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
arrayWithIDsLocalized: [
|
||||
{
|
||||
text: 'some text',
|
||||
},
|
||||
],
|
||||
title: 'post',
|
||||
},
|
||||
})
|
||||
|
||||
const res = (await payload.db.updateOne({
|
||||
data: {
|
||||
// Locales used => no optimized row update => need to pass full data, incuding title
|
||||
title: 'post',
|
||||
arrayWithIDsLocalized: {
|
||||
$push: {
|
||||
en: {
|
||||
text: 'some text 2',
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
},
|
||||
es: {
|
||||
text: 'some text 2 es',
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
collection: 'posts',
|
||||
id: post.id,
|
||||
})) as unknown as any
|
||||
|
||||
expect(res.arrayWithIDsLocalized?.en).toHaveLength(2)
|
||||
expect(res.arrayWithIDsLocalized?.en?.[0]?.text).toBe('some text')
|
||||
expect(res.arrayWithIDsLocalized?.en?.[1]?.text).toBe('some text 2')
|
||||
|
||||
expect(res.arrayWithIDsLocalized?.es).toHaveLength(1)
|
||||
expect(res.arrayWithIDsLocalized?.es?.[0]?.text).toBe('some text 2 es')
|
||||
})
|
||||
|
||||
it('should allow atomic array updates using $push with multiple values, unlocalized', async () => {
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
arrayWithIDs: [
|
||||
{
|
||||
text: 'some text',
|
||||
},
|
||||
],
|
||||
title: 'post',
|
||||
},
|
||||
})
|
||||
|
||||
const res = (await payload.db.updateOne({
|
||||
data: {
|
||||
arrayWithIDs: {
|
||||
$push: [
|
||||
{
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
text: 'some text 2',
|
||||
},
|
||||
{
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
text: 'some text 3',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
collection: 'posts',
|
||||
id: post.id,
|
||||
})) as unknown as Post
|
||||
|
||||
expect(res.arrayWithIDs).toHaveLength(3)
|
||||
expect(res.arrayWithIDs?.[0]?.text).toBe('some text')
|
||||
expect(res.arrayWithIDs?.[1]?.text).toBe('some text 2')
|
||||
expect(res.arrayWithIDs?.[2]?.text).toBe('some text 3')
|
||||
})
|
||||
|
||||
it('should allow atomic array updates using $push with multiple values, localized field within array', async () => {
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
arrayWithIDs: [
|
||||
{
|
||||
text: 'some text',
|
||||
textLocalized: 'Some text localized',
|
||||
},
|
||||
],
|
||||
title: 'post',
|
||||
},
|
||||
})
|
||||
|
||||
const res = (await payload.db.updateOne({
|
||||
data: {
|
||||
// Locales used => no optimized row update => need to pass full data, incuding title
|
||||
title: 'post',
|
||||
arrayWithIDs: {
|
||||
$push: [
|
||||
{
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
text: 'some text 2',
|
||||
textLocalized: {
|
||||
en: 'Some text 2 localized',
|
||||
es: 'Algun texto 2 localizado',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
text: 'some text 3',
|
||||
textLocalized: {
|
||||
en: 'Some text 3 localized',
|
||||
es: 'Algun texto 3 localizado',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
collection: 'posts',
|
||||
id: post.id,
|
||||
})) as unknown as Post
|
||||
|
||||
expect(res.arrayWithIDs).toHaveLength(3)
|
||||
expect(res.arrayWithIDs?.[0]?.text).toBe('some text')
|
||||
expect(res.arrayWithIDs?.[1]?.text).toBe('some text 2')
|
||||
expect(res.arrayWithIDs?.[2]?.text).toBe('some text 3')
|
||||
|
||||
expect(res.arrayWithIDs?.[0]?.textLocalized).toEqual({
|
||||
en: 'Some text localized',
|
||||
})
|
||||
expect(res.arrayWithIDs?.[1]?.textLocalized).toEqual({
|
||||
en: 'Some text 2 localized',
|
||||
es: 'Algun texto 2 localizado',
|
||||
})
|
||||
expect(res.arrayWithIDs?.[2]?.textLocalized).toEqual({
|
||||
en: 'Some text 3 localized',
|
||||
es: 'Algun texto 3 localizado',
|
||||
})
|
||||
})
|
||||
|
||||
it('should allow atomic array updates using $push with multiple values, localized array', async () => {
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
arrayWithIDsLocalized: [
|
||||
{
|
||||
text: 'some text',
|
||||
},
|
||||
],
|
||||
title: 'post',
|
||||
},
|
||||
})
|
||||
|
||||
const res = (await payload.db.updateOne({
|
||||
data: {
|
||||
// Locales used => no optimized row update => need to pass full data, incuding title
|
||||
title: 'post',
|
||||
arrayWithIDsLocalized: {
|
||||
$push: {
|
||||
en: {
|
||||
text: 'some text 2',
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
},
|
||||
es: [
|
||||
{
|
||||
text: 'some text 2 es',
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
},
|
||||
{
|
||||
text: 'some text 3 es',
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
collection: 'posts',
|
||||
id: post.id,
|
||||
})) as unknown as any
|
||||
|
||||
expect(res.arrayWithIDsLocalized?.en).toHaveLength(2)
|
||||
expect(res.arrayWithIDsLocalized?.en?.[0]?.text).toBe('some text')
|
||||
expect(res.arrayWithIDsLocalized?.en?.[1]?.text).toBe('some text 2')
|
||||
|
||||
expect(res.arrayWithIDsLocalized?.es).toHaveLength(2)
|
||||
expect(res.arrayWithIDsLocalized?.es?.[0]?.text).toBe('some text 2 es')
|
||||
expect(res.arrayWithIDsLocalized?.es?.[1]?.text).toBe('some text 3 es')
|
||||
})
|
||||
})
|
||||
|
||||
it('should support x3 nesting blocks', async () => {
|
||||
const res = await payload.create({
|
||||
collection: 'posts',
|
||||
|
||||
@@ -180,27 +180,11 @@ export interface NoTimeStamp {
|
||||
export interface Category {
|
||||
id: string;
|
||||
title?: string | null;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
_status?: ('draft' | 'published') | null;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "simple".
|
||||
*/
|
||||
export interface Simple {
|
||||
id: string;
|
||||
text?: string | null;
|
||||
number?: number | null;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "categories-custom-id".
|
||||
*/
|
||||
export interface CategoriesCustomId {
|
||||
id: number;
|
||||
hideout?: {
|
||||
camera1?: {
|
||||
time1Image?: (string | null) | Post;
|
||||
};
|
||||
};
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
_status?: ('draft' | 'published') | null;
|
||||
@@ -242,6 +226,13 @@ export interface Post {
|
||||
hasTransaction?: boolean | null;
|
||||
throwAfterChange?: boolean | null;
|
||||
arrayWithIDs?:
|
||||
| {
|
||||
text?: string | null;
|
||||
textLocalized?: string | null;
|
||||
id?: string | null;
|
||||
}[]
|
||||
| null;
|
||||
arrayWithIDsLocalized?:
|
||||
| {
|
||||
text?: string | null;
|
||||
id?: string | null;
|
||||
@@ -264,6 +255,27 @@ export interface Post {
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "categories-custom-id".
|
||||
*/
|
||||
export interface CategoriesCustomId {
|
||||
id: number;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
_status?: ('draft' | 'published') | null;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "simple".
|
||||
*/
|
||||
export interface Simple {
|
||||
id: string;
|
||||
text?: string | null;
|
||||
number?: number | null;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "error-on-unnamed-fields".
|
||||
@@ -770,6 +782,15 @@ export interface NoTimeStampsSelect<T extends boolean = true> {
|
||||
*/
|
||||
export interface CategoriesSelect<T extends boolean = true> {
|
||||
title?: T;
|
||||
hideout?:
|
||||
| T
|
||||
| {
|
||||
camera1?:
|
||||
| T
|
||||
| {
|
||||
time1Image?: T;
|
||||
};
|
||||
};
|
||||
updatedAt?: T;
|
||||
createdAt?: T;
|
||||
_status?: T;
|
||||
@@ -842,6 +863,13 @@ export interface PostsSelect<T extends boolean = true> {
|
||||
hasTransaction?: T;
|
||||
throwAfterChange?: T;
|
||||
arrayWithIDs?:
|
||||
| T
|
||||
| {
|
||||
text?: T;
|
||||
textLocalized?: T;
|
||||
id?: T;
|
||||
};
|
||||
arrayWithIDsLocalized?:
|
||||
| T
|
||||
| {
|
||||
text?: T;
|
||||
|
||||
@@ -2,9 +2,12 @@ import type { Payload } from 'payload'
|
||||
|
||||
/* eslint-disable jest/require-top-level-describe */
|
||||
import assert from 'assert'
|
||||
import mongoose from 'mongoose'
|
||||
import path from 'path'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
import type { Post } from './payload-types.js'
|
||||
|
||||
import { initPayloadInt } from '../helpers/initPayloadInt.js'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
@@ -169,6 +172,53 @@ describePostgres('database - postgres logs', () => {
|
||||
})
|
||||
|
||||
expect(allPosts.docs).toHaveLength(1)
|
||||
expect(allPosts.docs[0].id).toEqual(doc1.id)
|
||||
expect(allPosts.docs[0]?.id).toEqual(doc1.id)
|
||||
})
|
||||
|
||||
it('ensure array update using $push is done in single db call', async () => {
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
arrayWithIDs: [
|
||||
{
|
||||
text: 'some text',
|
||||
},
|
||||
],
|
||||
title: 'post',
|
||||
},
|
||||
})
|
||||
const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
|
||||
await payload.db.updateOne({
|
||||
data: {
|
||||
// Ensure db adapter does not automatically set updatedAt - one less db call
|
||||
updatedAt: null,
|
||||
arrayWithIDs: {
|
||||
$push: {
|
||||
text: 'some text 2',
|
||||
id: new mongoose.Types.ObjectId().toHexString(),
|
||||
},
|
||||
},
|
||||
},
|
||||
collection: 'posts',
|
||||
id: post.id,
|
||||
returning: false,
|
||||
})
|
||||
|
||||
// 1 Update:
|
||||
// 1. (updatedAt for posts row.) - skipped because we explicitly set updatedAt to null
|
||||
// 2. arrayWithIDs.$push for posts row
|
||||
expect(consoleCount).toHaveBeenCalledTimes(1)
|
||||
consoleCount.mockRestore()
|
||||
|
||||
const updatedPost = (await payload.db.findOne({
|
||||
collection: 'posts',
|
||||
where: { id: { equals: post.id } },
|
||||
})) as unknown as Post
|
||||
|
||||
expect(updatedPost.title).toBe('post')
|
||||
expect(updatedPost.arrayWithIDs).toHaveLength(2)
|
||||
expect(updatedPost.arrayWithIDs?.[0]?.text).toBe('some text')
|
||||
expect(updatedPost.arrayWithIDs?.[1]?.text).toBe('some text 2')
|
||||
})
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user