perf(drizzle): further optimize postgres row updates (#13184)

This is a follow-up to https://github.com/payloadcms/payload/pull/13060.

There are a bunch of other db adapter methods that use `upsertRow` for
updates: `updateGlobal`, `updateGlobalVersion`, `updateJobs`,
`updateMany`, `updateVersion`.

The previous PR had the logic for using the optimized row updating logic
inside the `updateOne` adapter. This PR moves that logic to the original
`upsertRow` function. Benefits:
- all the other db methods will benefit from this massive optimization
as well. This will be especially relevant for optimizing postgres job
queue initial updates - we should be able to close
https://github.com/payloadcms/payload/pull/11865 after another follow-up
PR
- easier to read db adapter methods due to less code.

---
- To see the specific tasks where the Asana app for GitHub is being
used, see below:
  - https://app.asana.com/0/0/1210803039809810
This commit is contained in:
Alessio Gravili
2025-07-16 09:45:02 -07:00
committed by GitHub
parent be8e8d9c7f
commit 7cd682c66a
4 changed files with 518 additions and 495 deletions

View File

@@ -1,67 +1,15 @@
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
import type { FlattenedField, UpdateOne } from 'payload'
import type { UpdateOne } from 'payload'
import { eq } from 'drizzle-orm'
import toSnakeCase from 'to-snake-case'
import type { DrizzleAdapter } from './types.js'
import { buildFindManyArgs } from './find/buildFindManyArgs.js'
import { buildQuery } from './queries/buildQuery.js'
import { selectDistinct } from './queries/selectDistinct.js'
import { transform } from './transform/read/index.js'
import { transformForWrite } from './transform/write/index.js'
import { upsertRow } from './upsertRow/index.js'
import { getTransaction } from './utilities/getTransaction.js'
/**
* Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call.
* We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships.
*/
const shouldUseUpsertRow = ({
data,
fields,
}: {
data: Record<string, unknown>
fields: FlattenedField[]
}) => {
for (const key in data) {
const value = data[key]
const field = fields.find((each) => each.name === key)
if (!field) {
continue
}
if (
field.type === 'array' ||
field.type === 'blocks' ||
((field.type === 'text' ||
field.type === 'relationship' ||
field.type === 'upload' ||
field.type === 'select' ||
field.type === 'number') &&
field.hasMany) ||
((field.type === 'relationship' || field.type === 'upload') &&
Array.isArray(field.relationTo)) ||
field.localized
) {
return true
}
if (
(field.type === 'group' || field.type === 'tab') &&
value &&
typeof value === 'object' &&
shouldUseUpsertRow({ data: value as Record<string, unknown>, fields: field.flattenedFields })
) {
return true
}
}
return false
}
export const updateOne: UpdateOne = async function updateOne(
this: DrizzleAdapter,
{
@@ -126,72 +74,23 @@ export const updateOne: UpdateOne = async function updateOne(
return null
}
if (!idToUpdate || shouldUseUpsertRow({ data, fields: collection.flattenedFields })) {
const result = await upsertRow({
id: idToUpdate,
adapter: this,
data,
db,
fields: collection.flattenedFields,
ignoreResult: returning === false,
joinQuery,
operation: 'update',
req,
select,
tableName,
})
if (returning === false) {
return null
}
return result
}
const { row } = transformForWrite({
const result = await upsertRow({
id: idToUpdate,
adapter: this,
data,
enableAtomicWrites: true,
db,
fields: collection.flattenedFields,
ignoreResult: returning === false,
joinQuery,
operation: 'update',
req,
select,
tableName,
})
const drizzle = db as LibSQLDatabase
await drizzle
.update(this.tables[tableName])
.set(row)
// TODO: we can skip fetching idToUpdate here with using the incoming where
.where(eq(this.tables[tableName].id, idToUpdate))
if (returning === false) {
return null
}
const findManyArgs = buildFindManyArgs({
adapter: this,
depth: 0,
fields: collection.flattenedFields,
joinQuery: false,
select,
tableName,
})
findManyArgs.where = eq(this.tables[tableName].id, idToUpdate)
const doc = await db.query[tableName].findFirst(findManyArgs)
// //////////////////////////////////
// TRANSFORM DATA
// //////////////////////////////////
const result = transform({
adapter: this,
config: this.payload.config,
data: doc,
fields: collection.flattenedFields,
joinQuery: false,
tableName,
})
return result
}

View File

@@ -1,3 +1,4 @@
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
import type { TypeWithID } from 'payload'
import { eq } from 'drizzle-orm'
@@ -12,13 +13,14 @@ import { transformForWrite } from '../transform/write/index.js'
import { deleteExistingArrayRows } from './deleteExistingArrayRows.js'
import { deleteExistingRowsByPath } from './deleteExistingRowsByPath.js'
import { insertArrays } from './insertArrays.js'
import { shouldUseOptimizedUpsertRow } from './shouldUseOptimizedUpsertRow.js'
/**
* If `id` is provided, it will update the row with that ID.
* If `where` is provided, it will update the row that matches the `where`
* If neither `id` nor `where` is provided, it will create a new row.
*
* This function replaces the entire row and does not support partial updates.
* adapter function replaces the entire row and does not support partial updates.
*/
export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>({
id,
@@ -39,429 +41,446 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
upsertTarget,
where,
}: Args): Promise<T> => {
// Split out the incoming data into the corresponding:
// base row, locales, relationships, blocks, and arrays
const rowToInsert = transformForWrite({
adapter,
data,
enableAtomicWrites: false,
fields,
path,
tableName,
})
// First, we insert the main row
let insertedRow: Record<string, unknown>
try {
if (operation === 'update') {
const target = upsertTarget || adapter.tables[tableName].id
if (id) {
rowToInsert.row.id = id
;[insertedRow] = await adapter.insert({
db,
onConflictDoUpdate: { set: rowToInsert.row, target },
tableName,
values: rowToInsert.row,
})
} else {
;[insertedRow] = await adapter.insert({
db,
onConflictDoUpdate: { set: rowToInsert.row, target, where },
tableName,
values: rowToInsert.row,
})
}
} else {
if (adapter.allowIDOnCreate && data.id) {
rowToInsert.row.id = data.id
}
;[insertedRow] = await adapter.insert({
db,
tableName,
values: rowToInsert.row,
})
}
const localesToInsert: Record<string, unknown>[] = []
const relationsToInsert: Record<string, unknown>[] = []
const textsToInsert: Record<string, unknown>[] = []
const numbersToInsert: Record<string, unknown>[] = []
const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {}
const selectsToInsert: { [selectTableName: string]: Record<string, unknown>[] } = {}
// If there are locale rows with data, add the parent and locale to each
if (Object.keys(rowToInsert.locales).length > 0) {
Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => {
localeRow._parentID = insertedRow.id
localeRow._locale = locale
localesToInsert.push(localeRow)
})
}
// If there are relationships, add parent to each
if (rowToInsert.relationships.length > 0) {
rowToInsert.relationships.forEach((relation) => {
relation.parent = insertedRow.id
relationsToInsert.push(relation)
})
}
// If there are texts, add parent to each
if (rowToInsert.texts.length > 0) {
rowToInsert.texts.forEach((textRow) => {
textRow.parent = insertedRow.id
textsToInsert.push(textRow)
})
}
// If there are numbers, add parent to each
if (rowToInsert.numbers.length > 0) {
rowToInsert.numbers.forEach((numberRow) => {
numberRow.parent = insertedRow.id
numbersToInsert.push(numberRow)
})
}
// If there are selects, add parent to each, and then
// store by table name and rows
if (Object.keys(rowToInsert.selects).length > 0) {
Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => {
selectsToInsert[selectTableName] = []
selectRows.forEach((row) => {
if (typeof row.parent === 'undefined') {
row.parent = insertedRow.id
}
selectsToInsert[selectTableName].push(row)
})
})
}
// If there are blocks, add parent to each, and then
// store by table name and rows
Object.keys(rowToInsert.blocks).forEach((tableName) => {
rowToInsert.blocks[tableName].forEach((blockRow) => {
blockRow.row._parentID = insertedRow.id
if (!blocksToInsert[tableName]) {
blocksToInsert[tableName] = []
}
if (blockRow.row.uuid) {
delete blockRow.row.uuid
}
blocksToInsert[tableName].push(blockRow)
})
let insertedRow: Record<string, unknown> = { id }
if (id && shouldUseOptimizedUpsertRow({ data, fields })) {
const { row } = transformForWrite({
adapter,
data,
enableAtomicWrites: true,
fields,
tableName,
})
// //////////////////////////////////
// INSERT LOCALES
// //////////////////////////////////
const drizzle = db as LibSQLDatabase
if (localesToInsert.length > 0) {
const localeTableName = `${tableName}${adapter.localesSuffix}`
const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`]
await drizzle
.update(adapter.tables[tableName])
.set(row)
// TODO: we can skip fetching idToUpdate here with using the incoming where
.where(eq(adapter.tables[tableName].id, id))
} else {
// Split out the incoming data into the corresponding:
// base row, locales, relationships, blocks, and arrays
const rowToInsert = transformForWrite({
adapter,
data,
enableAtomicWrites: false,
fields,
path,
tableName,
})
// First, we insert the main row
try {
if (operation === 'update') {
await adapter.deleteWhere({
db,
tableName: localeTableName,
where: eq(localeTable._parentID, insertedRow.id),
})
}
const target = upsertTarget || adapter.tables[tableName].id
await adapter.insert({
db,
tableName: localeTableName,
values: localesToInsert,
})
}
// //////////////////////////////////
// INSERT RELATIONSHIPS
// //////////////////////////////////
const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}`
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete],
tableName: relationshipsTableName,
})
}
if (relationsToInsert.length > 0) {
await adapter.insert({
db,
tableName: relationshipsTableName,
values: relationsToInsert,
})
}
// //////////////////////////////////
// INSERT hasMany TEXTS
// //////////////////////////////////
const textsTableName = `${tableName}_texts`
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...textsToInsert, ...rowToInsert.textsToDelete],
tableName: textsTableName,
})
}
if (textsToInsert.length > 0) {
await adapter.insert({
db,
tableName: textsTableName,
values: textsToInsert,
})
}
// //////////////////////////////////
// INSERT hasMany NUMBERS
// //////////////////////////////////
const numbersTableName = `${tableName}_numbers`
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...numbersToInsert, ...rowToInsert.numbersToDelete],
tableName: numbersTableName,
})
}
if (numbersToInsert.length > 0) {
await adapter.insert({
db,
tableName: numbersTableName,
values: numbersToInsert,
})
}
// //////////////////////////////////
// INSERT BLOCKS
// //////////////////////////////////
const insertedBlockRows: Record<string, Record<string, unknown>[]> = {}
if (operation === 'update') {
for (const tableName of rowToInsert.blocksToDelete) {
const blockTable = adapter.tables[tableName]
await adapter.deleteWhere({
if (id) {
rowToInsert.row.id = id
;[insertedRow] = await adapter.insert({
db,
onConflictDoUpdate: { set: rowToInsert.row, target },
tableName,
values: rowToInsert.row,
})
} else {
;[insertedRow] = await adapter.insert({
db,
onConflictDoUpdate: { set: rowToInsert.row, target, where },
tableName,
values: rowToInsert.row,
})
}
} else {
if (adapter.allowIDOnCreate && data.id) {
rowToInsert.row.id = data.id
}
;[insertedRow] = await adapter.insert({
db,
tableName,
where: eq(blockTable._parentID, insertedRow.id),
values: rowToInsert.row,
})
}
}
// When versions are enabled, this is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions.
const arraysBlocksUUIDMap: Record<string, number | string> = {}
const localesToInsert: Record<string, unknown>[] = []
const relationsToInsert: Record<string, unknown>[] = []
const textsToInsert: Record<string, unknown>[] = []
const numbersToInsert: Record<string, unknown>[] = []
const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {}
const selectsToInsert: { [selectTableName: string]: Record<string, unknown>[] } = {}
for (const [tableName, blockRows] of Object.entries(blocksToInsert)) {
insertedBlockRows[tableName] = await adapter.insert({
db,
tableName,
values: blockRows.map(({ row }) => row),
})
// If there are locale rows with data, add the parent and locale to each
if (Object.keys(rowToInsert.locales).length > 0) {
Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => {
localeRow._parentID = insertedRow.id
localeRow._locale = locale
localesToInsert.push(localeRow)
})
}
insertedBlockRows[tableName].forEach((row, i) => {
blockRows[i].row = row
if (
typeof row._uuid === 'string' &&
(typeof row.id === 'string' || typeof row.id === 'number')
) {
arraysBlocksUUIDMap[row._uuid] = row.id
}
})
// If there are relationships, add parent to each
if (rowToInsert.relationships.length > 0) {
rowToInsert.relationships.forEach((relation) => {
relation.parent = insertedRow.id
relationsToInsert.push(relation)
})
}
const blockLocaleIndexMap: number[] = []
// If there are texts, add parent to each
if (rowToInsert.texts.length > 0) {
rowToInsert.texts.forEach((textRow) => {
textRow.parent = insertedRow.id
textsToInsert.push(textRow)
})
}
const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => {
if (Object.entries(blockRow.locales).length > 0) {
Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => {
if (Object.keys(blockLocaleData).length > 0) {
blockLocaleData._parentID = blockRow.row.id
blockLocaleData._locale = blockLocale
acc.push(blockLocaleData)
blockLocaleIndexMap.push(i)
// If there are numbers, add parent to each
if (rowToInsert.numbers.length > 0) {
rowToInsert.numbers.forEach((numberRow) => {
numberRow.parent = insertedRow.id
numbersToInsert.push(numberRow)
})
}
// If there are selects, add parent to each, and then
// store by table name and rows
if (Object.keys(rowToInsert.selects).length > 0) {
Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => {
selectsToInsert[selectTableName] = []
selectRows.forEach((row) => {
if (typeof row.parent === 'undefined') {
row.parent = insertedRow.id
}
selectsToInsert[selectTableName].push(row)
})
})
}
// If there are blocks, add parent to each, and then
// store by table name and rows
Object.keys(rowToInsert.blocks).forEach((tableName) => {
rowToInsert.blocks[tableName].forEach((blockRow) => {
blockRow.row._parentID = insertedRow.id
if (!blocksToInsert[tableName]) {
blocksToInsert[tableName] = []
}
if (blockRow.row.uuid) {
delete blockRow.row.uuid
}
blocksToInsert[tableName].push(blockRow)
})
})
// //////////////////////////////////
// INSERT LOCALES
// //////////////////////////////////
if (localesToInsert.length > 0) {
const localeTableName = `${tableName}${adapter.localesSuffix}`
const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`]
if (operation === 'update') {
await adapter.deleteWhere({
db,
tableName: localeTableName,
where: eq(localeTable._parentID, insertedRow.id),
})
}
return acc
}, [])
if (blockLocaleRowsToInsert.length > 0) {
await adapter.insert({
db,
tableName: `${tableName}${adapter.localesSuffix}`,
values: blockLocaleRowsToInsert,
tableName: localeTableName,
values: localesToInsert,
})
}
// //////////////////////////////////
// INSERT RELATIONSHIPS
// //////////////////////////////////
const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}`
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete],
tableName: relationshipsTableName,
})
}
if (relationsToInsert.length > 0) {
await adapter.insert({
db,
tableName: relationshipsTableName,
values: relationsToInsert,
})
}
// //////////////////////////////////
// INSERT hasMany TEXTS
// //////////////////////////////////
const textsTableName = `${tableName}_texts`
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...textsToInsert, ...rowToInsert.textsToDelete],
tableName: textsTableName,
})
}
if (textsToInsert.length > 0) {
await adapter.insert({
db,
tableName: textsTableName,
values: textsToInsert,
})
}
// //////////////////////////////////
// INSERT hasMany NUMBERS
// //////////////////////////////////
const numbersTableName = `${tableName}_numbers`
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...numbersToInsert, ...rowToInsert.numbersToDelete],
tableName: numbersTableName,
})
}
if (numbersToInsert.length > 0) {
await adapter.insert({
db,
tableName: numbersTableName,
values: numbersToInsert,
})
}
// //////////////////////////////////
// INSERT BLOCKS
// //////////////////////////////////
const insertedBlockRows: Record<string, Record<string, unknown>[]> = {}
if (operation === 'update') {
for (const tableName of rowToInsert.blocksToDelete) {
const blockTable = adapter.tables[tableName]
await adapter.deleteWhere({
db,
tableName,
where: eq(blockTable._parentID, insertedRow.id),
})
}
}
// When versions are enabled, adapter is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions.
const arraysBlocksUUIDMap: Record<string, number | string> = {}
for (const [tableName, blockRows] of Object.entries(blocksToInsert)) {
insertedBlockRows[tableName] = await adapter.insert({
db,
tableName,
values: blockRows.map(({ row }) => row),
})
insertedBlockRows[tableName].forEach((row, i) => {
blockRows[i].row = row
if (
typeof row._uuid === 'string' &&
(typeof row.id === 'string' || typeof row.id === 'number')
) {
arraysBlocksUUIDMap[row._uuid] = row.id
}
})
const blockLocaleIndexMap: number[] = []
const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => {
if (Object.entries(blockRow.locales).length > 0) {
Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => {
if (Object.keys(blockLocaleData).length > 0) {
blockLocaleData._parentID = blockRow.row.id
blockLocaleData._locale = blockLocale
acc.push(blockLocaleData)
blockLocaleIndexMap.push(i)
}
})
}
return acc
}, [])
if (blockLocaleRowsToInsert.length > 0) {
await adapter.insert({
db,
tableName: `${tableName}${adapter.localesSuffix}`,
values: blockLocaleRowsToInsert,
})
}
await insertArrays({
adapter,
arrays: blockRows.map(({ arrays }) => arrays),
db,
parentRows: insertedBlockRows[tableName],
uuidMap: arraysBlocksUUIDMap,
})
}
// //////////////////////////////////
// INSERT ARRAYS RECURSIVELY
// //////////////////////////////////
if (operation === 'update') {
for (const arrayTableName of Object.keys(rowToInsert.arrays)) {
await deleteExistingArrayRows({
adapter,
db,
parentID: insertedRow.id,
tableName: arrayTableName,
})
}
}
await insertArrays({
adapter,
arrays: blockRows.map(({ arrays }) => arrays),
arrays: [rowToInsert.arrays],
db,
parentRows: insertedBlockRows[tableName],
parentRows: [insertedRow],
uuidMap: arraysBlocksUUIDMap,
})
}
// //////////////////////////////////
// INSERT ARRAYS RECURSIVELY
// //////////////////////////////////
// //////////////////////////////////
// INSERT hasMany SELECTS
// //////////////////////////////////
if (operation === 'update') {
for (const arrayTableName of Object.keys(rowToInsert.arrays)) {
await deleteExistingArrayRows({
adapter,
db,
parentID: insertedRow.id,
tableName: arrayTableName,
})
}
}
for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) {
const selectTable = adapter.tables[selectTableName]
if (operation === 'update') {
await adapter.deleteWhere({
db,
tableName: selectTableName,
where: eq(selectTable.parent, insertedRow.id),
})
}
await insertArrays({
adapter,
arrays: [rowToInsert.arrays],
db,
parentRows: [insertedRow],
uuidMap: arraysBlocksUUIDMap,
})
if (Object.keys(arraysBlocksUUIDMap).length > 0) {
tableRows.forEach((row: any) => {
if (row.parent in arraysBlocksUUIDMap) {
row.parent = arraysBlocksUUIDMap[row.parent]
}
})
}
// //////////////////////////////////
// INSERT hasMany SELECTS
// //////////////////////////////////
for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) {
const selectTable = adapter.tables[selectTableName]
if (operation === 'update') {
await adapter.deleteWhere({
db,
tableName: selectTableName,
where: eq(selectTable.parent, insertedRow.id),
})
if (tableRows.length) {
await adapter.insert({
db,
tableName: selectTableName,
values: tableRows,
})
}
}
if (Object.keys(arraysBlocksUUIDMap).length > 0) {
tableRows.forEach((row: any) => {
if (row.parent in arraysBlocksUUIDMap) {
row.parent = arraysBlocksUUIDMap[row.parent]
// //////////////////////////////////
// Error Handling
// //////////////////////////////////
} catch (caughtError) {
// Unique constraint violation error
// '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite
let error = caughtError
if (typeof caughtError === 'object' && 'cause' in caughtError) {
error = caughtError.cause
}
if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
let fieldName: null | string = null
// We need to try and find the right constraint for the field but if we can't we fallback to a generic message
if (error.code === '23505') {
// For PostgreSQL, we can try to extract the field name from the error constraint
if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) {
fieldName = adapter.fieldConstraints[tableName]?.[error.constraint]
} else {
const replacement = `${tableName}_`
if (error.constraint.includes(replacement)) {
const replacedConstraint = error.constraint.replace(replacement, '')
if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) {
fieldName = adapter.fieldConstraints[tableName][replacedConstraint]
}
}
}
})
}
if (tableRows.length) {
await adapter.insert({
db,
tableName: selectTableName,
values: tableRows,
})
}
}
if (!fieldName) {
// Last case scenario we extract the key and value from the detail on the error
const detail = error.detail
const regex = /Key \(([^)]+)\)=\(([^)]+)\)/
const match: string[] = detail.match(regex)
// //////////////////////////////////
// Error Handling
// //////////////////////////////////
} catch (caughtError) {
// Unique constraint violation error
// '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite
if (match && match[1]) {
const key = match[1]
let error = caughtError
if (typeof caughtError === 'object' && 'cause' in caughtError) {
error = caughtError.cause
}
fieldName = key
}
}
} else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
/**
* For SQLite, we can try to extract the field name from the error message
* The message typically looks like:
* "UNIQUE constraint failed: table_name.field_name"
*/
const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/
const match: string[] = error.message.match(regex)
if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
let fieldName: null | string = null
// We need to try and find the right constraint for the field but if we can't we fallback to a generic message
if (error.code === '23505') {
// For PostgreSQL, we can try to extract the field name from the error constraint
if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) {
fieldName = adapter.fieldConstraints[tableName]?.[error.constraint]
} else {
const replacement = `${tableName}_`
if (match && match[2]) {
if (adapter.fieldConstraints[tableName]) {
fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`]
}
if (error.constraint.includes(replacement)) {
const replacedConstraint = error.constraint.replace(replacement, '')
if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) {
fieldName = adapter.fieldConstraints[tableName][replacedConstraint]
if (!fieldName) {
fieldName = match[2]
}
}
}
if (!fieldName) {
// Last case scenario we extract the key and value from the detail on the error
const detail = error.detail
const regex = /Key \(([^)]+)\)=\(([^)]+)\)/
const match: string[] = detail.match(regex)
if (match && match[1]) {
const key = match[1]
fieldName = key
}
}
} else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
/**
* For SQLite, we can try to extract the field name from the error message
* The message typically looks like:
* "UNIQUE constraint failed: table_name.field_name"
*/
const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/
const match: string[] = error.message.match(regex)
if (match && match[2]) {
if (adapter.fieldConstraints[tableName]) {
fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`]
}
if (!fieldName) {
fieldName = match[2]
}
}
throw new ValidationError(
{
id,
errors: [
{
message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique',
path: fieldName,
},
],
req,
},
req?.t,
)
} else {
throw error
}
throw new ValidationError(
{
id,
errors: [
{
message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique',
path: fieldName,
},
],
req,
},
req?.t,
)
} else {
throw error
}
}

View File

@@ -0,0 +1,52 @@
import type { FlattenedField } from 'payload'
/**
* Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call.
* We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships.
*/
export const shouldUseOptimizedUpsertRow = ({
data,
fields,
}: {
data: Record<string, unknown>
fields: FlattenedField[]
}) => {
for (const key in data) {
const value = data[key]
const field = fields.find((each) => each.name === key)
if (!field) {
continue
}
if (
field.type === 'array' ||
field.type === 'blocks' ||
((field.type === 'text' ||
field.type === 'relationship' ||
field.type === 'upload' ||
field.type === 'select' ||
field.type === 'number') &&
field.hasMany) ||
((field.type === 'relationship' || field.type === 'upload') &&
Array.isArray(field.relationTo)) ||
field.localized
) {
return false
}
if (
(field.type === 'group' || field.type === 'tab') &&
value &&
typeof value === 'object' &&
!shouldUseOptimizedUpsertRow({
data: value as Record<string, unknown>,
fields: field.flattenedFields,
})
) {
return false
}
}
return true
}

View File

@@ -1,7 +1,13 @@
import type { MongooseAdapter } from '@payloadcms/db-mongodb'
import type { PostgresAdapter } from '@payloadcms/db-postgres/types'
import type { NextRESTClient } from 'helpers/NextRESTClient.js'
import type { Payload, PayloadRequest, TypeWithID, ValidationError } from 'payload'
import type {
DataFromCollectionSlug,
Payload,
PayloadRequest,
TypeWithID,
ValidationError,
} from 'payload'
import {
migrateRelationshipsV2_V3,
@@ -2807,7 +2813,7 @@ describe('database', () => {
}
})
it('should update simple', async () => {
it('should use optimized updateOne', async () => {
const post = await payload.create({
collection: 'posts',
data: {
@@ -2818,7 +2824,7 @@ describe('database', () => {
arrayWithIDs: [{ text: 'some text' }],
},
})
const res = await payload.db.updateOne({
const res = (await payload.db.updateOne({
where: { id: { equals: post.id } },
data: {
title: 'hello updated',
@@ -2826,14 +2832,61 @@ describe('database', () => {
tab: { text: 'in tab updated' },
},
collection: 'posts',
})
})) as unknown as DataFromCollectionSlug<'posts'>
expect(res.title).toBe('hello updated')
expect(res.text).toBe('other text (should not be nuked)')
expect(res.group.text).toBe('in group updated')
expect(res.tab.text).toBe('in tab updated')
expect(res.group?.text).toBe('in group updated')
expect(res.tab?.text).toBe('in tab updated')
expect(res.arrayWithIDs).toHaveLength(1)
expect(res.arrayWithIDs[0].text).toBe('some text')
expect(res.arrayWithIDs?.[0]?.text).toBe('some text')
})
it('should use optimized updateMany', async () => {
const post1 = await payload.create({
collection: 'posts',
data: {
text: 'other text (should not be nuked)',
title: 'hello',
group: { text: 'in group' },
tab: { text: 'in tab' },
arrayWithIDs: [{ text: 'some text' }],
},
})
const post2 = await payload.create({
collection: 'posts',
data: {
text: 'other text 2 (should not be nuked)',
title: 'hello',
group: { text: 'in group' },
tab: { text: 'in tab' },
arrayWithIDs: [{ text: 'some text' }],
},
})
const res = (await payload.db.updateMany({
where: { id: { in: [post1.id, post2.id] } },
data: {
title: 'hello updated',
group: { text: 'in group updated' },
tab: { text: 'in tab updated' },
},
collection: 'posts',
})) as unknown as Array<DataFromCollectionSlug<'posts'>>
expect(res).toHaveLength(2)
const resPost1 = res?.find((r) => r.id === post1.id)
const resPost2 = res?.find((r) => r.id === post2.id)
expect(resPost1?.text).toBe('other text (should not be nuked)')
expect(resPost2?.text).toBe('other text 2 (should not be nuked)')
for (const post of res) {
expect(post.title).toBe('hello updated')
expect(post.group?.text).toBe('in group updated')
expect(post.tab?.text).toBe('in tab updated')
expect(post.arrayWithIDs).toHaveLength(1)
expect(post.arrayWithIDs?.[0]?.text).toBe('some text')
}
})
it('should allow incremental number update', async () => {