chore: converts anything that awaits many promises to sequential execution

This commit is contained in:
James
2023-09-24 16:31:23 -07:00
parent 8155902476
commit 4df19cbdf6
28 changed files with 767 additions and 185 deletions

View File

@@ -21,6 +21,7 @@
"@libsql/client": "^0.3.1",
"drizzle-kit": "0.19.13-e99bac1",
"drizzle-orm": "0.28.5",
"mongo-query-to-postgres-jsonb": "^0.2.15",
"pg": "8.11.3",
"prompts": "2.4.2",
"to-snake-case": "1.0.0"

View File

@@ -5,7 +5,7 @@ import toSnakeCase from 'to-snake-case'
import { upsertRow } from '../upsertRow'
export const create: Create = async function create({ collection: collectionSlug, data, req }) {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const collection = this.payload.collections[collectionSlug].config
const result = await upsertRow({

View File

@@ -11,7 +11,7 @@ export async function createGlobal<T extends TypeWithID>(
this: PostgresAdapter,
{ data, req = {} as PayloadRequest, slug }: CreateGlobalArgs,
): Promise<T> {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const globalConfig = this.payload.globals.config.find((config) => config.slug === slug)
const result = await upsertRow<T>({

View File

@@ -13,7 +13,7 @@ export const createGlobalVersion: CreateGlobalVersion = async function createGlo
this: PostgresAdapter,
{ autosave, globalSlug, req = {} as PayloadRequest, versionData },
) {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const global = this.payload.globals.config.find(({ slug }) => slug === globalSlug)
const globalTableName = toSnakeCase(globalSlug)
const tableName = `_${globalTableName}_v`

View File

@@ -13,7 +13,7 @@ export const createVersion: CreateVersion = async function createVersion(
this: PostgresAdapter,
{ autosave, collectionSlug, parent, req = {} as PayloadRequest, versionData },
) {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const collection = this.payload.collections[collectionSlug].config
const collectionTableName = toSnakeCase(collectionSlug)
const tableName = `_${collectionTableName}_v`

View File

@@ -12,7 +12,7 @@ export const deleteMany: DeleteMany = async function deleteMany(
this: PostgresAdapter,
{ collection, req = {} as PayloadRequest, where },
) {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const collectionConfig = this.payload.collections[collection].config
const tableName = toSnakeCase(collection)

View File

@@ -13,7 +13,7 @@ export const deleteOne: DeleteOne = async function deleteOne(
this: PostgresAdapter,
{ collection, req = {} as PayloadRequest, where: incomingWhere },
) {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const collectionConfig = this.payload.collections[collection].config
const tableName = toSnakeCase(collection)

View File

@@ -13,7 +13,7 @@ export const deleteVersions: DeleteVersions = async function deleteVersion(
this: PostgresAdapter,
{ collection, locale, req = {} as PayloadRequest, where: where },
) {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const collectionConfig: SanitizedCollectionConfig = this.payload.collections[collection].config
const tableName = `_${toSnakeCase(collection)}_v`

View File

@@ -10,7 +10,7 @@ export const findGlobal: FindGlobal = async function findGlobal(
this: PostgresAdapter,
{ locale, req, slug, where },
) {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const globalConfig = this.payload.globals.config.find((config) => config.slug === slug)
const tableName = toSnakeCase(slug)

View File

@@ -13,7 +13,7 @@ export async function findOne<T extends TypeWithID>(
this: PostgresAdapter,
{ collection, locale, req = {} as PayloadRequest, where: incomingWhere }: FindOneArgs,
): Promise<T> {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const collectionConfig: SanitizedCollectionConfig = this.payload.collections[collection].config
const tableName = toSnakeCase(collection)

View File

@@ -0,0 +1,337 @@
import * as util from './util'
// These are the simple operators.
// Note that "is distinct from" needs to be used to ensure nulls are returned as expected, see https://modern-sql.com/feature/is-distinct-from
const OPS = {
$eq: '=',
$gt: '>',
$gte: '>=',
$lt: '<',
$lte: '<=',
$ne: ' IS DISTINCT FROM ',
}
const OTHER_OPS = {
$all: true,
$and: true,
$elemMatch: true,
$exists: true,
$in: true,
$mod: true,
$nin: true,
$not: true,
$or: true,
$regex: true,
$size: true,
$text: true,
$type: true,
}
function getMatchingArrayPath(op, arrayPaths) {
if (arrayPaths === true) {
// always assume array path if true is passed
return true
}
if (!arrayPaths || !Array.isArray(arrayPaths)) {
return false
}
return arrayPaths.find((path) => op.startsWith(path))
}
/**
* @param path array path current key
* @param op current key, might be a dotted path
* @param value
* @param parent
* @param arrayPathStr
* @returns {string|string|*}
*/
function createElementOrArrayQuery(path, op, value, parent, arrayPathStr, options) {
const arrayPath = typeof arrayPathStr === 'string' ? arrayPathStr?.split('.') : []
const deeperPath = op.split('.').slice(arrayPath.length)
const innerPath = ['value', ...deeperPath]
const pathToMaybeArray = path.concat(arrayPath)
// TODO: nested array paths are not yet supported.
const singleElementQuery = convertOp(path, op, value, parent, [], options)
const text = util.pathToText(pathToMaybeArray, false)
const safeArray = `jsonb_typeof(${text})='array' AND`
let arrayQuery = ''
const specialKeys = getSpecialKeys(path, value, true)
if (typeof value === 'object' && !Array.isArray(value) && value !== null) {
if (typeof value['$size'] !== 'undefined') {
// size does not support array element based matching
} else if (value['$elemMatch']) {
const sub = convert(innerPath, value['$elemMatch'], [], false, options)
arrayQuery = `EXISTS (SELECT * FROM jsonb_array_elements(${text}) WHERE ${safeArray} ${sub})`
return arrayQuery
} else if (value['$in']) {
const sub = convert(innerPath, value, [], true, options)
arrayQuery = `EXISTS (SELECT * FROM jsonb_array_elements(${text}) WHERE ${safeArray} ${sub})`
} else if (value['$all']) {
const cleanedValue = value['$all'].filter((v) => v !== null && typeof v !== 'undefined')
arrayQuery =
'(' +
cleanedValue
.map(function (subquery) {
const sub = convert(innerPath, subquery, [], false, options)
return `EXISTS (SELECT * FROM jsonb_array_elements(${text}) WHERE ${safeArray} ${sub})`
})
.join(' AND ') +
')'
} else if (specialKeys.length === 0) {
const sub = convert(innerPath, value, [], true, options)
arrayQuery = `EXISTS (SELECT * FROM jsonb_array_elements(${text}) WHERE ${safeArray} ${sub})`
} else {
const params = value
arrayQuery =
'(' +
Object.keys(params)
.map(function (subKey) {
const sub = convert(innerPath, { [subKey]: params[subKey] }, [], true, options)
return `EXISTS (SELECT * FROM jsonb_array_elements(${text}) WHERE ${safeArray} ${sub})`
})
.join(' AND ') +
')'
}
} else {
const sub = convert(innerPath, value, [], true, options)
arrayQuery = `EXISTS (SELECT * FROM jsonb_array_elements(${text}) WHERE ${safeArray} ${sub})`
}
if (!arrayQuery || arrayQuery === '()') {
return singleElementQuery
}
return `(${singleElementQuery} OR ${arrayQuery})`
}
/**
* @param path {string} a dotted path
* @param op {string} sub path, especially the current operation to convert, e.g. $in
* @param value {mixed}
* @param parent {mixed} parent[path] = value
* @param arrayPaths {Array} List of dotted paths that possibly need to be handled as arrays.
*/
function convertOp(path, op, value, parent, arrayPaths, options) {
const arrayPath = getMatchingArrayPath(op, arrayPaths)
// It seems like direct matches shouldn't be array fields, but 2D arrays are possible in MongoDB
// I will need to do more testing to see if we should handle this case differently.
// const arrayDirectMatch = !isSpecialOp(op) && Array.isArray(value)
if (arrayPath) {
return createElementOrArrayQuery(path, op, value, parent, arrayPath, options)
}
switch (op) {
case '$not':
return '(NOT ' + convert(path, value, undefined, false, options) + ')'
case '$nor': {
for (const v of value) {
if (typeof v !== 'object') {
throw new Error('$or/$and/$nor entries need to be full objects')
}
}
const notted = value.map((e) => ({ $not: e }))
return convertOp(path, '$and', notted, value, arrayPaths, options)
}
case '$or':
case '$and':
if (!Array.isArray(value)) {
throw new Error('$and or $or requires an array.')
}
if (value.length == 0) {
throw new Error('$and/$or/$nor must be a nonempty array')
} else {
for (const v of value) {
if (typeof v !== 'object') {
throw new Error('$or/$and/$nor entries need to be full objects')
}
}
return (
'(' +
value
.map((subquery) => convert(path, subquery, arrayPaths, false, options))
.join(op === '$or' ? ' OR ' : ' AND ') +
')'
)
}
// TODO (make sure this handles multiple elements correctly)
case '$elemMatch':
return convert(path, value, arrayPaths, false, options)
//return util.pathToText(path, false) + ' @> \'' + util.stringEscape(JSON.stringify(value)) + '\'::jsonb'
case '$in':
case '$nin': {
if (value.length === 0) {
return 'FALSE'
}
if (value.length === 1) {
return convert(path, value[0], arrayPaths, false, options)
}
const cleanedValue = value.filter((v) => v !== null && typeof v !== 'undefined')
const partial =
util.pathToText(path, typeof value[0] == 'string') +
(op == '$nin' ? ' NOT' : '') +
' IN (' +
cleanedValue.map(util.quote).join(', ') +
')'
if (value.length != cleanedValue.length) {
return op === '$in' ? '(' + partial + ' OR IS NULL)' : '(' + partial + ' AND IS NOT NULL)'
}
return partial
}
case '$text': {
const newOp = '~' + (!value['$caseSensitive'] ? '*' : '')
return (
util.pathToText(path, true) + ' ' + newOp + " '" + util.stringEscape(value['$search']) + "'"
)
}
case '$regex': {
let regexOp = '~'
let op2 = ''
if (parent['$options'] && parent['$options'].includes('i')) {
regexOp += '*'
}
if (!parent['$options'] || !parent['$options'].includes('s')) {
// partial newline-sensitive matching
op2 += '(?p)'
}
if (value instanceof RegExp) {
value = value.source
}
const result =
util.pathToText(path, true) + ' ' + regexOp + " '" + op2 + util.stringEscape(value) + "'"
return result
}
case '$gt':
case '$gte':
case '$lt':
case '$lte':
case '$ne':
case '$eq': {
const isSimpleComparision = op === '$eq' || op === '$ne'
const pathContainsArrayAccess = path.some((key) => /^\d+$/.test(key))
if (isSimpleComparision && !pathContainsArrayAccess && !options.disableContainmentQuery) {
// create containment query since these can use GIN indexes
// See docs here, https://www.postgresql.org/docs/9.4/datatype-json.html#JSON-INDEXING
const [head, ...tail] = path
return `${op == '$ne' ? 'NOT ' : ''}${head} @> ` + util.pathToObject([...tail, value])
} else {
const text = util.pathToText(path, typeof value == 'string')
return text + OPS[op] + util.quote(value)
}
}
case '$type': {
const text = util.pathToText(path, false)
const type = util.getPostgresTypeName(value)
return 'jsonb_typeof(' + text + ')=' + util.quote(type)
}
case '$size': {
if (typeof value !== 'number' || value < 0 || !Number.isInteger(value)) {
throw new Error('$size only supports positive integer')
}
const text = util.pathToText(path, false)
return 'jsonb_array_length(' + text + ')=' + value
}
case '$exists': {
if (path.length > 1) {
const key = path.pop()
const text = util.pathToText(path, false)
return (value ? '' : ' NOT ') + text + ' ? ' + util.quote(key)
} else {
const text = util.pathToText(path, false)
return text + ' IS ' + (value ? 'NOT ' : '') + 'NULL'
}
}
case '$mod': {
const text = util.pathToText(path, true)
if (typeof value[0] != 'number' || typeof value[1] != 'number') {
throw new Error('$mod requires numeric inputs')
}
return 'cast(' + text + ' AS numeric) % ' + value[0] + '=' + value[1]
}
default:
// this is likely a top level field, recurse
return convert(path.concat(op.split('.')), value, undefined, false, options)
}
}
function isSpecialOp(op) {
return op in OPS || op in OTHER_OPS
}
// top level keys are always special, since you never exact match the whole object
function getSpecialKeys(path, query, forceExact) {
return Object.keys(query).filter(function (key) {
return (path.length === 1 && !forceExact) || isSpecialOp(key)
})
}
/**
* Convert a filter expression to the corresponding PostgreSQL text.
* @param path {Array} The current path
* @param query {Mixed} Any value
* @param arrayPaths {Array} List of dotted paths that possibly need to be handled as arrays.
* @param forceExact {Boolean} When true, an exact match will be required.
* @returns The corresponding PSQL expression
*/
const convert = function (path, query, arrayPaths, forceExact, options) {
if (
typeof query === 'string' ||
typeof query === 'boolean' ||
typeof query == 'number' ||
Array.isArray(query)
) {
return convertOp(path, '$eq', query, {}, arrayPaths, options)
}
if (query === null) {
const text = util.pathToText(path, false)
return '(' + text + ' IS NULL OR ' + text + " = 'null'::jsonb)"
}
if (query instanceof RegExp) {
const op = query.ignoreCase ? '~*' : '~'
return util.pathToText(path, true) + ' ' + op + " '" + util.stringEscape(query.source) + "'"
}
if (typeof query === 'object') {
// Check for an empty object
if (Object.keys(query).length === 0) {
return 'TRUE'
}
const specialKeys = getSpecialKeys(path, query, forceExact)
switch (specialKeys.length) {
case 0: {
const text = util.pathToText(path, typeof query == 'string')
return text + '=' + util.quote(query)
}
case 1: {
const key = specialKeys[0]
return convertOp(path, key, query[key], query, arrayPaths, options)
}
default:
return (
'(' +
specialKeys
.map(function (key) {
return convertOp(path, key, query[key], query, arrayPaths, options)
})
.join(' and ') +
')'
)
}
}
}
export default function (
fieldName: string,
query: Record<string, unknown>,
arraysOrOptions: { arrays: boolean } | string[],
) {
let arrays: boolean | string[]
let options = {}
if (arraysOrOptions && Array.isArray(arraysOrOptions)) {
arrays = arraysOrOptions
} else if (typeof arraysOrOptions === 'object' && 'arrays' in arraysOrOptions) {
arrays = arraysOrOptions.arrays || []
options = arraysOrOptions
}
return convert([fieldName], query, arrays || [], false, options)
}

View File

@@ -0,0 +1,137 @@
export const updateSpecialKeys = [
'$currentDate',
'$inc',
'$min',
'$max',
'$mul',
'$rename',
'$set',
'$setOnInsert',
'$unset',
'$push',
'$pull',
'$pullAll',
'$addToSet',
]
export const countUpdateSpecialKeys = function (doc) {
return Object.keys(doc).filter(function (n) {
return updateSpecialKeys.includes(n)
}).length
}
function quoteReplacer(key, value) {
if (typeof value == 'string') {
return stringEscape(value)
}
return value
}
export const quote = function (data) {
if (typeof data == 'string') return "'" + stringEscape(data) + "'"
return "'" + JSON.stringify(data) + "'::jsonb"
}
export const quote2 = function (data) {
if (typeof data == 'string') return '\'"' + stringEscape(data) + '"\''
return "'" + JSON.stringify(data, quoteReplacer) + "'::jsonb"
}
export const stringEscape = function (str) {
return str.replace(/'/g, "''")
}
export const pathToText = function (path, isString) {
let text = stringEscape(path[0])
if (isString && path.length === 1) {
return text + " #>>'{}'"
}
for (let i = 1; i < path.length; i++) {
text += i == path.length - 1 && isString ? '->>' : '->'
if (/^\d+$/.test(path[i])) text += path[i] //don't wrap numbers in quotes
else text += "'" + stringEscape(path[i]) + "'"
}
return text
}
export const pathToObject = function (path) {
if (path.length === 1) {
return quote2(path[0])
}
return "'" + pathToObjectHelper(path) + "'"
}
export const pathToObjectHelper = function (path) {
if (path.length === 1) {
if (typeof path[0] == 'string') {
return `"${path[0]}"`
} else {
return JSON.stringify(path[0])
}
}
const [head, ...tail] = path
return `{ "${head}": ${pathToObjectHelper(tail)} }`
}
export const convertDotNotation = function (path, pathDotNotation) {
return pathToText([path].concat(pathDotNotation.split('.')), true)
}
export const toPostgresPath = function (path) {
return "'{" + path.join(',') + "}'"
}
export const toNumeric = function (path) {
return 'COALESCE(Cast(' + path + ' as numeric),0)'
}
const typeMapping = {
1: 'number',
2: 'string',
3: 'object',
4: 'array',
8: 'boolean',
10: 'null',
16: 'number',
18: 'number',
19: 'number',
}
export const getPostgresTypeName = function (type) {
if (!['number', 'string'].includes(typeof type)) {
throw { code: 14, errmsg: 'argument to $type is not a number or a string' }
}
return typeMapping[type] || type
}
function isIntegerStrict(val) {
return val != 'NaN' && parseInt(val).toString() == val
}
export const getPathSortedArray = function (keys) {
return keys.sort((a, b) => {
if (a == b) {
return 0
}
const aArr = a.split('.')
const bArr = b.split('.')
for (let i = 0; i < aArr.length; i++) {
if (i >= bArr.length) {
return -1
}
if (aArr[i] == bArr[i]) {
continue
}
const aItem = isIntegerStrict(aArr[i]) ? parseInt(aArr[i]) : aArr[i]
const bItem = isIntegerStrict(bArr[i]) ? parseInt(bArr[i]) : bArr[i]
return aItem > bItem ? -1 : 1
}
return 1
})
}

View File

@@ -0,0 +1,11 @@
export const convertPathToJSONQuery = (path: string) => {
const segments = path.split('.')
segments.shift()
return segments.reduce((res, segment, i) => {
const formattedSegment = Number.isNaN(parseInt(segment)) ? `'${segment}'` : segment
if (i + 1 === segments.length) return `${res}->>${formattedSegment}`
return `${res}->${formattedSegment}`
}, '')
}

View File

@@ -0,0 +1,73 @@
import { v4 as uuid } from 'uuid'
// TARGET:
// SELECT COUNT(*)
// FROM "rich_text_fields"
// WHERE EXISTS (
// SELECT 1
// FROM jsonb_array_elements(rich_text) AS rt
// WHERE EXISTS (
// SELECT 1
// FROM jsonb_array_elements(rt -> 'children') AS child
// WHERE child ->> 'text' ~* 'Hello'
// )
// );
type FromArrayArgs = {
operator: string
pathSegments: string[]
treatAsArray?: string[]
value: unknown
}
const fromArray = (args: FromArrayArgs) => `EXISTS (
SELECT 1
FROM jsonb_array_elements(${args.pathSegments[0]}) AS ${uuid}
${createJSONQuery({
...args,
pathSegments: args.pathSegments.slice(1),
})}
)`
const createConstraint = (args) => ``
type Args = {
operator: string
pathSegments: string[]
treatAsArray?: string[]
treatRootAsArray?: boolean
value: unknown
}
type CreateJSONQuery = ({ operator, pathSegments, treatAsArray, treatRootAsArray, value }) => string
export const createJSONQuery = ({
operator,
pathSegments,
treatAsArray,
treatRootAsArray,
value,
}: Args): string => {
if (treatRootAsArray) {
return fromArray({
operator,
pathSegments,
treatAsArray,
value,
})
}
if (treatAsArray.includes(pathSegments[0])) {
return fromArray({
operator,
pathSegments,
treatAsArray,
value,
})
}
return createConstraint()
}
// myNestedProperty.myArray.myGroup.myArray.text

View File

@@ -2,7 +2,8 @@
import type { SQL } from 'drizzle-orm'
import type { Field, Operator, Where } from 'payload/types'
import { and, ilike, isNotNull } from 'drizzle-orm'
import { and, ilike, isNotNull, sql } from 'drizzle-orm'
// import createJSONQuery from 'mongo-query-to-postgres-jsonb'
import { QueryError } from 'payload/errors'
import { validOperators } from 'payload/types'
@@ -10,6 +11,9 @@ import type { GenericColumn, PostgresAdapter } from '../types'
import type { BuildQueryJoinAliases, BuildQueryJoins } from './buildQuery'
import { buildAndOrConditions } from './buildAndOrConditions'
import { convertPathToJSONQuery } from './convertPathToJSONQuery'
import { createJSONQuery } from './createJSONQuery'
// import convertJSONQuery from './convertJSONQuery'
import { getTableColumnFromPath } from './getTableColumnFromPath'
import { operatorMap } from './operatorMap'
import { sanitizeQueryValue } from './sanitizeQueryValue'
@@ -99,6 +103,31 @@ export async function parseParams({
queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => {
constraints.push(operatorMap.equals(constraintTable[col], value))
})
if (['json', 'richText'].includes(field.type)) {
const pathSegments = relationOrPath.split('.').slice(1)
pathSegments.unshift(table[columnName].name)
if (field.type === 'richText') {
const jsonQuery = createJSONQuery({
operator,
pathSegments,
treatAsArray: ['children'],
treatRootAsArray: true,
value: val,
})
// constraints.push(sql.raw(jsonQuery))
}
if (field.type === 'json') {
const jsonQuery = convertPathToJSONQuery(jsonPath)
constraints.push(sql.raw(`${table[columnName].name}${jsonQuery} = '%${val}%'`))
}
break
}
if (getNotNullColumnByValue) {
const columnName = getNotNullColumnByValue(val)
if (columnName) {
@@ -106,21 +135,25 @@ export async function parseParams({
} else {
throw new QueryError([{ path: relationOrPath }])
}
} else if (operator === 'like') {
break
}
if (operator === 'like') {
constraints.push(
and(...val.split(' ').map((word) => ilike(table[columnName], `%${word}%`))),
)
} else {
const { operator: queryOperator, value: queryValue } = sanitizeQueryValue({
field,
operator,
val,
})
constraints.push(
operatorMap[queryOperator](rawColumn || table[columnName], queryValue),
)
break
}
const { operator: queryOperator, value: queryValue } = sanitizeQueryValue({
field,
operator,
val,
})
constraints.push(
operatorMap[queryOperator](rawColumn || table[columnName], queryValue),
)
}
}
}

View File

@@ -7,13 +7,13 @@ import { v4 as uuid } from 'uuid'
import type { PostgresAdapter } from '../types'
export const beginTransaction: BeginTransaction = async function beginTransaction(
this: PostgresAdapter
this: PostgresAdapter,
) {
let id
try {
id = uuid()
this.sessions[id] = drizzle(this.pool, { schema: this.schema });
await this.sessions[id].execute(sql`BEGIN`);
this.sessions[id] = drizzle(this.pool, { schema: this.schema })
await this.sessions[id].execute(sql`BEGIN`)
} catch (err) {
this.payload.logger.error(`Error: cannot begin transaction: ${err.message}`, err)
process.exit(1)

View File

@@ -11,7 +11,7 @@ export const updateOne: UpdateOne = async function updateOne(
this: PostgresAdapter,
{ id, collection: collectionSlug, data, draft, locale, req, where: whereArg },
) {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const collection = this.payload.collections[collectionSlug].config
const tableName = toSnakeCase(collectionSlug)
const whereToUse = whereArg || { id: { equals: id } }

View File

@@ -11,7 +11,7 @@ export const updateGlobal: UpdateGlobal = async function updateGlobal(
this: PostgresAdapter,
{ data, req = {} as PayloadRequest, slug },
) {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const globalConfig = this.payload.globals.config.find((config) => config.slug === slug)
const tableName = toSnakeCase(slug)

View File

@@ -13,7 +13,7 @@ export const updateGlobalVersion: UpdateGlobalVersion = async function updateVer
this: PostgresAdapter,
{ id, global, locale, req = {} as PayloadRequest, versionData, where: whereArg },
) {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const globalConfig: SanitizedGlobalConfig = this.payload.globals.config.find(
({ slug }) => slug === global,
)

View File

@@ -13,7 +13,7 @@ export const updateVersion: UpdateVersion = async function updateVersion(
this: PostgresAdapter,
{ id, collection, locale, req = {} as PayloadRequest, versionData, where: whereArg },
) {
const db = this.sessions?.[req.transactionID] || this.db
const db = this.sessions[req.transactionID] || this.db
const collectionConfig: SanitizedCollectionConfig = this.payload.collections[collection].config
const whereToUse = whereArg || { id: { equals: id } }
const tableName = `_${toSnakeCase(collection)}_v`

View File

@@ -64,10 +64,6 @@ export const upsertRow = async <T extends TypeWithID>({
const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {}
const selectsToInsert: { [selectTableName: string]: Record<string, unknown>[] } = {}
// Maintain a list of promises to run locale, blocks, and relationships
// all in parallel
const promises = []
// If there are locale rows with data, add the parent and locale to each
if (Object.keys(rowToInsert.locales).length > 0) {
Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => {
@@ -122,67 +118,58 @@ export const upsertRow = async <T extends TypeWithID>({
if (localesToInsert.length > 0) {
const localeTable = adapter.tables[`${tableName}_locales`]
promises.push(async () => {
if (operation === 'update') {
await db.delete(localeTable).where(eq(localeTable._parentID, insertedRow.id))
}
if (operation === 'update') {
await db.delete(localeTable).where(eq(localeTable._parentID, insertedRow.id))
}
await db.insert(localeTable).values(localesToInsert)
})
await db.insert(localeTable).values(localesToInsert)
}
// //////////////////////////////////
// INSERT RELATIONSHIPS
// //////////////////////////////////
promises.push(async () => {
const relationshipsTableName = `${tableName}_relationships`
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete],
tableName: relationshipsTableName,
})
}
const relationshipsTableName = `${tableName}_relationships`
if (relationsToInsert.length > 0) {
try {
await db.insert(adapter.tables[relationshipsTableName]).values(relationsToInsert).returning()
} catch (e) {
console.log('ok');
}
}
})
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete],
tableName: relationshipsTableName,
})
}
if (relationsToInsert.length > 0) {
await db.insert(adapter.tables[relationshipsTableName]).values(relationsToInsert)
}
// //////////////////////////////////
// INSERT hasMany NUMBERS
// //////////////////////////////////
promises.push(async () => {
const numbersTableName = `${tableName}_numbers`
const numbersTableName = `${tableName}_numbers`
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: numbersToInsert,
tableName: numbersTableName,
})
}
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
localeColumnName: 'locale',
parentColumnName: 'parent',
parentID: insertedRow.id,
pathColumnName: 'path',
rows: numbersToInsert,
tableName: numbersTableName,
})
}
if (numbersToInsert.length > 0) {
await db.insert(adapter.tables[numbersTableName]).values(numbersToInsert).returning()
}
})
if (numbersToInsert.length > 0) {
await db.insert(adapter.tables[numbersTableName]).values(numbersToInsert).returning()
}
// //////////////////////////////////
// INSERT BLOCKS
@@ -190,105 +177,92 @@ export const upsertRow = async <T extends TypeWithID>({
const insertedBlockRows: Record<string, Record<string, unknown>[]> = {}
Object.entries(blocksToInsert).forEach(([blockName, blockRows]) => {
// For each block, push insert into promises to run parallel
promises.push(async () => {
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
parentID: insertedRow.id,
pathColumnName: '_path',
rows: blockRows.map(({ row }) => row),
tableName: `${tableName}_${blockName}`,
for (const [blockName, blockRows] of Object.entries(blocksToInsert)) {
if (operation === 'update') {
await deleteExistingRowsByPath({
adapter,
db,
parentID: insertedRow.id,
pathColumnName: '_path',
rows: blockRows.map(({ row }) => row),
tableName: `${tableName}_${blockName}`,
})
}
insertedBlockRows[blockName] = await db
.insert(adapter.tables[`${tableName}_${blockName}`])
.values(blockRows.map(({ row }) => row))
.returning()
insertedBlockRows[blockName].forEach((row, i) => {
blockRows[i].row = row
})
const blockLocaleIndexMap: number[] = []
const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => {
if (Object.entries(blockRow.locales).length > 0) {
Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => {
if (Object.keys(blockLocaleData).length > 0) {
blockLocaleData._parentID = blockRow.row.id
blockLocaleData._locale = blockLocale
acc.push(blockLocaleData)
blockLocaleIndexMap.push(i)
}
})
}
insertedBlockRows[blockName] = await db
.insert(adapter.tables[`${tableName}_${blockName}`])
.values(blockRows.map(({ row }) => row))
return acc
}, [])
if (blockLocaleRowsToInsert.length > 0) {
await db
.insert(adapter.tables[`${tableName}_${blockName}_locales`])
.values(blockLocaleRowsToInsert)
.returning()
}
insertedBlockRows[blockName].forEach((row, i) => {
blockRows[i].row = row
})
const blockLocaleIndexMap: number[] = []
const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => {
if (Object.entries(blockRow.locales).length > 0) {
Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => {
if (Object.keys(blockLocaleData).length > 0) {
blockLocaleData._parentID = blockRow.row.id
blockLocaleData._locale = blockLocale
acc.push(blockLocaleData)
blockLocaleIndexMap.push(i)
}
})
}
return acc
}, [])
if (blockLocaleRowsToInsert.length > 0) {
await db
.insert(adapter.tables[`${tableName}_${blockName}_locales`])
.values(blockLocaleRowsToInsert)
.returning()
}
await insertArrays({
adapter,
arrays: blockRows.map(({ arrays }) => arrays),
db,
parentRows: insertedBlockRows[blockName],
})
await insertArrays({
adapter,
arrays: blockRows.map(({ arrays }) => arrays),
db,
parentRows: insertedBlockRows[blockName],
})
})
}
// //////////////////////////////////
// INSERT ARRAYS RECURSIVELY
// //////////////////////////////////
promises.push(async () => {
if (operation === 'update') {
await Promise.all(
Object.entries(rowToInsert.arrays).map(async ([arrayTableName]) => {
await deleteExistingArrayRows({
adapter,
db,
parentID: insertedRow.id,
tableName: arrayTableName,
})
}),
)
if (operation === 'update') {
for (const [arrayTableName] of Object.keys(rowToInsert.arrays)) {
await deleteExistingArrayRows({
adapter,
db,
parentID: insertedRow.id,
tableName: arrayTableName,
})
}
}
await insertArrays({
adapter,
arrays: [rowToInsert.arrays],
db,
parentRows: [insertedRow],
})
await insertArrays({
adapter,
arrays: [rowToInsert.arrays],
db,
parentRows: [insertedRow],
})
// //////////////////////////////////
// INSERT hasMany SELECTS
// //////////////////////////////////
promises.push(async () => {
await Promise.all(
Object.entries(selectsToInsert).map(async ([selectTableName, tableRows]) => {
const selectTable = adapter.tables[selectTableName]
if (operation === 'update') {
await db.delete(selectTable).where(eq(selectTable.parent, insertedRow.id))
}
await db.insert(selectTable).values(tableRows).returning()
}),
)
})
await Promise.all(promises.map((promise) => promise()))
for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) {
const selectTable = adapter.tables[selectTableName]
if (operation === 'update') {
await db.delete(selectTable).where(eq(selectTable.parent, insertedRow.id))
}
await db.insert(selectTable).values(tableRows).returning()
}
// //////////////////////////////////
// RETRIEVE NEWLY UPDATED ROW

View File

@@ -58,28 +58,26 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
})
})
// Insert all corresponding arrays in parallel
// Insert all corresponding arrays
// (one insert per array table)
await Promise.all(
Object.entries(rowsByTable).map(async ([tableName, row]) => {
if (row.rows.length > 0) {
await db.insert(adapter.tables[tableName]).values(row.rows).returning()
}
for (const [tableName, row] of Object.entries(rowsByTable)) {
if (row.rows.length > 0) {
await db.insert(adapter.tables[tableName]).values(row.rows).returning()
}
// Insert locale rows
if (adapter.tables[`${tableName}_locales`] && row.locales.length > 0) {
await db.insert(adapter.tables[`${tableName}_locales`]).values(row.locales).returning()
}
// Insert locale rows
if (adapter.tables[`${tableName}_locales`] && row.locales.length > 0) {
await db.insert(adapter.tables[`${tableName}_locales`]).values(row.locales).returning()
}
// If there are sub arrays, call this function recursively
if (row.arrays.length > 0) {
await insertArrays({
adapter,
arrays: row.arrays,
db,
parentRows: row.rows,
})
}
}),
)
// If there are sub arrays, call this function recursively
if (row.arrays.length > 0) {
await insertArrays({
adapter,
arrays: row.arrays,
db,
parentRows: row.rows,
})
}
}
}

View File

@@ -7,7 +7,12 @@ import { promisify } from 'util'
import type { PayloadRequest } from '../../express/types'
import type { Document } from '../../types'
import type { AfterChangeHook, BeforeOperationHook, BeforeValidateHook, Collection, } from '../config/types'
import type {
AfterChangeHook,
BeforeOperationHook,
BeforeValidateHook,
Collection,
} from '../config/types'
import executeAccess from '../../auth/executeAccess'
import sendVerificationEmail from '../../auth/sendVerificationEmail'

View File

@@ -142,7 +142,7 @@ async function deleteByID<TSlug extends keyof GeneratedTypes['collections']>(
// Delete Preferences
// /////////////////////////////////////
deleteUserPreferences({
await deleteUserPreferences({
collectionConfig,
ids: [id],
payload,
@@ -154,7 +154,7 @@ async function deleteByID<TSlug extends keyof GeneratedTypes['collections']>(
// /////////////////////////////////////
if (collectionConfig.versions) {
deleteCollectionVersions({
await deleteCollectionVersions({
id,
payload,
req,

View File

@@ -11,9 +11,9 @@ type Args = {
payload: Payload
req: PayloadRequest
}
export const deleteUserPreferences = ({ collectionConfig, ids, payload, req }: Args) => {
export const deleteUserPreferences = async ({ collectionConfig, ids, payload, req }: Args) => {
if (collectionConfig.auth) {
payload.db.deleteMany({
await payload.db.deleteMany({
collection: 'payload-preferences',
req,
where: {
@@ -24,7 +24,7 @@ export const deleteUserPreferences = ({ collectionConfig, ids, payload, req }: A
},
})
}
payload.db.deleteMany({
await payload.db.deleteMany({
collection: 'payload-preferences',
req,
where: {

7
pnpm-lock.yaml generated
View File

@@ -306,6 +306,9 @@ importers:
drizzle-orm:
specifier: 0.28.5
version: 0.28.5(@libsql/client@0.3.4)(@types/pg@8.10.2)(better-sqlite3@8.6.0)(pg@8.11.3)
mongo-query-to-postgres-jsonb:
specifier: ^0.2.15
version: 0.2.15
pg:
specifier: 8.11.3
version: 8.11.3
@@ -10438,6 +10441,10 @@ packages:
resolution: {integrity: sha512-11Fkh6yzEmwx7O0YoLxeae0qEGFwmyPRlVxpg7oF9czOOCB/iCjdJrG5I67da5WiXK3YJCxoz9TJFE8Tfq/v9A==}
dev: false
/mongo-query-to-postgres-jsonb@0.2.15:
resolution: {integrity: sha512-LK7M85vnUz8j2EMy72Z3vfxRPGd+TM4leA0gBLccNgnNltfCNzM0FUUUH0wQPT/U3H2NtYKSe2RRHboSKzgbJw==}
dev: false
/mongodb-connection-string-url@2.6.0:
resolution: {integrity: sha512-WvTZlI9ab0QYtTYnuMLgobULWhokRjtC7db9LtcVfJ+Hsnyr5eo6ZtNAt3Ly24XZScGMelOcGtm7lSn0332tPQ==}
dependencies:

View File

@@ -8,6 +8,9 @@ type JSONField = {
}
const JSON: CollectionConfig = {
access: {
read: () => true,
},
fields: [
{
name: 'json',

View File

@@ -8,6 +8,9 @@ const RichTextFields: CollectionConfig = {
admin: {
useAsTitle: 'title',
},
access: {
read: () => true,
},
fields: [
{
name: 'title',