Chore/next poc merge main (#5204)
* wip moves payload, user and data into partial req * chore: adjust req type * chore(next): installs sass and resolves type errors * feat: working login route/view * fix: me route * chore(next): scaffolds access routes (#4562) * chore(next): scaffolds admin layout and dashboard view (#4566) * chore(next): builds initPage utility (#4589) * feat(3.0): next route handlers (#4590) * chore: removes old files * chore(next): ssr list view (#4594) * chore: removes old files * chore: adjusts graphql file imports to align with new operation exports * chore: allows for custom endpoints * chore: cleanup * chore(next): ssr edit view (#4614) * chore(ui): ssr main nav (#4619) * chore(next): ssr account view (#4620) * chore(next): ssr auth views and document create (#4631) * chore(next): ssr globals view (#4640) * chore(next): scaffolds document layout (#4644) * chore(next): ssr versions view (#4645) * chore(next): ssr field conditions (#4675) * chore(next): ssr field validations (#4700) * chore(next): moves dashboard view into next dir * chore(next): moves account view into next dir * chore(next): moves global edit view into next dir * chore(next): returns isolated configs and locale from initPage * chore(next): ssr api view (#4721) * feat: adds i18n functionality within Rest API, Local and Client contexts (#4749) * chore: separate client translation groups with empty line * chore: add missing translation used in db adapters * chore: simplify next/routes export and import paths * chore: renames PayloadT to Payload * chore(next): custom views (#4748) * chore: fix translation tsconfig * chore: adjust other package ts-configs that rely on translations * chore(next): installs @payloadcms/ui as direct dependency * chore(next): progress to build * chore(next): migrates types (#4792) * fixes acccept-language detection * chore(next): moves remaining components out from payload core (#4794) * chore(deps): removes all unused dependencies from payload core (#4797) * chore(next): achieves buildable state (#4803) * adds Translation component and removes more react-i18next * fixes up remaining translation strings * fixes a few i18n TODO's * chore: remaining translation strings without colons * chore: adds missing ja translations * chore(next): ssr group field (#4830) * chore: removes placeholder t function * chore: removes old file * chore(bundler-webpack): removes webpack bundler * chore(bundler-vite): removes vite bundler * chore(next): ssr tabs field (#4863) * chore(next): ssr row field * chore(next): ssr textarea field * chore(next): wires server action into document edit view (#4873) * chore(next): conditional logic (#4880) * chore(next): ssr radio, point, code, json, ui, and hidden fields (#4891) * chore(next): ssr collapsible field (#4894) * chore: remove findByID from req * chore: adjusts file property on request type * comment clarification * chore: wires up busboy with Requst readstream * chore: ports over express-fileupload into a NextJS compatible format * chore: adjust upload file structure * chore: adds try/catch around routes, corrects a few route responses * chore: renames file/function * chore: improve req type safety in local operations, misc req.files replacements * chore: misc type and fn export changes * chore: ensures root routes take pass unmodified request to root routes * chore: improve types * chore: consolidates locale api req initialization (#4922) * chore(next): overhauls field rendering strategy (#4924) * chore(next): ssr array field (#4937) * chore(next): ssr blocks field (#4942) * chore(next): ssr upload field and document drawer (#4957) * chore(next): wires form submissions (#4982) * chore: api handler adjustments * feat: adds graphql playground handler * adds credentials include setting to playground * remove old playground init, stub graphql handler location * fix: allow for null fallbackLocale * fix: correctly prioritize locales passed as null * chore: move all graphql code into next package * graphql changes * chore: semi working version of graphql http layer * gql fix attempts * rm console log * chore: partial gql changes * chore: adds gql and gql-http back into payload * chore: removes collection from req * chore: separates graphql package out for schema generation * chore: dep cleanup * chore: move graphql handlers * chore: removes unused deps * chore(next): ssr list view (#5032) * chore: refactor response handler order for custom endpoints * chore: add back in condition for collection GET path with 2 slugs * chore: rm optional chain * chore: import sort route file * chore: allows custom endpoints to attempt before erroring * feat: adds memoization to translation functions (#5036) * chore: fix APIError import * chore: return attemptCustomEndpointBeforeError responses * chore(next): properly instantiates table columns * fix(next): attaches params to req and properly assigns prefs key (#5042) * chore: reorganize next route order * chore(next): adds RouteError handler to next routes * chore: builds payload successfully * chore: misc file omissions * fix(ui): maintains proper column order * fix(ui): ensures first cell is a link * fix(next): properly copies url object in createPayloadRequest (#5064) * fix(ui): bumps react-toastify to v10.0.4 to fix hydration warnings * feat: add route for static file GET requests (#5065) * chore(next): allows resolved config promise to be thread through initPage (#5071) * chore(ui): conditionally renders field label from props * feat(next): next install script * chore: pass config to route handlers * feat: initial test suite framework (#4929) * chore(next): renderable account, api, and create first user views (#5084) * fix(next): properly parses search params in find, update, and delete handlers (#5088) * chore(next): ssr versions view (#5085) * chore: adds homepage for scss testing * chore: moves dev folder to top, establishes new test pattern * chore: working turbopack * chore: sets up working dynamic payload-config imports * remove unused code * chore: rm console log * misc * feat: correctly subs out ability to boot REST API within same process * chore: WIP dev suites * chore: removes need for REST_API folder in test dir * removes duplicate bootAdminPanel fn * misc * specify default export * chore: sets up jest to work with next/jest * chore: progress to mongodb and sharp builds * chore: passing community tests * chore: sorta workin * chore: adjust payload-config import * chore: adds rest client for Next handlers * chore: removes test garb * chore: restores payload-config tsconfig path temporarily * chore: establishes pattern for memory db during tests * chore: bumps mongoose to 7 * chore(next): 404s on nested create urls * chore: functional _community e2e * chore: increases e2e expect timeout * fix(next): sanitizes locale toString from client config * chore: type fixes * chore: pulls mongodb from main * chore: uses graphql to log user in * feat: passing auth test suite * chore(ui): threads params through context and conditionally renders document tabs (#5094) * feat(ui): adds params context (#5095) * chore: removes unecessary memory allocation for urlPropertiesObject object * chore: passing graphql test suite * chore: removes references to bson * chore: re-enables mongodb memory server for auth test suite * chore: replace bson with bson-objectid * feat: passing collections-rest int suite * chore: fixes bad imports * chore: more passing int suites * feat: passing globals int tests * feat: passing hooks int test suite * chore: remove last express file * chore: start live-preview int test migration * chore: passing localization int tests * passing relationships int tests * chore: partial passing upload int tests * chore: fixes scss imports * chore(ui): renders document info provider at root (#5106) * chore: adds schema path to useFieldPath provider, more passing tests * chore: begins work to optimize translation imports * chore: add translations to ui ts-config references * chore: add exports folder to package json exports * chore: adds readme how-to-use instructions * chore: attempts refactor of translation imports * chore: adds authentication:account translation key to server keys * chore: finishes translation optimization * chore: ignores warnings from mongodb * chore(ui): renders live document title (#5115) * chore(ui): ssr document tabs (#5116) * chore: handles redirecting from login * chore: handle redirect with no searchParams * chore: handle missing segments * chore(next): migrates server action into standalone api endpoint (#5122) * chore: adjust dashboard colection segments * test: update e2e suites * fix(ui): prevents unnecessary calls to form state * chore: fix finding global config fields from schema path * fix(next): executes root POST endpoints * chore(ui): ignores values returned by form state polling * chore: scaffolds ssr rte * chore: renders client leaves * chore: server-side rendered rich text elements * chore: defines ClientFunction pattern * chore(ui): migrates relationship field * chore: adds translations, cleans up slate * chore: functional slate link * chore: slate upload ssr * chore: relationship slate ssr * chore: remaining slate ssr * chore: fixes circular workspace dep * chore: correct broken int test import paths * chore: remove media files from root * chore: server renders custom edit view * fix(ui): resolves infinite loading in versions view * fix(next): resolves global edit view lookup * chore: payload builds * chore: delete unused files * chore: removes local property from payload * chore: adds mongodb as dev dep in db-mongodb package * chore: hide deprecation warnings for tempfile and jest-environment-jsdom * chore: remove all translations from translations dist * chore: clean ts-config files * chore: simple type fixes * chore(ui): server renders custom list view * chore: fix next config payload-config alias * chore: adds turbo alias paths * chore: adjusts translation generation * chore: improve auth function * chore: eslint config for packages/ui * chore(ui): exports FormState * chore(next): migrates account view to latest patterns * chore: disable barbie mode * chore(ui): lints * chore(next): lints * chore: for alexical * chore: custom handler type signature adjustment * fix: non-boolean condition result causes infinite looping (#4579) * chore(richtext-lexical): upgrade lexical from v0.12.5 to v0.12.6 (#4732) * chore(richtext-lexical): upgrade all lexical packages from 0.12.5 to 0.12.6 * fix(richtext-lexical): fix TypeScript errors * fix indenting * feat(richtext-lexical): Blocks: generate type definitions for blocks fields (#4529) * feat(richtext-lexical)!: Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground (#5066) * feat(richtext-lexical): Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground * chore: upgrade lexical version used in monorepo * chore: remove the 3 * chore: upgrade nodemon versions (#5059) * feat: add more options to addFieldStatePromise so that it can be used for field flattening (#4799) * feat(plugin-seo)!: remove support for payload <2.7.0 (#4765) * chore(plugin-seo): remove test script from package.json (#4762) * chore: upgrade @types/nodemailer from v6.4.8 to v6.4.14 (#4733) * chore: revert auth and initPage changes * chore(next): moves edit and list views (#5170) * fix: "The punycode module is deprecated" warning by updating nodemailer * chore: adjust translations tsconfig paths in root * chore: fix merge build --------- Co-authored-by: Jarrod Flesch <jarrodmflesch@gmail.com> Co-authored-by: Jacob Fletcher <jacobsfletch@gmail.com> Co-authored-by: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Co-authored-by: Elliot DeNolf <denolfe@gmail.com> Co-authored-by: James <james@trbl.design> Co-authored-by: Alessio Gravili <alessio@gravili.de> Co-authored-by: Alessio Gravili <70709113+AlessioGr@users.noreply.github.com>
This commit is contained in:
@@ -1 +0,0 @@
|
||||
export const postgresAdapter = () => ({})
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "0.2.1",
|
||||
"version": "0.7.0",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"repository": "https://github.com/payloadcms/payload",
|
||||
"license": "MIT",
|
||||
@@ -22,8 +22,8 @@
|
||||
"dependencies": {
|
||||
"@libsql/client": "^0.3.1",
|
||||
"console-table-printer": "2.11.2",
|
||||
"drizzle-kit": "0.19.13-e99bac1",
|
||||
"drizzle-orm": "0.28.5",
|
||||
"drizzle-kit": "0.20.14-1f2c838",
|
||||
"drizzle-orm": "0.29.3",
|
||||
"pg": "8.11.3",
|
||||
"prompts": "2.4.2",
|
||||
"to-snake-case": "1.0.0",
|
||||
|
||||
@@ -1,13 +1,51 @@
|
||||
import type { Payload } from 'payload'
|
||||
import type { Connect } from 'payload/database'
|
||||
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { drizzle } from 'drizzle-orm/node-postgres'
|
||||
import { numeric, pgTable, timestamp, varchar } from 'drizzle-orm/pg-core'
|
||||
import { numeric, timestamp, varchar } from 'drizzle-orm/pg-core'
|
||||
import { Pool } from 'pg'
|
||||
import prompts from 'prompts'
|
||||
|
||||
import type { PostgresAdapter } from './types'
|
||||
|
||||
const connectWithReconnect = async function ({
|
||||
adapter,
|
||||
payload,
|
||||
reconnect = false,
|
||||
}: {
|
||||
adapter: PostgresAdapter
|
||||
payload: Payload
|
||||
reconnect?: boolean
|
||||
}) {
|
||||
let result
|
||||
|
||||
if (!reconnect) {
|
||||
result = await adapter.pool.connect()
|
||||
} else {
|
||||
try {
|
||||
result = await adapter.pool.connect()
|
||||
} catch (err) {
|
||||
setTimeout(() => {
|
||||
payload.logger.info('Reconnecting to postgres')
|
||||
void connectWithReconnect({ adapter, payload, reconnect: true })
|
||||
}, 1000)
|
||||
}
|
||||
}
|
||||
if (!result) {
|
||||
return
|
||||
}
|
||||
result.prependListener('error', (err) => {
|
||||
try {
|
||||
if (err.code === 'ECONNRESET') {
|
||||
void connectWithReconnect({ adapter, payload, reconnect: true })
|
||||
}
|
||||
} catch (err) {
|
||||
// swallow error
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export const connect: Connect = async function connect(this: PostgresAdapter, payload) {
|
||||
this.schema = {
|
||||
...this.tables,
|
||||
@@ -17,13 +55,19 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
|
||||
|
||||
try {
|
||||
this.pool = new Pool(this.poolOptions)
|
||||
await this.pool.connect()
|
||||
await connectWithReconnect({ adapter: this, payload })
|
||||
|
||||
this.drizzle = drizzle(this.pool, { schema: this.schema })
|
||||
const logger = this.logger || false
|
||||
|
||||
this.drizzle = drizzle(this.pool, { logger, schema: this.schema })
|
||||
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
|
||||
this.payload.logger.info('---- DROPPING TABLES ----')
|
||||
await this.drizzle.execute(sql`drop schema public cascade;
|
||||
create schema public;`)
|
||||
this.payload.logger.info(`---- DROPPING TABLES SCHEMA(${this.schemaName || 'public'}) ----`)
|
||||
await this.drizzle.execute(
|
||||
sql.raw(`
|
||||
drop schema if exists ${this.schemaName || 'public'} cascade;
|
||||
create schema ${this.schemaName || 'public'};
|
||||
`),
|
||||
)
|
||||
this.payload.logger.info('---- DROPPED TABLES ----')
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -39,7 +83,7 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
|
||||
)
|
||||
return
|
||||
|
||||
const { pushSchema } = require('drizzle-kit/utils')
|
||||
const { pushSchema } = require('drizzle-kit/payload')
|
||||
|
||||
// This will prompt if clarifications are needed for Drizzle to push new schema
|
||||
const { apply, hasDataLoss, statementsToExecute, warnings } = await pushSchema(
|
||||
@@ -59,9 +103,9 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
|
||||
const { confirm: acceptWarnings } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message,
|
||||
type: 'confirm',
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
@@ -80,7 +124,7 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
|
||||
await apply()
|
||||
|
||||
// Migration table def in order to use query using drizzle
|
||||
const migrationsSchema = pgTable('payload_migrations', {
|
||||
const migrationsSchema = this.pgSchema.table('payload_migrations', {
|
||||
name: varchar('name'),
|
||||
batch: numeric('batch'),
|
||||
created_at: timestamp('created_at'),
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable no-restricted-syntax, no-await-in-loop */
|
||||
import type { DrizzleSnapshotJSON } from 'drizzle-kit/utils'
|
||||
import type { DrizzleSnapshotJSON } from 'drizzle-kit/payload'
|
||||
import type { CreateMigration } from 'payload/database'
|
||||
|
||||
import fs from 'fs'
|
||||
@@ -53,14 +53,14 @@ const getDefaultDrizzleSnapshot = (): DrizzleSnapshotJSON => ({
|
||||
|
||||
export const createMigration: CreateMigration = async function createMigration(
|
||||
this: PostgresAdapter,
|
||||
{ migrationName, payload },
|
||||
{ forceAcceptWarning, migrationName, payload },
|
||||
) {
|
||||
const dir = payload.db.migrationDir
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir)
|
||||
}
|
||||
|
||||
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/utils')
|
||||
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/payload')
|
||||
|
||||
const [yyymmdd, hhmmss] = new Date().toISOString().split('T')
|
||||
const formattedDate = yyymmdd.replace(/\D/g, '')
|
||||
@@ -95,13 +95,13 @@ export const createMigration: CreateMigration = async function createMigration(
|
||||
const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)
|
||||
|
||||
if (!sqlStatementsUp.length && !sqlStatementsDown.length) {
|
||||
if (!sqlStatementsUp.length && !sqlStatementsDown.length && !forceAcceptWarning) {
|
||||
const { confirm: shouldCreateBlankMigration } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: 'No schema changes detected. Would you like to create a blank migration file?',
|
||||
type: 'confirm',
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
|
||||
@@ -33,6 +33,16 @@ export const buildFindManyArgs = ({
|
||||
},
|
||||
}
|
||||
|
||||
if (adapter.tables[`${tableName}_texts`]) {
|
||||
result.with._texts = {
|
||||
columns: {
|
||||
id: false,
|
||||
parent: false,
|
||||
},
|
||||
orderBy: ({ order }, { asc: ASC }) => [ASC(order)],
|
||||
}
|
||||
}
|
||||
|
||||
if (adapter.tables[`${tableName}_numbers`]) {
|
||||
result.with._numbers = {
|
||||
columns: {
|
||||
|
||||
@@ -158,13 +158,13 @@ export const findMany = async function find({
|
||||
query: db
|
||||
.select({
|
||||
count: sql<number>`count
|
||||
(*)`,
|
||||
(DISTINCT ${adapter.tables[tableName].id})`,
|
||||
})
|
||||
.from(table)
|
||||
.where(where),
|
||||
})
|
||||
totalDocs = Number(countResult[0].count)
|
||||
totalPages = typeof limit === 'number' ? Math.ceil(totalDocs / limit) : 1
|
||||
totalPages = typeof limit === 'number' && limit !== 0 ? Math.ceil(totalDocs / limit) : 1
|
||||
hasPrevPage = page > 1
|
||||
hasNextPage = totalPages > page
|
||||
pagingCounter = (page - 1) * limit + 1
|
||||
|
||||
@@ -78,7 +78,7 @@ export const traverseFields = ({
|
||||
with: {},
|
||||
}
|
||||
|
||||
const arrayTableName = `${currentTableName}_${toSnakeCase(field.name)}`
|
||||
const arrayTableName = `${currentTableName}_${path}${toSnakeCase(field.name)}`
|
||||
|
||||
if (adapter.tables[`${arrayTableName}_locales`]) withArray.with._locales = _locales
|
||||
currentArgs.with[`${path}${field.name}`] = withArray
|
||||
@@ -128,15 +128,16 @@ export const traverseFields = ({
|
||||
with: {},
|
||||
}
|
||||
|
||||
if (adapter.tables[`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}_locales`])
|
||||
withBlock.with._locales = _locales
|
||||
const tableName = `${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`
|
||||
|
||||
if (adapter.tables[`${tableName}_locales`]) withBlock.with._locales = _locales
|
||||
topLevelArgs.with[blockKey] = withBlock
|
||||
|
||||
traverseFields({
|
||||
_locales,
|
||||
adapter,
|
||||
currentArgs: withBlock,
|
||||
currentTableName,
|
||||
currentTableName: tableName,
|
||||
depth,
|
||||
fields: block.fields,
|
||||
path: '',
|
||||
|
||||
@@ -43,6 +43,7 @@ export type { MigrateDownArgs, MigrateUpArgs } from './types'
|
||||
export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter> {
|
||||
function adapter({ payload }: { payload: Payload }) {
|
||||
const migrationDir = findMigrationDir(args.migrationDir)
|
||||
const idType = args.idType || 'serial'
|
||||
|
||||
return createDatabaseAdapter<PostgresAdapter>({
|
||||
name: 'postgres',
|
||||
@@ -51,11 +52,15 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
|
||||
drizzle: undefined,
|
||||
enums: {},
|
||||
fieldConstraints: {},
|
||||
idType,
|
||||
logger: args.logger,
|
||||
pgSchema: undefined,
|
||||
pool: undefined,
|
||||
poolOptions: args.pool,
|
||||
push: args.push,
|
||||
relations: {},
|
||||
schema: {},
|
||||
schemaName: args.schemaName,
|
||||
sessions: {},
|
||||
tables: {},
|
||||
|
||||
@@ -68,7 +73,10 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
|
||||
createGlobalVersion,
|
||||
createMigration,
|
||||
createVersion,
|
||||
defaultIDType: 'number',
|
||||
/**
|
||||
* This represents how a default ID is treated in Payload as were a field type
|
||||
*/
|
||||
defaultIDType: idType === 'serial' ? 'number' : 'text',
|
||||
deleteMany,
|
||||
deleteOne,
|
||||
deleteVersions,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import type { Init } from 'payload/database'
|
||||
import type { SanitizedCollectionConfig } from 'payload/types'
|
||||
|
||||
import { pgEnum } from 'drizzle-orm/pg-core'
|
||||
import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'
|
||||
import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload/versions'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
@@ -11,6 +11,12 @@ import type { PostgresAdapter } from './types'
|
||||
import { buildTable } from './schema/build'
|
||||
|
||||
export const init: Init = async function init(this: PostgresAdapter) {
|
||||
if (this.schemaName) {
|
||||
this.pgSchema = pgSchema(this.schemaName)
|
||||
} else {
|
||||
this.pgSchema = { table: pgTable }
|
||||
}
|
||||
|
||||
if (this.payload.config.localization) {
|
||||
this.enums.enum__locales = pgEnum(
|
||||
'_locales',
|
||||
@@ -25,6 +31,7 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
adapter: this,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!collection?.versions?.drafts,
|
||||
disableUnique: false,
|
||||
fields: collection.fields,
|
||||
@@ -40,6 +47,7 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
adapter: this,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!collection.versions?.drafts,
|
||||
disableUnique: true,
|
||||
fields: versionFields,
|
||||
@@ -56,6 +64,7 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
adapter: this,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!global?.versions?.drafts,
|
||||
disableUnique: false,
|
||||
fields: global.fields,
|
||||
@@ -71,6 +80,7 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
adapter: this,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!global.versions?.drafts,
|
||||
disableUnique: true,
|
||||
fields: versionFields,
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
/* eslint-disable no-restricted-syntax, no-await-in-loop */
|
||||
import type { Payload } from 'payload'
|
||||
import type { Migration } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/dist/express/types'
|
||||
|
||||
import { readMigrationFiles } from 'payload/database'
|
||||
import { commitTransaction } from 'payload/dist/utilities/commitTransaction'
|
||||
import { initTransaction } from 'payload/dist/utilities/initTransaction'
|
||||
import { killTransaction } from 'payload/dist/utilities/killTransaction'
|
||||
import prompts from 'prompts'
|
||||
|
||||
import type { PostgresAdapter } from './types'
|
||||
@@ -35,18 +39,18 @@ export async function migrate(this: PostgresAdapter): Promise<void> {
|
||||
latestBatch = Number(migrationsInDB[0]?.batch)
|
||||
}
|
||||
} else {
|
||||
await createMigrationTable(this.drizzle)
|
||||
await createMigrationTable(this)
|
||||
}
|
||||
|
||||
if (migrationsInDB.find((m) => m.batch === -1)) {
|
||||
const { confirm: runMigrations } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message:
|
||||
"It looks like you've run Payload in dev mode, meaning you've dynamically pushed changes to your database.\n\n" +
|
||||
"If you'd like to run migrations, data loss will occur. Would you like to proceed?",
|
||||
type: 'confirm',
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
@@ -76,9 +80,10 @@ export async function migrate(this: PostgresAdapter): Promise<void> {
|
||||
}
|
||||
|
||||
async function runMigrationFile(payload: Payload, migration: Migration, batch: number) {
|
||||
const { generateDrizzleJson } = require('drizzle-kit/utils')
|
||||
const { generateDrizzleJson } = require('drizzle-kit/payload')
|
||||
|
||||
const start = Date.now()
|
||||
const req = { payload } as PayloadRequest
|
||||
|
||||
payload.logger.info({ msg: `Migrating: ${migration.name}` })
|
||||
|
||||
@@ -86,7 +91,8 @@ async function runMigrationFile(payload: Payload, migration: Migration, batch: n
|
||||
const drizzleJSON = generateDrizzleJson(pgAdapter.schema)
|
||||
|
||||
try {
|
||||
await migration.up({ payload })
|
||||
await initTransaction(req)
|
||||
await migration.up({ payload, req })
|
||||
payload.logger.info({ msg: `Migrated: ${migration.name} (${Date.now() - start}ms)` })
|
||||
await payload.create({
|
||||
collection: 'payload-migrations',
|
||||
@@ -95,8 +101,11 @@ async function runMigrationFile(payload: Payload, migration: Migration, batch: n
|
||||
batch,
|
||||
schema: drizzleJSON,
|
||||
},
|
||||
req,
|
||||
})
|
||||
await commitTransaction(req)
|
||||
} catch (err: unknown) {
|
||||
await killTransaction(req)
|
||||
payload.logger.error({
|
||||
err,
|
||||
msg: parseError(err, `Error running migration ${migration.name}`),
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import { getMigrations, readMigrationFiles } from 'payload/database'
|
||||
import { commitTransaction } from 'payload/dist/utilities/commitTransaction'
|
||||
import { initTransaction } from 'payload/dist/utilities/initTransaction'
|
||||
import { killTransaction } from 'payload/dist/utilities/killTransaction'
|
||||
|
||||
import type { PostgresAdapter } from './types'
|
||||
|
||||
@@ -25,19 +28,21 @@ export async function migrateDown(this: PostgresAdapter): Promise<void> {
|
||||
msg: `Rolling back batch ${latestBatch} consisting of ${existingMigrations.length} migration(s).`,
|
||||
})
|
||||
|
||||
for (const migration of existingMigrations) {
|
||||
const latestBatchMigrations = existingMigrations.filter(({ batch }) => batch === latestBatch)
|
||||
|
||||
for (const migration of latestBatchMigrations) {
|
||||
const migrationFile = migrationFiles.find((m) => m.name === migration.name)
|
||||
if (!migrationFile) {
|
||||
throw new Error(`Migration ${migration.name} not found locally.`)
|
||||
}
|
||||
|
||||
const start = Date.now()
|
||||
let transactionID
|
||||
const req = { payload } as PayloadRequest
|
||||
|
||||
try {
|
||||
payload.logger.info({ msg: `Migrating down: ${migrationFile.name}` })
|
||||
transactionID = await this.beginTransaction()
|
||||
await migrationFile.down({ payload })
|
||||
await initTransaction(req)
|
||||
await migrationFile.down({ payload, req })
|
||||
payload.logger.info({
|
||||
msg: `Migrated down: ${migrationFile.name} (${Date.now() - start}ms)`,
|
||||
})
|
||||
@@ -47,15 +52,13 @@ export async function migrateDown(this: PostgresAdapter): Promise<void> {
|
||||
await payload.delete({
|
||||
id: migration.id,
|
||||
collection: 'payload-migrations',
|
||||
req: {
|
||||
transactionID,
|
||||
} as PayloadRequest,
|
||||
req,
|
||||
})
|
||||
}
|
||||
|
||||
await this.commitTransaction(transactionID)
|
||||
await commitTransaction(req)
|
||||
} catch (err: unknown) {
|
||||
await this.rollbackTransaction(transactionID)
|
||||
await killTransaction(req)
|
||||
|
||||
payload.logger.error({
|
||||
err,
|
||||
|
||||
@@ -2,6 +2,9 @@ import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import { sql } from 'drizzle-orm'
|
||||
import { readMigrationFiles } from 'payload/database'
|
||||
import { commitTransaction } from 'payload/dist/utilities/commitTransaction'
|
||||
import { initTransaction } from 'payload/dist/utilities/initTransaction'
|
||||
import { killTransaction } from 'payload/dist/utilities/killTransaction'
|
||||
import prompts from 'prompts'
|
||||
|
||||
import type { PostgresAdapter } from './types'
|
||||
@@ -11,61 +14,67 @@ import { parseError } from './utilities/parseError'
|
||||
/**
|
||||
* Drop the current database and run all migrate up functions
|
||||
*/
|
||||
export async function migrateFresh(this: PostgresAdapter): Promise<void> {
|
||||
export async function migrateFresh(
|
||||
this: PostgresAdapter,
|
||||
{ forceAcceptWarning = false },
|
||||
): Promise<void> {
|
||||
const { payload } = this
|
||||
|
||||
const { confirm: acceptWarning } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
initial: false,
|
||||
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
|
||||
type: 'confirm',
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
if (forceAcceptWarning === false) {
|
||||
const { confirm: acceptWarning } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
|
||||
},
|
||||
},
|
||||
)
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if (!acceptWarning) {
|
||||
process.exit(0)
|
||||
if (!acceptWarning) {
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
payload.logger.info({
|
||||
msg: `Dropping database.`,
|
||||
})
|
||||
|
||||
await this.drizzle.execute(sql`drop schema public cascade;\ncreate schema public;`)
|
||||
await this.drizzle.execute(
|
||||
sql.raw(`drop schema ${this.schemaName || 'public'} cascade;
|
||||
create schema ${this.schemaName || 'public'};`),
|
||||
)
|
||||
|
||||
const migrationFiles = await readMigrationFiles({ payload })
|
||||
payload.logger.debug({
|
||||
msg: `Found ${migrationFiles.length} migration files.`,
|
||||
})
|
||||
|
||||
let transactionID
|
||||
const req = { payload } as PayloadRequest
|
||||
// Run all migrate up
|
||||
for (const migration of migrationFiles) {
|
||||
payload.logger.info({ msg: `Migrating: ${migration.name}` })
|
||||
try {
|
||||
const start = Date.now()
|
||||
transactionID = await this.beginTransaction()
|
||||
await migration.up({ payload })
|
||||
await initTransaction(req)
|
||||
await migration.up({ payload, req })
|
||||
await payload.create({
|
||||
collection: 'payload-migrations',
|
||||
data: {
|
||||
name: migration.name,
|
||||
batch: 1,
|
||||
},
|
||||
req: {
|
||||
transactionID,
|
||||
} as PayloadRequest,
|
||||
req,
|
||||
})
|
||||
await this.commitTransaction(transactionID)
|
||||
await commitTransaction(req)
|
||||
|
||||
payload.logger.info({ msg: `Migrated: ${migration.name} (${Date.now() - start}ms)` })
|
||||
} catch (err: unknown) {
|
||||
await this.rollbackTransaction(transactionID)
|
||||
await killTransaction(req)
|
||||
payload.logger.error({
|
||||
err,
|
||||
msg: parseError(err, `Error running migration ${migration.name}. Rolling back`),
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import { getMigrations, readMigrationFiles } from 'payload/database'
|
||||
import { DatabaseError } from 'pg'
|
||||
import { commitTransaction } from 'payload/dist/utilities/commitTransaction'
|
||||
import { initTransaction } from 'payload/dist/utilities/initTransaction'
|
||||
import { killTransaction } from 'payload/dist/utilities/killTransaction'
|
||||
|
||||
import type { PostgresAdapter } from './types'
|
||||
|
||||
@@ -29,7 +31,7 @@ export async function migrateRefresh(this: PostgresAdapter) {
|
||||
msg: `Rolling back batch ${latestBatch} consisting of ${existingMigrations.length} migration(s).`,
|
||||
})
|
||||
|
||||
let transactionID
|
||||
const req = { payload } as PayloadRequest
|
||||
|
||||
// Reverse order of migrations to rollback
|
||||
existingMigrations.reverse()
|
||||
@@ -43,8 +45,8 @@ export async function migrateRefresh(this: PostgresAdapter) {
|
||||
|
||||
payload.logger.info({ msg: `Migrating down: ${migration.name}` })
|
||||
const start = Date.now()
|
||||
transactionID = await this.beginTransaction()
|
||||
await migrationFile.down({ payload })
|
||||
await initTransaction(req)
|
||||
await migrationFile.down({ payload, req })
|
||||
payload.logger.info({
|
||||
msg: `Migrated down: ${migration.name} (${Date.now() - start}ms)`,
|
||||
})
|
||||
@@ -53,9 +55,7 @@ export async function migrateRefresh(this: PostgresAdapter) {
|
||||
if (tableExists) {
|
||||
await payload.delete({
|
||||
collection: 'payload-migrations',
|
||||
req: {
|
||||
transactionID,
|
||||
} as PayloadRequest,
|
||||
req,
|
||||
where: {
|
||||
name: {
|
||||
equals: migration.name,
|
||||
@@ -63,8 +63,9 @@ export async function migrateRefresh(this: PostgresAdapter) {
|
||||
},
|
||||
})
|
||||
}
|
||||
await commitTransaction(req)
|
||||
} catch (err: unknown) {
|
||||
await this.rollbackTransaction(transactionID)
|
||||
await killTransaction(req)
|
||||
payload.logger.error({
|
||||
err,
|
||||
msg: parseError(err, `Error running migration ${migration.name}. Rolling back.`),
|
||||
@@ -78,23 +79,21 @@ export async function migrateRefresh(this: PostgresAdapter) {
|
||||
payload.logger.info({ msg: `Migrating: ${migration.name}` })
|
||||
try {
|
||||
const start = Date.now()
|
||||
transactionID = await this.beginTransaction()
|
||||
await migration.up({ payload })
|
||||
await initTransaction(req)
|
||||
await migration.up({ payload, req })
|
||||
await payload.create({
|
||||
collection: 'payload-migrations',
|
||||
data: {
|
||||
name: migration.name,
|
||||
executed: true,
|
||||
},
|
||||
req: {
|
||||
transactionID,
|
||||
} as PayloadRequest,
|
||||
req,
|
||||
})
|
||||
await this.commitTransaction(transactionID)
|
||||
await commitTransaction(req)
|
||||
|
||||
payload.logger.info({ msg: `Migrated: ${migration.name} (${Date.now() - start}ms)` })
|
||||
} catch (err: unknown) {
|
||||
await this.rollbackTransaction(transactionID)
|
||||
await killTransaction(req)
|
||||
payload.logger.error({
|
||||
err,
|
||||
msg: parseError(err, `Error running migration ${migration.name}. Rolling back.`),
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
|
||||
import { getMigrations, readMigrationFiles } from 'payload/database'
|
||||
import { commitTransaction } from 'payload/dist/utilities/commitTransaction'
|
||||
import { initTransaction } from 'payload/dist/utilities/initTransaction'
|
||||
import { killTransaction } from 'payload/dist/utilities/killTransaction'
|
||||
|
||||
import type { PostgresAdapter } from './types'
|
||||
|
||||
@@ -21,10 +24,10 @@ export async function migrateReset(this: PostgresAdapter): Promise<void> {
|
||||
return
|
||||
}
|
||||
|
||||
const req = { payload } as PayloadRequest
|
||||
|
||||
// Rollback all migrations in order
|
||||
for (const migration of existingMigrations) {
|
||||
let transactionID
|
||||
|
||||
const migrationFile = migrationFiles.find((m) => m.name === migration.name)
|
||||
try {
|
||||
if (!migrationFile) {
|
||||
@@ -33,8 +36,8 @@ export async function migrateReset(this: PostgresAdapter): Promise<void> {
|
||||
|
||||
const start = Date.now()
|
||||
payload.logger.info({ msg: `Migrating down: ${migrationFile.name}` })
|
||||
transactionID = await this.beginTransaction()
|
||||
await migrationFile.down({ payload })
|
||||
await initTransaction(req)
|
||||
await migrationFile.down({ payload, req })
|
||||
payload.logger.info({
|
||||
msg: `Migrated down: ${migrationFile.name} (${Date.now() - start}ms)`,
|
||||
})
|
||||
@@ -44,19 +47,17 @@ export async function migrateReset(this: PostgresAdapter): Promise<void> {
|
||||
await payload.delete({
|
||||
id: migration.id,
|
||||
collection: 'payload-migrations',
|
||||
req: {
|
||||
transactionID,
|
||||
} as PayloadRequest,
|
||||
req,
|
||||
})
|
||||
}
|
||||
|
||||
await this.commitTransaction(transactionID)
|
||||
await commitTransaction(req)
|
||||
} catch (err: unknown) {
|
||||
let msg = `Error running migration ${migrationFile.name}.`
|
||||
|
||||
if (err instanceof Error) msg += ` ${err.message}`
|
||||
|
||||
await this.rollbackTransaction(transactionID)
|
||||
await killTransaction(req)
|
||||
payload.logger.error({
|
||||
err,
|
||||
msg,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import type { PgTableWithColumns } from 'drizzle-orm/pg-core'
|
||||
import type { Field, Where } from 'payload/types'
|
||||
|
||||
import { asc, desc } from 'drizzle-orm'
|
||||
@@ -12,7 +13,7 @@ export type BuildQueryJoins = Record<string, SQL>
|
||||
|
||||
export type BuildQueryJoinAliases = {
|
||||
condition: SQL
|
||||
table: GenericTable
|
||||
table: GenericTable | PgTableWithColumns<any>
|
||||
}[]
|
||||
|
||||
type BuildQueryArgs = {
|
||||
@@ -75,6 +76,7 @@ const buildQuery = async function buildQuery({
|
||||
pathSegments: sortPath.replace(/__/g, '.').split('.'),
|
||||
selectFields,
|
||||
tableName,
|
||||
value: sortPath,
|
||||
})
|
||||
orderBy.column = sortTable?.[sortTableColumnName]
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import type { Field, FieldAffectingData, TabAsField } from 'payload/types'
|
||||
import type { PgTableWithColumns } from 'drizzle-orm/pg-core'
|
||||
import type { Field, FieldAffectingData, NumberField, TabAsField, TextField } from 'payload/types'
|
||||
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { and, eq, like, sql } from 'drizzle-orm'
|
||||
import { alias } from 'drizzle-orm/pg-core'
|
||||
import { APIError } from 'payload/errors'
|
||||
import { fieldAffectsData, tabHasName } from 'payload/types'
|
||||
@@ -15,7 +16,7 @@ import type { BuildQueryJoinAliases, BuildQueryJoins } from './buildQuery'
|
||||
|
||||
type Constraint = {
|
||||
columnName: string
|
||||
table: GenericTable
|
||||
table: GenericTable | PgTableWithColumns<any>
|
||||
value: unknown
|
||||
}
|
||||
|
||||
@@ -26,12 +27,12 @@ type TableColumn = {
|
||||
getNotNullColumnByValue?: (val: unknown) => string
|
||||
pathSegments?: string[]
|
||||
rawColumn?: SQL
|
||||
table: GenericTable
|
||||
table: GenericTable | PgTableWithColumns<any>
|
||||
}
|
||||
|
||||
type Args = {
|
||||
adapter: PostgresAdapter
|
||||
aliasTable?: GenericTable
|
||||
aliasTable?: GenericTable | PgTableWithColumns<any>
|
||||
collectionPath: string
|
||||
columnPrefix?: string
|
||||
constraintPath?: string
|
||||
@@ -44,6 +45,14 @@ type Args = {
|
||||
rootTableName?: string
|
||||
selectFields: Record<string, GenericColumn>
|
||||
tableName: string
|
||||
/**
|
||||
* If creating a new table name for arrays and blocks, this suffix should be appended to the table name
|
||||
*/
|
||||
tableNameSuffix?: string
|
||||
/**
|
||||
* The raw value of the query before sanitization
|
||||
*/
|
||||
value: unknown
|
||||
}
|
||||
/**
|
||||
* Transforms path to table and column name
|
||||
@@ -65,6 +74,8 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName: incomingRootTableName,
|
||||
selectFields,
|
||||
tableName,
|
||||
tableNameSuffix = '',
|
||||
value,
|
||||
}: Args): TableColumn => {
|
||||
const fieldPath = incomingSegments[0]
|
||||
let locale = incomingLocale
|
||||
@@ -83,8 +94,8 @@ export const getTableColumnFromPath = ({
|
||||
constraints,
|
||||
field: {
|
||||
name: 'id',
|
||||
type: 'number',
|
||||
},
|
||||
type: adapter.idType === 'uuid' ? 'text' : 'number',
|
||||
} as TextField | NumberField,
|
||||
table: adapter.tables[newTableName],
|
||||
}
|
||||
}
|
||||
@@ -125,6 +136,8 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix,
|
||||
value,
|
||||
})
|
||||
}
|
||||
case 'tab': {
|
||||
@@ -134,7 +147,7 @@ export const getTableColumnFromPath = ({
|
||||
aliasTable,
|
||||
collectionPath,
|
||||
columnPrefix: `${columnPrefix}${field.name}_`,
|
||||
constraintPath,
|
||||
constraintPath: `${constraintPath}${field.name}.`,
|
||||
constraints,
|
||||
fields: field.fields,
|
||||
joinAliases,
|
||||
@@ -144,6 +157,8 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix: `${tableNameSuffix}${toSnakeCase(field.name)}_`,
|
||||
value,
|
||||
})
|
||||
}
|
||||
return getTableColumnFromPath({
|
||||
@@ -161,6 +176,8 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -185,7 +202,7 @@ export const getTableColumnFromPath = ({
|
||||
aliasTable,
|
||||
collectionPath,
|
||||
columnPrefix: `${columnPrefix}${field.name}_`,
|
||||
constraintPath,
|
||||
constraintPath: `${constraintPath}${field.name}.`,
|
||||
constraints,
|
||||
fields: field.fields,
|
||||
joinAliases,
|
||||
@@ -195,11 +212,13 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix: `${tableNameSuffix}${toSnakeCase(field.name)}_`,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
case 'array': {
|
||||
newTableName = `${tableName}_${toSnakeCase(field.name)}`
|
||||
newTableName = `${tableName}_${tableNameSuffix}${toSnakeCase(field.name)}`
|
||||
constraintPath = `${constraintPath}${field.name}.%.`
|
||||
if (locale && field.localized && adapter.payload.config.localization) {
|
||||
joins[newTableName] = and(
|
||||
@@ -232,12 +251,39 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
case 'blocks': {
|
||||
let blockTableColumn: TableColumn
|
||||
let newTableName: string
|
||||
|
||||
// handle blockType queries
|
||||
if (pathSegments[1] === 'blockType') {
|
||||
// find the block config using the value
|
||||
const blockTypes = Array.isArray(value) ? value : [value]
|
||||
blockTypes.forEach((blockType) => {
|
||||
const block = field.blocks.find((block) => block.slug === blockType)
|
||||
newTableName = `${tableName}_blocks_${toSnakeCase(block.slug)}`
|
||||
joins[newTableName] = eq(
|
||||
adapter.tables[tableName].id,
|
||||
adapter.tables[newTableName]._parentID,
|
||||
)
|
||||
constraints.push({
|
||||
columnName: '_path',
|
||||
table: adapter.tables[newTableName],
|
||||
value: pathSegments[0],
|
||||
})
|
||||
})
|
||||
return {
|
||||
constraints,
|
||||
field,
|
||||
getNotNullColumnByValue: () => 'id',
|
||||
table: adapter.tables[tableName],
|
||||
}
|
||||
}
|
||||
|
||||
const hasBlockField = field.blocks.some((block) => {
|
||||
newTableName = `${tableName}_blocks_${toSnakeCase(block.slug)}`
|
||||
constraintPath = `${constraintPath}${field.name}.%.`
|
||||
@@ -258,6 +304,7 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields: blockSelectFields,
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
} catch (error) {
|
||||
// this is fine, not every block will have the field
|
||||
@@ -298,9 +345,6 @@ export const getTableColumnFromPath = ({
|
||||
table: blockTableColumn.table,
|
||||
}
|
||||
}
|
||||
if (pathSegments[1] === 'blockType') {
|
||||
throw new APIError('Querying on blockType is not supported')
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
@@ -317,21 +361,15 @@ export const getTableColumnFromPath = ({
|
||||
|
||||
// Join in the relationships table
|
||||
joinAliases.push({
|
||||
condition: eq(
|
||||
(aliasTable || adapter.tables[rootTableName]).id,
|
||||
aliasRelationshipTable.parent,
|
||||
condition: and(
|
||||
eq((aliasTable || adapter.tables[rootTableName]).id, aliasRelationshipTable.parent),
|
||||
like(aliasRelationshipTable.path, `${constraintPath}${field.name}`),
|
||||
),
|
||||
table: aliasRelationshipTable,
|
||||
})
|
||||
|
||||
selectFields[`${relationTableName}.path`] = aliasRelationshipTable.path
|
||||
|
||||
constraints.push({
|
||||
columnName: 'path',
|
||||
table: aliasRelationshipTable,
|
||||
value: `${constraintPath}${field.name}`,
|
||||
})
|
||||
|
||||
let newAliasTable
|
||||
|
||||
if (typeof field.relationTo === 'string') {
|
||||
@@ -346,7 +384,7 @@ export const getTableColumnFromPath = ({
|
||||
table: newAliasTable,
|
||||
})
|
||||
|
||||
if (newCollectionPath === '') {
|
||||
if (newCollectionPath === '' || newCollectionPath === 'id') {
|
||||
return {
|
||||
columnName: `${field.relationTo}ID`,
|
||||
constraints,
|
||||
@@ -394,6 +432,7 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName: newTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -428,7 +467,7 @@ export const getTableColumnFromPath = ({
|
||||
columnName: `${columnPrefix}${field.name}`,
|
||||
constraints,
|
||||
field,
|
||||
pathSegments: pathSegments,
|
||||
pathSegments,
|
||||
table: targetTable,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,11 +63,7 @@ export async function parseParams({
|
||||
where: condition,
|
||||
})
|
||||
if (builtConditions.length > 0) {
|
||||
if (result) {
|
||||
result = operatorMap[conditionOperator](result, ...builtConditions)
|
||||
} else {
|
||||
result = operatorMap[conditionOperator](...builtConditions)
|
||||
}
|
||||
result = operatorMap[conditionOperator](...builtConditions)
|
||||
}
|
||||
} else {
|
||||
// It's a path - and there can be multiple comparisons on a single path.
|
||||
@@ -77,6 +73,7 @@ export async function parseParams({
|
||||
if (typeof pathOperators === 'object') {
|
||||
for (const operator of Object.keys(pathOperators)) {
|
||||
if (validOperators.includes(operator as Operator)) {
|
||||
const val = where[relationOrPath][operator]
|
||||
const {
|
||||
columnName,
|
||||
constraints: queryConstraints,
|
||||
@@ -95,10 +92,9 @@ export async function parseParams({
|
||||
pathSegments: relationOrPath.replace(/__/g, '.').split('.'),
|
||||
selectFields,
|
||||
tableName,
|
||||
value: val,
|
||||
})
|
||||
|
||||
const val = where[relationOrPath][operator]
|
||||
|
||||
queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => {
|
||||
if (typeof value === 'string' && value.indexOf('%') > -1) {
|
||||
constraints.push(operatorMap.like(constraintTable[col], value))
|
||||
@@ -107,7 +103,11 @@ export async function parseParams({
|
||||
}
|
||||
})
|
||||
|
||||
if (['json', 'richText'].includes(field.type) && Array.isArray(pathSegments)) {
|
||||
if (
|
||||
['json', 'richText'].includes(field.type) &&
|
||||
Array.isArray(pathSegments) &&
|
||||
pathSegments.length > 1
|
||||
) {
|
||||
const segments = pathSegments.slice(1)
|
||||
segments.unshift(table[columnName].name)
|
||||
|
||||
@@ -121,12 +121,28 @@ export async function parseParams({
|
||||
})
|
||||
|
||||
constraints.push(sql.raw(jsonQuery))
|
||||
break
|
||||
}
|
||||
|
||||
if (field.type === 'json') {
|
||||
const jsonQuery = convertPathToJSONTraversal(pathSegments)
|
||||
constraints.push(sql.raw(`${table[columnName].name}${jsonQuery} = '%${val}%'`))
|
||||
const jsonQuery = convertPathToJSONTraversal(pathSegments)
|
||||
const operatorKeys = {
|
||||
contains: { operator: 'ilike', wildcard: '%' },
|
||||
equals: { operator: '=', wildcard: '' },
|
||||
exists: { operator: val === true ? 'is not null' : 'is null' },
|
||||
like: { operator: 'like', wildcard: '%' },
|
||||
not_equals: { operator: '<>', wildcard: '' },
|
||||
}
|
||||
let formattedValue = `'${operatorKeys[operator].wildcard}${val}${operatorKeys[operator].wildcard}'`
|
||||
|
||||
if (operator === 'exists') {
|
||||
formattedValue = ''
|
||||
}
|
||||
|
||||
constraints.push(
|
||||
sql.raw(
|
||||
`${table[columnName].name}${jsonQuery} ${operatorKeys[operator].operator} ${formattedValue}`,
|
||||
),
|
||||
)
|
||||
|
||||
break
|
||||
}
|
||||
@@ -149,6 +165,7 @@ export async function parseParams({
|
||||
}
|
||||
|
||||
const sanitizedQueryValue = sanitizeQueryValue({
|
||||
adapter,
|
||||
field,
|
||||
operator,
|
||||
relationOrPath,
|
||||
@@ -187,6 +204,16 @@ export async function parseParams({
|
||||
break
|
||||
}
|
||||
|
||||
if (operator === 'equals' && queryValue === null) {
|
||||
constraints.push(isNull(rawColumn || table[columnName]))
|
||||
break
|
||||
}
|
||||
|
||||
if (operator === 'not_equals' && queryValue === null) {
|
||||
constraints.push(isNotNull(rawColumn || table[columnName]))
|
||||
break
|
||||
}
|
||||
|
||||
constraints.push(
|
||||
operatorMap[queryOperator](rawColumn || table[columnName], queryValue),
|
||||
)
|
||||
|
||||
@@ -2,7 +2,10 @@ import { APIError } from 'payload/errors'
|
||||
import { type Field, type TabAsField, fieldAffectsData } from 'payload/types'
|
||||
import { createArrayFromCommaDelineated } from 'payload/utilities'
|
||||
|
||||
import type { PostgresAdapter } from '../types'
|
||||
|
||||
type SanitizeQueryValueArgs = {
|
||||
adapter: PostgresAdapter
|
||||
field: Field | TabAsField
|
||||
operator: string
|
||||
relationOrPath: string
|
||||
@@ -10,6 +13,7 @@ type SanitizeQueryValueArgs = {
|
||||
}
|
||||
|
||||
export const sanitizeQueryValue = ({
|
||||
adapter,
|
||||
field,
|
||||
operator: operatorArg,
|
||||
relationOrPath,
|
||||
@@ -27,8 +31,10 @@ export const sanitizeQueryValue = ({
|
||||
) {
|
||||
const allPossibleIDTypes: (number | string)[] = []
|
||||
formattedValue.forEach((val) => {
|
||||
if (typeof val === 'string') {
|
||||
if (adapter.idType !== 'uuid' && typeof val === 'string') {
|
||||
allPossibleIDTypes.push(val, parseInt(val))
|
||||
} else if (typeof val === 'string') {
|
||||
allPossibleIDTypes.push(val)
|
||||
} else {
|
||||
allPossibleIDTypes.push(val, String(val))
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
// type GenerateMigration = (before: DrizzleSnapshotJSON, after: DrizzleSnapshotJSON) => string[]
|
||||
|
||||
// type GenerateDrizzleJSON = (schema: DrizzleSchemaExports) => DrizzleSnapshotJSON
|
||||
|
||||
// type PushDiff = (schema: DrizzleSchemaExports) => Promise<{ warnings: string[], apply: () => Promise<void> }>
|
||||
|
||||
// drizzle-kit@utils
|
||||
|
||||
import { drizzle } from 'drizzle-orm/node-postgres'
|
||||
import { Pool } from 'pg'
|
||||
|
||||
async function generateUsage() {
|
||||
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/utils')
|
||||
|
||||
// @ts-expect-error Just TypeScript being broken // TODO: Open TypeScript issue
|
||||
const schema = await import('./data/users')
|
||||
// @ts-expect-error Just TypeScript being broken // TODO: Open TypeScript issue
|
||||
const schemaAfter = await import('./data/users-after')
|
||||
|
||||
const drizzleJsonBefore = generateDrizzleJson(schema)
|
||||
const drizzleJsonAfter = generateDrizzleJson(schemaAfter)
|
||||
|
||||
const sqlStatements = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
|
||||
console.log(sqlStatements)
|
||||
}
|
||||
|
||||
async function pushUsage() {
|
||||
const { pushSchema } = require('drizzle-kit/utils')
|
||||
|
||||
// @ts-expect-error Just TypeScript being broken // TODO: Open TypeScript issue
|
||||
const schemaAfter = await import('./data/users-after')
|
||||
|
||||
const db = drizzle(new Pool({ connectionString: '' }))
|
||||
|
||||
const response = await pushSchema(schemaAfter, db)
|
||||
|
||||
console.log('\n')
|
||||
console.log('hasDataLoss: ', response.hasDataLoss)
|
||||
console.log('warnings: ', response.warnings)
|
||||
console.log('statements: ', response.statementsToExecute)
|
||||
|
||||
await response.apply()
|
||||
|
||||
process.exit(0)
|
||||
}
|
||||
@@ -1,25 +1,22 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import type { Relation } from 'drizzle-orm'
|
||||
import type { IndexBuilder, PgColumnBuilder, UniqueConstraintBuilder } from 'drizzle-orm/pg-core'
|
||||
import type {
|
||||
IndexBuilder,
|
||||
PgColumnBuilder,
|
||||
PgTableWithColumns,
|
||||
UniqueConstraintBuilder,
|
||||
} from 'drizzle-orm/pg-core'
|
||||
import type { Field } from 'payload/types'
|
||||
|
||||
import { relations } from 'drizzle-orm'
|
||||
import {
|
||||
index,
|
||||
integer,
|
||||
numeric,
|
||||
pgTable,
|
||||
serial,
|
||||
timestamp,
|
||||
unique,
|
||||
varchar,
|
||||
} from 'drizzle-orm/pg-core'
|
||||
import { index, integer, numeric, serial, timestamp, unique, varchar } from 'drizzle-orm/pg-core'
|
||||
import { fieldAffectsData } from 'payload/types'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { GenericColumns, GenericTable, PostgresAdapter } from '../types'
|
||||
import type { GenericColumns, GenericTable, IDType, PostgresAdapter } from '../types'
|
||||
|
||||
import { parentIDColumnMap } from './parentIDColumnMap'
|
||||
import { setColumnID } from './setColumnID'
|
||||
import { traverseFields } from './traverseFields'
|
||||
|
||||
type Args = {
|
||||
@@ -28,6 +25,7 @@ type Args = {
|
||||
baseExtraConfig?: Record<string, (cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder>
|
||||
buildNumbers?: boolean
|
||||
buildRelationships?: boolean
|
||||
buildTexts?: boolean
|
||||
disableNotNull: boolean
|
||||
disableUnique: boolean
|
||||
fields: Field[]
|
||||
@@ -41,6 +39,7 @@ type Args = {
|
||||
|
||||
type Result = {
|
||||
hasManyNumberField: 'index' | boolean
|
||||
hasManyTextField: 'index' | boolean
|
||||
relationsToBuild: Map<string, string>
|
||||
}
|
||||
|
||||
@@ -50,6 +49,7 @@ export const buildTable = ({
|
||||
baseExtraConfig = {},
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
disableNotNull,
|
||||
disableUnique = false,
|
||||
fields,
|
||||
@@ -66,48 +66,39 @@ export const buildTable = ({
|
||||
|
||||
let hasLocalizedField = false
|
||||
let hasLocalizedRelationshipField = false
|
||||
let hasManyTextField: 'index' | boolean = false
|
||||
let hasManyNumberField: 'index' | boolean = false
|
||||
let hasLocalizedManyTextField = false
|
||||
let hasLocalizedManyNumberField = false
|
||||
|
||||
const localesColumns: Record<string, PgColumnBuilder> = {}
|
||||
const localesIndexes: Record<string, (cols: GenericColumns) => IndexBuilder> = {}
|
||||
let localesTable: GenericTable
|
||||
let numbersTable: GenericTable
|
||||
let localesTable: GenericTable | PgTableWithColumns<any>
|
||||
let textsTable: GenericTable | PgTableWithColumns<any>
|
||||
let numbersTable: GenericTable | PgTableWithColumns<any>
|
||||
|
||||
// Relationships to the base collection
|
||||
const relationships: Set<string> = rootRelationships || new Set()
|
||||
|
||||
let relationshipsTable: GenericTable
|
||||
let relationshipsTable: GenericTable | PgTableWithColumns<any>
|
||||
|
||||
// Drizzle relations
|
||||
const relationsToBuild: Map<string, string> = new Map()
|
||||
|
||||
const idField = fields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
let idColType = 'integer'
|
||||
|
||||
if (idField) {
|
||||
if (idField.type === 'number') {
|
||||
idColType = 'numeric'
|
||||
columns.id = numeric('id').primaryKey()
|
||||
}
|
||||
|
||||
if (idField.type === 'text') {
|
||||
idColType = 'varchar'
|
||||
columns.id = varchar('id').primaryKey()
|
||||
}
|
||||
} else {
|
||||
columns.id = serial('id').primaryKey()
|
||||
}
|
||||
const idColType: IDType = setColumnID({ adapter, columns, fields })
|
||||
|
||||
;({
|
||||
hasLocalizedField,
|
||||
hasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField,
|
||||
hasManyNumberField,
|
||||
hasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columns,
|
||||
disableNotNull,
|
||||
disableUnique,
|
||||
@@ -141,7 +132,7 @@ export const buildTable = ({
|
||||
.notNull()
|
||||
}
|
||||
|
||||
const table = pgTable(tableName, columns, (cols) => {
|
||||
const table = adapter.pgSchema.table(tableName, columns, (cols) => {
|
||||
const extraConfig = Object.entries(baseExtraConfig).reduce((config, [key, func]) => {
|
||||
config[key] = func(cols)
|
||||
return config
|
||||
@@ -163,7 +154,7 @@ export const buildTable = ({
|
||||
.references(() => table.id, { onDelete: 'cascade' })
|
||||
.notNull()
|
||||
|
||||
localesTable = pgTable(localeTableName, localesColumns, (cols) => {
|
||||
localesTable = adapter.pgSchema.table(localeTableName, localesColumns, (cols) => {
|
||||
return Object.entries(localesIndexes).reduce(
|
||||
(acc, [colName, func]) => {
|
||||
acc[colName] = func(cols)
|
||||
@@ -190,6 +181,50 @@ export const buildTable = ({
|
||||
adapter.relations[`relations_${localeTableName}`] = localesTableRelations
|
||||
}
|
||||
|
||||
if (hasManyTextField && buildTexts) {
|
||||
const textsTableName = `${rootTableName}_texts`
|
||||
const columns: Record<string, PgColumnBuilder> = {
|
||||
id: serial('id').primaryKey(),
|
||||
order: integer('order').notNull(),
|
||||
parent: parentIDColumnMap[idColType]('parent_id')
|
||||
.references(() => table.id, { onDelete: 'cascade' })
|
||||
.notNull(),
|
||||
path: varchar('path').notNull(),
|
||||
text: varchar('text'),
|
||||
}
|
||||
|
||||
if (hasLocalizedManyTextField) {
|
||||
columns.locale = adapter.enums.enum__locales('locale')
|
||||
}
|
||||
|
||||
textsTable = adapter.pgSchema.table(textsTableName, columns, (cols) => {
|
||||
const indexes: Record<string, IndexBuilder> = {
|
||||
orderParentIdx: index(`${textsTableName}_order_parent_idx`).on(cols.order, cols.parent),
|
||||
}
|
||||
|
||||
if (hasManyTextField === 'index') {
|
||||
indexes.text_idx = index(`${textsTableName}_text_idx`).on(cols.text)
|
||||
}
|
||||
|
||||
if (hasLocalizedManyTextField) {
|
||||
indexes.localeParent = index(`${textsTableName}_locale_parent`).on(cols.locale, cols.parent)
|
||||
}
|
||||
|
||||
return indexes
|
||||
})
|
||||
|
||||
adapter.tables[textsTableName] = textsTable
|
||||
|
||||
const textsTableRelations = relations(textsTable, ({ one }) => ({
|
||||
parent: one(table, {
|
||||
fields: [textsTable.parent],
|
||||
references: [table.id],
|
||||
}),
|
||||
}))
|
||||
|
||||
adapter.relations[`relations_${textsTableName}`] = textsTableRelations
|
||||
}
|
||||
|
||||
if (hasManyNumberField && buildNumbers) {
|
||||
const numbersTableName = `${rootTableName}_numbers`
|
||||
const columns: Record<string, PgColumnBuilder> = {
|
||||
@@ -206,17 +241,20 @@ export const buildTable = ({
|
||||
columns.locale = adapter.enums.enum__locales('locale')
|
||||
}
|
||||
|
||||
numbersTable = pgTable(numbersTableName, columns, (cols) => {
|
||||
numbersTable = adapter.pgSchema.table(numbersTableName, columns, (cols) => {
|
||||
const indexes: Record<string, IndexBuilder> = {
|
||||
orderParentIdx: index('order_parent_idx').on(cols.order, cols.parent),
|
||||
orderParentIdx: index(`${numbersTableName}_order_parent_idx`).on(cols.order, cols.parent),
|
||||
}
|
||||
|
||||
if (hasManyNumberField === 'index') {
|
||||
indexes.numberIdx = index('number_idx').on(cols.number)
|
||||
indexes.numberIdx = index(`${numbersTableName}_number_idx`).on(cols.number)
|
||||
}
|
||||
|
||||
if (hasLocalizedManyNumberField) {
|
||||
indexes.localeParent = index('locale_parent').on(cols.locale, cols.parent)
|
||||
indexes.localeParent = index(`${numbersTableName}_locale_parent`).on(
|
||||
cols.locale,
|
||||
cols.parent,
|
||||
)
|
||||
}
|
||||
|
||||
return indexes
|
||||
@@ -251,7 +289,7 @@ export const buildTable = ({
|
||||
|
||||
relationships.forEach((relationTo) => {
|
||||
const formattedRelationTo = toSnakeCase(relationTo)
|
||||
let colType = 'integer'
|
||||
let colType = adapter.idType === 'uuid' ? 'uuid' : 'integer'
|
||||
const relatedCollectionCustomID = adapter.payload.collections[
|
||||
relationTo
|
||||
].config.fields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
@@ -265,19 +303,23 @@ export const buildTable = ({
|
||||
|
||||
const relationshipsTableName = `${tableName}_rels`
|
||||
|
||||
relationshipsTable = pgTable(relationshipsTableName, relationshipColumns, (cols) => {
|
||||
const result: Record<string, unknown> = {
|
||||
order: index('order_idx').on(cols.order),
|
||||
parentIdx: index('parent_idx').on(cols.parent),
|
||||
pathIdx: index('path_idx').on(cols.path),
|
||||
}
|
||||
relationshipsTable = adapter.pgSchema.table(
|
||||
relationshipsTableName,
|
||||
relationshipColumns,
|
||||
(cols) => {
|
||||
const result: Record<string, unknown> = {
|
||||
order: index(`${relationshipsTableName}_order_idx`).on(cols.order),
|
||||
parentIdx: index(`${relationshipsTableName}_parent_idx`).on(cols.parent),
|
||||
pathIdx: index(`${relationshipsTableName}_path_idx`).on(cols.path),
|
||||
}
|
||||
|
||||
if (hasLocalizedRelationshipField) {
|
||||
result.localeIdx = index('locale_idx').on(cols.locale)
|
||||
}
|
||||
if (hasLocalizedRelationshipField) {
|
||||
result.localeIdx = index(`${relationshipsTableName}_locale_idx`).on(cols.locale)
|
||||
}
|
||||
|
||||
return result
|
||||
})
|
||||
return result
|
||||
},
|
||||
)
|
||||
|
||||
adapter.tables[relationshipsTableName] = relationshipsTable
|
||||
|
||||
@@ -317,6 +359,9 @@ export const buildTable = ({
|
||||
result._locales = many(localesTable)
|
||||
}
|
||||
|
||||
if (hasManyTextField) {
|
||||
result._texts = many(textsTable)
|
||||
}
|
||||
if (hasManyNumberField) {
|
||||
result._numbers = many(numbersTable)
|
||||
}
|
||||
@@ -332,5 +377,5 @@ export const buildTable = ({
|
||||
|
||||
adapter.relations[`relations_${tableName}`] = tableRelations
|
||||
|
||||
return { hasManyNumberField, relationsToBuild }
|
||||
return { hasManyNumberField, hasManyTextField, relationsToBuild }
|
||||
}
|
||||
|
||||
@@ -6,10 +6,11 @@ import type { GenericColumn } from '../types'
|
||||
type CreateIndexArgs = {
|
||||
columnName: string
|
||||
name: string | string[]
|
||||
tableName: string
|
||||
unique?: boolean
|
||||
}
|
||||
|
||||
export const createIndex = ({ name, columnName, unique }: CreateIndexArgs) => {
|
||||
export const createIndex = ({ name, columnName, tableName, unique }: CreateIndexArgs) => {
|
||||
return (table: { [x: string]: GenericColumn }) => {
|
||||
let columns
|
||||
if (Array.isArray(name)) {
|
||||
@@ -20,7 +21,8 @@ export const createIndex = ({ name, columnName, unique }: CreateIndexArgs) => {
|
||||
} else {
|
||||
columns = [table[name]]
|
||||
}
|
||||
if (unique) return uniqueIndex(`${columnName}_idx`).on(columns[0], ...columns.slice(1))
|
||||
return index(`${columnName}_idx`).on(columns[0], ...columns.slice(1))
|
||||
if (unique)
|
||||
return uniqueIndex(`${tableName}_${columnName}_idx`).on(columns[0], ...columns.slice(1))
|
||||
return index(`${tableName}_${columnName}_idx`).on(columns[0], ...columns.slice(1))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
import { integer, numeric, varchar } from 'drizzle-orm/pg-core'
|
||||
import { integer, numeric, uuid, varchar } from 'drizzle-orm/pg-core'
|
||||
|
||||
export const parentIDColumnMap = {
|
||||
import type { IDType } from '../types'
|
||||
|
||||
export const parentIDColumnMap: Record<
|
||||
IDType,
|
||||
typeof integer<string> | typeof numeric<string> | typeof uuid<string> | typeof varchar
|
||||
> = {
|
||||
integer,
|
||||
numeric,
|
||||
uuid,
|
||||
varchar,
|
||||
}
|
||||
|
||||
33
packages/db-postgres/src/schema/setColumnID.ts
Normal file
33
packages/db-postgres/src/schema/setColumnID.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import type { PgColumnBuilder } from 'drizzle-orm/pg-core'
|
||||
|
||||
import { numeric, serial, uuid, varchar } from 'drizzle-orm/pg-core'
|
||||
import { type Field, fieldAffectsData } from 'payload/types'
|
||||
import { flattenTopLevelFields } from 'payload/utilities'
|
||||
|
||||
import type { IDType, PostgresAdapter } from '../types'
|
||||
|
||||
type Args = { adapter: PostgresAdapter; columns: Record<string, PgColumnBuilder>; fields: Field[] }
|
||||
export const setColumnID = ({ adapter, columns, fields }: Args): IDType => {
|
||||
const idField = flattenTopLevelFields(fields).find(
|
||||
(field) => fieldAffectsData(field) && field.name === 'id',
|
||||
)
|
||||
if (idField) {
|
||||
if (idField.type === 'number') {
|
||||
columns.id = numeric('id').primaryKey()
|
||||
return 'numeric'
|
||||
}
|
||||
|
||||
if (idField.type === 'text') {
|
||||
columns.id = varchar('id').primaryKey()
|
||||
return 'varchar'
|
||||
}
|
||||
}
|
||||
|
||||
if (adapter.idType === 'uuid') {
|
||||
columns.id = uuid('id').defaultRandom().primaryKey()
|
||||
return 'uuid'
|
||||
}
|
||||
|
||||
columns.id = serial('id').primaryKey()
|
||||
return 'integer'
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import type { Field, TabAsField } from 'payload/types'
|
||||
import { relations } from 'drizzle-orm'
|
||||
import {
|
||||
PgNumericBuilder,
|
||||
PgUUIDBuilder,
|
||||
PgVarcharBuilder,
|
||||
boolean,
|
||||
index,
|
||||
@@ -21,7 +22,7 @@ import { InvalidConfiguration } from 'payload/errors'
|
||||
import { fieldAffectsData, optionIsObject } from 'payload/types'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { GenericColumns, PostgresAdapter } from '../types'
|
||||
import type { GenericColumns, IDType, PostgresAdapter } from '../types'
|
||||
|
||||
import { hasLocalesTable } from '../utilities/hasLocalesTable'
|
||||
import { buildTable } from './build'
|
||||
@@ -34,6 +35,7 @@ type Args = {
|
||||
adapter: PostgresAdapter
|
||||
buildNumbers: boolean
|
||||
buildRelationships: boolean
|
||||
buildTexts: boolean
|
||||
columnPrefix?: string
|
||||
columns: Record<string, PgColumnBuilder>
|
||||
disableNotNull: boolean
|
||||
@@ -56,14 +58,17 @@ type Args = {
|
||||
type Result = {
|
||||
hasLocalizedField: boolean
|
||||
hasLocalizedManyNumberField: boolean
|
||||
hasLocalizedManyTextField: boolean
|
||||
hasLocalizedRelationshipField: boolean
|
||||
hasManyNumberField: 'index' | boolean
|
||||
hasManyTextField: 'index' | boolean
|
||||
}
|
||||
|
||||
export const traverseFields = ({
|
||||
adapter,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull,
|
||||
@@ -84,10 +89,13 @@ export const traverseFields = ({
|
||||
}: Args): Result => {
|
||||
let hasLocalizedField = false
|
||||
let hasLocalizedRelationshipField = false
|
||||
let hasManyTextField: 'index' | boolean = false
|
||||
let hasLocalizedManyTextField = false
|
||||
let hasManyNumberField: 'index' | boolean = false
|
||||
let hasLocalizedManyNumberField = false
|
||||
|
||||
let parentIDColType = 'integer'
|
||||
let parentIDColType: IDType = 'integer'
|
||||
if (columns.id instanceof PgUUIDBuilder) parentIDColType = 'uuid'
|
||||
if (columns.id instanceof PgNumericBuilder) parentIDColType = 'numeric'
|
||||
if (columns.id instanceof PgVarcharBuilder) parentIDColType = 'varchar'
|
||||
|
||||
@@ -116,7 +124,7 @@ export const traverseFields = ({
|
||||
if (
|
||||
(field.unique || field.index) &&
|
||||
!['array', 'blocks', 'group', 'point', 'relationship', 'upload'].includes(field.type) &&
|
||||
!(field.type === 'number' && field.hasMany === true)
|
||||
!('hasMany' in field && field.hasMany === true)
|
||||
) {
|
||||
const unique = disableUnique !== true && field.unique
|
||||
if (unique) {
|
||||
@@ -126,16 +134,38 @@ export const traverseFields = ({
|
||||
}
|
||||
adapter.fieldConstraints[rootTableName][`${columnName}_idx`] = constraintValue
|
||||
}
|
||||
targetIndexes[`${field.name}Idx`] = createIndex({
|
||||
targetIndexes[`${newTableName}_${field.name}Idx`] = createIndex({
|
||||
name: fieldName,
|
||||
columnName,
|
||||
tableName: newTableName,
|
||||
unique,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
switch (field.type) {
|
||||
case 'text':
|
||||
case 'text': {
|
||||
if (field.hasMany) {
|
||||
if (field.localized) {
|
||||
hasLocalizedManyTextField = true
|
||||
}
|
||||
|
||||
if (field.index) {
|
||||
hasManyTextField = 'index'
|
||||
} else if (!hasManyTextField) {
|
||||
hasManyTextField = true
|
||||
}
|
||||
|
||||
if (field.unique) {
|
||||
throw new InvalidConfiguration(
|
||||
'Unique is not supported in Postgres for hasMany text fields.',
|
||||
)
|
||||
}
|
||||
} else {
|
||||
targetTable[fieldName] = varchar(columnName)
|
||||
}
|
||||
break
|
||||
}
|
||||
case 'email':
|
||||
case 'code':
|
||||
case 'textarea': {
|
||||
@@ -214,17 +244,18 @@ export const traverseFields = ({
|
||||
string,
|
||||
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
|
||||
> = {
|
||||
orderIdx: (cols) => index('order_idx').on(cols.order),
|
||||
parentIdx: (cols) => index('parent_idx').on(cols.parent),
|
||||
orderIdx: (cols) => index(`${selectTableName}_order_idx`).on(cols.order),
|
||||
parentIdx: (cols) => index(`${selectTableName}_parent_idx`).on(cols.parent),
|
||||
}
|
||||
|
||||
if (field.localized) {
|
||||
baseColumns.locale = adapter.enums.enum__locales('locale').notNull()
|
||||
baseExtraConfig.localeIdx = (cols) => index('locale_idx').on(cols.locale)
|
||||
baseExtraConfig.localeIdx = (cols) =>
|
||||
index(`${selectTableName}_locale_idx`).on(cols.locale)
|
||||
}
|
||||
|
||||
if (field.index) {
|
||||
baseExtraConfig.value = (cols) => index('value_idx').on(cols.value)
|
||||
baseExtraConfig.value = (cols) => index(`${selectTableName}_value_idx`).on(cols.value)
|
||||
}
|
||||
|
||||
buildTable({
|
||||
@@ -277,30 +308,38 @@ export const traverseFields = ({
|
||||
string,
|
||||
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
|
||||
> = {
|
||||
_orderIdx: (cols) => index('_order_idx').on(cols._order),
|
||||
_parentIDIdx: (cols) => index('_parent_id_idx').on(cols._parentID),
|
||||
_orderIdx: (cols) => index(`${arrayTableName}_order_idx`).on(cols._order),
|
||||
_parentIDIdx: (cols) => index(`${arrayTableName}_parent_id_idx`).on(cols._parentID),
|
||||
}
|
||||
|
||||
if (field.localized && adapter.payload.config.localization) {
|
||||
baseColumns._locale = adapter.enums.enum__locales('_locale').notNull()
|
||||
baseExtraConfig._localeIdx = (cols) => index('_locale_idx').on(cols._locale)
|
||||
baseExtraConfig._localeIdx = (cols) =>
|
||||
index(`${arrayTableName}_locale_idx`).on(cols._locale)
|
||||
}
|
||||
|
||||
const { hasManyNumberField: subHasManyNumberField, relationsToBuild: subRelationsToBuild } =
|
||||
buildTable({
|
||||
adapter,
|
||||
baseColumns,
|
||||
baseExtraConfig,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
disableUnique,
|
||||
fields: disableUnique ? idToUUID(field.fields) : field.fields,
|
||||
rootRelationsToBuild,
|
||||
rootRelationships: relationships,
|
||||
rootTableIDColType,
|
||||
rootTableName,
|
||||
tableName: arrayTableName,
|
||||
})
|
||||
const {
|
||||
hasManyNumberField: subHasManyNumberField,
|
||||
hasManyTextField: subHasManyTextField,
|
||||
relationsToBuild: subRelationsToBuild,
|
||||
} = buildTable({
|
||||
adapter,
|
||||
baseColumns,
|
||||
baseExtraConfig,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
disableUnique,
|
||||
fields: disableUnique ? idToUUID(field.fields) : field.fields,
|
||||
rootRelationsToBuild,
|
||||
rootRelationships: relationships,
|
||||
rootTableIDColType,
|
||||
rootTableName,
|
||||
tableName: arrayTableName,
|
||||
})
|
||||
|
||||
if (subHasManyTextField) {
|
||||
if (!hasManyTextField || subHasManyTextField === 'index')
|
||||
hasManyTextField = subHasManyTextField
|
||||
}
|
||||
if (subHasManyNumberField) {
|
||||
if (!hasManyNumberField || subHasManyNumberField === 'index')
|
||||
hasManyNumberField = subHasManyNumberField
|
||||
@@ -350,18 +389,20 @@ export const traverseFields = ({
|
||||
string,
|
||||
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
|
||||
> = {
|
||||
_orderIdx: (cols) => index('order_idx').on(cols._order),
|
||||
_parentIDIdx: (cols) => index('parent_id_idx').on(cols._parentID),
|
||||
_pathIdx: (cols) => index('path_idx').on(cols._path),
|
||||
_orderIdx: (cols) => index(`${blockTableName}_order_idx`).on(cols._order),
|
||||
_parentIDIdx: (cols) => index(`${blockTableName}_parent_id_idx`).on(cols._parentID),
|
||||
_pathIdx: (cols) => index(`${blockTableName}_path_idx`).on(cols._path),
|
||||
}
|
||||
|
||||
if (field.localized && adapter.payload.config.localization) {
|
||||
baseColumns._locale = adapter.enums.enum__locales('_locale').notNull()
|
||||
baseExtraConfig._localeIdx = (cols) => index('locale_idx').on(cols._locale)
|
||||
baseExtraConfig._localeIdx = (cols) =>
|
||||
index(`${blockTableName}_locale_idx`).on(cols._locale)
|
||||
}
|
||||
|
||||
const {
|
||||
hasManyNumberField: subHasManyNumberField,
|
||||
hasManyTextField: subHasManyTextField,
|
||||
relationsToBuild: subRelationsToBuild,
|
||||
} = buildTable({
|
||||
adapter,
|
||||
@@ -377,6 +418,11 @@ export const traverseFields = ({
|
||||
tableName: blockTableName,
|
||||
})
|
||||
|
||||
if (subHasManyTextField) {
|
||||
if (!hasManyTextField || subHasManyTextField === 'index')
|
||||
hasManyTextField = subHasManyTextField
|
||||
}
|
||||
|
||||
if (subHasManyNumberField) {
|
||||
if (!hasManyNumberField || subHasManyNumberField === 'index')
|
||||
hasManyNumberField = subHasManyNumberField
|
||||
@@ -426,12 +472,15 @@ export const traverseFields = ({
|
||||
const {
|
||||
hasLocalizedField: groupHasLocalizedField,
|
||||
hasLocalizedManyNumberField: groupHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: groupHasLocalizedRelationshipField,
|
||||
hasManyNumberField: groupHasManyNumberField,
|
||||
hasManyTextField: groupHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull,
|
||||
@@ -453,6 +502,8 @@ export const traverseFields = ({
|
||||
|
||||
if (groupHasLocalizedField) hasLocalizedField = true
|
||||
if (groupHasLocalizedRelationshipField) hasLocalizedRelationshipField = true
|
||||
if (groupHasManyTextField) hasManyTextField = true
|
||||
if (groupHasLocalizedManyTextField) hasLocalizedManyTextField = true
|
||||
if (groupHasManyNumberField) hasManyNumberField = true
|
||||
if (groupHasLocalizedManyNumberField) hasLocalizedManyNumberField = true
|
||||
break
|
||||
@@ -463,12 +514,15 @@ export const traverseFields = ({
|
||||
const {
|
||||
hasLocalizedField: groupHasLocalizedField,
|
||||
hasLocalizedManyNumberField: groupHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: groupHasLocalizedRelationshipField,
|
||||
hasManyNumberField: groupHasManyNumberField,
|
||||
hasManyTextField: groupHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix: `${columnName}_`,
|
||||
columns,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
@@ -490,6 +544,8 @@ export const traverseFields = ({
|
||||
|
||||
if (groupHasLocalizedField) hasLocalizedField = true
|
||||
if (groupHasLocalizedRelationshipField) hasLocalizedRelationshipField = true
|
||||
if (groupHasManyTextField) hasManyTextField = true
|
||||
if (groupHasLocalizedManyTextField) hasLocalizedManyTextField = true
|
||||
if (groupHasManyNumberField) hasManyNumberField = true
|
||||
if (groupHasLocalizedManyNumberField) hasLocalizedManyNumberField = true
|
||||
break
|
||||
@@ -501,12 +557,15 @@ export const traverseFields = ({
|
||||
const {
|
||||
hasLocalizedField: tabHasLocalizedField,
|
||||
hasLocalizedManyNumberField: tabHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: tabHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: tabHasLocalizedRelationshipField,
|
||||
hasManyNumberField: tabHasManyNumberField,
|
||||
hasManyTextField: tabHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
@@ -528,9 +587,10 @@ export const traverseFields = ({
|
||||
|
||||
if (tabHasLocalizedField) hasLocalizedField = true
|
||||
if (tabHasLocalizedRelationshipField) hasLocalizedRelationshipField = true
|
||||
if (tabHasManyTextField) hasManyTextField = true
|
||||
if (tabHasLocalizedManyTextField) hasLocalizedManyTextField = true
|
||||
if (tabHasManyNumberField) hasManyNumberField = true
|
||||
if (tabHasLocalizedManyNumberField) hasLocalizedManyNumberField = true
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
@@ -540,12 +600,15 @@ export const traverseFields = ({
|
||||
const {
|
||||
hasLocalizedField: rowHasLocalizedField,
|
||||
hasLocalizedManyNumberField: rowHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: rowHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: rowHasLocalizedRelationshipField,
|
||||
hasManyNumberField: rowHasManyNumberField,
|
||||
hasManyTextField: rowHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
@@ -567,6 +630,8 @@ export const traverseFields = ({
|
||||
|
||||
if (rowHasLocalizedField) hasLocalizedField = true
|
||||
if (rowHasLocalizedRelationshipField) hasLocalizedRelationshipField = true
|
||||
if (rowHasManyTextField) hasManyTextField = true
|
||||
if (rowHasLocalizedManyTextField) hasLocalizedManyTextField = true
|
||||
if (rowHasManyNumberField) hasManyNumberField = true
|
||||
if (rowHasLocalizedManyNumberField) hasLocalizedManyNumberField = true
|
||||
break
|
||||
@@ -605,7 +670,9 @@ export const traverseFields = ({
|
||||
return {
|
||||
hasLocalizedField,
|
||||
hasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField,
|
||||
hasManyNumberField,
|
||||
hasManyTextField,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { Block } from 'payload/types'
|
||||
import type { Block, Field } from 'payload/types'
|
||||
|
||||
import { InvalidConfiguration } from 'payload/errors'
|
||||
import { flattenTopLevelFields } from 'payload/utilities'
|
||||
import { fieldAffectsData, fieldHasSubFields, tabHasName } from 'payload/types'
|
||||
|
||||
import type { GenericTable } from '../types'
|
||||
|
||||
@@ -12,29 +12,72 @@ type Args = {
|
||||
table: GenericTable
|
||||
}
|
||||
|
||||
const getFlattenedFieldNames = (fields: Field[], prefix: string = ''): string[] => {
|
||||
return fields.reduce((fieldsToUse, field) => {
|
||||
let fieldPrefix = prefix
|
||||
|
||||
if (
|
||||
['array', 'blocks', 'relationship', 'upload'].includes(field.type) ||
|
||||
('hasMany' in field && field.hasMany === true)
|
||||
) {
|
||||
return fieldsToUse
|
||||
}
|
||||
|
||||
if (fieldHasSubFields(field)) {
|
||||
fieldPrefix = 'name' in field ? `${prefix}${field.name}.` : prefix
|
||||
return [...fieldsToUse, ...getFlattenedFieldNames(field.fields, fieldPrefix)]
|
||||
}
|
||||
|
||||
if (field.type === 'tabs') {
|
||||
return [
|
||||
...fieldsToUse,
|
||||
...field.tabs.reduce((tabFields, tab) => {
|
||||
fieldPrefix = 'name' in tab ? `${prefix}.${tab.name}` : prefix
|
||||
return [
|
||||
...tabFields,
|
||||
...(tabHasName(tab)
|
||||
? [{ ...tab, type: 'tab' }]
|
||||
: getFlattenedFieldNames(tab.fields, fieldPrefix)),
|
||||
]
|
||||
}, []),
|
||||
]
|
||||
}
|
||||
|
||||
if (fieldAffectsData(field)) {
|
||||
return [...fieldsToUse, `${fieldPrefix?.replace('.', '_') || ''}${field.name}`]
|
||||
}
|
||||
|
||||
return fieldsToUse
|
||||
}, [])
|
||||
}
|
||||
|
||||
export const validateExistingBlockIsIdentical = ({
|
||||
block,
|
||||
localized,
|
||||
rootTableName,
|
||||
table,
|
||||
}: Args): void => {
|
||||
if (table) {
|
||||
const fieldNames = flattenTopLevelFields(block.fields).flatMap((field) => field.name)
|
||||
const fieldNames = getFlattenedFieldNames(block.fields)
|
||||
|
||||
Object.keys(table).forEach((fieldName) => {
|
||||
const missingField =
|
||||
// ensure every field from the config is in the matching table
|
||||
fieldNames.find((name) => Object.keys(table).indexOf(name) === -1) ||
|
||||
// ensure every table column is matched for every field from the config
|
||||
Object.keys(table).find((fieldName) => {
|
||||
if (!['_locale', '_order', '_parentID', '_path', '_uuid'].includes(fieldName)) {
|
||||
if (fieldNames.indexOf(fieldName) === -1) {
|
||||
throw new InvalidConfiguration(
|
||||
`The table ${rootTableName} has multiple blocks with slug ${block.slug}, but the schemas do not match. One block includes the field ${fieldName}, while the other block does not.`,
|
||||
)
|
||||
}
|
||||
return fieldNames.indexOf(fieldName) === -1
|
||||
}
|
||||
})
|
||||
|
||||
if (Boolean(localized) !== Boolean(table._locale)) {
|
||||
throw new InvalidConfiguration(
|
||||
`The table ${rootTableName} has multiple blocks with slug ${block.slug}, but the schemas do not match. One is localized, but another is not. Block schemas of the same name must match exactly.`,
|
||||
)
|
||||
}
|
||||
if (missingField) {
|
||||
throw new InvalidConfiguration(
|
||||
`The table ${rootTableName} has multiple blocks with slug ${block.slug}, but the schemas do not match. One block includes the field ${missingField}, while the other block does not.`,
|
||||
)
|
||||
}
|
||||
|
||||
if (Boolean(localized) !== Boolean(table._locale)) {
|
||||
throw new InvalidConfiguration(
|
||||
`The table ${rootTableName} has multiple blocks with slug ${block.slug}, but the schemas do not match. One is localized, but another is not. Block schemas of the same name must match exactly.`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,11 +11,11 @@ export const beginTransaction: BeginTransaction = async function beginTransactio
|
||||
try {
|
||||
id = uuid()
|
||||
|
||||
let reject: (value?: unknown) => void
|
||||
let resolve: (value?: unknown) => void
|
||||
let reject: () => Promise<void>
|
||||
let resolve: () => Promise<void>
|
||||
let transaction: DrizzleTransaction
|
||||
|
||||
let transactionReady: (value?: unknown) => void
|
||||
let transactionReady: () => void
|
||||
|
||||
// Drizzle only exposes a transactions API that is sufficient if you
|
||||
// can directly pass around the `tx` argument. But our operations are spread
|
||||
@@ -24,13 +24,19 @@ export const beginTransaction: BeginTransaction = async function beginTransactio
|
||||
// and will call them in our respective transaction methods
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
this.drizzle
|
||||
const done = this.drizzle
|
||||
.transaction(async (tx) => {
|
||||
transaction = tx
|
||||
await new Promise((res, rej) => {
|
||||
await new Promise<void>((res, rej) => {
|
||||
resolve = () => {
|
||||
res()
|
||||
return done
|
||||
}
|
||||
reject = () => {
|
||||
rej()
|
||||
return done
|
||||
}
|
||||
transactionReady()
|
||||
resolve = res
|
||||
reject = rej
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
@@ -39,7 +45,7 @@ export const beginTransaction: BeginTransaction = async function beginTransactio
|
||||
|
||||
// Need to wait until the transaction is ready
|
||||
// before binding its `resolve` and `reject` methods below
|
||||
await new Promise((resolve) => (transactionReady = resolve))
|
||||
await new Promise<void>((resolve) => (transactionReady = resolve))
|
||||
|
||||
this.sessions[id] = {
|
||||
db: transaction,
|
||||
|
||||
@@ -7,9 +7,9 @@ export const commitTransaction: CommitTransaction = async function commitTransac
|
||||
}
|
||||
|
||||
try {
|
||||
this.sessions[id].resolve()
|
||||
await this.sessions[id].resolve()
|
||||
} catch (err: unknown) {
|
||||
this.sessions[id].reject()
|
||||
await this.sessions[id].reject()
|
||||
}
|
||||
|
||||
delete this.sessions[id]
|
||||
|
||||
19
packages/db-postgres/src/transform/read/hasManyText.ts
Normal file
19
packages/db-postgres/src/transform/read/hasManyText.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import type { TextField } from 'payload/types'
|
||||
|
||||
type Args = {
|
||||
field: TextField
|
||||
locale?: string
|
||||
textRows: Record<string, unknown>[]
|
||||
ref: Record<string, unknown>
|
||||
}
|
||||
|
||||
export const transformHasManyText = ({ field, locale, textRows, ref }: Args) => {
|
||||
const result = textRows.map(({ text }) => text)
|
||||
|
||||
if (locale) {
|
||||
ref[field.name][locale] = result
|
||||
} else {
|
||||
ref[field.name] = result
|
||||
}
|
||||
}
|
||||
@@ -18,6 +18,7 @@ type TransformArgs = {
|
||||
// into the shape Payload expects based on field schema
|
||||
export const transform = <T extends TypeWithID>({ config, data, fields }: TransformArgs): T => {
|
||||
let relationships: Record<string, Record<string, unknown>[]> = {}
|
||||
let texts: Record<string, Record<string, unknown>[]> = {}
|
||||
let numbers: Record<string, Record<string, unknown>[]> = {}
|
||||
|
||||
if ('_rels' in data) {
|
||||
@@ -25,6 +26,11 @@ export const transform = <T extends TypeWithID>({ config, data, fields }: Transf
|
||||
delete data._rels
|
||||
}
|
||||
|
||||
if ('_texts' in data) {
|
||||
texts = createPathMap(data._texts)
|
||||
delete data._texts
|
||||
}
|
||||
|
||||
if ('_numbers' in data) {
|
||||
numbers = createPathMap(data._numbers)
|
||||
delete data._numbers
|
||||
@@ -42,6 +48,7 @@ export const transform = <T extends TypeWithID>({ config, data, fields }: Transf
|
||||
deletions,
|
||||
fieldPrefix: '',
|
||||
fields,
|
||||
texts,
|
||||
numbers,
|
||||
path: '',
|
||||
relationships,
|
||||
|
||||
@@ -7,6 +7,7 @@ import { fieldAffectsData } from 'payload/types'
|
||||
import type { BlocksMap } from '../../utilities/createBlocksMap'
|
||||
|
||||
import { transformHasManyNumber } from './hasManyNumber'
|
||||
import { transformHasManyText } from './hasManyText'
|
||||
import { transformRelationship } from './relationship'
|
||||
|
||||
type TraverseFieldsArgs = {
|
||||
@@ -50,6 +51,10 @@ type TraverseFieldsArgs = {
|
||||
* Data structure representing the nearest table from db
|
||||
*/
|
||||
table: Record<string, unknown>
|
||||
/**
|
||||
* All hasMany text fields, as returned by Drizzle, keyed on an object by field path
|
||||
*/
|
||||
texts: Record<string, Record<string, unknown>[]>
|
||||
}
|
||||
|
||||
// Traverse fields recursively, transforming data
|
||||
@@ -65,6 +70,7 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
path,
|
||||
relationships,
|
||||
table,
|
||||
texts,
|
||||
}: TraverseFieldsArgs): T => {
|
||||
const sanitizedPath = path ? `${path}.` : path
|
||||
|
||||
@@ -81,6 +87,7 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
path,
|
||||
relationships,
|
||||
table,
|
||||
texts,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -100,12 +107,18 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
path,
|
||||
relationships,
|
||||
table,
|
||||
texts,
|
||||
})
|
||||
}
|
||||
|
||||
if (fieldAffectsData(field)) {
|
||||
const fieldName = `${fieldPrefix || ''}${field.name}`
|
||||
const fieldData = table[fieldName]
|
||||
|
||||
if (fieldPrefix) {
|
||||
deletions.push(() => delete table[fieldName])
|
||||
}
|
||||
|
||||
if (field.type === 'array') {
|
||||
if (Array.isArray(fieldData)) {
|
||||
if (field.localized) {
|
||||
@@ -131,8 +144,13 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
path: `${sanitizedPath}${field.name}.${row._order - 1}`,
|
||||
relationships,
|
||||
table: row,
|
||||
texts,
|
||||
})
|
||||
|
||||
if ('_order' in rowResult) {
|
||||
delete rowResult._order
|
||||
}
|
||||
|
||||
arrayResult[locale].push(rowResult)
|
||||
}
|
||||
|
||||
@@ -144,6 +162,11 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
row.id = row._uuid
|
||||
delete row._uuid
|
||||
}
|
||||
|
||||
if ('_order' in row) {
|
||||
delete row._order
|
||||
}
|
||||
|
||||
return traverseFields<T>({
|
||||
blocks,
|
||||
config,
|
||||
@@ -155,6 +178,7 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
path: `${sanitizedPath}${field.name}.${i}`,
|
||||
relationships,
|
||||
table: row,
|
||||
texts,
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -198,6 +222,7 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
path: `${blockFieldPath}.${row._order - 1}`,
|
||||
relationships,
|
||||
table: row,
|
||||
texts,
|
||||
})
|
||||
|
||||
delete blockResult._order
|
||||
@@ -228,6 +253,7 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
path: `${blockFieldPath}.${i}`,
|
||||
relationships,
|
||||
table: row,
|
||||
texts,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -285,6 +311,40 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
return result
|
||||
}
|
||||
|
||||
if (field.type === 'text' && field?.hasMany) {
|
||||
const textPathMatch = texts[`${sanitizedPath}${field.name}`]
|
||||
if (!textPathMatch) return result
|
||||
|
||||
if (field.localized) {
|
||||
result[field.name] = {}
|
||||
const textsByLocale: Record<string, Record<string, unknown>[]> = {}
|
||||
|
||||
textPathMatch.forEach((row) => {
|
||||
if (typeof row.locale === 'string') {
|
||||
if (!textsByLocale[row.locale]) textsByLocale[row.locale] = []
|
||||
textsByLocale[row.locale].push(row)
|
||||
}
|
||||
})
|
||||
|
||||
Object.entries(textsByLocale).forEach(([locale, texts]) => {
|
||||
transformHasManyText({
|
||||
field,
|
||||
locale,
|
||||
ref: result,
|
||||
textRows: texts,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
transformHasManyText({
|
||||
field,
|
||||
ref: result,
|
||||
textRows: textPathMatch,
|
||||
})
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
if (field.type === 'number' && field.hasMany) {
|
||||
const numberPathMatch = numbers[`${sanitizedPath}${field.name}`]
|
||||
if (!numberPathMatch) return result
|
||||
@@ -378,8 +438,12 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
path: `${sanitizedPath}${field.name}`,
|
||||
relationships,
|
||||
table,
|
||||
texts,
|
||||
})
|
||||
})
|
||||
if ('_order' in ref) {
|
||||
delete ref._order
|
||||
}
|
||||
} else {
|
||||
const groupData = {}
|
||||
|
||||
@@ -394,7 +458,26 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
path: `${sanitizedPath}${field.name}`,
|
||||
relationships,
|
||||
table,
|
||||
texts,
|
||||
})
|
||||
if ('_order' in ref) {
|
||||
delete ref._order
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case 'text': {
|
||||
let val = fieldData
|
||||
if (typeof fieldData === 'string') {
|
||||
val = String(fieldData)
|
||||
}
|
||||
|
||||
if (typeof locale === 'string') {
|
||||
ref[locale] = val
|
||||
} else {
|
||||
result[field.name] = val
|
||||
}
|
||||
|
||||
break
|
||||
|
||||
@@ -18,6 +18,7 @@ type Args = {
|
||||
data: unknown
|
||||
field: ArrayField
|
||||
locale?: string
|
||||
texts: Record<string, unknown>[]
|
||||
numbers: Record<string, unknown>[]
|
||||
path: string
|
||||
relationships: Record<string, unknown>[]
|
||||
@@ -36,6 +37,7 @@ export const transformArray = ({
|
||||
data,
|
||||
field,
|
||||
locale,
|
||||
texts,
|
||||
numbers,
|
||||
path,
|
||||
relationships,
|
||||
@@ -86,6 +88,7 @@ export const transformArray = ({
|
||||
fieldPrefix: '',
|
||||
fields: field.fields,
|
||||
locales: newRow.locales,
|
||||
texts,
|
||||
numbers,
|
||||
parentTableName: arrayTableName,
|
||||
path: `${path || ''}${field.name}.${i}.`,
|
||||
|
||||
@@ -18,6 +18,7 @@ type Args = {
|
||||
data: Record<string, unknown>[]
|
||||
field: BlockField
|
||||
locale?: string
|
||||
texts: Record<string, unknown>[]
|
||||
numbers: Record<string, unknown>[]
|
||||
path: string
|
||||
relationships: Record<string, unknown>[]
|
||||
@@ -34,6 +35,7 @@ export const transformBlocks = ({
|
||||
data,
|
||||
field,
|
||||
locale,
|
||||
texts,
|
||||
numbers,
|
||||
path,
|
||||
relationships,
|
||||
@@ -84,6 +86,7 @@ export const transformBlocks = ({
|
||||
fieldPrefix: '',
|
||||
fields: matchedBlock.fields,
|
||||
locales: newRow.locales,
|
||||
texts,
|
||||
numbers,
|
||||
parentTableName: blockTableName,
|
||||
path: `${path || ''}${field.name}.${i}.`,
|
||||
|
||||
@@ -27,6 +27,7 @@ export const transformForWrite = ({
|
||||
blocks: {},
|
||||
blocksToDelete: new Set(),
|
||||
locales: {},
|
||||
texts: [],
|
||||
numbers: [],
|
||||
relationships: [],
|
||||
relationshipsToDelete: [],
|
||||
@@ -47,6 +48,7 @@ export const transformForWrite = ({
|
||||
fieldPrefix: '',
|
||||
fields,
|
||||
locales: rowToInsert.locales,
|
||||
texts: rowToInsert.texts,
|
||||
numbers: rowToInsert.numbers,
|
||||
parentTableName: tableName,
|
||||
path,
|
||||
|
||||
15
packages/db-postgres/src/transform/write/texts.ts
Normal file
15
packages/db-postgres/src/transform/write/texts.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
type Args = {
|
||||
baseRow: Record<string, unknown>
|
||||
data: unknown[]
|
||||
texts: Record<string, unknown>[]
|
||||
}
|
||||
|
||||
export const transformTexts = ({ baseRow, data, texts }: Args) => {
|
||||
data.forEach((val, i) => {
|
||||
texts.push({
|
||||
...baseRow,
|
||||
text: val,
|
||||
order: i + 1,
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -13,6 +13,7 @@ import { transformBlocks } from './blocks'
|
||||
import { transformNumbers } from './numbers'
|
||||
import { transformRelationship } from './relationships'
|
||||
import { transformSelects } from './selects'
|
||||
import { transformTexts } from './texts'
|
||||
|
||||
type Args = {
|
||||
adapter: PostgresAdapter
|
||||
@@ -44,6 +45,7 @@ type Args = {
|
||||
locales: {
|
||||
[locale: string]: Record<string, unknown>
|
||||
}
|
||||
texts: Record<string, unknown>[]
|
||||
numbers: Record<string, unknown>[]
|
||||
/**
|
||||
* This is the name of the parent table
|
||||
@@ -71,6 +73,7 @@ export const traverseFields = ({
|
||||
fields,
|
||||
forcedLocale,
|
||||
locales,
|
||||
texts,
|
||||
numbers,
|
||||
parentTableName,
|
||||
path,
|
||||
@@ -108,6 +111,7 @@ export const traverseFields = ({
|
||||
data: localeData,
|
||||
field,
|
||||
locale: localeKey,
|
||||
texts,
|
||||
numbers,
|
||||
path,
|
||||
relationships,
|
||||
@@ -128,6 +132,7 @@ export const traverseFields = ({
|
||||
blocksToDelete,
|
||||
data: data[field.name],
|
||||
field,
|
||||
texts,
|
||||
numbers,
|
||||
path,
|
||||
relationships,
|
||||
@@ -158,6 +163,7 @@ export const traverseFields = ({
|
||||
data: localeData,
|
||||
field,
|
||||
locale: localeKey,
|
||||
texts,
|
||||
numbers,
|
||||
path,
|
||||
relationships,
|
||||
@@ -175,6 +181,7 @@ export const traverseFields = ({
|
||||
blocksToDelete,
|
||||
data: fieldData,
|
||||
field,
|
||||
texts,
|
||||
numbers,
|
||||
path,
|
||||
relationships,
|
||||
@@ -203,6 +210,7 @@ export const traverseFields = ({
|
||||
fields: field.fields,
|
||||
forcedLocale: localeKey,
|
||||
locales,
|
||||
texts,
|
||||
numbers,
|
||||
parentTableName,
|
||||
path: `${path || ''}${field.name}.`,
|
||||
@@ -225,6 +233,7 @@ export const traverseFields = ({
|
||||
fieldPrefix: `${fieldName}_`,
|
||||
fields: field.fields,
|
||||
locales,
|
||||
texts,
|
||||
numbers,
|
||||
parentTableName,
|
||||
path: `${path || ''}${field.name}.`,
|
||||
@@ -258,6 +267,7 @@ export const traverseFields = ({
|
||||
fields: tab.fields,
|
||||
forcedLocale: localeKey,
|
||||
locales,
|
||||
texts,
|
||||
numbers,
|
||||
parentTableName,
|
||||
path: `${path || ''}${tab.name}.`,
|
||||
@@ -280,6 +290,7 @@ export const traverseFields = ({
|
||||
fieldPrefix: `${fieldPrefix || ''}${tab.name}_`,
|
||||
fields: tab.fields,
|
||||
locales,
|
||||
texts,
|
||||
numbers,
|
||||
parentTableName,
|
||||
path: `${path || ''}${tab.name}.`,
|
||||
@@ -303,6 +314,7 @@ export const traverseFields = ({
|
||||
fieldPrefix,
|
||||
fields: tab.fields,
|
||||
locales,
|
||||
texts,
|
||||
numbers,
|
||||
parentTableName,
|
||||
path,
|
||||
@@ -328,6 +340,7 @@ export const traverseFields = ({
|
||||
fieldPrefix,
|
||||
fields: field.fields,
|
||||
locales,
|
||||
texts,
|
||||
numbers,
|
||||
parentTableName,
|
||||
path,
|
||||
@@ -382,6 +395,37 @@ export const traverseFields = ({
|
||||
return
|
||||
}
|
||||
|
||||
if (field.type === 'text' && field.hasMany) {
|
||||
const textPath = `${path || ''}${field.name}`
|
||||
|
||||
if (field.localized) {
|
||||
if (typeof fieldData === 'object') {
|
||||
Object.entries(fieldData).forEach(([localeKey, localeData]) => {
|
||||
if (Array.isArray(localeData)) {
|
||||
transformTexts({
|
||||
baseRow: {
|
||||
locale: localeKey,
|
||||
path: textPath,
|
||||
},
|
||||
data: localeData,
|
||||
texts,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
} else if (Array.isArray(fieldData)) {
|
||||
transformTexts({
|
||||
baseRow: {
|
||||
path: textPath,
|
||||
},
|
||||
data: fieldData,
|
||||
texts,
|
||||
})
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if (field.type === 'number' && field.hasMany) {
|
||||
const numberPath = `${path || ''}${field.name}`
|
||||
|
||||
|
||||
@@ -34,6 +34,7 @@ export type RowToInsert = {
|
||||
locales: {
|
||||
[locale: string]: Record<string, unknown>
|
||||
}
|
||||
texts: Record<string, unknown>[]
|
||||
numbers: Record<string, unknown>[]
|
||||
relationships: Record<string, unknown>[]
|
||||
relationshipsToDelete: RelationshipToDelete[]
|
||||
|
||||
@@ -1,22 +1,34 @@
|
||||
import type {
|
||||
ColumnBaseConfig,
|
||||
ColumnDataType,
|
||||
DrizzleConfig,
|
||||
ExtractTablesWithRelations,
|
||||
Relation,
|
||||
Relations,
|
||||
} from 'drizzle-orm'
|
||||
import type { NodePgDatabase, NodePgQueryResultHKT } from 'drizzle-orm/node-postgres'
|
||||
import type { PgColumn, PgEnum, PgTableWithColumns, PgTransaction } from 'drizzle-orm/pg-core'
|
||||
import type {
|
||||
PgColumn,
|
||||
PgEnum,
|
||||
PgSchema,
|
||||
PgTableWithColumns,
|
||||
PgTransaction,
|
||||
} from 'drizzle-orm/pg-core'
|
||||
import type { PgTableFn } from 'drizzle-orm/pg-core/table'
|
||||
import type { Payload } from 'payload'
|
||||
import type { BaseDatabaseAdapter } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
import type { Pool, PoolConfig } from 'pg'
|
||||
|
||||
export type DrizzleDB = NodePgDatabase<Record<string, unknown>>
|
||||
|
||||
export type Args = {
|
||||
idType?: 'serial' | 'uuid'
|
||||
logger?: DrizzleConfig['logger']
|
||||
migrationDir?: string
|
||||
pool: PoolConfig
|
||||
push?: boolean
|
||||
schemaName?: string
|
||||
}
|
||||
|
||||
export type GenericColumn = PgColumn<
|
||||
@@ -53,23 +65,31 @@ export type PostgresAdapter = BaseDatabaseAdapter & {
|
||||
* Used for returning properly formed errors from unique fields
|
||||
*/
|
||||
fieldConstraints: Record<string, Record<string, string>>
|
||||
idType: Args['idType']
|
||||
logger: DrizzleConfig['logger']
|
||||
pgSchema?: { table: PgTableFn } | PgSchema
|
||||
pool: Pool
|
||||
poolOptions: Args['pool']
|
||||
push: boolean
|
||||
relations: Record<string, GenericRelation>
|
||||
schema: Record<string, GenericEnum | GenericRelation | GenericTable>
|
||||
schemaName?: Args['schemaName']
|
||||
sessions: {
|
||||
[id: string]: {
|
||||
db: DrizzleTransaction
|
||||
reject: () => void
|
||||
resolve: () => void
|
||||
reject: () => Promise<void>
|
||||
resolve: () => Promise<void>
|
||||
}
|
||||
}
|
||||
tables: Record<string, GenericTable>
|
||||
tables: Record<string, GenericTable | PgTableWithColumns<any>>
|
||||
}
|
||||
|
||||
export type MigrateUpArgs = { payload: Payload }
|
||||
export type MigrateDownArgs = { payload: Payload }
|
||||
export type IDType = 'integer' | 'numeric' | 'uuid' | 'varchar'
|
||||
|
||||
export type PostgresAdapterResult = (args: { payload: Payload }) => PostgresAdapter
|
||||
|
||||
export type MigrateUpArgs = { payload: Payload; req?: Partial<PayloadRequest> }
|
||||
export type MigrateDownArgs = { payload: Payload; req?: Partial<PayloadRequest> }
|
||||
|
||||
declare module 'payload' {
|
||||
export interface DatabaseAdapter
|
||||
@@ -85,8 +105,8 @@ declare module 'payload' {
|
||||
sessions: {
|
||||
[id: string]: {
|
||||
db: DrizzleTransaction
|
||||
reject: () => void
|
||||
resolve: () => void
|
||||
reject: () => Promise<void>
|
||||
resolve: () => Promise<void>
|
||||
}
|
||||
}
|
||||
tables: Record<string, GenericTable>
|
||||
|
||||
@@ -67,6 +67,7 @@ export const upsertRow = async <T extends TypeWithID>({
|
||||
|
||||
const localesToInsert: Record<string, unknown>[] = []
|
||||
const relationsToInsert: Record<string, unknown>[] = []
|
||||
const textsToInsert: Record<string, unknown>[] = []
|
||||
const numbersToInsert: Record<string, unknown>[] = []
|
||||
const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {}
|
||||
const selectsToInsert: { [selectTableName: string]: Record<string, unknown>[] } = {}
|
||||
@@ -88,6 +89,14 @@ export const upsertRow = async <T extends TypeWithID>({
|
||||
})
|
||||
}
|
||||
|
||||
// If there are texts, add parent to each
|
||||
if (rowToInsert.texts.length > 0) {
|
||||
rowToInsert.texts.forEach((textRow) => {
|
||||
textRow.parent = insertedRow.id
|
||||
textsToInsert.push(textRow)
|
||||
})
|
||||
}
|
||||
|
||||
// If there are numbers, add parent to each
|
||||
if (rowToInsert.numbers.length > 0) {
|
||||
rowToInsert.numbers.forEach((numberRow) => {
|
||||
@@ -160,6 +169,29 @@ export const upsertRow = async <T extends TypeWithID>({
|
||||
await db.insert(adapter.tables[relationshipsTableName]).values(relationsToInsert)
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT hasMany TEXTS
|
||||
// //////////////////////////////////
|
||||
|
||||
const textsTableName = `${tableName}_texts`
|
||||
|
||||
if (operation === 'update') {
|
||||
await deleteExistingRowsByPath({
|
||||
adapter,
|
||||
db,
|
||||
localeColumnName: 'locale',
|
||||
parentColumnName: 'parent',
|
||||
parentID: insertedRow.id,
|
||||
pathColumnName: 'path',
|
||||
rows: textsToInsert,
|
||||
tableName: textsTableName,
|
||||
})
|
||||
}
|
||||
|
||||
if (textsToInsert.length > 0) {
|
||||
await db.insert(adapter.tables[textsTableName]).values(textsToInsert).returning()
|
||||
}
|
||||
|
||||
// //////////////////////////////////
|
||||
// INSERT hasMany NUMBERS
|
||||
// //////////////////////////////////
|
||||
|
||||
@@ -36,11 +36,11 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
}
|
||||
}
|
||||
|
||||
const parentID = parentRows[parentRowIndex].id || parentRows[parentRowIndex]._parentID
|
||||
const parentID = parentRows[parentRowIndex].id
|
||||
|
||||
// Add any sub arrays that need to be created
|
||||
// We will call this recursively below
|
||||
arrayRows.forEach((arrayRow) => {
|
||||
arrayRows.forEach((arrayRow, i) => {
|
||||
if (Object.keys(arrayRow.arrays).length > 0) {
|
||||
rowsByTable[tableName].arrays.push(arrayRow.arrays)
|
||||
}
|
||||
@@ -53,6 +53,9 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
arrayRowLocaleData._parentID = arrayRow.row.id
|
||||
arrayRowLocaleData._locale = arrayRowLocale
|
||||
rowsByTable[tableName].locales.push(arrayRowLocaleData)
|
||||
if (!arrayRow.row.id) {
|
||||
arrayRowLocaleData._getParentID = (rows) => rows[i].id
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -61,12 +64,23 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
// Insert all corresponding arrays
|
||||
// (one insert per array table)
|
||||
for (const [tableName, row] of Object.entries(rowsByTable)) {
|
||||
// the nested arrays need the ID for the parentID foreign key
|
||||
let insertedRows: Args['parentRows']
|
||||
if (row.rows.length > 0) {
|
||||
await db.insert(adapter.tables[tableName]).values(row.rows).returning()
|
||||
insertedRows = await db.insert(adapter.tables[tableName]).values(row.rows).returning()
|
||||
}
|
||||
|
||||
// Insert locale rows
|
||||
if (adapter.tables[`${tableName}_locales`] && row.locales.length > 0) {
|
||||
if (!row.locales[0]._parentID) {
|
||||
row.locales = row.locales.map((localeRow, i) => {
|
||||
if (typeof localeRow._getParentID === 'function') {
|
||||
localeRow._parentID = localeRow._getParentID(insertedRows)
|
||||
delete localeRow._getParentID
|
||||
}
|
||||
return localeRow
|
||||
})
|
||||
}
|
||||
await db.insert(adapter.tables[`${tableName}_locales`]).values(row.locales).returning()
|
||||
}
|
||||
|
||||
@@ -76,7 +90,7 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
adapter,
|
||||
arrays: row.arrays,
|
||||
db,
|
||||
parentRows: row.rows,
|
||||
parentRows: insertedRows,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
import { sql } from 'drizzle-orm'
|
||||
|
||||
import type { DrizzleDB } from '../types'
|
||||
import type { PostgresAdapter } from '../types'
|
||||
|
||||
export const createMigrationTable = async (db: DrizzleDB): Promise<void> => {
|
||||
await db.execute(sql`CREATE TABLE IF NOT EXISTS "payload_migrations" (
|
||||
export const createMigrationTable = async (adapter: PostgresAdapter): Promise<void> => {
|
||||
const prependSchema = adapter.schemaName ? `"${adapter.schemaName}".` : ''
|
||||
|
||||
await adapter.drizzle.execute(
|
||||
sql.raw(`CREATE TABLE IF NOT EXISTS ${prependSchema}"payload_migrations" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"name" varchar,
|
||||
"batch" numeric,
|
||||
"updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
|
||||
);`)
|
||||
);`),
|
||||
)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user