feat: @payloadcms/db-vercel-postgres adapter (#7806)

Dedicated adapter for Vercel Postgres

- Uses the `@vercel/postgres` package under the hood.
- No `pg` dependency, speeds up invocation
- Includes refactoring all base postgres functionality into a
`BasePostgresAdapter` type, which will ease implementation of [other
adapters supported by
drizzle-orm](https://orm.drizzle.team/docs/get-started-postgresql)

## Usage

```ts
import { buildConfig } from 'payload'
import { vercelPostgresAdapter } from '@payloadcms/db-vercel-postgres'

export default buildConfig({
  db: vercelPostgresAdapter({
    pool: {
      connectionString: process.env.DATABASE_URI,
    },
  }),
  // ...rest of config
})
```

### Automatic Connection String Detection

Have Vercel automatically detect from environment variable (typically
`process.env.POSTGRES_URL`)

```ts
export default buildConfig({
  db: postgresAdapter(),
  // ...rest of config
})
```
This commit is contained in:
Elliot DeNolf
2024-08-21 22:54:47 -04:00
committed by GitHub
parent 492d920133
commit 631431e006
33 changed files with 1674 additions and 18 deletions

View File

@@ -16,6 +16,7 @@
"build:db-mongodb": "turbo build --filter db-mongodb",
"build:db-postgres": "turbo build --filter db-postgres",
"build:db-sqlite": "turbo build --filter db-sqlite",
"build:db-vercel-postgres": "turbo build --filter db-vercel-postgres",
"build:drizzle": "turbo build --filter drizzle",
"build:email-nodemailer": "turbo build --filter email-nodemailer",
"build:email-resend": "turbo build --filter email-resend",
@@ -58,6 +59,7 @@
"dev:generate-importmap": "pnpm runts ./test/generateImportMap.ts",
"dev:generate-types": "pnpm runts ./test/generateTypes.ts",
"dev:postgres": "cross-env PAYLOAD_DATABASE=postgres pnpm runts ./test/dev.ts",
"dev:vercel-postgres": "cross-env PAYLOAD_DATABASE=vercel-postgres pnpm runts ./test/dev.ts",
"devsafe": "node ./scripts/delete-recursively.js '**/.next' && pnpm dev",
"docker:restart": "pnpm docker:stop --remove-orphans && pnpm docker:start",
"docker:start": "docker compose -f packages/plugin-cloud-storage/docker-compose.yml up -d",

View File

@@ -182,6 +182,7 @@ export function mongooseAdapter({
init,
migrateFresh,
migrationDir,
packageName: '@payloadcms/db-mongodb',
payload,
prodMigrations,
queryDrafts,

View File

@@ -139,6 +139,7 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
migrateReset,
migrateStatus,
migrationDir,
packageName: '@payloadcms/db-postgres',
payload,
queryDrafts,
rejectInitializing,

View File

@@ -140,6 +140,7 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
migrateReset,
migrateStatus,
migrationDir,
packageName: '@payloadcms/db-sqlite',
payload,
queryDrafts,
rejectInitializing,

View File

@@ -0,0 +1 @@
/migrations

View File

@@ -0,0 +1,10 @@
.tmp
**/.git
**/.hg
**/.pnp.*
**/.svn
**/.yarn/**
**/build
**/dist/**
**/node_modules
**/temp

View File

@@ -0,0 +1,15 @@
{
"$schema": "https://json.schemastore.org/swcrc",
"sourceMaps": true,
"jsc": {
"target": "esnext",
"parser": {
"syntax": "typescript",
"tsx": true,
"dts": true
}
},
"module": {
"type": "es6"
}
}

View File

@@ -0,0 +1,43 @@
# Payload Postgres Adapter
[Vercel Postgres](https://vercel.com/docs/storage/vercel-postgres) adapter for [Payload](https://payloadcms.com).
- [Main Repository](https://github.com/payloadcms/payload)
- [Payload Docs](https://payloadcms.com/docs)
## Installation
```bash
npm install @payloadcms/db-vercel-postgres
```
## Usage
### Explicit Connection String
```ts
import { buildConfig } from 'payload'
import { vercelPostgresAdapter } from '@payloadcms/db-vercel-postgres'
export default buildConfig({
db: vercelPostgresAdapter({
pool: {
connectionString: process.env.DATABASE_URI,
},
}),
// ...rest of config
})
```
### Automatic Connection String Detection
Have Vercel automatically detect from environment variable (typically `process.env.POSTGRES_URL`)
```ts
export default buildConfig({
db: postgresAdapter(),
// ...rest of config
})
```
More detailed usage can be found in the [Payload Docs](https://payloadcms.com/docs/configuration/overview).

View File

@@ -0,0 +1,20 @@
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
/** @typedef {import('eslint').Linter.FlatConfig} */
let FlatConfig
/** @type {FlatConfig[]} */
export const index = [
...rootEslintConfig,
{
languageOptions: {
parserOptions: {
project: './tsconfig.json',
tsconfigDirName: import.meta.dirname,
...rootParserOptions,
},
},
},
]
export default index

View File

@@ -0,0 +1,89 @@
{
"name": "@payloadcms/db-vercel-postgres",
"version": "3.0.0-beta.84",
"description": "Vercel Postgres adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {
"type": "git",
"url": "https://github.com/payloadcms/payload.git",
"directory": "packages/db-vercel-postgres"
},
"license": "MIT",
"author": "Payload <dev@payloadcms.com> (https://payloadcms.com)",
"type": "module",
"exports": {
".": {
"import": "./src/index.ts",
"types": "./src/index.ts",
"default": "./src/index.ts"
},
"./types": {
"import": "./src/types.ts",
"types": "./src/types.ts",
"default": "./src/types.ts"
},
"./migration-utils": {
"import": "./src/exports/migration-utils.ts",
"types": "./src/exports/migration-utils.ts",
"default": "./src/exports/migration-utils.ts"
}
},
"main": "./src/index.ts",
"types": "./src/types.ts",
"files": [
"dist",
"mock.js"
],
"scripts": {
"build": "rimraf .dist && rimraf tsconfig.tsbuildinfo && pnpm build:types && pnpm build:swc && pnpm build:esbuild && pnpm renamePredefinedMigrations",
"build:esbuild": "echo skipping esbuild",
"build:swc": "swc ./src -d ./dist --config-file .swcrc --strip-leading-paths",
"build:types": "tsc --emitDeclarationOnly --outDir dist",
"clean": "rimraf {dist,*.tsbuildinfo}",
"prepack": "pnpm clean && pnpm turbo build",
"prepublishOnly": "pnpm clean && pnpm turbo build",
"renamePredefinedMigrations": "node --no-deprecation --import @swc-node/register/esm-register ./scripts/renamePredefinedMigrations.ts"
},
"dependencies": {
"@payloadcms/drizzle": "workspace:*",
"@vercel/postgres": "^0.9.0",
"console-table-printer": "2.11.2",
"drizzle-kit": "0.23.2-df9e596",
"drizzle-orm": "0.32.1",
"prompts": "2.4.2",
"to-snake-case": "1.0.0",
"uuid": "10.0.0"
},
"devDependencies": {
"@hyrious/esbuild-plugin-commonjs": "^0.2.4",
"@payloadcms/eslint-config": "workspace:*",
"@types/pg": "8.10.2",
"@types/to-snake-case": "1.0.0",
"esbuild": "0.23.0",
"payload": "workspace:*"
},
"peerDependencies": {
"payload": "workspace:*"
},
"publishConfig": {
"exports": {
".": {
"import": "./dist/index.js",
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
},
"./types": {
"import": "./dist/types.js",
"types": "./dist/types.d.ts",
"default": "./dist/types.js"
},
"./migration-utils": {
"import": "./dist/exports/migration-utils.js",
"types": "./dist/exports/migration-utils.d.ts",
"default": "./dist/exports/migration-utils.js"
}
},
"main": "./dist/index.js",
"types": "./dist/index.d.ts"
}
}

View File

@@ -0,0 +1,13 @@
const imports = `import { migratePostgresV2toV3 } from '@payloadcms/migratePostgresV2toV3'`
const up = ` await migratePostgresV2toV3({
// enables logging of changes that will be made to the database
debug: false,
// skips calls that modify schema or data
dryRun: false,
payload,
req,
})
`
export { imports, up }
//# sourceMappingURL=relationships-v2-v3.js.map

View File

@@ -0,0 +1,19 @@
import fs from 'fs'
import path from 'path'
/**
* Changes built .js files to .mjs to for ESM imports
*/
const rename = () => {
fs.readdirSync(path.resolve('./dist/predefinedMigrations'))
.filter((f) => {
return f.endsWith('.js')
})
.forEach((file) => {
const newPath = path.join('./dist/predefinedMigrations', file)
fs.renameSync(newPath, newPath.replace('.js', '.mjs'))
})
console.log('done')
}
rename()

View File

@@ -0,0 +1,61 @@
import type { DrizzleAdapter } from '@payloadcms/drizzle/types'
import type { Connect } from 'payload'
import { pushDevSchema } from '@payloadcms/drizzle'
import { VercelPool, sql } from '@vercel/postgres'
import { drizzle } from 'drizzle-orm/node-postgres'
import type { VercelPostgresAdapter } from './types.js'
export const connect: Connect = async function connect(
this: VercelPostgresAdapter,
options = {
hotReload: false,
},
) {
const { hotReload } = options
this.schema = {
pgSchema: this.pgSchema,
...this.tables,
...this.relations,
...this.enums,
}
try {
const logger = this.logger || false
// Passed the poolOptions if provided,
// else have vercel/postgres detect the connection string from the environment
this.drizzle = drizzle(this.poolOptions ? new VercelPool(this.poolOptions) : sql, {
logger,
schema: this.schema,
})
if (!hotReload) {
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
this.payload.logger.info(`---- DROPPING TABLES SCHEMA(${this.schemaName || 'public'}) ----`)
await this.dropDatabase({ adapter: this })
this.payload.logger.info('---- DROPPED TABLES ----')
}
}
} catch (err) {
this.payload.logger.error(`Error: cannot connect to Postgres. Details: ${err.message}`, err)
if (typeof this.rejectInitializing === 'function') this.rejectInitializing()
process.exit(1)
}
// Only push schema if not in production
if (
process.env.NODE_ENV !== 'production' &&
process.env.PAYLOAD_MIGRATING !== 'true' &&
this.push !== false
) {
await pushDevSchema(this as unknown as DrizzleAdapter)
}
if (typeof this.resolveInitializing === 'function') this.resolveInitializing()
if (process.env.NODE_ENV === 'production' && this.prodMigrations) {
await this.migrate({ migrations: this.prodMigrations })
}
}

View File

@@ -0,0 +1 @@
export { migratePostgresV2toV3 } from '../predefinedMigrations/v2-v3/index.js'

View File

@@ -0,0 +1,163 @@
import type { DatabaseAdapterObj, Payload } from 'payload'
import {
beginTransaction,
commitTransaction,
count,
create,
createGlobal,
createGlobalVersion,
createVersion,
deleteMany,
deleteOne,
deleteVersions,
destroy,
find,
findGlobal,
findGlobalVersions,
findMigrationDir,
findOne,
findVersions,
migrate,
migrateDown,
migrateFresh,
migrateRefresh,
migrateReset,
migrateStatus,
operatorMap,
queryDrafts,
rollbackTransaction,
updateGlobal,
updateGlobalVersion,
updateOne,
updateVersion,
} from '@payloadcms/drizzle'
import {
convertPathToJSONTraversal,
countDistinct,
createJSONQuery,
createMigration,
defaultDrizzleSnapshot,
deleteWhere,
dropDatabase,
execute,
getMigrationTemplate,
init,
insert,
requireDrizzleKit,
} from '@payloadcms/drizzle/postgres'
import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'
import { createDatabaseAdapter } from 'payload'
import type { Args, VercelPostgresAdapter } from './types.js'
import { connect } from './connect.js'
export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<VercelPostgresAdapter> {
const postgresIDType = args.idType || 'serial'
const payloadIDType = postgresIDType === 'serial' ? 'number' : 'text'
function adapter({ payload }: { payload: Payload }) {
const migrationDir = findMigrationDir(args.migrationDir)
let resolveInitializing
let rejectInitializing
let adapterSchema: VercelPostgresAdapter['pgSchema']
const initializing = new Promise<void>((res, rej) => {
resolveInitializing = res
rejectInitializing = rej
})
if (args.schemaName) {
adapterSchema = pgSchema(args.schemaName)
} else {
adapterSchema = { enum: pgEnum, table: pgTable }
}
return createDatabaseAdapter<VercelPostgresAdapter>({
name: 'postgres',
defaultDrizzleSnapshot,
drizzle: undefined,
enums: {},
features: {
json: true,
},
fieldConstraints: {},
getMigrationTemplate,
idType: postgresIDType,
initializing,
localesSuffix: args.localesSuffix || '_locales',
logger: args.logger,
operators: operatorMap,
pgSchema: adapterSchema,
pool: undefined,
poolOptions: args.pool,
prodMigrations: args.prodMigrations,
push: args.push,
relations: {},
relationshipsSuffix: args.relationshipsSuffix || '_rels',
schema: {},
schemaName: args.schemaName,
sessions: {},
tableNameMap: new Map<string, string>(),
tables: {},
transactionOptions: args.transactionOptions || undefined,
versionsSuffix: args.versionsSuffix || '_v',
// DatabaseAdapter
beginTransaction: args.transactionOptions === false ? undefined : beginTransaction,
commitTransaction,
connect,
convertPathToJSONTraversal,
count,
countDistinct,
create,
createGlobal,
createGlobalVersion,
createJSONQuery,
createMigration,
createVersion,
defaultIDType: payloadIDType,
deleteMany,
deleteOne,
deleteVersions,
deleteWhere,
destroy,
dropDatabase,
execute,
find,
findGlobal,
findGlobalVersions,
findOne,
findVersions,
init,
insert,
migrate,
migrateDown,
migrateFresh,
migrateRefresh,
migrateReset,
migrateStatus,
migrationDir,
packageName: '@payloadcms/db-vercel-postgres',
payload,
queryDrafts,
rejectInitializing,
requireDrizzleKit,
resolveInitializing,
rollbackTransaction,
updateGlobal,
updateGlobalVersion,
updateOne,
updateVersion,
})
}
return {
defaultIDType: payloadIDType,
init: adapter,
}
}
export type { MigrateDownArgs, MigrateUpArgs } from '@payloadcms/drizzle/postgres'
export { sql } from 'drizzle-orm'

View File

@@ -0,0 +1,10 @@
const imports = `import { migratePostgresV2toV3 } from '@payloadcms/db-postgres/migration-utils'`
const upSQL = ` await migratePostgresV2toV3({
// enables logging of changes that will be made to the database
debug: false,
payload,
req,
})
`
export { imports, upSQL }

View File

@@ -0,0 +1,237 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { Field, Payload, PayloadRequest } from 'payload'
import { upsertRow } from '@payloadcms/drizzle'
import type { VercelPostgresAdapter } from '../../../types.js'
import type { DocsToResave } from '../types.js'
import { traverseFields } from './traverseFields.js'
type Args = {
adapter: VercelPostgresAdapter
collectionSlug?: string
db: TransactionPg
debug: boolean
docsToResave: DocsToResave
fields: Field[]
globalSlug?: string
isVersions: boolean
payload: Payload
req: PayloadRequest
tableName: string
}
export const fetchAndResave = async ({
adapter,
collectionSlug,
db,
debug,
docsToResave,
fields,
globalSlug,
isVersions,
payload,
req,
tableName,
}: Args) => {
for (const [id, rows] of Object.entries(docsToResave)) {
if (collectionSlug) {
const collectionConfig = payload.collections[collectionSlug].config
if (collectionConfig) {
if (isVersions) {
const doc = await payload.findVersionByID({
id,
collection: collectionSlug,
depth: 0,
fallbackLocale: null,
locale: 'all',
req,
showHiddenFields: true,
})
if (debug) {
payload.logger.info(
`The collection "${collectionConfig.slug}" version with ID ${id} will be migrated`,
)
}
traverseFields({
doc,
fields,
path: '',
rows,
})
try {
await upsertRow({
id: doc.id,
adapter,
data: doc,
db,
fields,
ignoreResult: true,
operation: 'update',
req,
tableName,
})
} catch (err) {
payload.logger.error(
`"${collectionConfig.slug}" version with ID ${doc.id} FAILED TO MIGRATE`,
)
throw err
}
if (debug) {
payload.logger.info(
`"${collectionConfig.slug}" version with ID ${doc.id} migrated successfully!`,
)
}
} else {
const doc = await payload.findByID({
id,
collection: collectionSlug,
depth: 0,
fallbackLocale: null,
locale: 'all',
req,
showHiddenFields: true,
})
if (debug) {
payload.logger.info(
`The collection "${collectionConfig.slug}" with ID ${doc.id} will be migrated`,
)
}
traverseFields({
doc,
fields,
path: '',
rows,
})
try {
await upsertRow({
id: doc.id,
adapter,
data: doc,
db,
fields,
ignoreResult: true,
operation: 'update',
req,
tableName,
})
} catch (err) {
payload.logger.error(
`The collection "${collectionConfig.slug}" with ID ${doc.id} has FAILED TO MIGRATE`,
)
throw err
}
if (debug) {
payload.logger.info(
`The collection "${collectionConfig.slug}" with ID ${doc.id} has migrated successfully!`,
)
}
}
}
}
if (globalSlug) {
const globalConfig = payload.config.globals?.find((global) => global.slug === globalSlug)
if (globalConfig) {
if (isVersions) {
const { docs } = await payload.findGlobalVersions({
slug: globalSlug,
depth: 0,
fallbackLocale: null,
limit: 0,
locale: 'all',
req,
showHiddenFields: true,
})
if (debug) {
payload.logger.info(`${docs.length} global "${globalSlug}" versions will be migrated`)
}
for (const doc of docs) {
traverseFields({
doc,
fields,
path: '',
rows,
})
try {
await upsertRow({
id: doc.id,
adapter,
data: doc,
db,
fields,
ignoreResult: true,
operation: 'update',
req,
tableName,
})
} catch (err) {
payload.logger.error(`"${globalSlug}" version with ID ${doc.id} FAILED TO MIGRATE`)
throw err
}
if (debug) {
payload.logger.info(
`"${globalSlug}" version with ID ${doc.id} migrated successfully!`,
)
}
}
} else {
const doc = await payload.findGlobal({
slug: globalSlug,
depth: 0,
fallbackLocale: null,
locale: 'all',
req,
showHiddenFields: true,
})
traverseFields({
doc,
fields,
path: '',
rows,
})
try {
await upsertRow({
adapter,
data: doc,
db,
fields,
ignoreResult: true,
operation: 'update',
req,
tableName,
})
} catch (err) {
payload.logger.error(`The global "${globalSlug}" has FAILED TO MIGRATE`)
throw err
}
if (debug) {
payload.logger.info(`The global "${globalSlug}" has migrated successfully!`)
}
}
}
}
}
}

View File

@@ -0,0 +1,215 @@
import type { Field } from 'payload'
import { tabHasName } from 'payload/shared'
type Args = {
doc: Record<string, unknown>
fields: Field[]
locale?: string
path: string
rows: Record<string, unknown>[]
}
export const traverseFields = ({ doc, fields, locale, path, rows }: Args) => {
fields.forEach((field) => {
switch (field.type) {
case 'group': {
const newPath = `${path ? `${path}.` : ''}${field.name}`
const newDoc = doc?.[field.name]
if (typeof newDoc === 'object' && newDoc !== null) {
if (field.localized) {
Object.entries(newDoc).forEach(([locale, localeDoc]) => {
return traverseFields({
doc: localeDoc,
fields: field.fields,
locale,
path: newPath,
rows,
})
})
} else {
return traverseFields({
doc: newDoc as Record<string, unknown>,
fields: field.fields,
path: newPath,
rows,
})
}
}
break
}
case 'row':
case 'collapsible': {
return traverseFields({
doc,
fields: field.fields,
path,
rows,
})
}
case 'array': {
const rowData = doc?.[field.name]
if (field.localized && typeof rowData === 'object' && rowData !== null) {
Object.entries(rowData).forEach(([locale, localeRows]) => {
if (Array.isArray(localeRows)) {
localeRows.forEach((row, i) => {
return traverseFields({
doc: row as Record<string, unknown>,
fields: field.fields,
locale,
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
rows,
})
})
}
})
}
if (Array.isArray(rowData)) {
rowData.forEach((row, i) => {
return traverseFields({
doc: row as Record<string, unknown>,
fields: field.fields,
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
rows,
})
})
}
break
}
case 'blocks': {
const rowData = doc?.[field.name]
if (field.localized && typeof rowData === 'object' && rowData !== null) {
Object.entries(rowData).forEach(([locale, localeRows]) => {
if (Array.isArray(localeRows)) {
localeRows.forEach((row, i) => {
const matchedBlock = field.blocks.find((block) => block.slug === row.blockType)
if (matchedBlock) {
return traverseFields({
doc: row as Record<string, unknown>,
fields: matchedBlock.fields,
locale,
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
rows,
})
}
})
}
})
}
if (Array.isArray(rowData)) {
rowData.forEach((row, i) => {
const matchedBlock = field.blocks.find((block) => block.slug === row.blockType)
if (matchedBlock) {
return traverseFields({
doc: row as Record<string, unknown>,
fields: matchedBlock.fields,
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
rows,
})
}
})
}
break
}
case 'tabs': {
return field.tabs.forEach((tab) => {
if (tabHasName(tab)) {
const newDoc = doc?.[tab.name]
const newPath = `${path ? `${path}.` : ''}${tab.name}`
if (typeof newDoc === 'object' && newDoc !== null) {
if (tab.localized) {
Object.entries(newDoc).forEach(([locale, localeDoc]) => {
return traverseFields({
doc: localeDoc,
fields: tab.fields,
locale,
path: newPath,
rows,
})
})
} else {
return traverseFields({
doc: newDoc as Record<string, unknown>,
fields: tab.fields,
path: newPath,
rows,
})
}
}
} else {
traverseFields({
doc,
fields: tab.fields,
path,
rows,
})
}
})
}
case 'relationship':
case 'upload': {
if (typeof field.relationTo === 'string') {
if (field.type === 'upload' || !field.hasMany) {
const relationshipPath = `${path ? `${path}.` : ''}${field.name}`
if (field.localized) {
const matchedRelationshipsWithLocales = rows.filter(
(row) => row.path === relationshipPath,
)
if (matchedRelationshipsWithLocales.length && !doc[field.name]) {
doc[field.name] = {}
}
const newDoc = doc[field.name] as Record<string, unknown>
matchedRelationshipsWithLocales.forEach((localeRow) => {
if (typeof localeRow.locale === 'string') {
const [, id] = Object.entries(localeRow).find(
([key, val]) =>
val !== null && !['id', 'locale', 'order', 'parent_id', 'path'].includes(key),
)
newDoc[localeRow.locale] = id
}
})
} else {
const matchedRelationship = rows.find((row) => {
const matchesPath = row.path === relationshipPath
if (locale) return matchesPath && locale === row.locale
return row.path === relationshipPath
})
if (matchedRelationship) {
const [, id] = Object.entries(matchedRelationship).find(
([key, val]) =>
val !== null && !['id', 'locale', 'order', 'parent_id', 'path'].includes(key),
)
doc[field.name] = id
}
}
}
}
}
}
})
}

View File

@@ -0,0 +1,74 @@
export type Groups =
| 'addColumn'
| 'addConstraint'
| 'dropColumn'
| 'dropConstraint'
| 'dropTable'
| 'notNull'
/**
* Convert an "ADD COLUMN" statement to an "ALTER COLUMN" statement
* example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
* to: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
* @param sql
*/
function convertAddColumnToAlterColumn(sql) {
// Regular expression to match the ADD COLUMN statement with its constraints
const regex = /ALTER TABLE ("[^"]+") ADD COLUMN ("[^"]+") [\w\s]+ NOT NULL;/
// Replace the matched part with "ALTER COLUMN ... SET NOT NULL;"
return sql.replace(regex, 'ALTER TABLE $1 ALTER COLUMN $2 SET NOT NULL;')
}
export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> => {
const groups = {
addColumn: 'ADD COLUMN',
// example: ALTER TABLE "posts" ADD COLUMN "category_id" integer
addConstraint: 'ADD CONSTRAINT',
//example:
// DO $$ BEGIN
// ALTER TABLE "pages_blocks_my_block" ADD CONSTRAINT "pages_blocks_my_block_person_id_users_id_fk" FOREIGN KEY ("person_id") REFERENCES "users"("id") ON DELETE cascade ON UPDATE no action;
// EXCEPTION
// WHEN duplicate_object THEN null;
// END $$;
dropColumn: 'DROP COLUMN',
// example: ALTER TABLE "_posts_v_rels" DROP COLUMN IF EXISTS "posts_id";
dropConstraint: 'DROP CONSTRAINT',
// example: ALTER TABLE "_posts_v_rels" DROP CONSTRAINT "_posts_v_rels_posts_fk";
dropTable: 'DROP TABLE',
// example: DROP TABLE "pages_rels";
notNull: 'NOT NULL',
// example: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
}
const result = Object.keys(groups).reduce((result, group: Groups) => {
result[group] = []
return result
}, {}) as Record<Groups, string[]>
for (const line of list) {
Object.entries(groups).some(([key, value]) => {
if (line.endsWith('NOT NULL;')) {
// split up the ADD COLUMN and ALTER COLUMN NOT NULL statements
// example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
// becomes two separate statements:
// 1. ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer;
// 2. ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
result.addColumn.push(line.replace(' NOT NULL;', ';'))
result.notNull.push(convertAddColumnToAlterColumn(line))
return true
}
if (line.includes(value)) {
result[key].push(line)
return true
}
})
}
return result
}

View File

@@ -0,0 +1,278 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
import type { Payload, PayloadRequest } from 'payload'
import { sql } from 'drizzle-orm'
import fs from 'fs'
import { createRequire } from 'module'
import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload'
import toSnakeCase from 'to-snake-case'
import type { VercelPostgresAdapter } from '../../types.js'
import type { PathsToQuery } from './types.js'
import { groupUpSQLStatements } from './groupUpSQLStatements.js'
import { migrateRelationships } from './migrateRelationships.js'
import { traverseFields } from './traverseFields.js'
const require = createRequire(import.meta.url)
type Args = {
debug?: boolean
payload: Payload
req?: Partial<PayloadRequest>
}
/**
* Moves upload and relationship columns from the join table and into the tables while moving data
* This is done in the following order:
* ADD COLUMNs
* -- manipulate data to move relationships to new columns
* ADD CONSTRAINTs
* NOT NULLs
* DROP TABLEs
* DROP CONSTRAINTs
* DROP COLUMNs
* @param debug
* @param payload
* @param req
*/
export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
const adapter = payload.db as unknown as VercelPostgresAdapter
const db = adapter.sessions[await req.transactionID].db as TransactionPg
const dir = payload.db.migrationDir
// get the drizzle migrateUpSQL from drizzle using the last schema
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/api')
const drizzleJsonAfter = generateDrizzleJson(adapter.schema)
// Get the previous migration snapshot
const previousSnapshot = fs
.readdirSync(dir)
.filter((file) => file.endsWith('.json') && !file.endsWith('relationships_v2_v3.json'))
.sort()
.reverse()?.[0]
if (!previousSnapshot) {
throw new Error(
`No previous migration schema file found! A prior migration from v2 is required to migrate to v3.`,
)
}
const drizzleJsonBefore = JSON.parse(
fs.readFileSync(`${dir}/${previousSnapshot}`, 'utf8'),
) as DrizzleSnapshotJSON
const generatedSQL = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
if (!generatedSQL.length) {
payload.logger.info(`No schema changes needed.`)
process.exit(0)
}
const sqlUpStatements = groupUpSQLStatements(generatedSQL)
const addColumnsStatement = sqlUpStatements.addColumn.join('\n')
if (debug) {
payload.logger.info('CREATING NEW RELATIONSHIP COLUMNS')
payload.logger.info(addColumnsStatement)
}
await db.execute(sql.raw(addColumnsStatement))
for (const collection of payload.config.collections) {
const tableName = adapter.tableNameMap.get(toSnakeCase(collection.slug))
const pathsToQuery: PathsToQuery = new Set()
traverseFields({
adapter,
collectionSlug: collection.slug,
columnPrefix: '',
db,
disableNotNull: false,
fields: collection.fields,
isVersions: false,
newTableName: tableName,
parentTableName: tableName,
path: '',
pathsToQuery,
payload,
rootTableName: tableName,
})
await migrateRelationships({
adapter,
collectionSlug: collection.slug,
db,
debug,
fields: collection.fields,
isVersions: false,
pathsToQuery,
payload,
req,
tableName,
})
if (collection.versions) {
const versionsTableName = adapter.tableNameMap.get(
`_${toSnakeCase(collection.slug)}${adapter.versionsSuffix}`,
)
const versionFields = buildVersionCollectionFields(collection)
const versionPathsToQuery: PathsToQuery = new Set()
traverseFields({
adapter,
collectionSlug: collection.slug,
columnPrefix: '',
db,
disableNotNull: true,
fields: versionFields,
isVersions: true,
newTableName: versionsTableName,
parentTableName: versionsTableName,
path: '',
pathsToQuery: versionPathsToQuery,
payload,
rootTableName: versionsTableName,
})
await migrateRelationships({
adapter,
collectionSlug: collection.slug,
db,
debug,
fields: versionFields,
isVersions: true,
pathsToQuery: versionPathsToQuery,
payload,
req,
tableName: versionsTableName,
})
}
}
for (const global of payload.config.globals) {
const tableName = adapter.tableNameMap.get(toSnakeCase(global.slug))
const pathsToQuery: PathsToQuery = new Set()
traverseFields({
adapter,
columnPrefix: '',
db,
disableNotNull: false,
fields: global.fields,
globalSlug: global.slug,
isVersions: false,
newTableName: tableName,
parentTableName: tableName,
path: '',
pathsToQuery,
payload,
rootTableName: tableName,
})
await migrateRelationships({
adapter,
db,
debug,
fields: global.fields,
globalSlug: global.slug,
isVersions: false,
pathsToQuery,
payload,
req,
tableName,
})
if (global.versions) {
const versionsTableName = adapter.tableNameMap.get(
`_${toSnakeCase(global.slug)}${adapter.versionsSuffix}`,
)
const versionFields = buildVersionGlobalFields(global)
const versionPathsToQuery: PathsToQuery = new Set()
traverseFields({
adapter,
columnPrefix: '',
db,
disableNotNull: true,
fields: versionFields,
globalSlug: global.slug,
isVersions: true,
newTableName: versionsTableName,
parentTableName: versionsTableName,
path: '',
pathsToQuery: versionPathsToQuery,
payload,
rootTableName: versionsTableName,
})
await migrateRelationships({
adapter,
db,
debug,
fields: versionFields,
globalSlug: global.slug,
isVersions: true,
pathsToQuery: versionPathsToQuery,
payload,
req,
tableName: versionsTableName,
})
}
}
// ADD CONSTRAINT
const addConstraintsStatement = sqlUpStatements.addConstraint.join('\n')
if (debug) {
payload.logger.info('ADDING CONSTRAINTS')
payload.logger.info(addConstraintsStatement)
}
await db.execute(sql.raw(addConstraintsStatement))
// NOT NULL
const notNullStatements = sqlUpStatements.notNull.join('\n')
if (debug) {
payload.logger.info('NOT NULL CONSTRAINTS')
payload.logger.info(notNullStatements)
}
await db.execute(sql.raw(notNullStatements))
// DROP TABLE
const dropTablesStatement = sqlUpStatements.dropTable.join('\n')
if (debug) {
payload.logger.info('DROPPING TABLES')
payload.logger.info(dropTablesStatement)
}
await db.execute(sql.raw(dropTablesStatement))
// DROP CONSTRAINT
const dropConstraintsStatement = sqlUpStatements.dropConstraint.join('\n')
if (debug) {
payload.logger.info('DROPPING CONSTRAINTS')
payload.logger.info(dropConstraintsStatement)
}
await db.execute(sql.raw(dropConstraintsStatement))
// DROP COLUMN
const dropColumnsStatement = sqlUpStatements.dropColumn.join('\n')
if (debug) {
payload.logger.info('DROPPING COLUMNS')
payload.logger.info(dropColumnsStatement)
}
await db.execute(sql.raw(dropColumnsStatement))
}

View File

@@ -0,0 +1,103 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { Field, Payload, PayloadRequest } from 'payload'
import { sql } from 'drizzle-orm'
import type { VercelPostgresAdapter } from '../../types.js'
import type { DocsToResave, PathsToQuery } from './types.js'
import { fetchAndResave } from './fetchAndResave/index.js'
type Args = {
adapter: VercelPostgresAdapter
collectionSlug?: string
db: TransactionPg
debug: boolean
fields: Field[]
globalSlug?: string
isVersions: boolean
pathsToQuery: PathsToQuery
payload: Payload
req?: Partial<PayloadRequest>
tableName: string
}
export const migrateRelationships = async ({
adapter,
collectionSlug,
db,
debug,
fields,
globalSlug,
isVersions,
pathsToQuery,
payload,
req,
tableName,
}: Args) => {
if (pathsToQuery.size === 0) return
let offset = 0
let paginationResult
const where = Array.from(pathsToQuery).reduce((statement, path, i) => {
return (statement += `
"${tableName}${adapter.relationshipsSuffix}"."path" LIKE '${path}'${pathsToQuery.size !== i + 1 ? ' OR' : ''}
`)
}, '')
while (typeof paginationResult === 'undefined' || paginationResult.rows.length > 0) {
const paginationStatement = `SELECT DISTINCT parent_id FROM ${tableName}${adapter.relationshipsSuffix} WHERE
${where} ORDER BY parent_id LIMIT 500 OFFSET ${offset * 500};
`
paginationResult = await adapter.drizzle.execute(sql.raw(`${paginationStatement}`))
if (paginationResult.rows.length === 0) return
offset += 1
const statement = `SELECT * FROM ${tableName}${adapter.relationshipsSuffix} WHERE
(${where}) AND parent_id IN (${paginationResult.rows.map((row) => row.parent_id).join(', ')});
`
if (debug) {
payload.logger.info('FINDING ROWS TO MIGRATE')
payload.logger.info(statement)
}
const result = await adapter.drizzle.execute(sql.raw(`${statement}`))
const docsToResave: DocsToResave = {}
result.rows.forEach((row) => {
const parentID = row.parent_id
if (typeof parentID === 'string' || typeof parentID === 'number') {
if (!docsToResave[parentID]) docsToResave[parentID] = []
docsToResave[parentID].push(row)
}
})
await fetchAndResave({
adapter,
collectionSlug,
db,
debug,
docsToResave,
fields,
globalSlug,
isVersions,
payload,
req: req as unknown as PayloadRequest,
tableName,
})
}
const deleteStatement = `DELETE FROM ${tableName}${adapter.relationshipsSuffix} WHERE ${where}`
if (debug) {
payload.logger.info('DELETING ROWS')
payload.logger.info(deleteStatement)
}
await db.execute(sql.raw(`${deleteStatement}`))
}

View File

@@ -0,0 +1,117 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { Field, Payload } from 'payload'
import { tabHasName } from 'payload/shared'
import toSnakeCase from 'to-snake-case'
import type { VercelPostgresAdapter } from '../../types.js'
import type { PathsToQuery } from './types.js'
type Args = {
adapter: VercelPostgresAdapter
collectionSlug?: string
columnPrefix: string
db: TransactionPg
disableNotNull: boolean
fields: Field[]
globalSlug?: string
isVersions: boolean
newTableName: string
parentTableName: string
path: string
pathsToQuery: PathsToQuery
payload: Payload
rootTableName: string
}
export const traverseFields = (args: Args) => {
args.fields.forEach((field) => {
switch (field.type) {
case 'group': {
let newTableName = `${args.newTableName}_${toSnakeCase(field.name)}`
if (field.localized && args.payload.config.localization) {
newTableName += args.adapter.localesSuffix
}
return traverseFields({
...args,
columnPrefix: `${args.columnPrefix}${toSnakeCase(field.name)}_`,
fields: field.fields,
newTableName,
path: `${args.path ? `${args.path}.` : ''}${field.name}`,
})
}
case 'row':
case 'collapsible': {
return traverseFields({
...args,
fields: field.fields,
})
}
case 'array': {
const newTableName = args.adapter.tableNameMap.get(
`${args.newTableName}_${toSnakeCase(field.name)}`,
)
return traverseFields({
...args,
columnPrefix: '',
fields: field.fields,
newTableName,
parentTableName: newTableName,
path: `${args.path ? `${args.path}.` : ''}${field.name}.%`,
})
}
case 'blocks': {
return field.blocks.forEach((block) => {
const newTableName = args.adapter.tableNameMap.get(
`${args.rootTableName}_blocks_${toSnakeCase(block.slug)}`,
)
traverseFields({
...args,
columnPrefix: '',
fields: block.fields,
newTableName,
parentTableName: newTableName,
path: `${args.path ? `${args.path}.` : ''}${field.name}.%`,
})
})
}
case 'tabs': {
return field.tabs.forEach((tab) => {
if (tabHasName(tab)) {
args.columnPrefix = `${args.columnPrefix}_${toSnakeCase(tab.name)}_`
args.path = `${args.path ? `${args.path}.` : ''}${tab.name}`
args.newTableName = `${args.newTableName}_${toSnakeCase(tab.name)}`
if (tab.localized && args.payload.config.localization) {
args.newTableName += args.adapter.localesSuffix
}
}
traverseFields({
...args,
fields: tab.fields,
})
})
}
case 'relationship':
case 'upload': {
if (typeof field.relationTo === 'string') {
if (field.type === 'upload' || !field.hasMany) {
args.pathsToQuery.add(`${args.path ? `${args.path}.` : ''}${field.name}`)
}
}
return null
}
}
})
}

View File

@@ -0,0 +1,9 @@
/**
* Set of all paths which should be moved
* This will be built up into one WHERE query
*/
export type PathsToQuery = Set<string>
export type DocsToResave = {
[id: number | string]: Record<string, unknown>[]
}

View File

@@ -0,0 +1,78 @@
import type {
BasePostgresAdapter,
GenericEnum,
MigrateDownArgs,
MigrateUpArgs,
PostgresDB,
} from '@payloadcms/drizzle/postgres'
import type { DrizzleAdapter } from '@payloadcms/drizzle/types'
import type { VercelPool, VercelPostgresPoolConfig } from '@vercel/postgres'
import type { DrizzleConfig } from 'drizzle-orm'
import type { PgSchema, PgTableFn, PgTransactionConfig } from 'drizzle-orm/pg-core'
export type Args = {
connectionString?: string
idType?: 'serial' | 'uuid'
localesSuffix?: string
logger?: DrizzleConfig['logger']
migrationDir?: string
/**
* Optional pool configuration for Vercel Postgres
* If not provided, vercel/postgres will attempt to use the Vercel environment variables
*/
pool?: VercelPostgresPoolConfig
prodMigrations?: {
down: (args: MigrateDownArgs) => Promise<void>
name: string
up: (args: MigrateUpArgs) => Promise<void>
}[]
push?: boolean
relationshipsSuffix?: string
/**
* The schema name to use for the database
* @experimental This only works when there are not other tables or enums of the same name in the database under a different schema. Awaiting fix from Drizzle.
*/
schemaName?: string
transactionOptions?: PgTransactionConfig | false
versionsSuffix?: string
}
export type VercelPostgresAdapter = {
pool?: VercelPool
poolOptions?: Args['pool']
} & BasePostgresAdapter
declare module 'payload' {
export interface DatabaseAdapter
extends Omit<Args, 'idType' | 'logger' | 'migrationDir' | 'pool'>,
DrizzleAdapter {
beginTransaction: (options?: PgTransactionConfig) => Promise<null | number | string>
drizzle: PostgresDB
enums: Record<string, GenericEnum>
/**
* An object keyed on each table, with a key value pair where the constraint name is the key, followed by the dot-notation field name
* Used for returning properly formed errors from unique fields
*/
fieldConstraints: Record<string, Record<string, string>>
idType: Args['idType']
initializing: Promise<void>
localesSuffix?: string
logger: DrizzleConfig['logger']
pgSchema?: { table: PgTableFn } | PgSchema
pool: VercelPool
poolOptions: Args['pool']
prodMigrations?: {
down: (args: MigrateDownArgs) => Promise<void>
name: string
up: (args: MigrateUpArgs) => Promise<void>
}[]
push: boolean
rejectInitializing: () => void
relationshipsSuffix?: string
resolveInitializing: () => void
schema: Record<string, unknown>
schemaName?: Args['schemaName']
tableNameMap: Map<string, string>
versionsSuffix?: string
}
}

View File

@@ -0,0 +1,38 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"composite": true,
"noEmit": false,
"emitDeclarationOnly": true,
"outDir": "./dist",
"rootDir": "./src",
"skipLibCheck": true,
},
"exclude": [
"dist",
"build",
"tests",
"test",
"node_modules",
"eslint.config.js",
"src/**/*.spec.js",
"src/**/*.spec.jsx",
"src/**/*.spec.ts",
"src/**/*.spec.tsx"
],
"include": [
"src",
"src/**/*.ts",
],
"references": [
{
"path": "../payload"
},
{
"path": "../translations"
},
{
"path": "../drizzle"
}
]
}

View File

@@ -1,4 +1,3 @@
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
import type { CreateMigration } from 'payload'
import fs from 'fs'
@@ -112,6 +111,7 @@ export const createMigration: CreateMigration = async function createMigration(
getMigrationTemplate({
downSQL: downSQL || ` // Migration code`,
imports,
packageName: payload.db.packageName,
upSQL: upSQL || ` // Migration code`,
}),
)

View File

@@ -9,8 +9,9 @@ export const indent = (text: string) =>
export const getMigrationTemplate = ({
downSQL,
imports,
packageName,
upSQL,
}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '@payloadcms/db-postgres'
}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '${packageName}'
${imports ? `${imports}\n` : ''}
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
${indent(upSQL)}

View File

@@ -111,8 +111,6 @@ export type BasePostgresAdapter = {
logger: DrizzleConfig['logger']
operators: Operators
pgSchema?: Schema
// pool: Pool
// poolOptions: Args['pool']
prodMigrations?: {
down: (args: MigrateDownArgs) => Promise<void>
name: string

View File

@@ -100,6 +100,13 @@ export interface BaseDatabaseAdapter {
* The name of the database adapter
*/
name: string
/**
* Full package name of the database adapter
*
* @example @payloadcms/db-postgres
*/
packageName: string
/**
* reference to the instance of payload
*/
@@ -434,5 +441,6 @@ export type DBIdentifierName =
export type MigrationTemplateArgs = {
downSQL?: string
imports?: string
packageName?: string
upSQL?: string
}

57
pnpm-lock.yaml generated
View File

@@ -383,6 +383,52 @@ importers:
specifier: workspace:*
version: link:../payload
packages/db-vercel-postgres:
dependencies:
'@payloadcms/drizzle':
specifier: workspace:*
version: link:../drizzle
'@vercel/postgres':
specifier: ^0.9.0
version: 0.9.0
console-table-printer:
specifier: 2.11.2
version: 2.11.2
drizzle-kit:
specifier: 0.23.2-df9e596
version: 0.23.2-df9e596
drizzle-orm:
specifier: 0.32.1
version: 0.32.1(@libsql/client@0.6.2(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@neondatabase/serverless@0.9.4)(@types/pg@8.10.2)(@vercel/postgres@0.9.0)(pg@8.11.3)(react@19.0.0-rc-06d0b89e-20240801)(types-react@19.0.0-rc.0)
prompts:
specifier: 2.4.2
version: 2.4.2
to-snake-case:
specifier: 1.0.0
version: 1.0.0
uuid:
specifier: 10.0.0
version: 10.0.0
devDependencies:
'@hyrious/esbuild-plugin-commonjs':
specifier: ^0.2.4
version: 0.2.4(cjs-module-lexer@1.3.1)(esbuild@0.23.0)
'@payloadcms/eslint-config':
specifier: workspace:*
version: link:../eslint-config
'@types/pg':
specifier: 8.10.2
version: 8.10.2
'@types/to-snake-case':
specifier: 1.0.0
version: 1.0.0
esbuild:
specifier: 0.23.0
version: 0.23.0
payload:
specifier: workspace:*
version: link:../payload
packages/drizzle:
dependencies:
console-table-printer:
@@ -1600,6 +1646,9 @@ importers:
'@payloadcms/db-sqlite':
specifier: workspace:*
version: link:../packages/db-sqlite
'@payloadcms/db-vercel-postgres':
specifier: workspace:*
version: link:../packages/db-vercel-postgres
'@payloadcms/drizzle':
specifier: workspace:*
version: link:../packages/drizzle
@@ -12239,7 +12288,6 @@ snapshots:
'@neondatabase/serverless@0.9.4':
dependencies:
'@types/pg': 8.11.6
optional: true
'@next/bundle-analyzer@15.0.0-canary.104(bufferutil@4.0.8)':
dependencies:
@@ -13088,7 +13136,6 @@ snapshots:
'@types/node': 20.12.5
pg-protocol: 1.6.1
pg-types: 4.0.2
optional: true
'@types/pluralize@0.0.33': {}
@@ -13326,7 +13373,6 @@ snapshots:
bufferutil: 4.0.8
utf-8-validate: 6.0.4
ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)
optional: true
'@vue/compiler-core@3.4.37':
dependencies:
@@ -13832,7 +13878,6 @@ snapshots:
bufferutil@4.0.8:
dependencies:
node-gyp-build: 4.8.1
optional: true
bundle-name@3.0.0:
dependencies:
@@ -16940,8 +16985,7 @@ snapshots:
fetch-blob: 3.2.0
formdata-polyfill: 4.0.10
node-gyp-build@4.8.1:
optional: true
node-gyp-build@4.8.1: {}
node-int64@0.4.0: {}
@@ -18724,7 +18768,6 @@ snapshots:
utf-8-validate@6.0.4:
dependencies:
node-gyp-build: 4.8.1
optional: true
utf8-byte-length@1.0.5: {}

View File

@@ -9,6 +9,8 @@ import { fileURLToPath } from 'node:url'
import path from 'path'
import util from 'util'
import type { PackageDetails } from './lib/getPackageDetails.js'
import { getPackageDetails } from './lib/getPackageDetails.js'
const execOpts: ExecSyncOptions = { stdio: 'inherit' }
@@ -47,6 +49,7 @@ async function main() {
'drizzle',
'db-sqlite',
'db-postgres',
'db-vercel-postgres',
'richtext-lexical',
'translations',
'plugin-cloud',
@@ -58,19 +61,17 @@ async function main() {
// Prebuild all packages
header(`\n🔨 Prebuilding all packages...`)
//await execa('pnpm', ['install'], execaOpts)
const filtered = packageDetails.filter((p): p is Exclude<typeof p, null> => p !== null)
header(`\nOutputting ${filtered.length} packages...
${chalk.white.bold(filtered.map((p) => p.name).join('\n'))}
`)
if (!noBuild) {
execSync('pnpm build:all --output-logs=errors-only', { stdio: 'inherit' })
}
header(`\n 📦 Packing all packages to ${dest}...`)
header(`\nOutputting ${filtered.length} packages...
${chalk.white.bold(listPackages(filtered))}`)
header(`\n📦 Packing all packages to ${dest}...`)
await Promise.all(
filtered.map(async (p) => {
@@ -84,3 +85,7 @@ ${chalk.white.bold(filtered.map((p) => p.name).join('\n'))}
function header(message: string, opts?: { enable?: boolean }) {
console.log(chalk.bold.green(`${message}\n`))
}
function listPackages(packages: PackageDetails[]) {
return packages.map((p) => ` - ${p.name}`).join('\n')
}

View File

@@ -28,6 +28,7 @@
"@payloadcms/db-mongodb": "workspace:*",
"@payloadcms/db-postgres": "workspace:*",
"@payloadcms/db-sqlite": "workspace:*",
"@payloadcms/db-vercel-postgres": "workspace:*",
"@payloadcms/drizzle": "workspace:*",
"@payloadcms/email-nodemailer": "workspace:*",
"@payloadcms/email-resend": "workspace:*",

View File

@@ -9,6 +9,7 @@ export const tgzToPkgNameMap = {
payload: 'payload-*',
'@payloadcms/db-mongodb': 'payloadcms-db-mongodb-*',
'@payloadcms/db-postgres': 'payloadcms-db-postgres-*',
'@payloadcms/db-vercel-postgres': 'payloadcms-db-vercel-postgres-*',
'@payloadcms/db-sqlite': 'payloadcms-db-sqlite-*',
'@payloadcms/drizzle': 'payloadcms-drizzle-*',
'@payloadcms/email-nodemailer': 'payloadcms-email-nodemailer-*',