Compare commits

...

20 Commits

Author SHA1 Message Date
Sasha
73148ca819 templates: add script to regenerate types in all templates 2024-12-11 21:04:25 +02:00
Alessio Gravili
00d438e91f refactor(ui): export TableColumnsProvider, documentDrawerBaseClass and SelectMany (#9899) 2024-12-11 18:34:58 +00:00
Patrik
b1d92c2bad feat: allows excluding entities from the nav sidebar / dashboard without disabling its routes (#9897)
### What?

Previously, the `admin.group` property on `collection` / `global`
configs allowed for a custom group and the `admin.hidden` property would
not only hide the entity from the nav sidebar / dashboard but also
disable its routes.

### Why?

There was not a simple way to hide an entity from the nav sidebar /
dashboard but still keep the entities routes.

### How?

Now - we've added the `false` type to the `admin.group` field to account
for this.

Passing `false` to `admin.group` will hide the entity from the sidebar
nav and dashboard but keep the routes available to navigate.

I.e

```
admin: {
  group: false,
},
```
2024-12-11 13:31:12 -05:00
Elliot DeNolf
5c2f72d70e templates: bump for v3.6.0 (#9900)
🤖 Automated bump of templates for v3.6.0

Triggered by user: @denolfe

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2024-12-11 12:27:46 -06:00
Joachim Damsgaard
e78b542ebc docs: fix typo (#9886)
Unfinished value string in documentation example
2024-12-11 13:12:48 -05:00
Elliot DeNolf
45d20643df chore(release): v3.6.0 [skip ci] 2024-12-11 13:05:47 -05:00
Alessio Gravili
91e8acc871 fix: cannot pass function to client error when defining server-only props in custom field components (#9898)
Fixes https://github.com/payloadcms/payload/issues/9895

We were still including field custom components in the ClientConfig,
which will throw an error if actual server-only properties were passed
to `PayloadComponent.serverProps`. This PR removes them from the
ClientConfig
2024-12-11 18:00:09 +00:00
Alessio Gravili
b83ea8494e refactor(richtext-lexical): export useBlockComponentContext and useInlineBlockComponentContext (#9896) 2024-12-11 10:46:57 -07:00
Sasha
b73fc586b8 feat: expose session, db in migrations to use the active transaction with the database directly (#9849)
This PR adds a feature which fixes another issue with migrations in
Postgres and does few refactors that significantly reduce code
duplication.

Previously, if you needed to use the underlying database directly in
migrations with the active transaction (for example to execute raw SQL),
created from `payload create:migration`, as `req` doesn't work there you
had to do something like this:
```ts
// Postgres
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
  const db = payload.db.sessions?.[await req.transactionID!].db ?? payload.db
  const { rows: posts } = await db.execute(sql`SELECT * from posts`)
}

// MongoDB
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
  const session = payload.db.sessions?.[await req.transactionID!]
  const posts = await payload.db.collections.posts.collection.find({ session }).toArray()
}
```

Which was:
1. Awkward to write
2. Not documented anywhere

Now, we expose `session` and `db` to `up` and `down` functions for you:

#### MongoDB:
```ts
import { type MigrateUpArgs } from '@payloadcms/db-mongodb'

export async function up({ session, payload, req }: MigrateUpArgs): Promise<void> {
  const posts = await payload.db.collections.posts.collection.find({ session }).toArray()
}
```
#### Postgres:
```ts
import { type MigrateUpArgs, sql } from '@payloadcms/db-postgres'

export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
  const { rows: posts } = await db.execute(sql`SELECT * from posts`)
}
```

#### SQLite:
```ts
import { type MigrateUpArgs, sql } from '@payloadcms/db-sqlite'

export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
  const { rows: posts } = await db.run(sql`SELECT * from posts`)
}
```
This actually was a thing with Postgres migrations, we already were
passing `db`, but:
1. Only for `up` and when running `payload migrate`, not for example
with `payload migrate:fresh`
2. Not documented neither in TypeScript or docs.

By ensuring we use `db`, this also fixes an issue that affects all
Postgres/SQLite migrations:

Currently, if we run `payload migration:create` with the postgres
adapter we get a file like this:

```ts
import { MigrateUpArgs, MigrateDownArgs, sql } from '@payloadcms/db-postgres'

export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
  await payload.db.drizzle.execute(sql`
   CREATE TABLE IF NOT EXISTS "users" (
  	"id" serial PRIMARY KEY NOT NULL,
  );
```
Looks good?
Not exactly!
`payload.db.drizzle.execute()` doesn't really use the current
transaction which can lead to some problems.
Instead, it should use the `db` from `payload.db.sessions?.[await
req.transactionID!].db` because that's where we store our Drizzle
instance with the transaction.

But now, if we generate `payload migrate:create` we get:
```ts
import { MigrateUpArgs, MigrateDownArgs, sql } from '@payloadcms/db-postgres'

export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
  await db.execute(sql`
   CREATE TABLE IF NOT EXISTS "users" (
  	"id" serial PRIMARY KEY NOT NULL,
  );
```

Which is what we want, as the `db` is passed correctly here:

76428373e4/packages/drizzle/src/migrate.ts (L88-L90)

```ts
export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
  const dbWithTransaction = payload.db.sessions?.[await req.transactionID!].db
  payload.logger.info({ one: db === dbWithTransaction })
  payload.logger.info({ two: db === payload.db.drizzle })
```
<img width="336" alt="image"
src="https://github.com/user-attachments/assets/f9fab5a9-44c2-44a9-95dd-8e5cf267f027">

Additionally, this PR refactors:
* `createMigration` with Drizzle - now we have sharable
`buildCreateMigration` in `@payloadcms/drizzle` to reduce copy-pasting
of the same logic.
* the `v2-v3` relationships migration for Postgres is now shared between
`db-postgres` and `db-vercel-postgres`, again to reduce copy-paste.
2024-12-11 12:23:12 -05:00
Said Akhrarov
0303b78d62 fix(next): thread default ServerProps to view actions and other components that were missing (#9868)
This PR threads default `serverProps` to Edit and List view action slots, as well as other various components that were missing them.

---------

Co-authored-by: Alessio Gravili <alessio@gravili.de>
2024-12-11 17:19:37 +00:00
James Mikrut
a0f0316534 fix: ensures autosave only runs sequentially (#9892)
Previously, Autosave could trigger 2 parallel fetches where the second
could outpace the first, leading to inconsistent results.

Now, we use a simple queue-based system where we can push multiple
autosave events into a queue, and only the latest autosave will be
performed.

This also prevents multiple autosaves from ever running in parallel.
2024-12-11 09:33:48 -06:00
Patrik
522399095c feat(next): adds suppressHydrationWarning property to payload config admin options (#9867)
### What?

There are scenarios where the server-rendered HTML might intentionally
differ from the client-rendered DOM causing `Hydration` errors in the
DOM.

### How?

Added a new prop to the payload config `admin` object called
`suppressHydrationWarning` that allows control to display these warnings
or not.

If you set `suppressHydrationWarning` to `true`, React will not warn you
about mismatches in the attributes and the content of that element.

Defaults to `false` - so if there is a mismatch and this prop is not
defined in your config, the hydration errors will show.

```
admin: {
  suppressHydrationWarning: true // will suppress the errors if there is a mismatch
}
```
2024-12-11 10:24:56 -05:00
Dan Ribbens
306b5d2300 fix: forgotPassword set expiration time (#9871)
The logic for creating a timestamp for use in resetPassword was not
correctly returning a valid date.

---------

Co-authored-by: Patrik Kozak <patrik@payloadcms.com>
2024-12-11 08:43:22 -05:00
Jarrod Flesch
ca52a50dd9 feat: consolidates create and duplicate operations (#9866) 2024-12-10 21:44:47 -05:00
Alessio Gravili
5bfc92d71d fix(next): next.js 15.1.0 compatibility by not importing isRedirectError from next/dist (#9878)
Fixes https://github.com/payloadcms/payload/issues/9876

The import path has changed in Next.js 15.1.0
2024-12-11 01:00:53 +00:00
Alessio Gravili
b1ef28dd39 feat: allow where in payload.jobs.run (#9877)
Example:

```ts
  await payload.jobs.queue({
        task: 'MyTask',
        input: {
          message: `secret`,
        },
 })

await payload.jobs.run({ where: { 'input.message':  { equals: 'secret' } } })
```
2024-12-11 00:33:53 +00:00
Alessio Gravili
09246a45e0 feat: add payload.jobs.runByID (#9875) 2024-12-10 23:37:06 +00:00
Jacob Fletcher
da6bc55b19 fix(ui): ensures admin.disableListFilter is disabled despite url search params (#9874)
Continuation of #9846 and partial fix for #9774. When setting
`admin.disableListFilter` retroactively, it remains active within the
list filter controls. Same for when the URL search query contains one of
these fields, except this will actually display the _wrong_ field,
falling back to the _first_ field from the config. The fix is to
properly disable the condition for this field if it's an active filter,
while still preventing it from ever rendering as an option within the
field selector itself.
2024-12-10 17:37:52 -05:00
Jacob Fletcher
f7172b5b2c fix(ui): refreshes column state during hmr and respects admin.disableListColumn despite preferences (#9846)
Partial fix for #9774. When `admin.disableListColumn` is set
retroactively, it continues to appear in column state, but shouldn't.
This was because the table column context was not refreshing after HMR
runs, and would instead hold onto these stale columns until the page
itself refreshes. Similarly, this was also a problem when the user had
saved any of these columns to their list preferences, where those prefs
would take precedence despite these properties being set on the
underlying fields. The fix is to filter these columns from all requests
that send them, and ensure local component state properly refreshes
itself.
2024-12-10 15:11:44 -05:00
Patrik
563694d930 fix(ui): prevents unwanted data overrides when bulk editing (#9842)
### What?

It became possible for fields to reset to a defined `defaultValue` when
bulk editing from the `edit-many` drawer.

### Why?

The form-state of all fields were being considered during a bulk edit -
this also meant using their initial states - this meant any fields with
default values or nested fields (`arrays`) would be overwritten with
their initial states

I.e. empty values or default values.

### How?

Now - we only send through the form data of the fields specifically
being edited in the edit-many drawer and ignore all other fields.

Leaving all other fields stay their current values.

Fixes #9590

---------

Co-authored-by: Dan Ribbens <dan.ribbens@gmail.com>
2024-12-10 11:39:15 -05:00
169 changed files with 2160 additions and 2099 deletions

View File

@@ -25,24 +25,24 @@ export const MyCollection: CollectionConfig = {
The following options are available:
| Option | Description |
| -------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`group`** | Text used as a label for grouping Collection and Global links together in the navigation. |
| **`hidden`** | Set to true or a function, called with the current user, returning true to exclude this Collection from navigation and admin routing. |
| **`hooks`** | Admin-specific hooks for this Collection. [More details](../hooks/collections). |
| **`useAsTitle`** | Specify a top-level field to use for a document title throughout the Admin Panel. If no field is defined, the ID of the document is used as the title. A field with `virtual: true` cannot be used as the title. |
| Option | Description |
| -------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`group`** | Text or localization object used to group Collection and Global links in the admin navigation. Set to `false` to hide the link from the navigation while keeping its routes accessible. |
| **`hidden`** | Set to true or a function, called with the current user, returning true to exclude this Collection from navigation and admin routing. |
| **`hooks`** | Admin-specific hooks for this Collection. [More details](../hooks/collections). |
| **`useAsTitle`** | Specify a top-level field to use for a document title throughout the Admin Panel. If no field is defined, the ID of the document is used as the title. A field with `virtual: true` cannot be used as the title. |
| **`description`** | Text to display below the Collection label in the List View to give editors more information. Alternatively, you can use the `admin.components.Description` to render a React component. [More details](#custom-components). |
| **`defaultColumns`** | Array of field names that correspond to which columns to show by default in this Collection's List View. |
| **`hideAPIURL`** | Hides the "API URL" meta field while editing documents within this Collection. |
| **`enableRichTextLink`** | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| **`enableRichTextRelationship`** | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| **`meta`** | Page metadata overrides to apply to this Collection within the Admin Panel. [More details](./metadata). |
| **`preview`** | Function to generate preview URLs within the Admin Panel that can point to your app. [More details](#preview). |
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| **`defaultColumns`** | Array of field names that correspond to which columns to show by default in this Collection's List View. |
| **`hideAPIURL`** | Hides the "API URL" meta field while editing documents within this Collection. |
| **`enableRichTextLink`** | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| **`enableRichTextRelationship`** | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| **`meta`** | Page metadata overrides to apply to this Collection within the Admin Panel. [More details](./metadata). |
| **`preview`** | Function to generate preview URLs within the Admin Panel that can point to your app. [More details](#preview). |
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| **`components`** | Swap in your own React components to be used within this Collection. [More details](#custom-components). |
| **`listSearchableFields`** | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
| **`pagination`** | Set pagination-specific options for this Collection. [More details](#pagination). |
| **`baseListFilter`** | You can define a default base filter for this collection's List view, which will be merged into any filters that the user performs. |
| **`listSearchableFields`** | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
| **`pagination`** | Set pagination-specific options for this Collection. [More details](#pagination). |
| **`baseListFilter`** | You can define a default base filter for this collection's List view, which will be merged into any filters that the user performs. |
### Custom Components

View File

@@ -25,9 +25,9 @@ export const MyGlobal: GlobalConfig = {
The following options are available:
| Option | Description |
| ------------- | --------------------------------------------------------------------------------------------------------------------------------- |
| **`group`** | Text used as a label for grouping Collection and Global links together in the navigation. |
| Option | Description |
| ----------------- | --------------------------------------------------------------------------------------------------------------------------------- |
| **`group`** | Text or localization object used to group Collection and Global links in the admin navigation. Set to `false` to hide the link from the navigation while keeping its routes accessible. |
| **`hidden`** | Set to true or a function, called with the current user, returning true to exclude this Global from navigation and admin routing. |
| **`components`** | Swap in your own React components to be used within this Global. [More details](#custom-components). |
| **`preview`** | Function to generate a preview URL within the Admin Panel for this Global that can point to your app. [More details](#preview). |

View File

@@ -184,7 +184,7 @@ export const MyGlobal: GlobalConfig = {
meta: {
// highlight-end
title: 'My Global',
description: 'The best
description: 'The best admin panel in the world',
},
},
}

View File

@@ -86,20 +86,21 @@ const config = buildConfig({
The following options are available:
| Option | Description |
|---------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **`avatar`** | Set account profile picture. Options: `gravatar`, `default` or a custom React component. |
| **`autoLogin`** | Used to automate log-in for dev and demonstration convenience. [More details](../authentication/overview). |
| **`buildPath`** | Specify an absolute path for where to store the built Admin bundle used in production. Defaults to `path.resolve(process.cwd(), 'build')`. |
| **`components`** | Component overrides that affect the entirety of the Admin Panel. [More details](./components). |
| **`custom`** | Any custom properties you wish to pass to the Admin Panel. |
| **`dateFormat`** | The date format that will be used for all dates within the Admin Panel. Any valid [date-fns](https://date-fns.org/) format pattern can be used. |
| **`disable`** | If set to `true`, the entire Admin Panel will be disabled. |
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| **`meta`** | Base metadata to use for the Admin Panel. [More details](./metadata). |
| **`routes`** | Replace built-in Admin Panel routes with your own custom routes. [More details](#customizing-routes). |
| **`theme`** | Restrict the Admin Panel theme to use only one of your choice. Default is `all`.
| **`user`** | The `slug` of the Collection that you want to allow to login to the Admin Panel. [More details](#the-admin-user-collection). |
| Option | Description |
|--------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **`avatar`** | Set account profile picture. Options: `gravatar`, `default` or a custom React component. |
| **`autoLogin`** | Used to automate log-in for dev and demonstration convenience. [More details](../authentication/overview). |
| **`buildPath`** | Specify an absolute path for where to store the built Admin bundle used in production. Defaults to `path.resolve(process.cwd(), 'build')`. |
| **`components`** | Component overrides that affect the entirety of the Admin Panel. [More details](./components). |
| **`custom`** | Any custom properties you wish to pass to the Admin Panel. |
| **`dateFormat`** | The date format that will be used for all dates within the Admin Panel. Any valid [date-fns](https://date-fns.org/) format pattern can be used. |
| **`disable`** | If set to `true`, the entire Admin Panel will be disabled. |
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| **`meta`** | Base metadata to use for the Admin Panel. [More details](./metadata). |
| **`routes`** | Replace built-in Admin Panel routes with your own custom routes. [More details](#customizing-routes). |
| **`suppressHydrationWarning`** | If set to `true`, suppresses React hydration mismatch warnings during the hydration of the root <html> tag. Defaults to `false`. |
| **`theme`** | Restrict the Admin Panel theme to use only one of your choice. Default is `all`. |
| **`user`** | The `slug` of the Collection that you want to allow to login to the Admin Panel. [More details](#the-admin-user-collection). |
<Banner type="success">
<strong>Reminder:</strong>

View File

@@ -57,6 +57,38 @@ you need to do is pass the `req` object to any [local API](/docs/local-api/overv
after your `up` or `down` function runs. If the migration errors at any point or fails to commit, it is caught and the
transaction gets aborted. This way no change is made to the database if the migration fails.
### Using database directly with the transaction
Additionally, you can bypass Payload's layer entirely and perform operations directly on your underlying database within the active transaction:
### MongoDB:
```ts
import { type MigrateUpArgs } from '@payloadcms/db-mongodb'
export async function up({ session, payload, req }: MigrateUpArgs): Promise<void> {
const posts = await payload.db.collections.posts.collection.find({ session }).toArray()
}
```
### Postgres:
```ts
import { type MigrateUpArgs, sql } from '@payloadcms/db-postgres'
export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
const { rows: posts } = await db.execute(sql`SELECT * from posts`)
}
```
### SQLite:
In SQLite, transactions are disabled by default. [More](./transactions).
```ts
import { type MigrateUpArgs, sql } from '@payloadcms/db-sqlite'
export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
const { rows: posts } = await db.run(sql`SELECT * from posts`)
}
```
## Migrations Directory
Each DB adapter has an optional property `migrationDir` where you can override where you want your migrations to be

View File

@@ -16,6 +16,12 @@ By default, Payload will use transactions for all data changing operations, as l
MongoDB requires a connection to a replicaset in order to make use of transactions.
</Banner>
<Banner type="info">
<strong>Note:</strong>
<br />
Transactions in SQLite are disabled by default. You need to pass `transactionOptions: {}` to enable them.
</Banner>
The initial request made to Payload will begin a new transaction and attach it to the `req.transactionID`. If you have a `hook` that interacts with the database, you can opt in to using the same transaction by passing the `req` in the arguments. For example:
```ts

View File

@@ -98,11 +98,24 @@ After the project is deployed to Vercel, the Vercel Cron job will automatically
If you want to process jobs programmatically from your server-side code, you can use the Local API:
**Run all jobs:**
```ts
const results = await payload.jobs.run()
// You can customize the queue name and limit by passing them as arguments:
await payload.jobs.run({ queue: 'nightly', limit: 100 })
// You can provide a where clause to filter the jobs that should be run:
await payload.jobs.run({ where: { 'input.message': { equals: 'secret' } } })
```
**Run a single job:**
```ts
const results = await payload.jobs.runByID({
id: myJobID
})
```
#### Bin script

View File

@@ -131,6 +131,9 @@ const post = await payload.create({
// Alternatively, you can directly pass a File,
// if file is provided, filePath will be omitted
file: uploadedFile,
// If you want to create a document that is a duplicate of another document
duplicateFromID: 'document-id-to-duplicate',
})
```

View File

@@ -1,6 +1,6 @@
{
"name": "payload-monorepo",
"version": "3.5.0",
"version": "3.6.0",
"private": true,
"type": "module",
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "create-payload-app",
"version": "3.5.0",
"version": "3.6.0",
"homepage": "https://payloadcms.com",
"repository": {
"type": "git",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-mongodb",
"version": "3.5.0",
"version": "3.6.0",
"description": "The officially supported MongoDB database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -10,11 +10,11 @@ const migrationTemplate = ({ downSQL, imports, upSQL }: MigrationTemplateArgs):
MigrateUpArgs,
} from '@payloadcms/db-mongodb'
${imports ?? ''}
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
export async function up({ payload, req, session }: MigrateUpArgs): Promise<void> {
${upSQL ?? ` // Migration code`}
}
export async function down({ payload, req }: MigrateDownArgs): Promise<void> {
export async function down({ payload, req, session }: MigrateDownArgs): Promise<void> {
${downSQL ?? ` // Migration code`}
}
`

View File

@@ -1,3 +1,4 @@
import type { ClientSession } from 'mongodb'
import type {
AggregatePaginateModel,
IndexDefinition,
@@ -110,5 +111,65 @@ export type FieldToSchemaMap<TSchema> = {
upload: FieldGeneratorFunction<TSchema, UploadField>
}
export type MigrateUpArgs = { payload: Payload; req: PayloadRequest }
export type MigrateDownArgs = { payload: Payload; req: PayloadRequest }
export type MigrateUpArgs = {
/**
* The Payload instance that you can use to execute Local API methods
* To use the current transaction you must pass `req` to arguments
* @example
* ```ts
* import { type MigrateUpArgs } from '@payloadcms/db-mongodb'
*
* export async function up({ session, payload, req }: MigrateUpArgs): Promise<void> {
* const posts = await payload.find({ collection: 'posts', req })
* }
* ```
*/
payload: Payload
/**
* The `PayloadRequest` object that contains the current transaction
*/
req: PayloadRequest
/**
* The MongoDB client session that you can use to execute MongoDB methods directly within the current transaction.
* @example
* ```ts
* import { type MigrateUpArgs } from '@payloadcms/db-mongodb'
*
* export async function up({ session, payload, req }: MigrateUpArgs): Promise<void> {
* const { rows: posts } = await payload.db.collections.posts.collection.find({ session }).toArray()
* }
* ```
*/
session?: ClientSession
}
export type MigrateDownArgs = {
/**
* The Payload instance that you can use to execute Local API methods
* To use the current transaction you must pass `req` to arguments
* @example
* ```ts
* import { type MigrateDownArgs } from '@payloadcms/db-mongodb'
*
* export async function down({ session, payload, req }: MigrateDownArgs): Promise<void> {
* const posts = await payload.find({ collection: 'posts', req })
* }
* ```
*/
payload: Payload
/**
* The `PayloadRequest` object that contains the current transaction
*/
req: PayloadRequest
/**
* The MongoDB client session that you can use to execute MongoDB methods directly within the current transaction.
* @example
* ```ts
* import { type MigrateDownArgs } from '@payloadcms/db-mongodb'
*
* export async function down({ session, payload, req }: MigrateDownArgs): Promise<void> {
* const { rows: posts } = await payload.db.collections.posts.collection.find({ session }).toArray()
* }
* ```
*/
session?: ClientSession
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-postgres",
"version": "3.5.0",
"version": "3.6.0",
"description": "The officially supported Postgres database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1 +1 @@
export { migratePostgresV2toV3 } from '../predefinedMigrations/v2-v3/index.js'
export { migratePostgresV2toV3 } from '@payloadcms/drizzle/postgres'

View File

@@ -2,6 +2,7 @@ import type { DatabaseAdapterObj, Payload } from 'payload'
import {
beginTransaction,
buildCreateMigration,
commitTransaction,
count,
countGlobalVersions,
@@ -39,18 +40,15 @@ import {
createDatabase,
createExtensions,
createJSONQuery,
createMigration,
defaultDrizzleSnapshot,
deleteWhere,
dropDatabase,
execute,
getMigrationTemplate,
init,
insert,
requireDrizzleKit,
} from '@payloadcms/drizzle/postgres'
import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'
import path from 'path'
import { createDatabaseAdapter, defaultBeginTransaction } from 'payload'
import { fileURLToPath } from 'url'
@@ -59,7 +57,6 @@ import type { Args, PostgresAdapter } from './types.js'
import { connect } from './connect.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter> {
const postgresIDType = args.idType || 'serial'
@@ -93,9 +90,13 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
beforeSchemaInit: args.beforeSchemaInit ?? [],
createDatabase,
createExtensions,
createMigration(args) {
return createMigration.bind(this)({ ...args, dirname })
},
createMigration: buildCreateMigration({
executeMethod: 'execute',
filename,
sanitizeStatements({ sqlExecute, statements }) {
return `${sqlExecute}\n ${statements.join('\n')}\`)`
},
}),
defaultDrizzleSnapshot,
disableCreateDatabase: args.disableCreateDatabase ?? false,
drizzle: undefined,
@@ -105,7 +106,6 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
json: true,
},
fieldConstraints: {},
getMigrationTemplate,
idType: postgresIDType,
initializing,
localesSuffix: args.localesSuffix || '_locales',

View File

@@ -1,282 +0,0 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
import type { Payload, PayloadRequest } from 'payload'
import { sql } from 'drizzle-orm'
import fs from 'fs'
import { createRequire } from 'module'
import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload'
import toSnakeCase from 'to-snake-case'
import type { PostgresAdapter } from '../../types.js'
import type { PathsToQuery } from './types.js'
import { groupUpSQLStatements } from './groupUpSQLStatements.js'
import { migrateRelationships } from './migrateRelationships.js'
import { traverseFields } from './traverseFields.js'
const require = createRequire(import.meta.url)
type Args = {
debug?: boolean
payload: Payload
req?: Partial<PayloadRequest>
}
/**
* Moves upload and relationship columns from the join table and into the tables while moving data
* This is done in the following order:
* ADD COLUMNs
* -- manipulate data to move relationships to new columns
* ADD CONSTRAINTs
* NOT NULLs
* DROP TABLEs
* DROP CONSTRAINTs
* DROP COLUMNs
* @param debug
* @param payload
* @param req
*/
export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
const adapter = payload.db as unknown as PostgresAdapter
const db = adapter.sessions[await req.transactionID].db as TransactionPg
const dir = payload.db.migrationDir
// get the drizzle migrateUpSQL from drizzle using the last schema
const { generateDrizzleJson, generateMigration, upPgSnapshot } = require('drizzle-kit/api')
const drizzleJsonAfter = generateDrizzleJson(adapter.schema)
// Get the previous migration snapshot
const previousSnapshot = fs
.readdirSync(dir)
.filter((file) => file.endsWith('.json') && !file.endsWith('relationships_v2_v3.json'))
.sort()
.reverse()?.[0]
if (!previousSnapshot) {
throw new Error(
`No previous migration schema file found! A prior migration from v2 is required to migrate to v3.`,
)
}
let drizzleJsonBefore = JSON.parse(
fs.readFileSync(`${dir}/${previousSnapshot}`, 'utf8'),
) as DrizzleSnapshotJSON
if (drizzleJsonBefore.version < drizzleJsonAfter.version) {
drizzleJsonBefore = upPgSnapshot(drizzleJsonBefore)
}
const generatedSQL = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
if (!generatedSQL.length) {
payload.logger.info(`No schema changes needed.`)
process.exit(0)
}
const sqlUpStatements = groupUpSQLStatements(generatedSQL)
const addColumnsStatement = sqlUpStatements.addColumn.join('\n')
if (debug) {
payload.logger.info('CREATING NEW RELATIONSHIP COLUMNS')
payload.logger.info(addColumnsStatement)
}
await db.execute(sql.raw(addColumnsStatement))
for (const collection of payload.config.collections) {
const tableName = adapter.tableNameMap.get(toSnakeCase(collection.slug))
const pathsToQuery: PathsToQuery = new Set()
traverseFields({
adapter,
collectionSlug: collection.slug,
columnPrefix: '',
db,
disableNotNull: false,
fields: collection.flattenedFields,
isVersions: false,
newTableName: tableName,
parentTableName: tableName,
path: '',
pathsToQuery,
payload,
rootTableName: tableName,
})
await migrateRelationships({
adapter,
collectionSlug: collection.slug,
db,
debug,
fields: collection.flattenedFields,
isVersions: false,
pathsToQuery,
payload,
req,
tableName,
})
if (collection.versions) {
const versionsTableName = adapter.tableNameMap.get(
`_${toSnakeCase(collection.slug)}${adapter.versionsSuffix}`,
)
const versionFields = buildVersionCollectionFields(payload.config, collection, true)
const versionPathsToQuery: PathsToQuery = new Set()
traverseFields({
adapter,
collectionSlug: collection.slug,
columnPrefix: '',
db,
disableNotNull: true,
fields: versionFields,
isVersions: true,
newTableName: versionsTableName,
parentTableName: versionsTableName,
path: '',
pathsToQuery: versionPathsToQuery,
payload,
rootTableName: versionsTableName,
})
await migrateRelationships({
adapter,
collectionSlug: collection.slug,
db,
debug,
fields: versionFields,
isVersions: true,
pathsToQuery: versionPathsToQuery,
payload,
req,
tableName: versionsTableName,
})
}
}
for (const global of payload.config.globals) {
const tableName = adapter.tableNameMap.get(toSnakeCase(global.slug))
const pathsToQuery: PathsToQuery = new Set()
traverseFields({
adapter,
columnPrefix: '',
db,
disableNotNull: false,
fields: global.flattenedFields,
globalSlug: global.slug,
isVersions: false,
newTableName: tableName,
parentTableName: tableName,
path: '',
pathsToQuery,
payload,
rootTableName: tableName,
})
await migrateRelationships({
adapter,
db,
debug,
fields: global.flattenedFields,
globalSlug: global.slug,
isVersions: false,
pathsToQuery,
payload,
req,
tableName,
})
if (global.versions) {
const versionsTableName = adapter.tableNameMap.get(
`_${toSnakeCase(global.slug)}${adapter.versionsSuffix}`,
)
const versionFields = buildVersionGlobalFields(payload.config, global, true)
const versionPathsToQuery: PathsToQuery = new Set()
traverseFields({
adapter,
columnPrefix: '',
db,
disableNotNull: true,
fields: versionFields,
globalSlug: global.slug,
isVersions: true,
newTableName: versionsTableName,
parentTableName: versionsTableName,
path: '',
pathsToQuery: versionPathsToQuery,
payload,
rootTableName: versionsTableName,
})
await migrateRelationships({
adapter,
db,
debug,
fields: versionFields,
globalSlug: global.slug,
isVersions: true,
pathsToQuery: versionPathsToQuery,
payload,
req,
tableName: versionsTableName,
})
}
}
// ADD CONSTRAINT
const addConstraintsStatement = sqlUpStatements.addConstraint.join('\n')
if (debug) {
payload.logger.info('ADDING CONSTRAINTS')
payload.logger.info(addConstraintsStatement)
}
await db.execute(sql.raw(addConstraintsStatement))
// NOT NULL
const notNullStatements = sqlUpStatements.notNull.join('\n')
if (debug) {
payload.logger.info('NOT NULL CONSTRAINTS')
payload.logger.info(notNullStatements)
}
await db.execute(sql.raw(notNullStatements))
// DROP TABLE
const dropTablesStatement = sqlUpStatements.dropTable.join('\n')
if (debug) {
payload.logger.info('DROPPING TABLES')
payload.logger.info(dropTablesStatement)
}
await db.execute(sql.raw(dropTablesStatement))
// DROP CONSTRAINT
const dropConstraintsStatement = sqlUpStatements.dropConstraint.join('\n')
if (debug) {
payload.logger.info('DROPPING CONSTRAINTS')
payload.logger.info(dropConstraintsStatement)
}
await db.execute(sql.raw(dropConstraintsStatement))
// DROP COLUMN
const dropColumnsStatement = sqlUpStatements.dropColumn.join('\n')
if (debug) {
payload.logger.info('DROPPING COLUMNS')
payload.logger.info(dropColumnsStatement)
}
await db.execute(sql.raw(dropColumnsStatement))
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-sqlite",
"version": "3.5.0",
"version": "3.6.0",
"description": "The officially supported SQLite database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,123 +0,0 @@
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
import type { CreateMigration } from 'payload'
import fs from 'fs'
import { createRequire } from 'module'
import path from 'path'
import { getPredefinedMigration, writeMigrationIndex } from 'payload'
import prompts from 'prompts'
import { fileURLToPath } from 'url'
import type { SQLiteAdapter } from './types.js'
import { defaultDrizzleSnapshot } from './defaultSnapshot.js'
import { getMigrationTemplate } from './getMigrationTemplate.js'
const require = createRequire(import.meta.url)
export const createMigration: CreateMigration = async function createMigration(
this: SQLiteAdapter,
{ file, migrationName, payload, skipEmpty },
) {
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
const dir = payload.db.migrationDir
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir)
}
const { generateSQLiteDrizzleJson, generateSQLiteMigration } = require('drizzle-kit/api')
const drizzleJsonAfter = await generateSQLiteDrizzleJson(this.schema)
const [yyymmdd, hhmmss] = new Date().toISOString().split('T')
const formattedDate = yyymmdd.replace(/\D/g, '')
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '')
let imports: string = ''
let downSQL: string
let upSQL: string
;({ downSQL, imports, upSQL } = await getPredefinedMigration({
dirname,
file,
migrationName,
payload,
}))
const timestamp = `${formattedDate}_${formattedTime}`
const name = migrationName || file?.split('/').slice(2).join('/')
const fileName = `${timestamp}${name ? `_${name.replace(/\W/g, '_')}` : ''}`
const filePath = `${dir}/${fileName}`
let drizzleJsonBefore = defaultDrizzleSnapshot as any
if (!upSQL) {
// Get latest migration snapshot
const latestSnapshot = fs
.readdirSync(dir)
.filter((file) => file.endsWith('.json'))
.sort()
.reverse()?.[0]
if (latestSnapshot) {
drizzleJsonBefore = JSON.parse(
fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'),
) as DrizzleSnapshotJSON
}
const sqlStatementsUp = await generateSQLiteMigration(drizzleJsonBefore, drizzleJsonAfter)
const sqlStatementsDown = await generateSQLiteMigration(drizzleJsonAfter, drizzleJsonBefore)
// need to create tables as separate statements
const sqlExecute = 'await payload.db.drizzle.run(sql`'
if (sqlStatementsUp?.length) {
upSQL = sqlStatementsUp
.map((statement) => `${sqlExecute}${statement?.replaceAll('`', '\\`')}\`)`)
.join('\n')
}
if (sqlStatementsDown?.length) {
downSQL = sqlStatementsDown
.map((statement) => `${sqlExecute}${statement?.replaceAll('`', '\\`')}\`)`)
.join('\n')
}
if (!upSQL?.length && !downSQL?.length) {
if (skipEmpty) {
process.exit(0)
}
const { confirm: shouldCreateBlankMigration } = await prompts(
{
name: 'confirm',
type: 'confirm',
initial: false,
message: 'No schema changes detected. Would you like to create a blank migration file?',
},
{
onCancel: () => {
process.exit(0)
},
},
)
if (!shouldCreateBlankMigration) {
process.exit(0)
}
}
// write schema
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))
}
// write migration
fs.writeFileSync(
`${filePath}.ts`,
getMigrationTemplate({
downSQL: downSQL || ` // Migration code`,
imports,
upSQL: upSQL || ` // Migration code`,
}),
)
writeMigrationIndex({ migrationsDir: payload.db.migrationDir })
payload.logger.info({ msg: `Migration created at ${filePath}.ts` })
}

View File

@@ -1,22 +0,0 @@
import type { MigrationTemplateArgs } from 'payload'
export const indent = (text: string) =>
text
.split('\n')
.map((line) => ` ${line}`)
.join('\n')
export const getMigrationTemplate = ({
downSQL,
imports,
upSQL,
}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '@payloadcms/db-sqlite'
${imports ? `${imports}\n` : ''}
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
${indent(upSQL)}
}
export async function down({ payload, req }: MigrateDownArgs): Promise<void> {
${indent(downSQL)}
}
`

View File

@@ -3,6 +3,7 @@ import type { DatabaseAdapterObj, Payload } from 'payload'
import {
beginTransaction,
buildCreateMigration,
commitTransaction,
count,
countGlobalVersions,
@@ -37,6 +38,7 @@ import {
} from '@payloadcms/drizzle'
import { like } from 'drizzle-orm'
import { createDatabaseAdapter, defaultBeginTransaction } from 'payload'
import { fileURLToPath } from 'url'
import type { Args, SQLiteAdapter } from './types.js'
@@ -44,12 +46,10 @@ import { connect } from './connect.js'
import { countDistinct } from './countDistinct.js'
import { convertPathToJSONTraversal } from './createJSONQuery/convertPathToJSONTraversal.js'
import { createJSONQuery } from './createJSONQuery/index.js'
import { createMigration } from './createMigration.js'
import { defaultDrizzleSnapshot } from './defaultSnapshot.js'
import { deleteWhere } from './deleteWhere.js'
import { dropDatabase } from './dropDatabase.js'
import { execute } from './execute.js'
import { getMigrationTemplate } from './getMigrationTemplate.js'
import { init } from './init.js'
import { insert } from './insert.js'
import { requireDrizzleKit } from './requireDrizzleKit.js'
@@ -58,6 +58,8 @@ export type { MigrateDownArgs, MigrateUpArgs } from './types.js'
export { sql } from 'drizzle-orm'
const filename = fileURLToPath(import.meta.url)
export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
const postgresIDType = args.idType || 'serial'
const payloadIDType = postgresIDType === 'serial' ? 'number' : 'text'
@@ -91,7 +93,6 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
json: true,
},
fieldConstraints: {},
getMigrationTemplate,
idType: postgresIDType,
initializing,
localesSuffix: args.localesSuffix || '_locales',
@@ -122,7 +123,15 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
createGlobal,
createGlobalVersion,
createJSONQuery,
createMigration,
createMigration: buildCreateMigration({
executeMethod: 'run',
filename,
sanitizeStatements({ sqlExecute, statements }) {
return statements
.map((statement) => `${sqlExecute}${statement?.replaceAll('`', '\\`')}\`)`)
.join('\n')
},
}),
createVersion,
defaultIDType: payloadIDType,
deleteMany,

View File

@@ -1,15 +1,19 @@
import type { RequireDrizzleKit } from '@payloadcms/drizzle/types'
import { createRequire } from 'module'
const require = createRequire(import.meta.url)
/**
* Dynamically requires the `drizzle-kit` package to access the `generateSQLiteDrizzleJson` and `pushSQLiteSchema` functions and exports them generically to call them from @payloadcms/drizzle.
*/
export const requireDrizzleKit: RequireDrizzleKit = () => {
const {
generateSQLiteDrizzleJson: generateDrizzleJson,
pushSQLiteSchema: pushSchema,
generateSQLiteDrizzleJson,
generateSQLiteMigration,
pushSQLiteSchema,
} = require('drizzle-kit/api')
return { generateDrizzleJson, pushSchema }
return {
generateDrizzleJson: generateSQLiteDrizzleJson,
generateMigration: generateSQLiteMigration,
pushSchema: pushSQLiteSchema,
}
}

View File

@@ -154,11 +154,65 @@ export type SQLiteAdapter = {
export type IDType = 'integer' | 'numeric' | 'text'
export type MigrateUpArgs = {
/**
* The SQLite Drizzle instance that you can use to execute SQL directly within the current transaction.
* @example
* ```ts
* import { type MigrateUpArgs, sql } from '@payloadcms/db-sqlite'
*
* export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
* const { rows: posts } = await db.run(sql`SELECT * FROM posts`)
* }
* ```
*/
db: LibSQLDatabase
/**
* The Payload instance that you can use to execute Local API methods
* To use the current transaction you must pass `req` to arguments
* @example
* ```ts
* import { type MigrateUpArgs } from '@payloadcms/db-sqlite'
*
* export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
* const posts = await payload.find({ collection: 'posts', req })
* }
* ```
*/
payload: Payload
/**
* The `PayloadRequest` object that contains the current transaction
*/
req: PayloadRequest
}
export type MigrateDownArgs = {
/**
* The SQLite Drizzle instance that you can use to execute SQL directly within the current transaction.
* @example
* ```ts
* import { type MigrateDownArgs, sql } from '@payloadcms/db-sqlite'
*
* export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
* const { rows: posts } = await db.run(sql`SELECT * FROM posts`)
* }
* ```
*/
db: LibSQLDatabase
/**
* The Payload instance that you can use to execute Local API methods
* To use the current transaction you must pass `req` to arguments
* @example
* ```ts
* import { type MigrateDownArgs } from '@payloadcms/db-sqlite'
*
* export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
* const posts = await payload.find({ collection: 'posts', req })
* }
* ```
*/
payload: Payload
/**
* The `PayloadRequest` object that contains the current transaction
*/
req: PayloadRequest
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-vercel-postgres",
"version": "3.5.0",
"version": "3.6.0",
"description": "Vercel Postgres adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1 +1 @@
export { migratePostgresV2toV3 } from '../predefinedMigrations/v2-v3/index.js'
export { migratePostgresV2toV3 } from '@payloadcms/drizzle/postgres'

View File

@@ -2,6 +2,7 @@ import type { DatabaseAdapterObj, Payload } from 'payload'
import {
beginTransaction,
buildCreateMigration,
commitTransaction,
count,
countGlobalVersions,
@@ -39,18 +40,15 @@ import {
createDatabase,
createExtensions,
createJSONQuery,
createMigration,
defaultDrizzleSnapshot,
deleteWhere,
dropDatabase,
execute,
getMigrationTemplate,
init,
insert,
requireDrizzleKit,
} from '@payloadcms/drizzle/postgres'
import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'
import path from 'path'
import { createDatabaseAdapter, defaultBeginTransaction } from 'payload'
import { fileURLToPath } from 'url'
@@ -59,7 +57,6 @@ import type { Args, VercelPostgresAdapter } from './types.js'
import { connect } from './connect.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<VercelPostgresAdapter> {
const postgresIDType = args.idType || 'serial'
@@ -102,7 +99,6 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<Verce
json: true,
},
fieldConstraints: {},
getMigrationTemplate,
idType: postgresIDType,
indexes: new Set<string>(),
initializing,
@@ -138,9 +134,13 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<Verce
createGlobal,
createGlobalVersion,
createJSONQuery,
createMigration(args) {
return createMigration.bind(this)({ ...args, dirname })
},
createMigration: buildCreateMigration({
executeMethod: 'execute',
filename,
sanitizeStatements({ sqlExecute, statements }) {
return `${sqlExecute}\n ${statements.join('\n')}\`)`
},
}),
createVersion,
defaultIDType: payloadIDType,
deleteMany,

View File

@@ -1,237 +0,0 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { FlattenedField, Payload, PayloadRequest } from 'payload'
import { upsertRow } from '@payloadcms/drizzle'
import type { VercelPostgresAdapter } from '../../../types.js'
import type { DocsToResave } from '../types.js'
import { traverseFields } from './traverseFields.js'
type Args = {
adapter: VercelPostgresAdapter
collectionSlug?: string
db: TransactionPg
debug: boolean
docsToResave: DocsToResave
fields: FlattenedField[]
globalSlug?: string
isVersions: boolean
payload: Payload
req: PayloadRequest
tableName: string
}
export const fetchAndResave = async ({
adapter,
collectionSlug,
db,
debug,
docsToResave,
fields,
globalSlug,
isVersions,
payload,
req,
tableName,
}: Args) => {
for (const [id, rows] of Object.entries(docsToResave)) {
if (collectionSlug) {
const collectionConfig = payload.collections[collectionSlug].config
if (collectionConfig) {
if (isVersions) {
const doc = await payload.findVersionByID({
id,
collection: collectionSlug,
depth: 0,
fallbackLocale: null,
locale: 'all',
req,
showHiddenFields: true,
})
if (debug) {
payload.logger.info(
`The collection "${collectionConfig.slug}" version with ID ${id} will be migrated`,
)
}
traverseFields({
doc,
fields,
path: '',
rows,
})
try {
await upsertRow({
id: doc.id,
adapter,
data: doc,
db,
fields,
ignoreResult: true,
operation: 'update',
req,
tableName,
})
} catch (err) {
payload.logger.error(
`"${collectionConfig.slug}" version with ID ${doc.id} FAILED TO MIGRATE`,
)
throw err
}
if (debug) {
payload.logger.info(
`"${collectionConfig.slug}" version with ID ${doc.id} migrated successfully!`,
)
}
} else {
const doc = await payload.findByID({
id,
collection: collectionSlug,
depth: 0,
fallbackLocale: null,
locale: 'all',
req,
showHiddenFields: true,
})
if (debug) {
payload.logger.info(
`The collection "${collectionConfig.slug}" with ID ${doc.id} will be migrated`,
)
}
traverseFields({
doc,
fields,
path: '',
rows,
})
try {
await upsertRow({
id: doc.id,
adapter,
data: doc,
db,
fields,
ignoreResult: true,
operation: 'update',
req,
tableName,
})
} catch (err) {
payload.logger.error(
`The collection "${collectionConfig.slug}" with ID ${doc.id} has FAILED TO MIGRATE`,
)
throw err
}
if (debug) {
payload.logger.info(
`The collection "${collectionConfig.slug}" with ID ${doc.id} has migrated successfully!`,
)
}
}
}
}
if (globalSlug) {
const globalConfig = payload.config.globals?.find((global) => global.slug === globalSlug)
if (globalConfig) {
if (isVersions) {
const { docs } = await payload.findGlobalVersions({
slug: globalSlug,
depth: 0,
fallbackLocale: null,
limit: 0,
locale: 'all',
req,
showHiddenFields: true,
})
if (debug) {
payload.logger.info(`${docs.length} global "${globalSlug}" versions will be migrated`)
}
for (const doc of docs) {
traverseFields({
doc,
fields,
path: '',
rows,
})
try {
await upsertRow({
id: doc.id,
adapter,
data: doc,
db,
fields,
ignoreResult: true,
operation: 'update',
req,
tableName,
})
} catch (err) {
payload.logger.error(`"${globalSlug}" version with ID ${doc.id} FAILED TO MIGRATE`)
throw err
}
if (debug) {
payload.logger.info(
`"${globalSlug}" version with ID ${doc.id} migrated successfully!`,
)
}
}
} else {
const doc = await payload.findGlobal({
slug: globalSlug,
depth: 0,
fallbackLocale: null,
locale: 'all',
req,
showHiddenFields: true,
})
traverseFields({
doc,
fields,
path: '',
rows,
})
try {
await upsertRow({
adapter,
data: doc,
db,
fields,
ignoreResult: true,
operation: 'update',
req,
tableName,
})
} catch (err) {
payload.logger.error(`The global "${globalSlug}" has FAILED TO MIGRATE`)
throw err
}
if (debug) {
payload.logger.info(`The global "${globalSlug}" has migrated successfully!`)
}
}
}
}
}
}

View File

@@ -1,171 +0,0 @@
import type { FlattenedField } from 'payload'
type Args = {
doc: Record<string, unknown>
fields: FlattenedField[]
locale?: string
path: string
rows: Record<string, unknown>[]
}
export const traverseFields = ({ doc, fields, locale, path, rows }: Args) => {
fields.forEach((field) => {
switch (field.type) {
case 'array': {
const rowData = doc?.[field.name]
if (field.localized && typeof rowData === 'object' && rowData !== null) {
Object.entries(rowData).forEach(([locale, localeRows]) => {
if (Array.isArray(localeRows)) {
localeRows.forEach((row, i) => {
return traverseFields({
doc: row as Record<string, unknown>,
fields: field.flattenedFields,
locale,
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
rows,
})
})
}
})
}
if (Array.isArray(rowData)) {
rowData.forEach((row, i) => {
return traverseFields({
doc: row as Record<string, unknown>,
fields: field.flattenedFields,
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
rows,
})
})
}
break
}
case 'blocks': {
const rowData = doc?.[field.name]
if (field.localized && typeof rowData === 'object' && rowData !== null) {
Object.entries(rowData).forEach(([locale, localeRows]) => {
if (Array.isArray(localeRows)) {
localeRows.forEach((row, i) => {
const matchedBlock = field.blocks.find((block) => block.slug === row.blockType)
if (matchedBlock) {
return traverseFields({
doc: row as Record<string, unknown>,
fields: matchedBlock.flattenedFields,
locale,
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
rows,
})
}
})
}
})
}
if (Array.isArray(rowData)) {
rowData.forEach((row, i) => {
const matchedBlock = field.blocks.find((block) => block.slug === row.blockType)
if (matchedBlock) {
return traverseFields({
doc: row as Record<string, unknown>,
fields: matchedBlock.flattenedFields,
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
rows,
})
}
})
}
break
}
case 'group':
case 'tab': {
const newPath = `${path ? `${path}.` : ''}${field.name}`
const newDoc = doc?.[field.name]
if (typeof newDoc === 'object' && newDoc !== null) {
if (field.localized) {
Object.entries(newDoc).forEach(([locale, localeDoc]) => {
return traverseFields({
doc: localeDoc,
fields: field.flattenedFields,
locale,
path: newPath,
rows,
})
})
} else {
return traverseFields({
doc: newDoc as Record<string, unknown>,
fields: field.flattenedFields,
path: newPath,
rows,
})
}
}
break
}
case 'relationship':
// falls through
case 'upload': {
if (typeof field.relationTo === 'string') {
if (field.type === 'upload' || !field.hasMany) {
const relationshipPath = `${path ? `${path}.` : ''}${field.name}`
if (field.localized) {
const matchedRelationshipsWithLocales = rows.filter(
(row) => row.path === relationshipPath,
)
if (matchedRelationshipsWithLocales.length && !doc[field.name]) {
doc[field.name] = {}
}
const newDoc = doc[field.name] as Record<string, unknown>
matchedRelationshipsWithLocales.forEach((localeRow) => {
if (typeof localeRow.locale === 'string') {
const [, id] = Object.entries(localeRow).find(
([key, val]) =>
val !== null && !['id', 'locale', 'order', 'parent_id', 'path'].includes(key),
)
newDoc[localeRow.locale] = id
}
})
} else {
const matchedRelationship = rows.find((row) => {
const matchesPath = row.path === relationshipPath
if (locale) {
return matchesPath && locale === row.locale
}
return row.path === relationshipPath
})
if (matchedRelationship) {
const [, id] = Object.entries(matchedRelationship).find(
([key, val]) =>
val !== null && !['id', 'locale', 'order', 'parent_id', 'path'].includes(key),
)
doc[field.name] = id
}
}
}
}
break
}
}
})
}

View File

@@ -1,74 +0,0 @@
export type Groups =
| 'addColumn'
| 'addConstraint'
| 'dropColumn'
| 'dropConstraint'
| 'dropTable'
| 'notNull'
/**
* Convert an "ADD COLUMN" statement to an "ALTER COLUMN" statement
* example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
* to: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
* @param sql
*/
function convertAddColumnToAlterColumn(sql) {
// Regular expression to match the ADD COLUMN statement with its constraints
const regex = /ALTER TABLE ("[^"]+") ADD COLUMN ("[^"]+") [\w\s]+ NOT NULL;/
// Replace the matched part with "ALTER COLUMN ... SET NOT NULL;"
return sql.replace(regex, 'ALTER TABLE $1 ALTER COLUMN $2 SET NOT NULL;')
}
export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> => {
const groups = {
addColumn: 'ADD COLUMN',
// example: ALTER TABLE "posts" ADD COLUMN "category_id" integer
addConstraint: 'ADD CONSTRAINT',
//example:
// DO $$ BEGIN
// ALTER TABLE "pages_blocks_my_block" ADD CONSTRAINT "pages_blocks_my_block_person_id_users_id_fk" FOREIGN KEY ("person_id") REFERENCES "users"("id") ON DELETE cascade ON UPDATE no action;
// EXCEPTION
// WHEN duplicate_object THEN null;
// END $$;
dropColumn: 'DROP COLUMN',
// example: ALTER TABLE "_posts_v_rels" DROP COLUMN IF EXISTS "posts_id";
dropConstraint: 'DROP CONSTRAINT',
// example: ALTER TABLE "_posts_v_rels" DROP CONSTRAINT "_posts_v_rels_posts_fk";
dropTable: 'DROP TABLE',
// example: DROP TABLE "pages_rels";
notNull: 'NOT NULL',
// example: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
}
const result = Object.keys(groups).reduce((result, group: Groups) => {
result[group] = []
return result
}, {}) as Record<Groups, string[]>
for (const line of list) {
Object.entries(groups).some(([key, value]) => {
if (line.endsWith('NOT NULL;')) {
// split up the ADD COLUMN and ALTER COLUMN NOT NULL statements
// example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
// becomes two separate statements:
// 1. ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer;
// 2. ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
result.addColumn.push(line.replace(' NOT NULL;', ';'))
result.notNull.push(convertAddColumnToAlterColumn(line))
return true
}
if (line.includes(value)) {
result[key].push(line)
return true
}
})
}
return result
}

View File

@@ -1,109 +0,0 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { FlattenedField, Payload, PayloadRequest } from 'payload'
import { sql } from 'drizzle-orm'
import type { VercelPostgresAdapter } from '../../types.js'
import type { DocsToResave, PathsToQuery } from './types.js'
import { fetchAndResave } from './fetchAndResave/index.js'
type Args = {
adapter: VercelPostgresAdapter
collectionSlug?: string
db: TransactionPg
debug: boolean
fields: FlattenedField[]
globalSlug?: string
isVersions: boolean
pathsToQuery: PathsToQuery
payload: Payload
req?: Partial<PayloadRequest>
tableName: string
}
export const migrateRelationships = async ({
adapter,
collectionSlug,
db,
debug,
fields,
globalSlug,
isVersions,
pathsToQuery,
payload,
req,
tableName,
}: Args) => {
if (pathsToQuery.size === 0) {
return
}
let offset = 0
let paginationResult
const where = Array.from(pathsToQuery).reduce((statement, path, i) => {
return (statement += `
"${tableName}${adapter.relationshipsSuffix}"."path" LIKE '${path}'${pathsToQuery.size !== i + 1 ? ' OR' : ''}
`)
}, '')
while (typeof paginationResult === 'undefined' || paginationResult.rows.length > 0) {
const paginationStatement = `SELECT DISTINCT parent_id FROM ${tableName}${adapter.relationshipsSuffix} WHERE
${where} ORDER BY parent_id LIMIT 500 OFFSET ${offset * 500};
`
paginationResult = await adapter.drizzle.execute(sql.raw(`${paginationStatement}`))
if (paginationResult.rows.length === 0) {
return
}
offset += 1
const statement = `SELECT * FROM ${tableName}${adapter.relationshipsSuffix} WHERE
(${where}) AND parent_id IN (${paginationResult.rows.map((row) => row.parent_id).join(', ')});
`
if (debug) {
payload.logger.info('FINDING ROWS TO MIGRATE')
payload.logger.info(statement)
}
const result = await adapter.drizzle.execute(sql.raw(`${statement}`))
const docsToResave: DocsToResave = {}
result.rows.forEach((row) => {
const parentID = row.parent_id
if (typeof parentID === 'string' || typeof parentID === 'number') {
if (!docsToResave[parentID]) {
docsToResave[parentID] = []
}
docsToResave[parentID].push(row)
}
})
await fetchAndResave({
adapter,
collectionSlug,
db,
debug,
docsToResave,
fields,
globalSlug,
isVersions,
payload,
req: req as unknown as PayloadRequest,
tableName,
})
}
const deleteStatement = `DELETE FROM ${tableName}${adapter.relationshipsSuffix} WHERE ${where}`
if (debug) {
payload.logger.info('DELETING ROWS')
payload.logger.info(deleteStatement)
}
await db.execute(sql.raw(`${deleteStatement}`))
}

View File

@@ -1,90 +0,0 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { FlattenedField, Payload } from 'payload'
import toSnakeCase from 'to-snake-case'
import type { VercelPostgresAdapter } from '../../types.js'
import type { PathsToQuery } from './types.js'
type Args = {
adapter: VercelPostgresAdapter
collectionSlug?: string
columnPrefix: string
db: TransactionPg
disableNotNull: boolean
fields: FlattenedField[]
globalSlug?: string
isVersions: boolean
newTableName: string
parentTableName: string
path: string
pathsToQuery: PathsToQuery
payload: Payload
rootTableName: string
}
export const traverseFields = (args: Args) => {
args.fields.forEach((field) => {
switch (field.type) {
case 'array': {
const newTableName = args.adapter.tableNameMap.get(
`${args.newTableName}_${toSnakeCase(field.name)}`,
)
return traverseFields({
...args,
columnPrefix: '',
fields: field.flattenedFields,
newTableName,
parentTableName: newTableName,
path: `${args.path ? `${args.path}.` : ''}${field.name}.%`,
})
}
case 'blocks': {
return field.blocks.forEach((block) => {
const newTableName = args.adapter.tableNameMap.get(
`${args.rootTableName}_blocks_${toSnakeCase(block.slug)}`,
)
traverseFields({
...args,
columnPrefix: '',
fields: block.flattenedFields,
newTableName,
parentTableName: newTableName,
path: `${args.path ? `${args.path}.` : ''}${field.name}.%`,
})
})
}
case 'group':
case 'tab': {
let newTableName = `${args.newTableName}_${toSnakeCase(field.name)}`
if (field.localized && args.payload.config.localization) {
newTableName += args.adapter.localesSuffix
}
return traverseFields({
...args,
columnPrefix: `${args.columnPrefix}${toSnakeCase(field.name)}_`,
fields: field.flattenedFields,
newTableName,
path: `${args.path ? `${args.path}.` : ''}${field.name}`,
})
}
case 'relationship':
case 'upload': {
if (typeof field.relationTo === 'string') {
if (field.type === 'upload' || !field.hasMany) {
args.pathsToQuery.add(`${args.path ? `${args.path}.` : ''}${field.name}`)
}
}
return null
}
}
})
}

View File

@@ -1,9 +0,0 @@
/**
* Set of all paths which should be moved
* This will be built up into one WHERE query
*/
export type PathsToQuery = Set<string>
export type DocsToResave = {
[id: number | string]: Record<string, unknown>[]
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/drizzle",
"version": "3.5.0",
"version": "3.6.0",
"description": "A library of shared functions used by different payload database adapters",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -2,13 +2,12 @@ export { countDistinct } from '../postgres/countDistinct.js'
export { createDatabase } from '../postgres/createDatabase.js'
export { createExtensions } from '../postgres/createExtensions.js'
export { createJSONQuery } from '../postgres/createJSONQuery/index.js'
export { createMigration } from '../postgres/createMigration.js'
export { defaultDrizzleSnapshot } from '../postgres/defaultSnapshot.js'
export { deleteWhere } from '../postgres/deleteWhere.js'
export { dropDatabase } from '../postgres/dropDatabase.js'
export { execute } from '../postgres/execute.js'
export { getMigrationTemplate } from '../postgres/getMigrationTemplate.js'
export { init } from '../postgres/init.js'
export { insert } from '../postgres/insert.js'
export { migratePostgresV2toV3 } from '../postgres/predefinedMigrations/v2-v3/index.js'
export { requireDrizzleKit } from '../postgres/requireDrizzleKit.js'
export * from '../postgres/types.js'

View File

@@ -34,6 +34,7 @@ export { updateGlobal } from './updateGlobal.js'
export { updateGlobalVersion } from './updateGlobalVersion.js'
export { updateVersion } from './updateVersion.js'
export { upsertRow } from './upsertRow/index.js'
export { buildCreateMigration } from './utilities/buildCreateMigration.js'
export { buildIndexName } from './utilities/buildIndexName.js'
export { executeSchemaHooks } from './utilities/executeSchemaHooks.js'
export { extendDrizzleTable } from './utilities/extendDrizzleTable.js'

View File

@@ -44,7 +44,8 @@ export async function migrateDown(this: DrizzleAdapter): Promise<void> {
try {
payload.logger.info({ msg: `Migrating down: ${migrationFile.name}` })
await initTransaction(req)
await migrationFile.down({ payload, req })
const db = this.sessions[await req.transactionID]?.db || this.drizzle
await migrationFile.down({ db, payload, req })
payload.logger.info({
msg: `Migrated down: ${migrationFile.name} (${Date.now() - start}ms)`,
})

View File

@@ -59,8 +59,7 @@ export async function migrateFresh(
try {
const start = Date.now()
await initTransaction(req)
const adapter = payload.db as DrizzleAdapter
const db = adapter?.sessions[await req.transactionID]?.db || adapter.drizzle
const db = this.sessions[await req.transactionID]?.db || this.drizzle
await migration.up({ db, payload, req })
await payload.create({
collection: 'payload-migrations',

View File

@@ -48,7 +48,8 @@ export async function migrateRefresh(this: DrizzleAdapter) {
payload.logger.info({ msg: `Migrating down: ${migration.name}` })
const start = Date.now()
await initTransaction(req)
await migrationFile.down({ payload, req })
const db = this.sessions[await req.transactionID]?.db || this.drizzle
await migrationFile.down({ db, payload, req })
payload.logger.info({
msg: `Migrated down: ${migration.name} (${Date.now() - start}ms)`,
})

View File

@@ -39,7 +39,8 @@ export async function migrateReset(this: DrizzleAdapter): Promise<void> {
const start = Date.now()
payload.logger.info({ msg: `Migrating down: ${migrationFile.name}` })
await initTransaction(req)
await migrationFile.down({ payload, req })
const db = this.sessions[await req.transactionID]?.db || this.drizzle
await migrationFile.down({ db, payload, req })
payload.logger.info({
msg: `Migrated down: ${migrationFile.name} (${Date.now() - start}ms)`,
})

View File

@@ -1,122 +0,0 @@
import type { CreateMigration } from 'payload'
import fs from 'fs'
import { createRequire } from 'module'
import { getPredefinedMigration, writeMigrationIndex } from 'payload'
import prompts from 'prompts'
import type { BasePostgresAdapter } from './types.js'
import { defaultDrizzleSnapshot } from './defaultSnapshot.js'
import { getMigrationTemplate } from './getMigrationTemplate.js'
const require = createRequire(import.meta.url)
export const createMigration: CreateMigration = async function createMigration(
this: BasePostgresAdapter,
{ dirname, file, forceAcceptWarning, migrationName, payload, skipEmpty },
) {
const dir = payload.db.migrationDir
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir)
}
const { generateDrizzleJson, generateMigration, upPgSnapshot } = require('drizzle-kit/api')
const drizzleJsonAfter = generateDrizzleJson(this.schema)
const [yyymmdd, hhmmss] = new Date().toISOString().split('T')
const formattedDate = yyymmdd.replace(/\D/g, '')
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '')
let imports: string = ''
let downSQL: string
let upSQL: string
;({ downSQL, imports, upSQL } = await getPredefinedMigration({
dirname,
file,
migrationName,
payload,
}))
const timestamp = `${formattedDate}_${formattedTime}`
const name = migrationName || file?.split('/').slice(2).join('/')
const fileName = `${timestamp}${name ? `_${name.replace(/\W/g, '_')}` : ''}`
const filePath = `${dir}/${fileName}`
let drizzleJsonBefore = defaultDrizzleSnapshot
if (this.schemaName) {
drizzleJsonBefore.schemas = {
[this.schemaName]: this.schemaName,
}
}
if (!upSQL) {
// Get latest migration snapshot
const latestSnapshot = fs
.readdirSync(dir)
.filter((file) => file.endsWith('.json'))
.sort()
.reverse()?.[0]
if (latestSnapshot) {
drizzleJsonBefore = JSON.parse(fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'))
if (drizzleJsonBefore.version < drizzleJsonAfter.version) {
drizzleJsonBefore = upPgSnapshot(drizzleJsonBefore)
}
}
const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)
const sqlExecute = 'await payload.db.drizzle.execute(sql`'
if (sqlStatementsUp?.length) {
upSQL = `${sqlExecute}\n ${sqlStatementsUp?.join('\n')}\`)`
}
if (sqlStatementsDown?.length) {
downSQL = `${sqlExecute}\n ${sqlStatementsDown?.join('\n')}\`)`
}
if (!upSQL?.length && !downSQL?.length && !forceAcceptWarning) {
if (skipEmpty) {
process.exit(0)
}
const { confirm: shouldCreateBlankMigration } = await prompts(
{
name: 'confirm',
type: 'confirm',
initial: false,
message: 'No schema changes detected. Would you like to create a blank migration file?',
},
{
onCancel: () => {
process.exit(0)
},
},
)
if (!shouldCreateBlankMigration) {
process.exit(0)
}
}
// write schema
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))
}
// write migration
fs.writeFileSync(
`${filePath}.ts`,
getMigrationTemplate({
downSQL: downSQL || ` // Migration code`,
imports,
packageName: payload.db.packageName,
upSQL: upSQL || ` // Migration code`,
}),
)
writeMigrationIndex({ migrationsDir: payload.db.migrationDir })
payload.logger.info({ msg: `Migration created at ${filePath}.ts` })
}

View File

@@ -1,15 +1,14 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { FlattenedField, Payload, PayloadRequest } from 'payload'
import { upsertRow } from '@payloadcms/drizzle'
import type { PostgresAdapter } from '../../../types.js'
import type { TransactionPg } from '../../../../types.js'
import type { BasePostgresAdapter } from '../../../types.js'
import type { DocsToResave } from '../types.js'
import { upsertRow } from '../../../../upsertRow/index.js'
import { traverseFields } from './traverseFields.js'
type Args = {
adapter: PostgresAdapter
adapter: BasePostgresAdapter
collectionSlug?: string
db: TransactionPg
debug: boolean

View File

@@ -1,22 +1,19 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
import type { Payload, PayloadRequest } from 'payload'
import { sql } from 'drizzle-orm'
import fs from 'fs'
import { createRequire } from 'module'
import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload'
import toSnakeCase from 'to-snake-case'
import type { VercelPostgresAdapter } from '../../types.js'
import type { TransactionPg } from '../../../types.js'
import type { BasePostgresAdapter } from '../../types.js'
import type { PathsToQuery } from './types.js'
import { groupUpSQLStatements } from './groupUpSQLStatements.js'
import { migrateRelationships } from './migrateRelationships.js'
import { traverseFields } from './traverseFields.js'
const require = createRequire(import.meta.url)
type Args = {
debug?: boolean
payload: Payload
@@ -38,13 +35,13 @@ type Args = {
* @param req
*/
export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
const adapter = payload.db as unknown as VercelPostgresAdapter
const adapter = payload.db as unknown as BasePostgresAdapter
const db = adapter.sessions[await req.transactionID].db as TransactionPg
const dir = payload.db.migrationDir
// get the drizzle migrateUpSQL from drizzle using the last schema
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/api')
const drizzleJsonAfter = generateDrizzleJson(adapter.schema)
const { generateDrizzleJson, generateMigration, upSnapshot } = adapter.requireDrizzleKit()
const drizzleJsonAfter = generateDrizzleJson(adapter.schema) as DrizzleSnapshotJSON
// Get the previous migration snapshot
const previousSnapshot = fs
@@ -59,10 +56,14 @@ export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
)
}
const drizzleJsonBefore = JSON.parse(
let drizzleJsonBefore = JSON.parse(
fs.readFileSync(`${dir}/${previousSnapshot}`, 'utf8'),
) as DrizzleSnapshotJSON
if (upSnapshot && drizzleJsonBefore.version < drizzleJsonAfter.version) {
drizzleJsonBefore = upSnapshot(drizzleJsonBefore)
}
const generatedSQL = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
if (!generatedSQL.length) {
@@ -118,7 +119,6 @@ export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
const versionsTableName = adapter.tableNameMap.get(
`_${toSnakeCase(collection.slug)}${adapter.versionsSuffix}`,
)
const versionFields = buildVersionCollectionFields(payload.config, collection, true)
const versionPathsToQuery: PathsToQuery = new Set()

View File

@@ -1,15 +1,15 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { FlattenedField, Payload, PayloadRequest } from 'payload'
import { sql } from 'drizzle-orm'
import type { PostgresAdapter } from '../../types.js'
import type { TransactionPg } from '../../../types.js'
import type { BasePostgresAdapter } from '../../types.js'
import type { DocsToResave, PathsToQuery } from './types.js'
import { fetchAndResave } from './fetchAndResave/index.js'
type Args = {
adapter: PostgresAdapter
adapter: BasePostgresAdapter
collectionSlug?: string
db: TransactionPg
debug: boolean

View File

@@ -1,13 +1,13 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { FlattenedField, Payload } from 'payload'
import toSnakeCase from 'to-snake-case'
import type { PostgresAdapter } from '../../types.js'
import type { TransactionPg } from '../../../types.js'
import type { BasePostgresAdapter } from '../../types.js'
import type { PathsToQuery } from './types.js'
type Args = {
adapter: PostgresAdapter
adapter: BasePostgresAdapter
collectionSlug?: string
columnPrefix: string
db: TransactionPg

View File

@@ -3,4 +3,19 @@ import { createRequire } from 'module'
import type { RequireDrizzleKit } from '../types.js'
const require = createRequire(import.meta.url)
export const requireDrizzleKit: RequireDrizzleKit = () => require('drizzle-kit/api')
export const requireDrizzleKit: RequireDrizzleKit = () => {
const {
generateDrizzleJson,
generateMigration,
pushSchema,
upPgSnapshot,
} = require('drizzle-kit/api')
return {
generateDrizzleJson,
generateMigration,
pushSchema,
upSnapshot: upPgSnapshot,
}
}

View File

@@ -191,5 +191,66 @@ export type PostgresDrizzleAdapter = Omit<
export type IDType = 'integer' | 'numeric' | 'uuid' | 'varchar'
export type MigrateUpArgs = { payload: Payload; req: PayloadRequest }
export type MigrateDownArgs = { payload: Payload; req: PayloadRequest }
export type MigrateUpArgs = {
/**
* The Postgres Drizzle instance that you can use to execute SQL directly within the current transaction.
* @example
* ```ts
* import { type MigrateUpArgs, sql } from '@payloadcms/db-postgres'
*
* export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
* const { rows: posts } = await db.execute(sql`SELECT * FROM posts`)
* }
* ```
*/
db: PostgresDB
/**
* The Payload instance that you can use to execute Local API methods
* To use the current transaction you must pass `req` to arguments
* @example
* ```ts
* import { type MigrateUpArgs, sql } from '@payloadcms/db-postgres'
*
* export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
* const posts = await payload.find({ collection: 'posts', req })
* }
* ```
*/
payload: Payload
/**
* The `PayloadRequest` object that contains the current transaction
*/
req: PayloadRequest
}
export type MigrateDownArgs = {
/**
* The Postgres Drizzle instance that you can use to execute SQL directly within the current transaction.
* @example
* ```ts
* import { type MigrateDownArgs, sql } from '@payloadcms/db-postgres'
*
* export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
* const { rows: posts } = await db.execute(sql`SELECT * FROM posts`)
* }
* ```
*/
db: PostgresDB
/**
* The Payload instance that you can use to execute Local API methods
* To use the current transaction you must pass `req` to arguments
* @example
* ```ts
* import { type MigrateDownArgs } from '@payloadcms/db-postgres'
*
* export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
* const posts = await payload.find({ collection: 'posts', req })
* }
* ```
*/
payload: Payload
/**
* The `PayloadRequest` object that contains the current transaction
*/
req: PayloadRequest
}

View File

@@ -14,19 +14,14 @@ import type { NodePgDatabase, NodePgQueryResultHKT } from 'drizzle-orm/node-post
import type { PgColumn, PgTable, PgTransaction } from 'drizzle-orm/pg-core'
import type { SQLiteColumn, SQLiteTable, SQLiteTransaction } from 'drizzle-orm/sqlite-core'
import type { Result } from 'drizzle-orm/sqlite-core/session'
import type {
BaseDatabaseAdapter,
MigrationData,
MigrationTemplateArgs,
Payload,
PayloadRequest,
} from 'payload'
import type { BaseDatabaseAdapter, MigrationData, Payload, PayloadRequest } from 'payload'
import type { BuildQueryJoinAliases } from './queries/buildQuery.js'
export { BuildQueryJoinAliases }
import type { ResultSet } from '@libsql/client'
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
import type { SQLiteRaw } from 'drizzle-orm/sqlite-core/query-builders/raw'
import type { QueryResult } from 'pg'
@@ -117,7 +112,10 @@ export type Insert = (args: {
}) => Promise<Record<string, unknown>[]>
export type RequireDrizzleKit = () => {
generateDrizzleJson: (args: { schema: Record<string, unknown> }) => unknown
generateDrizzleJson: (
args: Record<string, unknown>,
) => DrizzleSnapshotJSON | Promise<DrizzleSnapshotJSON>
generateMigration: (prev: DrizzleSnapshotJSON, cur: DrizzleSnapshotJSON) => Promise<string[]>
pushSchema: (
schema: Record<string, unknown>,
drizzle: DrizzleAdapter['drizzle'],
@@ -125,6 +123,7 @@ export type RequireDrizzleKit = () => {
tablesFilter?: string[],
extensionsFilter?: string[],
) => Promise<{ apply; hasDataLoss; warnings }>
upSnapshot?: (snapshot: Record<string, unknown>) => DrizzleSnapshotJSON
}
export type Migration = {
@@ -177,7 +176,6 @@ export interface DrizzleAdapter extends BaseDatabaseAdapter {
* Used for returning properly formed errors from unique fields
*/
fieldConstraints: Record<string, Record<string, string>>
getMigrationTemplate: (args: MigrationTemplateArgs) => string
idType: 'serial' | 'uuid'
indexes: Set<string>
initializing: Promise<void>

View File

@@ -0,0 +1,134 @@
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
import type { CreateMigration } from 'payload'
import fs from 'fs'
import path from 'path'
import { getPredefinedMigration, writeMigrationIndex } from 'payload'
import prompts from 'prompts'
import type { DrizzleAdapter } from '../types.js'
import { getMigrationTemplate } from './getMigrationTemplate.js'
export const buildCreateMigration = ({
executeMethod,
filename,
sanitizeStatements,
}: {
executeMethod: string
filename: string
sanitizeStatements: (args: { sqlExecute: string; statements: string[] }) => string
}): CreateMigration => {
const dirname = path.dirname(filename)
return async function createMigration(
this: DrizzleAdapter,
{ file, forceAcceptWarning, migrationName, payload, skipEmpty },
) {
const dir = payload.db.migrationDir
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir)
}
const { generateDrizzleJson, generateMigration, upSnapshot } = this.requireDrizzleKit()
const drizzleJsonAfter = await generateDrizzleJson(this.schema)
const [yyymmdd, hhmmss] = new Date().toISOString().split('T')
const formattedDate = yyymmdd.replace(/\D/g, '')
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '')
let imports: string = ''
let downSQL: string
let upSQL: string
;({ downSQL, imports, upSQL } = await getPredefinedMigration({
dirname,
file,
migrationName,
payload,
}))
const timestamp = `${formattedDate}_${formattedTime}`
const name = migrationName || file?.split('/').slice(2).join('/')
const fileName = `${timestamp}${name ? `_${name.replace(/\W/g, '_')}` : ''}`
const filePath = `${dir}/${fileName}`
let drizzleJsonBefore = this.defaultDrizzleSnapshot as DrizzleSnapshotJSON
if (this.schemaName) {
drizzleJsonBefore.schemas = {
[this.schemaName]: this.schemaName,
}
}
if (!upSQL) {
// Get latest migration snapshot
const latestSnapshot = fs
.readdirSync(dir)
.filter((file) => file.endsWith('.json'))
.sort()
.reverse()?.[0]
if (latestSnapshot) {
drizzleJsonBefore = JSON.parse(fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'))
if (upSnapshot && drizzleJsonBefore.version < drizzleJsonAfter.version) {
drizzleJsonBefore = upSnapshot(drizzleJsonBefore)
}
}
const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)
const sqlExecute = `await db.${executeMethod}(` + 'sql`'
if (sqlStatementsUp?.length) {
upSQL = sanitizeStatements({ sqlExecute, statements: sqlStatementsUp })
}
if (sqlStatementsDown?.length) {
downSQL = sanitizeStatements({ sqlExecute, statements: sqlStatementsDown })
}
if (!upSQL?.length && !downSQL?.length && !forceAcceptWarning) {
if (skipEmpty) {
process.exit(0)
}
const { confirm: shouldCreateBlankMigration } = await prompts(
{
name: 'confirm',
type: 'confirm',
initial: false,
message: 'No schema changes detected. Would you like to create a blank migration file?',
},
{
onCancel: () => {
process.exit(0)
},
},
)
if (!shouldCreateBlankMigration) {
process.exit(0)
}
}
// write schema
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))
}
// write migration
fs.writeFileSync(
`${filePath}.ts`,
getMigrationTemplate({
downSQL: downSQL || ` // Migration code`,
imports,
packageName: payload.db.packageName,
upSQL: upSQL || ` // Migration code`,
}),
)
writeMigrationIndex({ migrationsDir: payload.db.migrationDir })
payload.logger.info({ msg: `Migration created at ${filePath}.ts` })
}
}

View File

@@ -13,11 +13,11 @@ export const getMigrationTemplate = ({
upSQL,
}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '${packageName}'
${imports ? `${imports}\n` : ''}
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
${indent(upSQL)}
}
export async function down({ payload, req }: MigrateDownArgs): Promise<void> {
export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
${indent(downSQL)}
}
`

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/email-nodemailer",
"version": "3.5.0",
"version": "3.6.0",
"description": "Payload Nodemailer Email Adapter",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/email-resend",
"version": "3.5.0",
"version": "3.6.0",
"description": "Payload Resend Email Adapter",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/graphql",
"version": "3.5.0",
"version": "3.6.0",
"homepage": "https://payloadcms.com",
"repository": {
"type": "git",

View File

@@ -30,15 +30,13 @@ export function createResolver<TSlug extends CollectionSlug>(
context.req.locale = args.locale
}
const options = {
const result = await createOperation({
collection,
data: args.data,
depth: 0,
draft: args.draft,
req: isolateObjectProperty(context.req, 'transactionID'),
}
const result = await createOperation(options)
})
return result
}

View File

@@ -7,6 +7,7 @@ import type { Context } from '../types.js'
export type Resolver<TData> = (
_: unknown,
args: {
data: TData
draft: boolean
fallbackLocale?: string
id: string
@@ -28,15 +29,14 @@ export function duplicateResolver<TSlug extends CollectionSlug>(
req.fallbackLocale = args.fallbackLocale || fallbackLocale
context.req = req
const options = {
const result = await duplicateOperation({
id: args.id,
collection,
data: args.data,
depth: 0,
draft: args.draft,
req: isolateObjectProperty(req, 'transactionID'),
}
const result = await duplicateOperation(options)
})
return result
}

View File

@@ -280,6 +280,9 @@ export function initCollections({ config, graphqlResult }: InitCollectionsGraphQ
type: collection.graphQL.type,
args: {
id: { type: new GraphQLNonNull(idType) },
...(createMutationInputType
? { data: { type: collection.graphQL.mutationInputType } }
: {}),
},
resolve: duplicateResolver(collection),
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/live-preview-react",
"version": "3.5.0",
"version": "3.6.0",
"description": "The official React SDK for Payload Live Preview",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/live-preview-vue",
"version": "3.5.0",
"version": "3.6.0",
"description": "The official Vue SDK for Payload Live Preview",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/live-preview",
"version": "3.5.0",
"version": "3.6.0",
"description": "The official live preview JavaScript SDK for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/next",
"version": "3.5.0",
"version": "3.6.0",
"homepage": "https://payloadcms.com",
"repository": {
"type": "git",

View File

@@ -93,7 +93,12 @@ export const RootLayout = async ({
})
return (
<html data-theme={theme} dir={dir} lang={languageCode}>
<html
data-theme={theme}
dir={dir}
lang={languageCode}
suppressHydrationWarning={config?.admin?.suppressHydrationWarning ?? false}
>
<head>
<style>{`@layer payload-default, payload;`}</style>
</head>

View File

@@ -27,6 +27,7 @@ export const duplicate: CollectionRouteHandlerWithID = async ({
const doc = await duplicateOperation({
id,
collection,
data: req.data,
depth: isNumber(depth) ? Number(depth) : undefined,
draft,
populate: sanitizePopulateParam(req.query.populate),

View File

@@ -48,6 +48,20 @@ export const DefaultTemplate: React.FC<DefaultTemplateProps> = ({
} = {},
} = payload.config || {}
const serverProps = React.useMemo<ServerProps>(
() => ({
i18n,
locale,
params,
payload,
permissions,
searchParams,
user,
visibleEntities,
}),
[i18n, locale, params, payload, permissions, searchParams, user, visibleEntities],
)
const { Actions } = React.useMemo<{
Actions: Record<string, React.ReactNode>
}>(() => {
@@ -59,11 +73,13 @@ export const DefaultTemplate: React.FC<DefaultTemplateProps> = ({
acc[action.path] = RenderServerComponent({
Component: action,
importMap: payload.importMap,
serverProps,
})
} else {
acc[action] = RenderServerComponent({
Component: action,
importMap: payload.importMap,
serverProps,
})
}
}
@@ -72,23 +88,14 @@ export const DefaultTemplate: React.FC<DefaultTemplateProps> = ({
}, {})
: undefined,
}
}, [viewActions, payload])
}, [payload, serverProps, viewActions])
const NavComponent = RenderServerComponent({
clientProps: { clientProps: { visibleEntities } },
Component: CustomNav,
Fallback: DefaultNav,
importMap: payload.importMap,
serverProps: {
i18n,
locale,
params,
payload,
permissions,
searchParams,
user,
visibleEntities,
},
serverProps,
})
return (
@@ -99,16 +106,7 @@ export const DefaultTemplate: React.FC<DefaultTemplateProps> = ({
clientProps: { clientProps: { visibleEntities } },
Component: CustomHeader,
importMap: payload.importMap,
serverProps: {
i18n,
locale,
params,
payload,
permissions,
searchParams,
user,
visibleEntities,
},
serverProps,
})}
<div style={{ position: 'relative' }}>
<div className={`${baseClass}__nav-toggler-wrapper`} id="nav-toggler">
@@ -127,6 +125,7 @@ export const DefaultTemplate: React.FC<DefaultTemplateProps> = ({
? RenderServerComponent({
Component: avatar.Component,
importMap: payload.importMap,
serverProps,
})
: undefined
}
@@ -135,6 +134,7 @@ export const DefaultTemplate: React.FC<DefaultTemplateProps> = ({
? RenderServerComponent({
Component: components.graphics.Icon,
importMap: payload.importMap,
serverProps,
})
: undefined
}

View File

@@ -4,7 +4,6 @@ import { DocumentInfoProvider, EditDepthProvider, HydrateAuthProvider } from '@p
import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerComponent'
import { formatAdminURL, isEditing as getIsEditing } from '@payloadcms/ui/shared'
import { buildFormState } from '@payloadcms/ui/utilities/buildFormState'
import { isRedirectError } from 'next/dist/client/components/redirect.js'
import { notFound, redirect } from 'next/navigation.js'
import React from 'react'
@@ -382,7 +381,7 @@ export const Document: React.FC<AdminViewProps> = async (args) => {
const { Document: RenderedDocument } = await renderDocument(args)
return RenderedDocument
} catch (error) {
if (isRedirectError(error)) {
if (error?.message === 'NEXT_REDIRECT') {
throw error
}
args.initPageResult.req.payload.logger.error(error)

View File

@@ -5,12 +5,12 @@ import type {
SanitizedCollectionConfig,
SanitizedDocumentPermissions,
SanitizedGlobalConfig,
ServerProps,
StaticDescription,
} from 'payload'
import { ViewDescription } from '@payloadcms/ui'
import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerComponent'
import React from 'react'
import { getDocumentPermissions } from './getDocumentPermissions.js'
@@ -29,6 +29,13 @@ export const renderDocumentSlots: (args: {
const isPreviewEnabled = collectionConfig?.admin?.preview || globalConfig?.admin?.preview
const serverProps: ServerProps = {
i18n: req.i18n,
payload: req.payload,
user: req.user,
// TODO: Add remaining serverProps
}
const CustomPreviewButton =
collectionConfig?.admin?.components?.edit?.PreviewButton ||
globalConfig?.admin?.components?.elements?.PreviewButton
@@ -37,6 +44,7 @@ export const renderDocumentSlots: (args: {
components.PreviewButton = RenderServerComponent({
Component: CustomPreviewButton,
importMap: req.payload.importMap,
serverProps,
})
}
@@ -60,6 +68,7 @@ export const renderDocumentSlots: (args: {
Component: CustomDescription,
Fallback: ViewDescription,
importMap: req.payload.importMap,
serverProps,
})
}
@@ -73,6 +82,7 @@ export const renderDocumentSlots: (args: {
components.PublishButton = RenderServerComponent({
Component: CustomPublishButton,
importMap: req.payload.importMap,
serverProps,
})
}
const CustomSaveDraftButton =
@@ -87,6 +97,7 @@ export const renderDocumentSlots: (args: {
components.SaveDraftButton = RenderServerComponent({
Component: CustomSaveDraftButton,
importMap: req.payload.importMap,
serverProps,
})
}
} else {
@@ -98,6 +109,7 @@ export const renderDocumentSlots: (args: {
components.SaveButton = RenderServerComponent({
Component: CustomSaveButton,
importMap: req.payload.importMap,
serverProps,
})
}
}

View File

@@ -171,13 +171,13 @@ export const renderListView = async (
const clientCollectionConfig = clientConfig.collections.find((c) => c.slug === collectionSlug)
const { columnState, Table } = renderTable({
collectionConfig: clientCollectionConfig,
clientCollectionConfig,
collectionConfig,
columnPreferences: listPreferences?.columns,
customCellProps,
docs: data.docs,
drawerSlug,
enableRowSelections,
fields,
i18n: req.i18n,
payload,
useAsTitle,

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/payload-cloud",
"version": "3.5.0",
"version": "3.6.0",
"description": "The official Payload Cloud plugin",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "payload",
"version": "3.5.0",
"version": "3.6.0",
"description": "Node, React, Headless CMS and Application Framework built on Next.js",
"keywords": [
"admin panel",

View File

@@ -137,8 +137,8 @@ export const forgotPasswordOperation = async <TSlug extends CollectionSlug>(
user.resetPasswordToken = token
user.resetPasswordExpiration = new Date(
collectionConfig.auth?.forgotPassword?.expiration || expiration || Date.now() + 3600000,
).toISOString() // 1 hour
Date.now() + (collectionConfig.auth?.forgotPassword?.expiration ?? expiration ?? 3600000),
).toISOString()
user = await payload.update({
id: user.id,

View File

@@ -335,9 +335,12 @@ export type CollectionAdminOptions = {
enableRichTextLink?: boolean
enableRichTextRelationship?: boolean
/**
* Place collections into a navigational group
* */
group?: Record<string, string> | string
* Specify a navigational group for collections in the admin sidebar.
* - Provide a string to place the entity in a custom group.
* - Provide a record to define localized group names.
* - Set to `false` to exclude the entity from the sidebar / dashboard without disabling its routes.
*/
group?: false | Record<string, string> | string
/**
* Exclude the collection from the admin nav and routes
*/

View File

@@ -13,6 +13,7 @@ import type {
BeforeOperationHook,
BeforeValidateHook,
Collection,
DataFromCollectionSlug,
RequiredDataFromCollectionSlug,
SelectFromCollectionSlug,
} from '../config/types.js'
@@ -21,6 +22,7 @@ import { ensureUsernameOrEmail } from '../../auth/ensureUsernameOrEmail.js'
import executeAccess from '../../auth/executeAccess.js'
import { sendVerificationEmail } from '../../auth/sendVerificationEmail.js'
import { registerLocalStrategy } from '../../auth/strategies/local/register.js'
import { getDuplicateDocumentData } from '../../duplicateDocument/index.js'
import { afterChange } from '../../fields/hooks/afterChange/index.js'
import { afterRead } from '../../fields/hooks/afterRead/index.js'
import { beforeChange } from '../../fields/hooks/beforeChange/index.js'
@@ -43,6 +45,7 @@ export type Arguments<TSlug extends CollectionSlug> = {
disableTransaction?: boolean
disableVerificationEmail?: boolean
draft?: boolean
duplicateFromID?: DataFromCollectionSlug<TSlug>['id']
overrideAccess?: boolean
overwriteExistingFiles?: boolean
populate?: PopulateType
@@ -97,6 +100,7 @@ export const createOperation = async <
depth,
disableVerificationEmail,
draft = false,
duplicateFromID,
overrideAccess,
overwriteExistingFiles = false,
populate,
@@ -115,6 +119,23 @@ export const createOperation = async <
const shouldSaveDraft = Boolean(draft && collectionConfig.versions.drafts)
let duplicatedFromDocWithLocales: JsonObject = {}
let duplicatedFromDoc: JsonObject = {}
if (duplicateFromID) {
const duplicateResult = await getDuplicateDocumentData({
id: duplicateFromID,
collectionConfig,
draftArg: shouldSaveDraft,
overrideAccess,
req,
shouldSaveDraft,
})
duplicatedFromDoc = duplicateResult.duplicatedFromDoc
duplicatedFromDocWithLocales = duplicateResult.duplicatedFromDocWithLocales
}
// /////////////////////////////////////
// Access
// /////////////////////////////////////
@@ -131,7 +152,9 @@ export const createOperation = async <
collection,
config,
data,
isDuplicating: Boolean(duplicateFromID),
operation: 'create',
originalDoc: duplicatedFromDoc,
overwriteExistingFiles,
req,
throwOnMissingFile:
@@ -148,7 +171,7 @@ export const createOperation = async <
collection: collectionConfig,
context: req.context,
data,
doc: {},
doc: duplicatedFromDoc,
global: null,
operation: 'create',
overrideAccess,
@@ -169,6 +192,7 @@ export const createOperation = async <
context: req.context,
data,
operation: 'create',
originalDoc: duplicatedFromDoc,
req,
})) || data
},
@@ -188,6 +212,7 @@ export const createOperation = async <
context: req.context,
data,
operation: 'create',
originalDoc: duplicatedFromDoc,
req,
})) || data
}, Promise.resolve())
@@ -200,8 +225,8 @@ export const createOperation = async <
collection: collectionConfig,
context: req.context,
data,
doc: {},
docWithLocales: {},
doc: duplicatedFromDoc,
docWithLocales: duplicatedFromDocWithLocales,
global: null,
operation: 'create',
req,

View File

@@ -1,391 +1,26 @@
import type { DeepPartial } from 'ts-essentials'
import httpStatus from 'http-status'
import type { FindOneArgs } from '../../database/types.js'
import type { CollectionSlug } from '../../index.js'
import type {
PayloadRequest,
PopulateType,
SelectType,
TransformCollectionWithSelect,
} from '../../types/index.js'
import type {
Collection,
DataFromCollectionSlug,
SelectFromCollectionSlug,
} from '../config/types.js'
import type { TransformCollectionWithSelect } from '../../types/index.js'
import type { RequiredDataFromCollectionSlug, SelectFromCollectionSlug } from '../config/types.js'
import executeAccess from '../../auth/executeAccess.js'
import { hasWhereAccessResult } from '../../auth/types.js'
import { combineQueries } from '../../database/combineQueries.js'
import { APIError, Forbidden, NotFound } from '../../errors/index.js'
import { afterChange } from '../../fields/hooks/afterChange/index.js'
import { afterRead } from '../../fields/hooks/afterRead/index.js'
import { beforeChange } from '../../fields/hooks/beforeChange/index.js'
import { beforeDuplicate } from '../../fields/hooks/beforeDuplicate/index.js'
import { beforeValidate } from '../../fields/hooks/beforeValidate/index.js'
import { generateFileData } from '../../uploads/generateFileData.js'
import { uploadFiles } from '../../uploads/uploadFiles.js'
import { commitTransaction } from '../../utilities/commitTransaction.js'
import { initTransaction } from '../../utilities/initTransaction.js'
import { killTransaction } from '../../utilities/killTransaction.js'
import sanitizeInternalFields from '../../utilities/sanitizeInternalFields.js'
import { getLatestCollectionVersion } from '../../versions/getLatestCollectionVersion.js'
import { saveVersion } from '../../versions/saveVersion.js'
import { buildAfterOperation } from './utils.js'
import { type Arguments as CreateArguments, createOperation } from './create.js'
export type Arguments = {
collection: Collection
depth?: number
disableTransaction?: boolean
draft?: boolean
export type Arguments<TSlug extends CollectionSlug> = {
data?: DeepPartial<RequiredDataFromCollectionSlug<TSlug>>
id: number | string
overrideAccess?: boolean
populate?: PopulateType
req: PayloadRequest
select?: SelectType
showHiddenFields?: boolean
}
} & Omit<CreateArguments<TSlug>, 'data' | 'duplicateFromID'>
export const duplicateOperation = async <
TSlug extends CollectionSlug,
TSelect extends SelectFromCollectionSlug<TSlug>,
>(
incomingArgs: Arguments,
incomingArgs: Arguments<TSlug>,
): Promise<TransformCollectionWithSelect<TSlug, TSelect>> => {
let args = incomingArgs
const operation = 'create'
try {
const shouldCommit = !args.disableTransaction && (await initTransaction(args.req))
// /////////////////////////////////////
// beforeOperation - Collection
// /////////////////////////////////////
await args.collection.config.hooks.beforeOperation.reduce(async (priorHook, hook) => {
await priorHook
args =
(await hook({
args,
collection: args.collection.config,
context: args.req.context,
operation,
req: args.req,
})) || args
}, Promise.resolve())
const {
id,
collection: { config: collectionConfig },
depth,
draft: draftArg = true,
overrideAccess,
populate,
req: { fallbackLocale, locale: localeArg, payload },
req,
select,
showHiddenFields,
} = args
if (!id) {
throw new APIError('Missing ID of document to duplicate.', httpStatus.BAD_REQUEST)
}
const shouldSaveDraft = Boolean(draftArg && collectionConfig.versions.drafts)
// /////////////////////////////////////
// Read Access
// /////////////////////////////////////
const accessResults = !overrideAccess
? await executeAccess({ id, req }, collectionConfig.access.read)
: true
const hasWherePolicy = hasWhereAccessResult(accessResults)
// /////////////////////////////////////
// Retrieve document
// /////////////////////////////////////
const findOneArgs: FindOneArgs = {
collection: collectionConfig.slug,
locale: req.locale,
req,
where: combineQueries({ id: { equals: id } }, accessResults),
}
let docWithLocales = await getLatestCollectionVersion({
id,
config: collectionConfig,
payload,
query: findOneArgs,
req,
})
if (!docWithLocales && !hasWherePolicy) {
throw new NotFound(req.t)
}
if (!docWithLocales && hasWherePolicy) {
throw new Forbidden(req.t)
}
// remove the createdAt timestamp and id to rely on the db to set the default it
delete docWithLocales.createdAt
delete docWithLocales.id
docWithLocales = await beforeDuplicate({
id,
collection: collectionConfig,
context: req.context,
doc: docWithLocales,
overrideAccess,
req,
})
// for version enabled collections, override the current status with draft, unless draft is explicitly set to false
if (shouldSaveDraft) {
docWithLocales._status = 'draft'
}
let result
let originalDoc = await afterRead({
collection: collectionConfig,
context: req.context,
depth: 0,
doc: docWithLocales,
draft: draftArg,
fallbackLocale: null,
global: null,
locale: req.locale,
overrideAccess: true,
req,
showHiddenFields: true,
})
const { data: newFileData, files: filesToUpload } = await generateFileData({
collection: args.collection,
config: req.payload.config,
data: originalDoc,
operation: 'create',
overwriteExistingFiles: 'forceDisable',
req,
throwOnMissingFile: true,
})
originalDoc = newFileData
// /////////////////////////////////////
// Create Access
// /////////////////////////////////////
if (!overrideAccess) {
await executeAccess({ data: originalDoc, req }, collectionConfig.access.create)
}
// /////////////////////////////////////
// beforeValidate - Fields
// /////////////////////////////////////
let data = await beforeValidate<DeepPartial<DataFromCollectionSlug<TSlug>>>({
id,
collection: collectionConfig,
context: req.context,
data: originalDoc,
doc: originalDoc,
duplicate: true,
global: null,
operation,
overrideAccess,
req,
})
// /////////////////////////////////////
// beforeValidate - Collection
// /////////////////////////////////////
await collectionConfig.hooks.beforeValidate.reduce(async (priorHook, hook) => {
await priorHook
data =
(await hook({
collection: collectionConfig,
context: req.context,
data,
operation,
originalDoc,
req,
})) || result
}, Promise.resolve())
// /////////////////////////////////////
// beforeChange - Collection
// /////////////////////////////////////
await collectionConfig.hooks.beforeChange.reduce(async (priorHook, hook) => {
await priorHook
data =
(await hook({
collection: collectionConfig,
context: req.context,
data,
operation,
originalDoc: result,
req,
})) || result
}, Promise.resolve())
// /////////////////////////////////////
// beforeChange - Fields
// /////////////////////////////////////
result = await beforeChange({
id,
collection: collectionConfig,
context: req.context,
data,
doc: originalDoc,
docWithLocales,
global: null,
operation,
req,
skipValidation:
shouldSaveDraft &&
collectionConfig.versions.drafts &&
!collectionConfig.versions.drafts.validate,
})
// set req.locale back to the original locale
req.locale = localeArg
// /////////////////////////////////////
// Create / Update
// /////////////////////////////////////
// /////////////////////////////////////
// Write files to local storage
// /////////////////////////////////////
if (!collectionConfig.upload.disableLocalStorage) {
await uploadFiles(payload, filesToUpload, req)
}
let versionDoc = await payload.db.create({
collection: collectionConfig.slug,
data: result,
req,
select,
})
versionDoc = sanitizeInternalFields(versionDoc)
// /////////////////////////////////////
// Create version
// /////////////////////////////////////
if (collectionConfig.versions) {
result = await saveVersion({
id: versionDoc.id,
collection: collectionConfig,
docWithLocales: versionDoc,
draft: shouldSaveDraft,
payload,
req,
})
}
// /////////////////////////////////////
// afterRead - Fields
// /////////////////////////////////////
result = await afterRead({
collection: collectionConfig,
context: req.context,
depth,
doc: versionDoc,
draft: draftArg,
fallbackLocale,
global: null,
locale: localeArg,
overrideAccess,
populate,
req,
select,
showHiddenFields,
})
// /////////////////////////////////////
// afterRead - Collection
// /////////////////////////////////////
await collectionConfig.hooks.afterRead.reduce(async (priorHook, hook) => {
await priorHook
result =
(await hook({
collection: collectionConfig,
context: req.context,
doc: result,
req,
})) || result
}, Promise.resolve())
// /////////////////////////////////////
// afterChange - Fields
// /////////////////////////////////////
result = await afterChange({
collection: collectionConfig,
context: req.context,
data: versionDoc,
doc: result,
global: null,
operation,
previousDoc: {},
req,
})
// /////////////////////////////////////
// afterChange - Collection
// /////////////////////////////////////
await collectionConfig.hooks.afterChange.reduce(async (priorHook, hook) => {
await priorHook
result =
(await hook({
collection: collectionConfig,
context: req.context,
doc: result,
operation,
previousDoc: {},
req,
})) || result
}, Promise.resolve())
// /////////////////////////////////////
// afterOperation - Collection
// /////////////////////////////////////
result = await buildAfterOperation({
args,
collection: collectionConfig,
operation,
result,
})
// /////////////////////////////////////
// Return results
// /////////////////////////////////////
if (shouldCommit) {
await commitTransaction(req)
}
return result
} catch (error: unknown) {
await killTransaction(args.req)
throw error
}
const { id, ...args } = incomingArgs
return createOperation({
...args,
data: incomingArgs?.data || {},
duplicateFromID: id,
})
}

View File

@@ -8,6 +8,7 @@ import type {
} from '../../../types/index.js'
import type { File } from '../../../uploads/types.js'
import type {
DataFromCollectionSlug,
RequiredDataFromCollectionSlug,
SelectFromCollectionSlug,
} from '../../config/types.js'
@@ -28,6 +29,7 @@ export type Options<TSlug extends CollectionSlug, TSelect extends SelectType> =
disableTransaction?: boolean
disableVerificationEmail?: boolean
draft?: boolean
duplicateFromID?: DataFromCollectionSlug<TSlug>['id']
fallbackLocale?: false | TypedLocale
file?: File
filePath?: string
@@ -56,6 +58,7 @@ export default async function createLocal<
disableTransaction,
disableVerificationEmail,
draft,
duplicateFromID,
file,
filePath,
overrideAccess = true,
@@ -82,6 +85,7 @@ export default async function createLocal<
disableTransaction,
disableVerificationEmail,
draft,
duplicateFromID,
overrideAccess,
overwriteExistingFiles,
populate,

View File

@@ -1,3 +1,5 @@
import type { DeepPartial } from 'ts-essentials'
import type { CollectionSlug, TypedLocale } from '../../..//index.js'
import type { Payload, RequestContext } from '../../../index.js'
import type {
@@ -7,7 +9,10 @@ import type {
SelectType,
TransformCollectionWithSelect,
} from '../../../types/index.js'
import type { SelectFromCollectionSlug } from '../../config/types.js'
import type {
RequiredDataFromCollectionSlug,
SelectFromCollectionSlug,
} from '../../config/types.js'
import { APIError } from '../../../errors/index.js'
import { createLocalReq } from '../../../utilities/createLocalReq.js'
@@ -19,6 +24,7 @@ export type Options<TSlug extends CollectionSlug, TSelect extends SelectType> =
* context, which will then be passed to req.context, which can be read by hooks
*/
context?: RequestContext
data?: DeepPartial<RequiredDataFromCollectionSlug<TSlug>>
depth?: number
disableTransaction?: boolean
draft?: boolean
@@ -43,6 +49,7 @@ export async function duplicate<
const {
id,
collection: collectionSlug,
data,
depth,
disableTransaction,
draft,
@@ -71,6 +78,7 @@ export async function duplicate<
return duplicateOperation<TSlug, TSelect>({
id,
collection,
data,
depth,
disableTransaction,
draft,

View File

@@ -704,7 +704,7 @@ export type Config = {
| 'default'
| 'gravatar'
| {
Component: PayloadComponent<never>
Component: PayloadComponent
}
/**
* Add extra and/or replace built-in components with custom components
@@ -841,6 +841,12 @@ export type Config = {
/** The route for the unauthorized page. */
unauthorized?: string
}
/**
* Suppresses React hydration mismatch warnings during the hydration of the root <html> tag.
* Useful in scenarios where the server-rendered HTML might intentionally differ from the client-rendered DOM.
* @default false
*/
suppressHydrationWarning?: boolean
/**
* Restrict the Admin Panel theme to use only one of your choice
*

View File

@@ -35,7 +35,8 @@ export const migrate: BaseDatabaseAdapter['migrate'] = async function migrate(
try {
await initTransaction(req)
await migration.up({ payload, req })
const session = payload.db.sessions?.[await req.transactionID]
await migration.up({ payload, req, session })
payload.logger.info({ msg: `Migrated: ${migration.name} (${Date.now() - start}ms)` })
await payload.create({
collection: 'payload-migrations',

View File

@@ -38,7 +38,8 @@ export async function migrateDown(this: BaseDatabaseAdapter): Promise<void> {
try {
payload.logger.info({ msg: `Migrating down: ${migrationFile.name}` })
await initTransaction(req)
await migrationFile.down({ payload, req })
const session = payload.db.sessions?.[await req.transactionID]
await migrationFile.down({ payload, req, session })
payload.logger.info({
msg: `Migrated down: ${migrationFile.name} (${Date.now() - start}ms)`,
})

View File

@@ -37,7 +37,8 @@ export async function migrateRefresh(this: BaseDatabaseAdapter) {
payload.logger.info({ msg: `Migrating down: ${migration.name}` })
const start = Date.now()
await initTransaction(req)
await migrationFile.down({ payload, req })
const session = payload.db.sessions?.[await req.transactionID]
await migrationFile.down({ payload, req, session })
payload.logger.info({
msg: `Migrated down: ${migration.name} (${Date.now() - start}ms)`,
})

View File

@@ -31,7 +31,8 @@ export async function migrateReset(this: BaseDatabaseAdapter): Promise<void> {
try {
const start = Date.now()
await initTransaction(req)
await migration.down({ payload, req })
const session = payload.db.sessions?.[await req.transactionID]
await migration.down({ payload, req, session })
await payload.delete({
collection: 'payload-migrations',
req,

View File

@@ -161,8 +161,6 @@ export type Connect = (args?: ConnectArgs) => Promise<void>
export type Destroy = () => Promise<void>
export type CreateMigration = (args: {
/** dirname of the package, required in drizzle */
dirname?: string
file?: string
forceAcceptWarning?: boolean
migrationName?: string

View File

@@ -0,0 +1,106 @@
import type { SanitizedCollectionConfig } from '../collections/config/types.js'
import type { FindOneArgs } from '../database/types.js'
import type { JsonObject, PayloadRequest } from '../types/index.js'
import executeAccess from '../auth/executeAccess.js'
import { hasWhereAccessResult } from '../auth/types.js'
import { combineQueries } from '../database/combineQueries.js'
import { Forbidden } from '../errors/Forbidden.js'
import { NotFound } from '../errors/NotFound.js'
import { afterRead } from '../fields/hooks/afterRead/index.js'
import { beforeDuplicate } from '../fields/hooks/beforeDuplicate/index.js'
import { getLatestCollectionVersion } from '../versions/getLatestCollectionVersion.js'
type GetDuplicateDocumentArgs = {
collectionConfig: SanitizedCollectionConfig
draftArg?: boolean
id: number | string
overrideAccess?: boolean
req: PayloadRequest
shouldSaveDraft?: boolean
}
export const getDuplicateDocumentData = async ({
id,
collectionConfig,
draftArg,
overrideAccess,
req,
shouldSaveDraft,
}: GetDuplicateDocumentArgs): Promise<{
duplicatedFromDoc: JsonObject
duplicatedFromDocWithLocales: JsonObject
}> => {
const { payload } = req
// /////////////////////////////////////
// Read Access
// /////////////////////////////////////
const accessResults = !overrideAccess
? await executeAccess({ id, req }, collectionConfig.access.read)
: true
const hasWherePolicy = hasWhereAccessResult(accessResults)
// /////////////////////////////////////
// Retrieve document
// /////////////////////////////////////
const findOneArgs: FindOneArgs = {
collection: collectionConfig.slug,
locale: req.locale,
req,
where: combineQueries({ id: { equals: id } }, accessResults),
}
let duplicatedFromDocWithLocales = await getLatestCollectionVersion({
id,
config: collectionConfig,
payload,
query: findOneArgs,
req,
})
if (!duplicatedFromDocWithLocales && !hasWherePolicy) {
throw new NotFound(req.t)
}
if (!duplicatedFromDocWithLocales && hasWherePolicy) {
throw new Forbidden(req.t)
}
// remove the createdAt timestamp and rely on the db to set it
if ('createdAt' in duplicatedFromDocWithLocales) {
delete duplicatedFromDocWithLocales.createdAt
}
// remove the id and rely on the db to set it
if ('id' in duplicatedFromDocWithLocales) {
delete duplicatedFromDocWithLocales.id
}
duplicatedFromDocWithLocales = await beforeDuplicate({
id,
collection: collectionConfig,
context: req.context,
doc: duplicatedFromDocWithLocales,
overrideAccess,
req,
})
// for version enabled collections, override the current status with draft, unless draft is explicitly set to false
if (shouldSaveDraft) {
duplicatedFromDocWithLocales._status = 'draft'
}
const duplicatedFromDoc = await afterRead({
collection: collectionConfig,
context: req.context,
depth: 0,
doc: duplicatedFromDocWithLocales,
draft: draftArg,
fallbackLocale: null,
global: null,
locale: req.locale,
overrideAccess: true,
req,
showHiddenFields: true,
})
return { duplicatedFromDoc, duplicatedFromDocWithLocales }
}

View File

@@ -57,6 +57,8 @@ export {
} from '../utilities/deepMerge.js'
export { fieldSchemaToJSON } from '../utilities/fieldSchemaToJSON.js'
export { flattenAllFields } from '../utilities/flattenAllFields.js'
export { default as flattenTopLevelFields } from '../utilities/flattenTopLevelFields.js'
export { getDataByPath } from '../utilities/getDataByPath.js'
export { getSelectMode } from '../utilities/getSelectMode.js'

View File

@@ -37,7 +37,10 @@ export type ServerOnlyFieldProperties =
| 'validate'
| keyof Pick<FieldBase, 'access' | 'custom' | 'defaultValue' | 'hooks'>
export type ServerOnlyFieldAdminProperties = keyof Pick<FieldBase['admin'], 'condition'>
export type ServerOnlyFieldAdminProperties = keyof Pick<
FieldBase['admin'],
'components' | 'condition'
>
const serverOnlyFieldProperties: Partial<ServerOnlyFieldProperties>[] = [
'hooks',
@@ -57,7 +60,10 @@ const serverOnlyFieldProperties: Partial<ServerOnlyFieldProperties>[] = [
// `tabs`
// `admin`
]
const serverOnlyFieldAdminProperties: Partial<ServerOnlyFieldAdminProperties>[] = ['condition']
const serverOnlyFieldAdminProperties: Partial<ServerOnlyFieldAdminProperties>[] = [
'condition',
'components',
]
type FieldWithDescription = {
admin: AdminClient
} & ClientField

View File

@@ -286,7 +286,6 @@ export const promise = async ({
}
case 'collapsible':
case 'row': {
await traverseFields({
id,

View File

@@ -117,9 +117,12 @@ export type GlobalAdminOptions = {
*/
description?: EntityDescription
/**
* Place globals into a navigational group
* */
group?: Record<string, string> | string
* Specify a navigational group for globals in the admin sidebar.
* - Provide a string to place the entity in a custom group.
* - Provide a record to define localized group names.
* - Set to `false` to exclude the entity from the sidebar / dashboard without disabling its routes.
*/
group?: false | Record<string, string> | string
/**
* Exclude the global from the admin nav and routes
*/

View File

@@ -6,6 +6,7 @@ import {
type PayloadRequest,
type RunningJob,
type TypedJobs,
type Where,
} from '../index.js'
import { runJobs } from './operations/runJobs/index.js'
@@ -70,6 +71,7 @@ export const getJobsLocalAPI = (payload: Payload) => ({
overrideAccess?: boolean
queue?: string
req?: PayloadRequest
where?: Where
}): Promise<ReturnType<typeof runJobs>> => {
const newReq: PayloadRequest = args?.req ?? (await createLocalReq({}, payload))
const result = await runJobs({
@@ -77,6 +79,21 @@ export const getJobsLocalAPI = (payload: Payload) => ({
overrideAccess: args?.overrideAccess !== false,
queue: args?.queue,
req: newReq,
where: args?.where,
})
return result
},
runByID: async (args: {
id: number | string
overrideAccess?: boolean
req?: PayloadRequest
}): Promise<ReturnType<typeof runJobs>> => {
const newReq: PayloadRequest = args?.req ?? (await createLocalReq({}, payload))
const result = await runJobs({
id: args.id,
overrideAccess: args?.overrideAccess !== false,
req: newReq,
})
return result
},

View File

@@ -17,10 +17,15 @@ import { runJob } from './runJob/index.js'
import { runJSONJob } from './runJSONJob/index.js'
export type RunJobsArgs = {
/**
* ID of the job to run
*/
id?: number | string
limit?: number
overrideAccess?: boolean
queue?: string
req: PayloadRequest
where?: Where
}
export type RunJobsResult = {
@@ -36,10 +41,12 @@ export type RunJobsResult = {
}
export const runJobs = async ({
id,
limit = 10,
overrideAccess,
queue,
req,
where: whereFromProps,
}: RunJobsArgs): Promise<RunJobsResult> => {
if (!overrideAccess) {
const hasAccess = await req.payload.config.jobs.access.run({ req })
@@ -89,20 +96,42 @@ export const runJobs = async ({
})
}
if (whereFromProps) {
where.and.push(whereFromProps)
}
// Find all jobs and ensure we set job to processing: true as early as possible to reduce the chance of
// the same job being picked up by another worker
const jobsQuery = (await req.payload.update({
collection: 'payload-jobs',
data: {
processing: true,
seenByWorker: true,
},
depth: req.payload.config.jobs.depth,
disableTransaction: true,
limit,
showHiddenFields: true,
where,
})) as unknown as PaginatedDocs<BaseJob>
const jobsQuery: {
docs: BaseJob[]
} = id
? {
docs: [
(await req.payload.update({
id,
collection: 'payload-jobs',
data: {
processing: true,
seenByWorker: true,
},
depth: req.payload.config.jobs.depth,
disableTransaction: true,
showHiddenFields: true,
})) as BaseJob,
],
}
: ((await req.payload.update({
collection: 'payload-jobs',
data: {
processing: true,
seenByWorker: true,
},
depth: req.payload.config.jobs.depth,
disableTransaction: true,
limit,
showHiddenFields: true,
where,
})) as unknown as PaginatedDocs<BaseJob>)
/**
* Just for logging purposes, we want to know how many jobs are new and how many are existing (= already been tried).

View File

@@ -25,10 +25,10 @@ type Args<T> = {
collection: Collection
config: SanitizedConfig
data: T
isDuplicating?: boolean
operation: 'create' | 'update'
originalDoc?: T
/** pass forceDisable to not overwrite existing files even if they already exist in `data` */
overwriteExistingFiles?: 'forceDisable' | boolean
overwriteExistingFiles?: boolean
req: PayloadRequest
throwOnMissingFile?: boolean
}
@@ -41,6 +41,7 @@ type Result<T> = Promise<{
export const generateFileData = async <T>({
collection: { config: collectionConfig },
data,
isDuplicating,
operation,
originalDoc,
overwriteExistingFiles,
@@ -60,6 +61,7 @@ export const generateFileData = async <T>({
const uploadEdits = parseUploadEditsFromReqOrIncomingData({
data,
isDuplicating,
operation,
originalDoc,
req,
@@ -78,33 +80,31 @@ export const generateFileData = async <T>({
const staticPath = staticDir
if (!file && uploadEdits && data) {
const { filename, url } = data as FileData
const incomingFileData = isDuplicating ? originalDoc : data
if (!file && uploadEdits && incomingFileData) {
const { filename, url } = incomingFileData as FileData
try {
if (url && url.startsWith('/') && !disableLocalStorage) {
const filePath = `${staticPath}/${filename}`
const response = await getFileByPath(filePath)
file = response
if (overwriteExistingFiles !== 'forceDisable') {
overwriteExistingFiles = true
}
overwriteExistingFiles = true
} else if (filename && url) {
file = await getExternalFile({
data: data as FileData,
data: incomingFileData as FileData,
req,
uploadConfig: collectionConfig.upload,
})
if (overwriteExistingFiles !== 'forceDisable') {
overwriteExistingFiles = true
}
overwriteExistingFiles = true
}
} catch (err: unknown) {
throw new FileRetrievalError(req.t, err instanceof Error ? err.message : undefined)
}
}
if (overwriteExistingFiles === 'forceDisable') {
if (isDuplicating) {
overwriteExistingFiles = false
}
@@ -362,11 +362,12 @@ export const generateFileData = async <T>({
*/
function parseUploadEditsFromReqOrIncomingData(args: {
data: unknown
isDuplicating?: boolean
operation: 'create' | 'update'
originalDoc: unknown
req: PayloadRequest
}): UploadEdits {
const { data, operation, originalDoc, req } = args
const { data, isDuplicating, operation, originalDoc, req } = args
// Get intended focal point change from query string or incoming data
const uploadEdits =
@@ -381,10 +382,19 @@ function parseUploadEditsFromReqOrIncomingData(args: {
const incomingData = data as FileData
const origDoc = originalDoc as FileData
// If no change in focal point, return undefined.
// This prevents a refocal operation triggered from admin, because it always sends the focal point.
if (origDoc && incomingData.focalX === origDoc.focalX && incomingData.focalY === origDoc.focalY) {
return undefined
if (origDoc && 'focalX' in origDoc && 'focalY' in origDoc) {
// If no change in focal point, return undefined.
// This prevents a refocal operation triggered from admin, because it always sends the focal point.
if (incomingData.focalX === origDoc.focalX && incomingData.focalY === origDoc.focalY) {
return undefined
}
if (isDuplicating) {
uploadEdits.focalPoint = {
x: incomingData?.focalX || origDoc.focalX,
y: incomingData?.focalY || origDoc.focalX,
}
}
}
if (incomingData?.focalX && incomingData?.focalY) {
@@ -402,5 +412,6 @@ function parseUploadEditsFromReqOrIncomingData(args: {
y: 50,
}
}
return uploadEdits
}

View File

@@ -33,18 +33,14 @@ function flattenFields<TField extends ClientField | Field>(
fields: TField[],
keepPresentationalFields?: boolean,
): FlattenedField<TField>[] {
return fields.reduce<FlattenedField<TField>[]>((fieldsToUse, field) => {
return fields.reduce<FlattenedField<TField>[]>((acc, field) => {
if (fieldAffectsData(field) || (keepPresentationalFields && fieldIsPresentationalOnly(field))) {
return [...fieldsToUse, field as FlattenedField<TField>]
}
if (fieldHasSubFields(field)) {
return [...fieldsToUse, ...flattenFields(field.fields as TField[], keepPresentationalFields)]
}
if (field.type === 'tabs' && 'tabs' in field) {
acc.push(field as FlattenedField<TField>)
} else if (fieldHasSubFields(field)) {
acc.push(...flattenFields(field.fields as TField[], keepPresentationalFields))
} else if (field.type === 'tabs' && 'tabs' in field) {
return [
...fieldsToUse,
...acc,
...field.tabs.reduce<FlattenedField<TField>[]>((tabFields, tab: TabType<TField>) => {
if (tabHasName(tab)) {
return [...tabFields, { ...tab, type: 'tab' } as unknown as FlattenedField<TField>]
@@ -58,7 +54,7 @@ function flattenFields<TField extends ClientField | Field>(
]
}
return fieldsToUse
return acc
}, [])
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/plugin-cloud-storage",
"version": "3.5.0",
"version": "3.6.0",
"description": "The official cloud storage plugin for Payload CMS",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/plugin-form-builder",
"version": "3.5.0",
"version": "3.6.0",
"description": "Form builder plugin for Payload CMS",
"keywords": [
"payload",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/plugin-nested-docs",
"version": "3.5.0",
"version": "3.6.0",
"description": "The official Nested Docs plugin for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/plugin-redirects",
"version": "3.5.0",
"version": "3.6.0",
"description": "Redirects plugin for Payload",
"keywords": [
"payload",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/plugin-search",
"version": "3.5.0",
"version": "3.6.0",
"description": "Search plugin for Payload",
"keywords": [
"payload",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/plugin-sentry",
"version": "3.5.0",
"version": "3.6.0",
"description": "Sentry plugin for Payload",
"keywords": [
"payload",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/plugin-seo",
"version": "3.5.0",
"version": "3.6.0",
"description": "SEO plugin for Payload",
"keywords": [
"payload",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/plugin-stripe",
"version": "3.5.0",
"version": "3.6.0",
"description": "Stripe plugin for Payload",
"keywords": [
"payload",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/richtext-lexical",
"version": "3.5.0",
"version": "3.6.0",
"description": "The officially supported Lexical richtext adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

Some files were not shown because too many files have changed in this diff Show More