Compare commits
1 Commits
templates/
...
feat/suspe
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7aaf3c24e2 |
3
.github/workflows/main.yml
vendored
3
.github/workflows/main.yml
vendored
@@ -492,12 +492,9 @@ jobs:
|
||||
needs:
|
||||
- lint
|
||||
- build
|
||||
- build-templates
|
||||
- tests-unit
|
||||
- tests-int
|
||||
- tests-e2e
|
||||
- tests-types
|
||||
- tests-type-generation
|
||||
|
||||
steps:
|
||||
- if: ${{ always() && (contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')) }}
|
||||
|
||||
@@ -25,24 +25,24 @@ export const MyCollection: CollectionConfig = {
|
||||
|
||||
The following options are available:
|
||||
|
||||
| Option | Description |
|
||||
| -------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`group`** | Text or localization object used to group Collection and Global links in the admin navigation. Set to `false` to hide the link from the navigation while keeping its routes accessible. |
|
||||
| **`hidden`** | Set to true or a function, called with the current user, returning true to exclude this Collection from navigation and admin routing. |
|
||||
| **`hooks`** | Admin-specific hooks for this Collection. [More details](../hooks/collections). |
|
||||
| **`useAsTitle`** | Specify a top-level field to use for a document title throughout the Admin Panel. If no field is defined, the ID of the document is used as the title. A field with `virtual: true` cannot be used as the title. |
|
||||
| Option | Description |
|
||||
| -------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`group`** | Text used as a label for grouping Collection and Global links together in the navigation. |
|
||||
| **`hidden`** | Set to true or a function, called with the current user, returning true to exclude this Collection from navigation and admin routing. |
|
||||
| **`hooks`** | Admin-specific hooks for this Collection. [More details](../hooks/collections). |
|
||||
| **`useAsTitle`** | Specify a top-level field to use for a document title throughout the Admin Panel. If no field is defined, the ID of the document is used as the title. A field with `virtual: true` cannot be used as the title. |
|
||||
| **`description`** | Text to display below the Collection label in the List View to give editors more information. Alternatively, you can use the `admin.components.Description` to render a React component. [More details](#custom-components). |
|
||||
| **`defaultColumns`** | Array of field names that correspond to which columns to show by default in this Collection's List View. |
|
||||
| **`hideAPIURL`** | Hides the "API URL" meta field while editing documents within this Collection. |
|
||||
| **`enableRichTextLink`** | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
||||
| **`enableRichTextRelationship`** | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
||||
| **`meta`** | Page metadata overrides to apply to this Collection within the Admin Panel. [More details](./metadata). |
|
||||
| **`preview`** | Function to generate preview URLs within the Admin Panel that can point to your app. [More details](#preview). |
|
||||
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
|
||||
| **`defaultColumns`** | Array of field names that correspond to which columns to show by default in this Collection's List View. |
|
||||
| **`hideAPIURL`** | Hides the "API URL" meta field while editing documents within this Collection. |
|
||||
| **`enableRichTextLink`** | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
||||
| **`enableRichTextRelationship`** | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
||||
| **`meta`** | Page metadata overrides to apply to this Collection within the Admin Panel. [More details](./metadata). |
|
||||
| **`preview`** | Function to generate preview URLs within the Admin Panel that can point to your app. [More details](#preview). |
|
||||
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
|
||||
| **`components`** | Swap in your own React components to be used within this Collection. [More details](#custom-components). |
|
||||
| **`listSearchableFields`** | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
|
||||
| **`pagination`** | Set pagination-specific options for this Collection. [More details](#pagination). |
|
||||
| **`baseListFilter`** | You can define a default base filter for this collection's List view, which will be merged into any filters that the user performs. |
|
||||
| **`listSearchableFields`** | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
|
||||
| **`pagination`** | Set pagination-specific options for this Collection. [More details](#pagination). |
|
||||
| **`baseListFilter`** | You can define a default base filter for this collection's List view, which will be merged into any filters that the user performs. |
|
||||
|
||||
### Custom Components
|
||||
|
||||
@@ -108,20 +108,14 @@ export const Posts: CollectionConfig = {
|
||||
}
|
||||
```
|
||||
|
||||
The `preview` property resolves to a string that points to your front-end application with additional URL parameters. This can be an absolute URL or a relative path.
|
||||
The `preview` property resolves to a string that points to your front-end application with additional URL parameters. This can be an absolute URL or a relative path. If you are using a relative path, Payload will prepend the application's origin onto it, creating a fully qualified URL.
|
||||
|
||||
The preview function receives two arguments:
|
||||
|
||||
| Argument | Description |
|
||||
| --- | --- |
|
||||
| **`doc`** | The Document being edited. |
|
||||
| **`ctx`** | An object containing `locale`, `token`, and `req` properties. The `token` is the currently logged-in user's JWT. |
|
||||
|
||||
If your application requires a fully qualified URL, such as within deploying to Vercel Preview Deployments, you can use the `req` property to build this URL:
|
||||
|
||||
```ts
|
||||
preview: (doc, { req }) => `${req.protocol}//${req.host}/${doc.slug}` // highlight-line
|
||||
```
|
||||
| **`ctx`** | An object containing `locale` and `token` properties. The `token` is the currently logged-in user's JWT. |
|
||||
|
||||
<Banner type="success">
|
||||
<strong>Note:</strong>
|
||||
|
||||
@@ -25,9 +25,9 @@ export const MyGlobal: GlobalConfig = {
|
||||
|
||||
The following options are available:
|
||||
|
||||
| Option | Description |
|
||||
| ----------------- | --------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`group`** | Text or localization object used to group Collection and Global links in the admin navigation. Set to `false` to hide the link from the navigation while keeping its routes accessible. |
|
||||
| Option | Description |
|
||||
| ------------- | --------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`group`** | Text used as a label for grouping Collection and Global links together in the navigation. |
|
||||
| **`hidden`** | Set to true or a function, called with the current user, returning true to exclude this Global from navigation and admin routing. |
|
||||
| **`components`** | Swap in your own React components to be used within this Global. [More details](#custom-components). |
|
||||
| **`preview`** | Function to generate a preview URL within the Admin Panel for this Global that can point to your app. [More details](#preview). |
|
||||
|
||||
@@ -184,7 +184,7 @@ export const MyGlobal: GlobalConfig = {
|
||||
meta: {
|
||||
// highlight-end
|
||||
title: 'My Global',
|
||||
description: 'The best admin panel in the world',
|
||||
description: 'The best
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -86,21 +86,20 @@ const config = buildConfig({
|
||||
|
||||
The following options are available:
|
||||
|
||||
| Option | Description |
|
||||
|--------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| **`avatar`** | Set account profile picture. Options: `gravatar`, `default` or a custom React component. |
|
||||
| **`autoLogin`** | Used to automate log-in for dev and demonstration convenience. [More details](../authentication/overview). |
|
||||
| **`buildPath`** | Specify an absolute path for where to store the built Admin bundle used in production. Defaults to `path.resolve(process.cwd(), 'build')`. |
|
||||
| **`components`** | Component overrides that affect the entirety of the Admin Panel. [More details](./components). |
|
||||
| **`custom`** | Any custom properties you wish to pass to the Admin Panel. |
|
||||
| **`dateFormat`** | The date format that will be used for all dates within the Admin Panel. Any valid [date-fns](https://date-fns.org/) format pattern can be used. |
|
||||
| **`disable`** | If set to `true`, the entire Admin Panel will be disabled. |
|
||||
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
|
||||
| **`meta`** | Base metadata to use for the Admin Panel. [More details](./metadata). |
|
||||
| **`routes`** | Replace built-in Admin Panel routes with your own custom routes. [More details](#customizing-routes). |
|
||||
| **`suppressHydrationWarning`** | If set to `true`, suppresses React hydration mismatch warnings during the hydration of the root <html> tag. Defaults to `false`. |
|
||||
| **`theme`** | Restrict the Admin Panel theme to use only one of your choice. Default is `all`. |
|
||||
| **`user`** | The `slug` of the Collection that you want to allow to login to the Admin Panel. [More details](#the-admin-user-collection). |
|
||||
| Option | Description |
|
||||
|---------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| **`avatar`** | Set account profile picture. Options: `gravatar`, `default` or a custom React component. |
|
||||
| **`autoLogin`** | Used to automate log-in for dev and demonstration convenience. [More details](../authentication/overview). |
|
||||
| **`buildPath`** | Specify an absolute path for where to store the built Admin bundle used in production. Defaults to `path.resolve(process.cwd(), 'build')`. |
|
||||
| **`components`** | Component overrides that affect the entirety of the Admin Panel. [More details](./components). |
|
||||
| **`custom`** | Any custom properties you wish to pass to the Admin Panel. |
|
||||
| **`dateFormat`** | The date format that will be used for all dates within the Admin Panel. Any valid [date-fns](https://date-fns.org/) format pattern can be used. |
|
||||
| **`disable`** | If set to `true`, the entire Admin Panel will be disabled. |
|
||||
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
|
||||
| **`meta`** | Base metadata to use for the Admin Panel. [More details](./metadata). |
|
||||
| **`routes`** | Replace built-in Admin Panel routes with your own custom routes. [More details](#customizing-routes). |
|
||||
| **`theme`** | Restrict the Admin Panel theme to use only one of your choice. Default is `all`.
|
||||
| **`user`** | The `slug` of the Collection that you want to allow to login to the Admin Panel. [More details](#the-admin-user-collection). |
|
||||
|
||||
<Banner type="success">
|
||||
<strong>Reminder:</strong>
|
||||
|
||||
@@ -98,7 +98,7 @@ From there, you are ready to make updates to your project. When you are ready to
|
||||
|
||||
Projects generated from a template will come pre-configured with the official Cloud Plugin, but if you are using your own repository you will need to add this into your project. To do so, add the plugin to your Payload Config:
|
||||
|
||||
`pnpm add @payloadcms/payload-cloud`
|
||||
`yarn add @payloadcms/payload-cloud`
|
||||
|
||||
```js
|
||||
import { payloadCloudPlugin } from '@payloadcms/payload-cloud'
|
||||
|
||||
@@ -51,44 +51,12 @@ export async function down({ payload, req }: MigrateDownArgs): Promise<void> {
|
||||
|
||||
## Using Transactions
|
||||
|
||||
When migrations are run, each migration is performed in a new [transaction](/docs/database/transactions) for you. All
|
||||
When migrations are run, each migration is performed in a new [transactions](/docs/database/transactions) for you. All
|
||||
you need to do is pass the `req` object to any [local API](/docs/local-api/overview) or direct database calls, such as
|
||||
`payload.db.updateMany()`, to make database changes inside the transaction. Assuming no errors were thrown, the transaction is committed
|
||||
after your `up` or `down` function runs. If the migration errors at any point or fails to commit, it is caught and the
|
||||
transaction gets aborted. This way no change is made to the database if the migration fails.
|
||||
|
||||
### Using database directly with the transaction
|
||||
|
||||
Additionally, you can bypass Payload's layer entirely and perform operations directly on your underlying database within the active transaction:
|
||||
|
||||
### MongoDB:
|
||||
```ts
|
||||
import { type MigrateUpArgs } from '@payloadcms/db-mongodb'
|
||||
|
||||
export async function up({ session, payload, req }: MigrateUpArgs): Promise<void> {
|
||||
const posts = await payload.db.collections.posts.collection.find({ session }).toArray()
|
||||
}
|
||||
```
|
||||
|
||||
### Postgres:
|
||||
```ts
|
||||
import { type MigrateUpArgs, sql } from '@payloadcms/db-postgres'
|
||||
|
||||
export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
|
||||
const { rows: posts } = await db.execute(sql`SELECT * from posts`)
|
||||
}
|
||||
```
|
||||
|
||||
### SQLite:
|
||||
In SQLite, transactions are disabled by default. [More](./transactions).
|
||||
```ts
|
||||
import { type MigrateUpArgs, sql } from '@payloadcms/db-sqlite'
|
||||
|
||||
export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
|
||||
const { rows: posts } = await db.run(sql`SELECT * from posts`)
|
||||
}
|
||||
```
|
||||
|
||||
## Migrations Directory
|
||||
|
||||
Each DB adapter has an optional property `migrationDir` where you can override where you want your migrations to be
|
||||
|
||||
@@ -16,12 +16,6 @@ By default, Payload will use transactions for all data changing operations, as l
|
||||
MongoDB requires a connection to a replicaset in order to make use of transactions.
|
||||
</Banner>
|
||||
|
||||
<Banner type="info">
|
||||
<strong>Note:</strong>
|
||||
<br />
|
||||
Transactions in SQLite are disabled by default. You need to pass `transactionOptions: {}` to enable them.
|
||||
</Banner>
|
||||
|
||||
The initial request made to Payload will begin a new transaction and attach it to the `req.transactionID`. If you have a `hook` that interacts with the database, you can opt in to using the same transaction by passing the `req` in the arguments. For example:
|
||||
|
||||
```ts
|
||||
|
||||
@@ -10,9 +10,9 @@ keywords: documentation, getting started, guide, Content Management System, cms,
|
||||
|
||||
Payload requires the following software:
|
||||
|
||||
- Any JavaScript package manager (pnpm, npm, or yarn - pnpm is preferred)
|
||||
- Any JavaScript package manager (Yarn, NPM, or pnpm - pnpm is preferred)
|
||||
- Node.js version 20.9.0+
|
||||
- Any [compatible database](/docs/database/overview) (MongoDB, Postgres or SQLite)
|
||||
- Any [compatible database](/docs/database/overview) (MongoDB, Postgres or Sqlite)
|
||||
|
||||
<Banner type="warning">
|
||||
<strong>Important:</strong>
|
||||
@@ -49,7 +49,7 @@ pnpm i payload @payloadcms/next @payloadcms/richtext-lexical sharp graphql
|
||||
|
||||
<Banner type="warning">
|
||||
<strong>Note:</strong>
|
||||
Swap out `pnpm` for your package manager. If you are using npm, you might need to install using legacy peer deps: `npm i --legacy-peer-deps`.
|
||||
Swap out `pnpm` for your package manager. If you are using NPM, you might need to install using legacy peer deps: `npm i --legacy-peer-deps`.
|
||||
</Banner>
|
||||
|
||||
Next, install a [Database Adapter](/docs/database/overview). Payload requires a Database Adapter to establish a database connection. Payload works with all types of databases, but the most common are MongoDB and Postgres.
|
||||
@@ -181,6 +181,6 @@ Once you have a Payload Config, update your `tsconfig` to include a `path` that
|
||||
|
||||
#### 5. Fire it up!
|
||||
|
||||
After you've reached this point, it's time to boot up Payload. Start your project in your application's folder to get going. By default, the Next.js dev script is `pnpm dev` (or `npm run dev` if using npm).
|
||||
After you've reached this point, it's time to boot up Payload. Start your project in your application's folder to get going. By default, the Next.js dev script is `pnpm dev` (or `npm run dev` if using NPM).
|
||||
|
||||
After it starts, you can go to `http://localhost:3000/admin` to create your first Payload user!
|
||||
|
||||
@@ -62,7 +62,7 @@ type Collection1 {
|
||||
|
||||
The above example outputs all your definitions to a file relative from your payload config as `./graphql/schema.graphql`. By default, the file will be output to your current working directory as `schema.graphql`.
|
||||
|
||||
### Adding an npm script
|
||||
### Adding an NPM script
|
||||
|
||||
<Banner type="warning">
|
||||
<strong>Important</strong>
|
||||
@@ -72,7 +72,7 @@ The above example outputs all your definitions to a file relative from your payl
|
||||
|
||||
Payload will automatically try and locate your config, but might not always be able to find it. For example, if you are working in a `/src` directory or similar, you need to tell Payload where to find your config manually by using an environment variable.
|
||||
|
||||
If this applies to you, create an npm script to make generating types easier:
|
||||
If this applies to you, create an NPM script to make generating types easier:
|
||||
|
||||
```json
|
||||
// package.json
|
||||
|
||||
@@ -28,8 +28,6 @@ To pass data between hooks, you can assign values to context in an earlier hook
|
||||
For example:
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
const Customer: CollectionConfig = {
|
||||
slug: 'customers',
|
||||
hooks: {
|
||||
@@ -45,6 +43,7 @@ const Customer: CollectionConfig = {
|
||||
},
|
||||
],
|
||||
afterChange: [
|
||||
|
||||
async ({ context, doc, req }) => {
|
||||
// use context.customerData without needing to fetch it again
|
||||
if (context.customerData.contacted === false) {
|
||||
@@ -66,8 +65,6 @@ Let's say you have an `afterChange` hook, and you want to do a calculation insid
|
||||
Bad example:
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
const Customer: CollectionConfig = {
|
||||
slug: 'customers',
|
||||
hooks: {
|
||||
@@ -95,8 +92,6 @@ Instead of the above, we need to tell the `afterChange` hook to not run again if
|
||||
Fixed example:
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
const MyCollection: CollectionConfig = {
|
||||
slug: 'slug',
|
||||
hooks: {
|
||||
@@ -130,7 +125,7 @@ const MyCollection: CollectionConfig = {
|
||||
|
||||
The default TypeScript interface for `context` is `{ [key: string]: unknown }`. If you prefer a more strict typing in your project or when authoring plugins for others, you can override this using the `declare` syntax.
|
||||
|
||||
This is known as "type augmentation", a TypeScript feature which allows us to add types to existing types. Simply put this in any `.ts` or `.d.ts` file:
|
||||
This is known as "type augmentation", a TypeScript feature which allows us to add types to existing objects. Simply put this in any `.ts` or `.d.ts` file:
|
||||
|
||||
```ts
|
||||
import { RequestContext as OriginalRequestContext } from 'payload'
|
||||
|
||||
@@ -37,7 +37,7 @@ Root Hooks are not associated with any specific Collection, Global, or Field. Th
|
||||
To add Root Hooks, use the `hooks` property in your [Payload Config](/docs/configuration/config):
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
import { buildConfig } from 'payload'
|
||||
|
||||
export default buildConfig({
|
||||
// ...
|
||||
@@ -60,7 +60,7 @@ The following options are available:
|
||||
The `afterError` Hook is triggered when an error occurs in the Payload application. This can be useful for logging errors to a third-party service, sending an email to the development team, logging the error to Sentry or DataDog, etc. The output can be used to transform the result object / status code.
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
import { buildConfig } from 'payload'
|
||||
|
||||
export default buildConfig({
|
||||
// ...
|
||||
|
||||
@@ -98,24 +98,11 @@ After the project is deployed to Vercel, the Vercel Cron job will automatically
|
||||
|
||||
If you want to process jobs programmatically from your server-side code, you can use the Local API:
|
||||
|
||||
**Run all jobs:**
|
||||
|
||||
```ts
|
||||
const results = await payload.jobs.run()
|
||||
|
||||
// You can customize the queue name and limit by passing them as arguments:
|
||||
await payload.jobs.run({ queue: 'nightly', limit: 100 })
|
||||
|
||||
// You can provide a where clause to filter the jobs that should be run:
|
||||
await payload.jobs.run({ where: { 'input.message': { equals: 'secret' } } })
|
||||
```
|
||||
|
||||
**Run a single job:**
|
||||
|
||||
```ts
|
||||
const results = await payload.jobs.runByID({
|
||||
id: myJobID
|
||||
})
|
||||
```
|
||||
|
||||
#### Bin script
|
||||
|
||||
@@ -54,6 +54,8 @@ _\* An asterisk denotes that a property is required._
|
||||
|
||||
The `url` property resolves to a string that points to your front-end application. This value is used as the `src` attribute of the iframe rendering your front-end. Once loaded, the Admin Panel will communicate directly with your app through `window.postMessage` events.
|
||||
|
||||
This can be an absolute URL or a relative path. If you are using a relative path, Payload will prepend the application's origin onto it, creating a fully qualified URL. This is useful for Vercel preview deployments, for example, where URLs are not known ahead of time.
|
||||
|
||||
To set the URL, use the `admin.livePreview.url` property in your [Payload Config](../configuration/overview):
|
||||
|
||||
```ts
|
||||
@@ -105,16 +107,8 @@ The following arguments are provided to the `url` function:
|
||||
| Path | Description |
|
||||
| ------------------ | ----------------------------------------------------------------------------------------------------------------- |
|
||||
| **`data`** | The data of the Document being edited. This includes changes that have not yet been saved. |
|
||||
| **`documentInfo`** | Information about the Document being edited like collection slug. [More details](../admin/hooks#usedocumentinfo). |
|
||||
| **`locale`** | The locale currently being edited (if applicable). [More details](../configuration/localization). |
|
||||
| **`collectionConfig`** | The Collection Admin Config of the Document being edited. [More details](../admin/collections). |
|
||||
| **`globalConfig`** | The Global Admin Config of the Document being edited. [More details](../admin/globals). |
|
||||
| **`req`** | The Payload Request object. |
|
||||
|
||||
If your application requires a fully qualified URL, such as within deploying to Vercel Preview Deployments, you can use the `req` property to build this URL:
|
||||
|
||||
```ts
|
||||
url: (doc, { req }) => `${req.protocol}//${req.host}/${doc.slug}` // highlight-line
|
||||
```
|
||||
|
||||
### Breakpoints
|
||||
|
||||
|
||||
@@ -131,9 +131,6 @@ const post = await payload.create({
|
||||
// Alternatively, you can directly pass a File,
|
||||
// if file is provided, filePath will be omitted
|
||||
file: uploadedFile,
|
||||
|
||||
// If you want to create a document that is a duplicate of another document
|
||||
duplicateFromID: 'document-id-to-duplicate',
|
||||
})
|
||||
```
|
||||
|
||||
@@ -310,13 +307,13 @@ available:
|
||||
### Auth
|
||||
|
||||
```js
|
||||
// If you're using Next.js, you'll have to import headers from next/headers, like so:
|
||||
// If you're using nextjs, you'll have to import headers from next/headers, like so:
|
||||
// import { headers as nextHeaders } from 'next/headers'
|
||||
|
||||
// you'll also have to await headers inside your function, or component, like so:
|
||||
// const headers = await nextHeaders()
|
||||
|
||||
// If you're using payload outside of Next.js, you'll have to provide headers accordingly.
|
||||
// If you're using payload outside of NextJS, you'll have to provide headers accordingly.
|
||||
|
||||
// result will be formatted as follows:
|
||||
// {
|
||||
|
||||
@@ -40,7 +40,7 @@ Payload 3.0 requires a set of auto-generated files that you will need to bring i
|
||||
|
||||
For more details, see the [Documentation](https://payloadcms.com/docs/getting-started/installation).
|
||||
|
||||
1. **Install new dependencies of Payload, Next.js and React**:
|
||||
1. **Install new dependencies of payload, next.js and react**:
|
||||
|
||||
Refer to the package.json file made in the create-payload-app, including peerDependencies, devDependencies, and dependencies. The core package and plugins require all versions to be synced. Previously, on 2.x it was possible to be running the latest version of payload 2.x with an older version of db-mongodb for example. This is no longer the case.
|
||||
|
||||
@@ -412,7 +412,7 @@ For more details, see the [Documentation](https://payloadcms.com/docs/getting-st
|
||||
}
|
||||
})
|
||||
```
|
||||
1. The `./src/public` directory is now located directly at root level `./public` [see Next.js docs for details](https://nextjs.org/docs/pages/building-your-application/optimizing/static-assets)
|
||||
1. The `./src/public` directory is now located directly at root level `./public` [see nextJS docs for details](https://nextjs.org/docs/pages/building-your-application/optimizing/static-assets)
|
||||
|
||||
## Custom Components
|
||||
|
||||
|
||||
@@ -84,7 +84,7 @@ cd dev
|
||||
npx create-payload-app@latest
|
||||
```
|
||||
|
||||
If you're using the plugin template, the dev folder is built out for you and the `samplePlugin` has already been installed in `dev/payload.config.ts`.
|
||||
If you're using the plugin template, the dev folder is built out for you and the `samplePlugin` has already been installed in `dev/payload.config()`.
|
||||
|
||||
```
|
||||
plugins: [
|
||||
@@ -95,11 +95,11 @@ If you're using the plugin template, the dev folder is built out for you an
|
||||
]
|
||||
```
|
||||
|
||||
You can add to the `dev/payload.config.ts` and build out the dev project as needed to test your plugin.
|
||||
You can add to the `dev/payload.config` and build out the dev project as needed to test your plugin.
|
||||
|
||||
When you're ready to start development, navigate into this folder with `cd dev`
|
||||
|
||||
And then start the project with `pnpm dev` and pull up `http://localhost:3000` in your browser.
|
||||
And then start the project with `yarn dev` and pull up `http://localhost:3000` in your browser.
|
||||
|
||||
## Testing
|
||||
|
||||
@@ -112,7 +112,7 @@ Jest organizes tests into test suites and cases. We recommend creating tests bas
|
||||
The plugin template provides a stubbed out test suite at `dev/plugin.spec.ts` which is ready to go - just add in your own test conditions and you're all set!
|
||||
|
||||
```
|
||||
let payload: Payload
|
||||
import payload from 'payload'
|
||||
|
||||
describe('Plugin tests', () => {
|
||||
// Example test to check for seeded data
|
||||
@@ -245,7 +245,7 @@ config.hooks = {
|
||||
```
|
||||
|
||||
### Extending functions
|
||||
Function properties cannot use spread syntax. The way to extend them is to execute the existing function if it exists and then run your additional functionality.
|
||||
Function properties cannot use spread syntax. The way to extend them is to execute the existing function if it exists and then run your additional functionality.
|
||||
|
||||
Here is an example extending the `onInit` property:
|
||||
|
||||
@@ -285,11 +285,11 @@ For a better user experience, provide a way to disable the plugin without uninst
|
||||
|
||||
### Include tests in your GitHub CI workflow
|
||||
|
||||
If you've configured tests for your package, integrate them into your workflow to run the tests each time you commit to the plugin repository. Learn more about [how to configure tests into your GitHub CI workflow.](https://docs.github.com/en/actions/use-cases-and-examples/building-and-testing/building-and-testing-nodejs)
|
||||
If you've configured tests for your package, integrate them into your workflow to run the tests each time you commit to the plugin repository. Learn more about [how to configure tests into your GitHub CI workflow.](https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-nodejs)
|
||||
|
||||
### Publish your finished plugin to npm
|
||||
### Publish your finished plugin to NPM
|
||||
|
||||
The best way to share and allow others to use your plugin once it is complete is to publish an npm package. This process is straightforward and well documented, find out more about [creating and publishing a npm package here](https://docs.npmjs.com/creating-and-publishing-scoped-public-packages/).
|
||||
The best way to share and allow others to use your plugin once it is complete is to publish an NPM package. This process is straightforward and well documented, find out more about [creating and publishing a NPM package here](https://docs.npmjs.com/creating-and-publishing-scoped-public-packages/).
|
||||
|
||||
### Add payload-plugin topic tag
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ desc: Easily build and manage forms from the Admin Panel. Send dynamic, personal
|
||||
keywords: plugins, plugin, form, forms, form builder
|
||||
---
|
||||
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-form-builder)
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-form-builder)
|
||||
|
||||
This plugin allows you to build and manage custom forms directly within the [Admin Panel](../admin/overview). Instead of hard-coding a new form into your website or application every time you need one, admins can simply define the schema for each form they need on-the-fly, and your front-end can map over this schema, render its own UI components, and match your brand's design system.
|
||||
|
||||
@@ -33,7 +33,7 @@ Forms can be as simple or complex as you need, from a basic contact form, to a m
|
||||
|
||||
## Installation
|
||||
|
||||
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
|
||||
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com), or [PNPM](https://pnpm.io):
|
||||
|
||||
```bash
|
||||
pnpm add @payloadcms/plugin-form-builder
|
||||
|
||||
@@ -6,7 +6,7 @@ desc: Nested documents in a parent, child, and sibling relationship.
|
||||
keywords: plugins, nested, documents, parent, child, sibling, relationship
|
||||
---
|
||||
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-nested-docs)
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-nested-docs)
|
||||
|
||||
This plugin allows you to easily nest the documents of your application inside of one another. It does so by adding a
|
||||
new `parent` field onto each of your documents that, when selected, attaches itself to the parent's tree. When you edit
|
||||
@@ -44,7 +44,8 @@ but different parents.
|
||||
|
||||
## Installation
|
||||
|
||||
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
|
||||
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com),
|
||||
or [PNPM](https://pnpm.io):
|
||||
|
||||
```bash
|
||||
pnpm add @payloadcms/plugin-nested-docs
|
||||
|
||||
@@ -6,7 +6,7 @@ desc: Automatically create redirects for your Payload application
|
||||
keywords: plugins, redirects, redirect, plugin, payload, cms, seo, indexing, search, search engine
|
||||
---
|
||||
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-redirects)
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-redirects)
|
||||
|
||||
This plugin allows you to easily manage redirects for your application from within your [Admin Panel](../admin/overview). It does so by adding a `redirects` collection to your config that allows you specify a redirect from one URL to another. Your front-end application can use this data to automatically redirect users to the correct page using proper HTTP status codes. This is useful for SEO, indexing, and search engine ranking when re-platforming or when changing your URL structure.
|
||||
|
||||
@@ -29,7 +29,7 @@ For example, if you have a page at `/about` and you want to change it to `/about
|
||||
|
||||
## Installation
|
||||
|
||||
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
|
||||
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com), or [PNPM](https://pnpm.io):
|
||||
|
||||
```bash
|
||||
pnpm add @payloadcms/plugin-redirects
|
||||
|
||||
@@ -6,7 +6,7 @@ desc: Generates records of your documents that are extremely fast to search on.
|
||||
keywords: plugins, search, search plugin, search engine, search index, search results, search bar, search box, search field, search form, search input
|
||||
---
|
||||
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-search)
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-search)
|
||||
|
||||
This plugin generates records of your documents that are extremely fast to search on. It does so by creating a new `search` collection that is indexed in the database then saving a static copy of each of your documents using only search-critical data. Search records are automatically created, synced, and deleted behind-the-scenes as you manage your application's documents.
|
||||
|
||||
@@ -37,7 +37,7 @@ This plugin is a great way to implement a fast, immersive search experience such
|
||||
|
||||
## Installation
|
||||
|
||||
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
|
||||
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com), or [PNPM](https://pnpm.io):
|
||||
|
||||
```bash
|
||||
pnpm add @payloadcms/plugin-search
|
||||
|
||||
@@ -6,7 +6,7 @@ desc: Integrate Sentry error tracking into your Payload application
|
||||
keywords: plugins, sentry, error, tracking, monitoring, logging, bug, reporting, performance
|
||||
---
|
||||
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-sentry)
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-sentry)
|
||||
|
||||
This plugin allows you to integrate [Sentry](https://sentry.io/) seamlessly with your [Payload](https://github.com/payloadcms/payload) application.
|
||||
|
||||
@@ -36,7 +36,7 @@ This multi-faceted software offers a range of features that will help you manage
|
||||
|
||||
## Installation
|
||||
|
||||
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
|
||||
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com), or [PNPM](https://pnpm.io):
|
||||
|
||||
```bash
|
||||
pnpm add @payloadcms/plugin-sentry
|
||||
|
||||
@@ -6,7 +6,7 @@ desc: Manage SEO metadata from your Payload admin
|
||||
keywords: plugins, seo, meta, search, engine, ranking, google
|
||||
---
|
||||
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-seo)
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-seo)
|
||||
|
||||
This plugin allows you to easily manage SEO metadata for your application from within your [Admin Panel](../admin/overview). When enabled on your [Collections](../configuration/collections) and [Globals](../configuration/globals), it adds a new `meta` field group containing `title`, `description`, and `image` by default. Your front-end application can then use this data to render meta tags however your application requires. For example, you would inject a `title` tag into the `<head>` of your page using `meta.title` as its content.
|
||||
|
||||
@@ -34,7 +34,7 @@ To help you visualize what your page might look like in a search engine, a previ
|
||||
|
||||
## Installation
|
||||
|
||||
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
|
||||
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com), or [PNPM](https://pnpm.io):
|
||||
|
||||
```bash
|
||||
pnpm add @payloadcms/plugin-seo
|
||||
@@ -277,7 +277,7 @@ Tip: You can override the length rules by changing the minLength and maxLength p
|
||||
All types can be directly imported:
|
||||
|
||||
```ts
|
||||
import type {
|
||||
import {
|
||||
PluginConfig,
|
||||
GenerateTitle,
|
||||
GenerateDescription
|
||||
@@ -288,9 +288,9 @@ import type {
|
||||
You can then pass the collections from your generated Payload types into the generation types, for example:
|
||||
|
||||
```ts
|
||||
import type { Page } from './payload-types.ts';
|
||||
import { Page } from './payload-types.ts';
|
||||
|
||||
import type { GenerateTitle } from '@payloadcms/plugin-seo/types';
|
||||
import { GenerateTitle } from '@payloadcms/plugin-seo/types';
|
||||
|
||||
const generateTitle: GenerateTitle<Page> = async ({ doc, locale }) => {
|
||||
return `Website.com — ${doc?.title}`
|
||||
|
||||
@@ -6,7 +6,7 @@ desc: Easily accept payments with Stripe
|
||||
keywords: plugins, stripe, payments, ecommerce
|
||||
---
|
||||
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-stripe)
|
||||
[](https://www.npmjs.com/package/@payloadcms/plugin-stripe)
|
||||
|
||||
With this plugin you can easily integrate [Stripe](https://stripe.com) into Payload. Simply provide your Stripe credentials and this plugin will open up a two-way communication channel between the two platforms. This enables you to easily sync data back and forth, as well as proxy the Stripe REST API through Payload's [Access Control](../access-control/overview). Use this plugin to completely offload billing to Stripe and retain full control over your application's data.
|
||||
|
||||
@@ -36,7 +36,7 @@ The beauty of this plugin is the entirety of your application's content and busi
|
||||
|
||||
## Installation
|
||||
|
||||
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
|
||||
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com), or [PNPM](https://pnpm.io):
|
||||
|
||||
```bash
|
||||
pnpm add @payloadcms/plugin-stripe
|
||||
|
||||
@@ -43,9 +43,7 @@ But with a `depth` of `1`, the response might look like this:
|
||||
To specify depth in the [Local API](../local-api/overview), you can use the `depth` option in your query:
|
||||
|
||||
```ts
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
const getPosts = async (payload: Payload) => {
|
||||
const getPosts = async () => {
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
depth: 2, // highlight-line
|
||||
|
||||
@@ -19,9 +19,7 @@ Each of these APIs share the same underlying querying language, and fully suppor
|
||||
To query your Documents, you can send any number of [Operators](#operators) through your request:
|
||||
|
||||
```ts
|
||||
import type { Where } from 'payload'
|
||||
|
||||
const query: Where = {
|
||||
const query = {
|
||||
color: {
|
||||
equals: 'blue',
|
||||
},
|
||||
@@ -69,9 +67,7 @@ In addition to defining simple queries, you can join multiple queries together u
|
||||
To join queries, use the `and` or `or` keys in your query object:
|
||||
|
||||
```ts
|
||||
import type { Where } from 'payload'
|
||||
|
||||
const query: Where = {
|
||||
const query = {
|
||||
or: [ // highlight-line
|
||||
{
|
||||
color: {
|
||||
@@ -103,9 +99,7 @@ Written in plain English, if the above query were passed to a `find` operation,
|
||||
When working with nested properties, which can happen when using relational fields, it is possible to use the dot notation to access the nested property. For example, when working with a `Song` collection that has a `artists` field which is related to an `Artists` collection using the `name: 'artists'`. You can access a property within the collection `Artists` like so:
|
||||
|
||||
```js
|
||||
import type { Where } from 'payload'
|
||||
|
||||
const query: Where = {
|
||||
const query = {
|
||||
'artists.featured': {
|
||||
// nested property name to filter on
|
||||
exists: true, // operator to use and boolean value that needs to be true
|
||||
@@ -122,9 +116,7 @@ Writing queries in Payload is simple and consistent across all APIs, with only m
|
||||
The [Local API](../local-api/overview) supports the `find` operation that accepts a raw query object:
|
||||
|
||||
```ts
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
const getPosts = async (payload: Payload) => {
|
||||
const getPosts = async () => {
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
where: {
|
||||
@@ -165,20 +157,19 @@ For this reason, we recommend to use the extremely helpful and ubiquitous [`qs-e
|
||||
|
||||
```ts
|
||||
import { stringify } from 'qs-esm'
|
||||
import type { Where } from 'payload'
|
||||
|
||||
const query: Where = {
|
||||
const query = {
|
||||
color: {
|
||||
equals: 'mint',
|
||||
},
|
||||
// This query could be much more complex
|
||||
// and qs-esm would handle it beautifully
|
||||
// and QS would handle it beautifully
|
||||
}
|
||||
|
||||
const getPosts = async () => {
|
||||
const stringifiedQuery = stringify(
|
||||
{
|
||||
where: query, // ensure that `qs-esm` adds the `where` property, too!
|
||||
where: query, // ensure that `qs` adds the `where` property, too!
|
||||
},
|
||||
{ addQueryPrefix: true },
|
||||
)
|
||||
|
||||
@@ -15,10 +15,8 @@ This is where Payload's `select` feature comes in. Here, you can define exactly
|
||||
To specify `select` in the [Local API](../local-api/overview), you can use the `select` option in your query:
|
||||
|
||||
```ts
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
// Include mode
|
||||
const getPosts = async (payload: Payload) => {
|
||||
const getPosts = async () => {
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
select: {
|
||||
@@ -36,7 +34,7 @@ const getPosts = async (payload: Payload) => {
|
||||
}
|
||||
|
||||
// Exclude mode
|
||||
const getPosts = async (payload: Payload) => {
|
||||
const getPosts = async () => {
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
// Select everything except for array and group.number
|
||||
@@ -75,9 +73,8 @@ For this reason, we recommend to use the extremely helpful and ubiquitous [`qs-e
|
||||
|
||||
```ts
|
||||
import { stringify } from 'qs-esm'
|
||||
import type { Where } from 'payload'
|
||||
|
||||
const select: Where = {
|
||||
const select = {
|
||||
text: true,
|
||||
group: {
|
||||
number: true
|
||||
@@ -119,6 +116,9 @@ Loading all of the page content, its related links, and everything else is going
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
import { lexicalEditor, LinkFeature } from '@payloadcms/richtext-lexical'
|
||||
import { slateEditor } from '@payloadcms/richtext-slate'
|
||||
|
||||
// The TSlug generic can be passed to have type safety for `defaultPopulate`.
|
||||
// If avoided, the `defaultPopulate` type resolves to `SelectType`.
|
||||
export const Pages: CollectionConfig<'pages'> = {
|
||||
@@ -144,9 +144,7 @@ Setting `defaultPopulate` will enforce that each time Payload performs a "popula
|
||||
**Local API:**
|
||||
|
||||
```ts
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
const getPosts = async (payload: Payload) => {
|
||||
const getPosts = async () => {
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
populate: {
|
||||
|
||||
@@ -20,9 +20,7 @@ Because sorting is handled by the database, the field cannot be a [Virtual Field
|
||||
To sort Documents in the [Local API](../local-api/overview), you can use the `sort` option in your query:
|
||||
|
||||
```ts
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
const getPosts = async (payload: Payload) => {
|
||||
const getPosts = async () => {
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
sort: '-createdAt', // highlight-line
|
||||
@@ -35,9 +33,7 @@ const getPosts = async (payload: Payload) => {
|
||||
To sort by multiple fields, you can use the `sort` option with fields in an array:
|
||||
|
||||
```ts
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
const getPosts = async (payload: Payload) => {
|
||||
const getPosts = async () => {
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
sort: ['priority', '-createdAt'], // highlight-line
|
||||
|
||||
@@ -213,7 +213,7 @@ export interface Collection1 {
|
||||
|
||||
Now that your types have been generated, payloads local API will now be typed. It is common for users to want to use this in their frontend code, we recommend generating them with Payload and then copying the file over to your frontend codebase. This is the simplest way to get your types into your frontend codebase.
|
||||
|
||||
### Adding an npm script
|
||||
### Adding an NPM script
|
||||
|
||||
<Banner type="warning">
|
||||
<strong>Important</strong>
|
||||
@@ -221,9 +221,9 @@ Now that your types have been generated, payloads local API will now be typed. I
|
||||
Payload needs to be able to find your config to generate your types.
|
||||
</Banner>
|
||||
|
||||
Payload will automatically try and locate your config, but might not always be able to find it. For example, if you are working in a `/src` directory or similar, you need to tell Payload where to find your config manually by using an environment variable. If this applies to you, you can create an npm script to make generating your types easier.
|
||||
Payload will automatically try and locate your config, but might not always be able to find it. For example, if you are working in a `/src` directory or similar, you need to tell Payload where to find your config manually by using an environment variable. If this applies to you, you can create an NPM script to make generating your types easier.
|
||||
|
||||
To add an npm script to generate your types and show Payload where to find your config, open your `package.json` and update the `scripts` property to the following:
|
||||
To add an NPM script to generate your types and show Payload where to find your config, open your `package.json` and update the `scripts` property to the following:
|
||||
|
||||
```
|
||||
{
|
||||
@@ -233,4 +233,4 @@ To add an npm script to generate your types and show Payload where to find your
|
||||
}
|
||||
```
|
||||
|
||||
Now you can run `pnpm generate:types` to easily generate your types.
|
||||
Now you can run `yarn generate:types` to easily generate your types.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload-monorepo",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "create-payload-app",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -10,11 +10,11 @@ const migrationTemplate = ({ downSQL, imports, upSQL }: MigrationTemplateArgs):
|
||||
MigrateUpArgs,
|
||||
} from '@payloadcms/db-mongodb'
|
||||
${imports ?? ''}
|
||||
export async function up({ payload, req, session }: MigrateUpArgs): Promise<void> {
|
||||
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
|
||||
${upSQL ?? ` // Migration code`}
|
||||
}
|
||||
|
||||
export async function down({ payload, req, session }: MigrateDownArgs): Promise<void> {
|
||||
export async function down({ payload, req }: MigrateDownArgs): Promise<void> {
|
||||
${downSQL ?? ` // Migration code`}
|
||||
}
|
||||
`
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import type { ClientSession } from 'mongodb'
|
||||
import type {
|
||||
AggregatePaginateModel,
|
||||
IndexDefinition,
|
||||
@@ -111,65 +110,5 @@ export type FieldToSchemaMap<TSchema> = {
|
||||
upload: FieldGeneratorFunction<TSchema, UploadField>
|
||||
}
|
||||
|
||||
export type MigrateUpArgs = {
|
||||
/**
|
||||
* The Payload instance that you can use to execute Local API methods
|
||||
* To use the current transaction you must pass `req` to arguments
|
||||
* @example
|
||||
* ```ts
|
||||
* import { type MigrateUpArgs } from '@payloadcms/db-mongodb'
|
||||
*
|
||||
* export async function up({ session, payload, req }: MigrateUpArgs): Promise<void> {
|
||||
* const posts = await payload.find({ collection: 'posts', req })
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
payload: Payload
|
||||
/**
|
||||
* The `PayloadRequest` object that contains the current transaction
|
||||
*/
|
||||
req: PayloadRequest
|
||||
/**
|
||||
* The MongoDB client session that you can use to execute MongoDB methods directly within the current transaction.
|
||||
* @example
|
||||
* ```ts
|
||||
* import { type MigrateUpArgs } from '@payloadcms/db-mongodb'
|
||||
*
|
||||
* export async function up({ session, payload, req }: MigrateUpArgs): Promise<void> {
|
||||
* const { rows: posts } = await payload.db.collections.posts.collection.find({ session }).toArray()
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
session?: ClientSession
|
||||
}
|
||||
export type MigrateDownArgs = {
|
||||
/**
|
||||
* The Payload instance that you can use to execute Local API methods
|
||||
* To use the current transaction you must pass `req` to arguments
|
||||
* @example
|
||||
* ```ts
|
||||
* import { type MigrateDownArgs } from '@payloadcms/db-mongodb'
|
||||
*
|
||||
* export async function down({ session, payload, req }: MigrateDownArgs): Promise<void> {
|
||||
* const posts = await payload.find({ collection: 'posts', req })
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
payload: Payload
|
||||
/**
|
||||
* The `PayloadRequest` object that contains the current transaction
|
||||
*/
|
||||
req: PayloadRequest
|
||||
/**
|
||||
* The MongoDB client session that you can use to execute MongoDB methods directly within the current transaction.
|
||||
* @example
|
||||
* ```ts
|
||||
* import { type MigrateDownArgs } from '@payloadcms/db-mongodb'
|
||||
*
|
||||
* export async function down({ session, payload, req }: MigrateDownArgs): Promise<void> {
|
||||
* const { rows: posts } = await payload.db.collections.posts.collection.find({ session }).toArray()
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
session?: ClientSession
|
||||
}
|
||||
export type MigrateUpArgs = { payload: Payload; req: PayloadRequest }
|
||||
export type MigrateDownArgs = { payload: Payload; req: PayloadRequest }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1 +1 @@
|
||||
export { migratePostgresV2toV3 } from '@payloadcms/drizzle/postgres'
|
||||
export { migratePostgresV2toV3 } from '../predefinedMigrations/v2-v3/index.js'
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { DatabaseAdapterObj, Payload } from 'payload'
|
||||
|
||||
import {
|
||||
beginTransaction,
|
||||
buildCreateMigration,
|
||||
commitTransaction,
|
||||
count,
|
||||
countGlobalVersions,
|
||||
@@ -40,15 +39,18 @@ import {
|
||||
createDatabase,
|
||||
createExtensions,
|
||||
createJSONQuery,
|
||||
createMigration,
|
||||
defaultDrizzleSnapshot,
|
||||
deleteWhere,
|
||||
dropDatabase,
|
||||
execute,
|
||||
getMigrationTemplate,
|
||||
init,
|
||||
insert,
|
||||
requireDrizzleKit,
|
||||
} from '@payloadcms/drizzle/postgres'
|
||||
import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'
|
||||
import path from 'path'
|
||||
import { createDatabaseAdapter, defaultBeginTransaction } from 'payload'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
@@ -57,6 +59,7 @@ import type { Args, PostgresAdapter } from './types.js'
|
||||
import { connect } from './connect.js'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
const dirname = path.dirname(filename)
|
||||
|
||||
export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter> {
|
||||
const postgresIDType = args.idType || 'serial'
|
||||
@@ -90,13 +93,9 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
|
||||
beforeSchemaInit: args.beforeSchemaInit ?? [],
|
||||
createDatabase,
|
||||
createExtensions,
|
||||
createMigration: buildCreateMigration({
|
||||
executeMethod: 'execute',
|
||||
filename,
|
||||
sanitizeStatements({ sqlExecute, statements }) {
|
||||
return `${sqlExecute}\n ${statements.join('\n')}\`)`
|
||||
},
|
||||
}),
|
||||
createMigration(args) {
|
||||
return createMigration.bind(this)({ ...args, dirname })
|
||||
},
|
||||
defaultDrizzleSnapshot,
|
||||
disableCreateDatabase: args.disableCreateDatabase ?? false,
|
||||
drizzle: undefined,
|
||||
@@ -106,6 +105,7 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
|
||||
json: true,
|
||||
},
|
||||
fieldConstraints: {},
|
||||
getMigrationTemplate,
|
||||
idType: postgresIDType,
|
||||
initializing,
|
||||
localesSuffix: args.localesSuffix || '_locales',
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import type { TransactionPg } from '@payloadcms/drizzle/types'
|
||||
import type { FlattenedField, Payload, PayloadRequest } from 'payload'
|
||||
|
||||
import type { TransactionPg } from '../../../../types.js'
|
||||
import type { BasePostgresAdapter } from '../../../types.js'
|
||||
import { upsertRow } from '@payloadcms/drizzle'
|
||||
|
||||
import type { PostgresAdapter } from '../../../types.js'
|
||||
import type { DocsToResave } from '../types.js'
|
||||
|
||||
import { upsertRow } from '../../../../upsertRow/index.js'
|
||||
import { traverseFields } from './traverseFields.js'
|
||||
|
||||
type Args = {
|
||||
adapter: BasePostgresAdapter
|
||||
adapter: PostgresAdapter
|
||||
collectionSlug?: string
|
||||
db: TransactionPg
|
||||
debug: boolean
|
||||
@@ -1,19 +1,22 @@
|
||||
import type { TransactionPg } from '@payloadcms/drizzle/types'
|
||||
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
|
||||
import type { Payload, PayloadRequest } from 'payload'
|
||||
|
||||
import { sql } from 'drizzle-orm'
|
||||
import fs from 'fs'
|
||||
import { createRequire } from 'module'
|
||||
import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { TransactionPg } from '../../../types.js'
|
||||
import type { BasePostgresAdapter } from '../../types.js'
|
||||
import type { PostgresAdapter } from '../../types.js'
|
||||
import type { PathsToQuery } from './types.js'
|
||||
|
||||
import { groupUpSQLStatements } from './groupUpSQLStatements.js'
|
||||
import { migrateRelationships } from './migrateRelationships.js'
|
||||
import { traverseFields } from './traverseFields.js'
|
||||
|
||||
const require = createRequire(import.meta.url)
|
||||
|
||||
type Args = {
|
||||
debug?: boolean
|
||||
payload: Payload
|
||||
@@ -35,13 +38,13 @@ type Args = {
|
||||
* @param req
|
||||
*/
|
||||
export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
||||
const adapter = payload.db as unknown as BasePostgresAdapter
|
||||
const adapter = payload.db as unknown as PostgresAdapter
|
||||
const db = adapter.sessions[await req.transactionID].db as TransactionPg
|
||||
const dir = payload.db.migrationDir
|
||||
|
||||
// get the drizzle migrateUpSQL from drizzle using the last schema
|
||||
const { generateDrizzleJson, generateMigration, upSnapshot } = adapter.requireDrizzleKit()
|
||||
const drizzleJsonAfter = generateDrizzleJson(adapter.schema) as DrizzleSnapshotJSON
|
||||
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/api')
|
||||
const drizzleJsonAfter = generateDrizzleJson(adapter.schema)
|
||||
|
||||
// Get the previous migration snapshot
|
||||
const previousSnapshot = fs
|
||||
@@ -56,14 +59,10 @@ export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
||||
)
|
||||
}
|
||||
|
||||
let drizzleJsonBefore = JSON.parse(
|
||||
const drizzleJsonBefore = JSON.parse(
|
||||
fs.readFileSync(`${dir}/${previousSnapshot}`, 'utf8'),
|
||||
) as DrizzleSnapshotJSON
|
||||
|
||||
if (upSnapshot && drizzleJsonBefore.version < drizzleJsonAfter.version) {
|
||||
drizzleJsonBefore = upSnapshot(drizzleJsonBefore)
|
||||
}
|
||||
|
||||
const generatedSQL = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
|
||||
if (!generatedSQL.length) {
|
||||
@@ -1,15 +1,15 @@
|
||||
import type { TransactionPg } from '@payloadcms/drizzle/types'
|
||||
import type { FlattenedField, Payload, PayloadRequest } from 'payload'
|
||||
|
||||
import { sql } from 'drizzle-orm'
|
||||
|
||||
import type { TransactionPg } from '../../../types.js'
|
||||
import type { BasePostgresAdapter } from '../../types.js'
|
||||
import type { PostgresAdapter } from '../../types.js'
|
||||
import type { DocsToResave, PathsToQuery } from './types.js'
|
||||
|
||||
import { fetchAndResave } from './fetchAndResave/index.js'
|
||||
|
||||
type Args = {
|
||||
adapter: BasePostgresAdapter
|
||||
adapter: PostgresAdapter
|
||||
collectionSlug?: string
|
||||
db: TransactionPg
|
||||
debug: boolean
|
||||
@@ -1,13 +1,13 @@
|
||||
import type { TransactionPg } from '@payloadcms/drizzle/types'
|
||||
import type { FlattenedField, Payload } from 'payload'
|
||||
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { TransactionPg } from '../../../types.js'
|
||||
import type { BasePostgresAdapter } from '../../types.js'
|
||||
import type { PostgresAdapter } from '../../types.js'
|
||||
import type { PathsToQuery } from './types.js'
|
||||
|
||||
type Args = {
|
||||
adapter: BasePostgresAdapter
|
||||
adapter: PostgresAdapter
|
||||
collectionSlug?: string
|
||||
columnPrefix: string
|
||||
db: TransactionPg
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-sqlite",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"description": "The officially supported SQLite database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
123
packages/db-sqlite/src/createMigration.ts
Normal file
123
packages/db-sqlite/src/createMigration.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
|
||||
import type { CreateMigration } from 'payload'
|
||||
|
||||
import fs from 'fs'
|
||||
import { createRequire } from 'module'
|
||||
import path from 'path'
|
||||
import { getPredefinedMigration, writeMigrationIndex } from 'payload'
|
||||
import prompts from 'prompts'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
import type { SQLiteAdapter } from './types.js'
|
||||
|
||||
import { defaultDrizzleSnapshot } from './defaultSnapshot.js'
|
||||
import { getMigrationTemplate } from './getMigrationTemplate.js'
|
||||
|
||||
const require = createRequire(import.meta.url)
|
||||
|
||||
export const createMigration: CreateMigration = async function createMigration(
|
||||
this: SQLiteAdapter,
|
||||
{ file, migrationName, payload, skipEmpty },
|
||||
) {
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
const dirname = path.dirname(filename)
|
||||
const dir = payload.db.migrationDir
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir)
|
||||
}
|
||||
const { generateSQLiteDrizzleJson, generateSQLiteMigration } = require('drizzle-kit/api')
|
||||
const drizzleJsonAfter = await generateSQLiteDrizzleJson(this.schema)
|
||||
const [yyymmdd, hhmmss] = new Date().toISOString().split('T')
|
||||
const formattedDate = yyymmdd.replace(/\D/g, '')
|
||||
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '')
|
||||
let imports: string = ''
|
||||
let downSQL: string
|
||||
let upSQL: string
|
||||
;({ downSQL, imports, upSQL } = await getPredefinedMigration({
|
||||
dirname,
|
||||
file,
|
||||
migrationName,
|
||||
payload,
|
||||
}))
|
||||
|
||||
const timestamp = `${formattedDate}_${formattedTime}`
|
||||
|
||||
const name = migrationName || file?.split('/').slice(2).join('/')
|
||||
const fileName = `${timestamp}${name ? `_${name.replace(/\W/g, '_')}` : ''}`
|
||||
|
||||
const filePath = `${dir}/${fileName}`
|
||||
|
||||
let drizzleJsonBefore = defaultDrizzleSnapshot as any
|
||||
|
||||
if (!upSQL) {
|
||||
// Get latest migration snapshot
|
||||
const latestSnapshot = fs
|
||||
.readdirSync(dir)
|
||||
.filter((file) => file.endsWith('.json'))
|
||||
.sort()
|
||||
.reverse()?.[0]
|
||||
|
||||
if (latestSnapshot) {
|
||||
drizzleJsonBefore = JSON.parse(
|
||||
fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'),
|
||||
) as DrizzleSnapshotJSON
|
||||
}
|
||||
|
||||
const sqlStatementsUp = await generateSQLiteMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
const sqlStatementsDown = await generateSQLiteMigration(drizzleJsonAfter, drizzleJsonBefore)
|
||||
// need to create tables as separate statements
|
||||
const sqlExecute = 'await payload.db.drizzle.run(sql`'
|
||||
|
||||
if (sqlStatementsUp?.length) {
|
||||
upSQL = sqlStatementsUp
|
||||
.map((statement) => `${sqlExecute}${statement?.replaceAll('`', '\\`')}\`)`)
|
||||
.join('\n')
|
||||
}
|
||||
if (sqlStatementsDown?.length) {
|
||||
downSQL = sqlStatementsDown
|
||||
.map((statement) => `${sqlExecute}${statement?.replaceAll('`', '\\`')}\`)`)
|
||||
.join('\n')
|
||||
}
|
||||
|
||||
if (!upSQL?.length && !downSQL?.length) {
|
||||
if (skipEmpty) {
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const { confirm: shouldCreateBlankMigration } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: 'No schema changes detected. Would you like to create a blank migration file?',
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if (!shouldCreateBlankMigration) {
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
// write schema
|
||||
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))
|
||||
}
|
||||
|
||||
// write migration
|
||||
fs.writeFileSync(
|
||||
`${filePath}.ts`,
|
||||
getMigrationTemplate({
|
||||
downSQL: downSQL || ` // Migration code`,
|
||||
imports,
|
||||
upSQL: upSQL || ` // Migration code`,
|
||||
}),
|
||||
)
|
||||
|
||||
writeMigrationIndex({ migrationsDir: payload.db.migrationDir })
|
||||
|
||||
payload.logger.info({ msg: `Migration created at ${filePath}.ts` })
|
||||
}
|
||||
22
packages/db-sqlite/src/getMigrationTemplate.ts
Normal file
22
packages/db-sqlite/src/getMigrationTemplate.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import type { MigrationTemplateArgs } from 'payload'
|
||||
|
||||
export const indent = (text: string) =>
|
||||
text
|
||||
.split('\n')
|
||||
.map((line) => ` ${line}`)
|
||||
.join('\n')
|
||||
|
||||
export const getMigrationTemplate = ({
|
||||
downSQL,
|
||||
imports,
|
||||
upSQL,
|
||||
}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '@payloadcms/db-sqlite'
|
||||
${imports ? `${imports}\n` : ''}
|
||||
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
|
||||
${indent(upSQL)}
|
||||
}
|
||||
|
||||
export async function down({ payload, req }: MigrateDownArgs): Promise<void> {
|
||||
${indent(downSQL)}
|
||||
}
|
||||
`
|
||||
@@ -3,7 +3,6 @@ import type { DatabaseAdapterObj, Payload } from 'payload'
|
||||
|
||||
import {
|
||||
beginTransaction,
|
||||
buildCreateMigration,
|
||||
commitTransaction,
|
||||
count,
|
||||
countGlobalVersions,
|
||||
@@ -38,7 +37,6 @@ import {
|
||||
} from '@payloadcms/drizzle'
|
||||
import { like } from 'drizzle-orm'
|
||||
import { createDatabaseAdapter, defaultBeginTransaction } from 'payload'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
import type { Args, SQLiteAdapter } from './types.js'
|
||||
|
||||
@@ -46,10 +44,12 @@ import { connect } from './connect.js'
|
||||
import { countDistinct } from './countDistinct.js'
|
||||
import { convertPathToJSONTraversal } from './createJSONQuery/convertPathToJSONTraversal.js'
|
||||
import { createJSONQuery } from './createJSONQuery/index.js'
|
||||
import { createMigration } from './createMigration.js'
|
||||
import { defaultDrizzleSnapshot } from './defaultSnapshot.js'
|
||||
import { deleteWhere } from './deleteWhere.js'
|
||||
import { dropDatabase } from './dropDatabase.js'
|
||||
import { execute } from './execute.js'
|
||||
import { getMigrationTemplate } from './getMigrationTemplate.js'
|
||||
import { init } from './init.js'
|
||||
import { insert } from './insert.js'
|
||||
import { requireDrizzleKit } from './requireDrizzleKit.js'
|
||||
@@ -58,8 +58,6 @@ export type { MigrateDownArgs, MigrateUpArgs } from './types.js'
|
||||
|
||||
export { sql } from 'drizzle-orm'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
|
||||
export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
|
||||
const postgresIDType = args.idType || 'serial'
|
||||
const payloadIDType = postgresIDType === 'serial' ? 'number' : 'text'
|
||||
@@ -93,6 +91,7 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
|
||||
json: true,
|
||||
},
|
||||
fieldConstraints: {},
|
||||
getMigrationTemplate,
|
||||
idType: postgresIDType,
|
||||
initializing,
|
||||
localesSuffix: args.localesSuffix || '_locales',
|
||||
@@ -123,15 +122,7 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
|
||||
createGlobal,
|
||||
createGlobalVersion,
|
||||
createJSONQuery,
|
||||
createMigration: buildCreateMigration({
|
||||
executeMethod: 'run',
|
||||
filename,
|
||||
sanitizeStatements({ sqlExecute, statements }) {
|
||||
return statements
|
||||
.map((statement) => `${sqlExecute}${statement?.replaceAll('`', '\\`')}\`)`)
|
||||
.join('\n')
|
||||
},
|
||||
}),
|
||||
createMigration,
|
||||
createVersion,
|
||||
defaultIDType: payloadIDType,
|
||||
deleteMany,
|
||||
|
||||
@@ -1,19 +1,15 @@
|
||||
import type { RequireDrizzleKit } from '@payloadcms/drizzle/types'
|
||||
|
||||
import { createRequire } from 'module'
|
||||
|
||||
const require = createRequire(import.meta.url)
|
||||
|
||||
/**
|
||||
* Dynamically requires the `drizzle-kit` package to access the `generateSQLiteDrizzleJson` and `pushSQLiteSchema` functions and exports them generically to call them from @payloadcms/drizzle.
|
||||
*/
|
||||
export const requireDrizzleKit: RequireDrizzleKit = () => {
|
||||
const {
|
||||
generateSQLiteDrizzleJson,
|
||||
generateSQLiteMigration,
|
||||
pushSQLiteSchema,
|
||||
generateSQLiteDrizzleJson: generateDrizzleJson,
|
||||
pushSQLiteSchema: pushSchema,
|
||||
} = require('drizzle-kit/api')
|
||||
|
||||
return {
|
||||
generateDrizzleJson: generateSQLiteDrizzleJson,
|
||||
generateMigration: generateSQLiteMigration,
|
||||
pushSchema: pushSQLiteSchema,
|
||||
}
|
||||
return { generateDrizzleJson, pushSchema }
|
||||
}
|
||||
|
||||
@@ -154,65 +154,11 @@ export type SQLiteAdapter = {
|
||||
export type IDType = 'integer' | 'numeric' | 'text'
|
||||
|
||||
export type MigrateUpArgs = {
|
||||
/**
|
||||
* The SQLite Drizzle instance that you can use to execute SQL directly within the current transaction.
|
||||
* @example
|
||||
* ```ts
|
||||
* import { type MigrateUpArgs, sql } from '@payloadcms/db-sqlite'
|
||||
*
|
||||
* export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
|
||||
* const { rows: posts } = await db.run(sql`SELECT * FROM posts`)
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
db: LibSQLDatabase
|
||||
/**
|
||||
* The Payload instance that you can use to execute Local API methods
|
||||
* To use the current transaction you must pass `req` to arguments
|
||||
* @example
|
||||
* ```ts
|
||||
* import { type MigrateUpArgs } from '@payloadcms/db-sqlite'
|
||||
*
|
||||
* export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
|
||||
* const posts = await payload.find({ collection: 'posts', req })
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
payload: Payload
|
||||
/**
|
||||
* The `PayloadRequest` object that contains the current transaction
|
||||
*/
|
||||
req: PayloadRequest
|
||||
}
|
||||
export type MigrateDownArgs = {
|
||||
/**
|
||||
* The SQLite Drizzle instance that you can use to execute SQL directly within the current transaction.
|
||||
* @example
|
||||
* ```ts
|
||||
* import { type MigrateDownArgs, sql } from '@payloadcms/db-sqlite'
|
||||
*
|
||||
* export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
|
||||
* const { rows: posts } = await db.run(sql`SELECT * FROM posts`)
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
db: LibSQLDatabase
|
||||
/**
|
||||
* The Payload instance that you can use to execute Local API methods
|
||||
* To use the current transaction you must pass `req` to arguments
|
||||
* @example
|
||||
* ```ts
|
||||
* import { type MigrateDownArgs } from '@payloadcms/db-sqlite'
|
||||
*
|
||||
* export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
|
||||
* const posts = await payload.find({ collection: 'posts', req })
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
payload: Payload
|
||||
/**
|
||||
* The `PayloadRequest` object that contains the current transaction
|
||||
*/
|
||||
req: PayloadRequest
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-vercel-postgres",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"description": "Vercel Postgres adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1 +1 @@
|
||||
export { migratePostgresV2toV3 } from '@payloadcms/drizzle/postgres'
|
||||
export { migratePostgresV2toV3 } from '../predefinedMigrations/v2-v3/index.js'
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { DatabaseAdapterObj, Payload } from 'payload'
|
||||
|
||||
import {
|
||||
beginTransaction,
|
||||
buildCreateMigration,
|
||||
commitTransaction,
|
||||
count,
|
||||
countGlobalVersions,
|
||||
@@ -40,15 +39,18 @@ import {
|
||||
createDatabase,
|
||||
createExtensions,
|
||||
createJSONQuery,
|
||||
createMigration,
|
||||
defaultDrizzleSnapshot,
|
||||
deleteWhere,
|
||||
dropDatabase,
|
||||
execute,
|
||||
getMigrationTemplate,
|
||||
init,
|
||||
insert,
|
||||
requireDrizzleKit,
|
||||
} from '@payloadcms/drizzle/postgres'
|
||||
import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'
|
||||
import path from 'path'
|
||||
import { createDatabaseAdapter, defaultBeginTransaction } from 'payload'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
@@ -57,6 +59,7 @@ import type { Args, VercelPostgresAdapter } from './types.js'
|
||||
import { connect } from './connect.js'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
const dirname = path.dirname(filename)
|
||||
|
||||
export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<VercelPostgresAdapter> {
|
||||
const postgresIDType = args.idType || 'serial'
|
||||
@@ -99,6 +102,7 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<Verce
|
||||
json: true,
|
||||
},
|
||||
fieldConstraints: {},
|
||||
getMigrationTemplate,
|
||||
idType: postgresIDType,
|
||||
indexes: new Set<string>(),
|
||||
initializing,
|
||||
@@ -134,13 +138,9 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<Verce
|
||||
createGlobal,
|
||||
createGlobalVersion,
|
||||
createJSONQuery,
|
||||
createMigration: buildCreateMigration({
|
||||
executeMethod: 'execute',
|
||||
filename,
|
||||
sanitizeStatements({ sqlExecute, statements }) {
|
||||
return `${sqlExecute}\n ${statements.join('\n')}\`)`
|
||||
},
|
||||
}),
|
||||
createMigration(args) {
|
||||
return createMigration.bind(this)({ ...args, dirname })
|
||||
},
|
||||
createVersion,
|
||||
defaultIDType: payloadIDType,
|
||||
deleteMany,
|
||||
|
||||
@@ -0,0 +1,237 @@
|
||||
import type { TransactionPg } from '@payloadcms/drizzle/types'
|
||||
import type { FlattenedField, Payload, PayloadRequest } from 'payload'
|
||||
|
||||
import { upsertRow } from '@payloadcms/drizzle'
|
||||
|
||||
import type { VercelPostgresAdapter } from '../../../types.js'
|
||||
import type { DocsToResave } from '../types.js'
|
||||
|
||||
import { traverseFields } from './traverseFields.js'
|
||||
|
||||
type Args = {
|
||||
adapter: VercelPostgresAdapter
|
||||
collectionSlug?: string
|
||||
db: TransactionPg
|
||||
debug: boolean
|
||||
docsToResave: DocsToResave
|
||||
fields: FlattenedField[]
|
||||
globalSlug?: string
|
||||
isVersions: boolean
|
||||
payload: Payload
|
||||
req: PayloadRequest
|
||||
tableName: string
|
||||
}
|
||||
|
||||
export const fetchAndResave = async ({
|
||||
adapter,
|
||||
collectionSlug,
|
||||
db,
|
||||
debug,
|
||||
docsToResave,
|
||||
fields,
|
||||
globalSlug,
|
||||
isVersions,
|
||||
payload,
|
||||
req,
|
||||
tableName,
|
||||
}: Args) => {
|
||||
for (const [id, rows] of Object.entries(docsToResave)) {
|
||||
if (collectionSlug) {
|
||||
const collectionConfig = payload.collections[collectionSlug].config
|
||||
|
||||
if (collectionConfig) {
|
||||
if (isVersions) {
|
||||
const doc = await payload.findVersionByID({
|
||||
id,
|
||||
collection: collectionSlug,
|
||||
depth: 0,
|
||||
fallbackLocale: null,
|
||||
locale: 'all',
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(
|
||||
`The collection "${collectionConfig.slug}" version with ID ${id} will be migrated`,
|
||||
)
|
||||
}
|
||||
|
||||
traverseFields({
|
||||
doc,
|
||||
fields,
|
||||
path: '',
|
||||
rows,
|
||||
})
|
||||
|
||||
try {
|
||||
await upsertRow({
|
||||
id: doc.id,
|
||||
adapter,
|
||||
data: doc,
|
||||
db,
|
||||
fields,
|
||||
ignoreResult: true,
|
||||
operation: 'update',
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
} catch (err) {
|
||||
payload.logger.error(
|
||||
`"${collectionConfig.slug}" version with ID ${doc.id} FAILED TO MIGRATE`,
|
||||
)
|
||||
|
||||
throw err
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(
|
||||
`"${collectionConfig.slug}" version with ID ${doc.id} migrated successfully!`,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
const doc = await payload.findByID({
|
||||
id,
|
||||
collection: collectionSlug,
|
||||
depth: 0,
|
||||
fallbackLocale: null,
|
||||
locale: 'all',
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(
|
||||
`The collection "${collectionConfig.slug}" with ID ${doc.id} will be migrated`,
|
||||
)
|
||||
}
|
||||
|
||||
traverseFields({
|
||||
doc,
|
||||
fields,
|
||||
path: '',
|
||||
rows,
|
||||
})
|
||||
|
||||
try {
|
||||
await upsertRow({
|
||||
id: doc.id,
|
||||
adapter,
|
||||
data: doc,
|
||||
db,
|
||||
fields,
|
||||
ignoreResult: true,
|
||||
operation: 'update',
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
} catch (err) {
|
||||
payload.logger.error(
|
||||
`The collection "${collectionConfig.slug}" with ID ${doc.id} has FAILED TO MIGRATE`,
|
||||
)
|
||||
|
||||
throw err
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(
|
||||
`The collection "${collectionConfig.slug}" with ID ${doc.id} has migrated successfully!`,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (globalSlug) {
|
||||
const globalConfig = payload.config.globals?.find((global) => global.slug === globalSlug)
|
||||
|
||||
if (globalConfig) {
|
||||
if (isVersions) {
|
||||
const { docs } = await payload.findGlobalVersions({
|
||||
slug: globalSlug,
|
||||
depth: 0,
|
||||
fallbackLocale: null,
|
||||
limit: 0,
|
||||
locale: 'all',
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(`${docs.length} global "${globalSlug}" versions will be migrated`)
|
||||
}
|
||||
|
||||
for (const doc of docs) {
|
||||
traverseFields({
|
||||
doc,
|
||||
fields,
|
||||
path: '',
|
||||
rows,
|
||||
})
|
||||
|
||||
try {
|
||||
await upsertRow({
|
||||
id: doc.id,
|
||||
adapter,
|
||||
data: doc,
|
||||
db,
|
||||
fields,
|
||||
ignoreResult: true,
|
||||
operation: 'update',
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
} catch (err) {
|
||||
payload.logger.error(`"${globalSlug}" version with ID ${doc.id} FAILED TO MIGRATE`)
|
||||
|
||||
throw err
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(
|
||||
`"${globalSlug}" version with ID ${doc.id} migrated successfully!`,
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const doc = await payload.findGlobal({
|
||||
slug: globalSlug,
|
||||
depth: 0,
|
||||
fallbackLocale: null,
|
||||
locale: 'all',
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
traverseFields({
|
||||
doc,
|
||||
fields,
|
||||
path: '',
|
||||
rows,
|
||||
})
|
||||
|
||||
try {
|
||||
await upsertRow({
|
||||
adapter,
|
||||
data: doc,
|
||||
db,
|
||||
fields,
|
||||
ignoreResult: true,
|
||||
operation: 'update',
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
} catch (err) {
|
||||
payload.logger.error(`The global "${globalSlug}" has FAILED TO MIGRATE`)
|
||||
|
||||
throw err
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info(`The global "${globalSlug}" has migrated successfully!`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,171 @@
|
||||
import type { FlattenedField } from 'payload'
|
||||
|
||||
type Args = {
|
||||
doc: Record<string, unknown>
|
||||
fields: FlattenedField[]
|
||||
locale?: string
|
||||
path: string
|
||||
rows: Record<string, unknown>[]
|
||||
}
|
||||
|
||||
export const traverseFields = ({ doc, fields, locale, path, rows }: Args) => {
|
||||
fields.forEach((field) => {
|
||||
switch (field.type) {
|
||||
case 'array': {
|
||||
const rowData = doc?.[field.name]
|
||||
|
||||
if (field.localized && typeof rowData === 'object' && rowData !== null) {
|
||||
Object.entries(rowData).forEach(([locale, localeRows]) => {
|
||||
if (Array.isArray(localeRows)) {
|
||||
localeRows.forEach((row, i) => {
|
||||
return traverseFields({
|
||||
doc: row as Record<string, unknown>,
|
||||
fields: field.flattenedFields,
|
||||
locale,
|
||||
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
|
||||
rows,
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (Array.isArray(rowData)) {
|
||||
rowData.forEach((row, i) => {
|
||||
return traverseFields({
|
||||
doc: row as Record<string, unknown>,
|
||||
fields: field.flattenedFields,
|
||||
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
|
||||
rows,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case 'blocks': {
|
||||
const rowData = doc?.[field.name]
|
||||
|
||||
if (field.localized && typeof rowData === 'object' && rowData !== null) {
|
||||
Object.entries(rowData).forEach(([locale, localeRows]) => {
|
||||
if (Array.isArray(localeRows)) {
|
||||
localeRows.forEach((row, i) => {
|
||||
const matchedBlock = field.blocks.find((block) => block.slug === row.blockType)
|
||||
|
||||
if (matchedBlock) {
|
||||
return traverseFields({
|
||||
doc: row as Record<string, unknown>,
|
||||
fields: matchedBlock.flattenedFields,
|
||||
locale,
|
||||
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
|
||||
rows,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (Array.isArray(rowData)) {
|
||||
rowData.forEach((row, i) => {
|
||||
const matchedBlock = field.blocks.find((block) => block.slug === row.blockType)
|
||||
|
||||
if (matchedBlock) {
|
||||
return traverseFields({
|
||||
doc: row as Record<string, unknown>,
|
||||
fields: matchedBlock.flattenedFields,
|
||||
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
|
||||
rows,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case 'group':
|
||||
case 'tab': {
|
||||
const newPath = `${path ? `${path}.` : ''}${field.name}`
|
||||
const newDoc = doc?.[field.name]
|
||||
|
||||
if (typeof newDoc === 'object' && newDoc !== null) {
|
||||
if (field.localized) {
|
||||
Object.entries(newDoc).forEach(([locale, localeDoc]) => {
|
||||
return traverseFields({
|
||||
doc: localeDoc,
|
||||
fields: field.flattenedFields,
|
||||
locale,
|
||||
path: newPath,
|
||||
rows,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
return traverseFields({
|
||||
doc: newDoc as Record<string, unknown>,
|
||||
fields: field.flattenedFields,
|
||||
path: newPath,
|
||||
rows,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case 'relationship':
|
||||
// falls through
|
||||
case 'upload': {
|
||||
if (typeof field.relationTo === 'string') {
|
||||
if (field.type === 'upload' || !field.hasMany) {
|
||||
const relationshipPath = `${path ? `${path}.` : ''}${field.name}`
|
||||
|
||||
if (field.localized) {
|
||||
const matchedRelationshipsWithLocales = rows.filter(
|
||||
(row) => row.path === relationshipPath,
|
||||
)
|
||||
|
||||
if (matchedRelationshipsWithLocales.length && !doc[field.name]) {
|
||||
doc[field.name] = {}
|
||||
}
|
||||
|
||||
const newDoc = doc[field.name] as Record<string, unknown>
|
||||
|
||||
matchedRelationshipsWithLocales.forEach((localeRow) => {
|
||||
if (typeof localeRow.locale === 'string') {
|
||||
const [, id] = Object.entries(localeRow).find(
|
||||
([key, val]) =>
|
||||
val !== null && !['id', 'locale', 'order', 'parent_id', 'path'].includes(key),
|
||||
)
|
||||
|
||||
newDoc[localeRow.locale] = id
|
||||
}
|
||||
})
|
||||
} else {
|
||||
const matchedRelationship = rows.find((row) => {
|
||||
const matchesPath = row.path === relationshipPath
|
||||
|
||||
if (locale) {
|
||||
return matchesPath && locale === row.locale
|
||||
}
|
||||
|
||||
return row.path === relationshipPath
|
||||
})
|
||||
|
||||
if (matchedRelationship) {
|
||||
const [, id] = Object.entries(matchedRelationship).find(
|
||||
([key, val]) =>
|
||||
val !== null && !['id', 'locale', 'order', 'parent_id', 'path'].includes(key),
|
||||
)
|
||||
|
||||
doc[field.name] = id
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
export type Groups =
|
||||
| 'addColumn'
|
||||
| 'addConstraint'
|
||||
| 'dropColumn'
|
||||
| 'dropConstraint'
|
||||
| 'dropTable'
|
||||
| 'notNull'
|
||||
|
||||
/**
|
||||
* Convert an "ADD COLUMN" statement to an "ALTER COLUMN" statement
|
||||
* example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
|
||||
* to: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||
* @param sql
|
||||
*/
|
||||
function convertAddColumnToAlterColumn(sql) {
|
||||
// Regular expression to match the ADD COLUMN statement with its constraints
|
||||
const regex = /ALTER TABLE ("[^"]+") ADD COLUMN ("[^"]+") [\w\s]+ NOT NULL;/
|
||||
|
||||
// Replace the matched part with "ALTER COLUMN ... SET NOT NULL;"
|
||||
return sql.replace(regex, 'ALTER TABLE $1 ALTER COLUMN $2 SET NOT NULL;')
|
||||
}
|
||||
|
||||
export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> => {
|
||||
const groups = {
|
||||
addColumn: 'ADD COLUMN',
|
||||
// example: ALTER TABLE "posts" ADD COLUMN "category_id" integer
|
||||
|
||||
addConstraint: 'ADD CONSTRAINT',
|
||||
//example:
|
||||
// DO $$ BEGIN
|
||||
// ALTER TABLE "pages_blocks_my_block" ADD CONSTRAINT "pages_blocks_my_block_person_id_users_id_fk" FOREIGN KEY ("person_id") REFERENCES "users"("id") ON DELETE cascade ON UPDATE no action;
|
||||
// EXCEPTION
|
||||
// WHEN duplicate_object THEN null;
|
||||
// END $$;
|
||||
|
||||
dropColumn: 'DROP COLUMN',
|
||||
// example: ALTER TABLE "_posts_v_rels" DROP COLUMN IF EXISTS "posts_id";
|
||||
|
||||
dropConstraint: 'DROP CONSTRAINT',
|
||||
// example: ALTER TABLE "_posts_v_rels" DROP CONSTRAINT "_posts_v_rels_posts_fk";
|
||||
|
||||
dropTable: 'DROP TABLE',
|
||||
// example: DROP TABLE "pages_rels";
|
||||
|
||||
notNull: 'NOT NULL',
|
||||
// example: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||
}
|
||||
|
||||
const result = Object.keys(groups).reduce((result, group: Groups) => {
|
||||
result[group] = []
|
||||
return result
|
||||
}, {}) as Record<Groups, string[]>
|
||||
|
||||
for (const line of list) {
|
||||
Object.entries(groups).some(([key, value]) => {
|
||||
if (line.endsWith('NOT NULL;')) {
|
||||
// split up the ADD COLUMN and ALTER COLUMN NOT NULL statements
|
||||
// example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
|
||||
// becomes two separate statements:
|
||||
// 1. ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer;
|
||||
// 2. ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||
result.addColumn.push(line.replace(' NOT NULL;', ';'))
|
||||
result.notNull.push(convertAddColumnToAlterColumn(line))
|
||||
return true
|
||||
}
|
||||
if (line.includes(value)) {
|
||||
result[key].push(line)
|
||||
return true
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
@@ -0,0 +1,279 @@
|
||||
import type { TransactionPg } from '@payloadcms/drizzle/types'
|
||||
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
|
||||
import type { Payload, PayloadRequest } from 'payload'
|
||||
|
||||
import { sql } from 'drizzle-orm'
|
||||
import fs from 'fs'
|
||||
import { createRequire } from 'module'
|
||||
import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { VercelPostgresAdapter } from '../../types.js'
|
||||
import type { PathsToQuery } from './types.js'
|
||||
|
||||
import { groupUpSQLStatements } from './groupUpSQLStatements.js'
|
||||
import { migrateRelationships } from './migrateRelationships.js'
|
||||
import { traverseFields } from './traverseFields.js'
|
||||
|
||||
const require = createRequire(import.meta.url)
|
||||
|
||||
type Args = {
|
||||
debug?: boolean
|
||||
payload: Payload
|
||||
req?: Partial<PayloadRequest>
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves upload and relationship columns from the join table and into the tables while moving data
|
||||
* This is done in the following order:
|
||||
* ADD COLUMNs
|
||||
* -- manipulate data to move relationships to new columns
|
||||
* ADD CONSTRAINTs
|
||||
* NOT NULLs
|
||||
* DROP TABLEs
|
||||
* DROP CONSTRAINTs
|
||||
* DROP COLUMNs
|
||||
* @param debug
|
||||
* @param payload
|
||||
* @param req
|
||||
*/
|
||||
export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
||||
const adapter = payload.db as unknown as VercelPostgresAdapter
|
||||
const db = adapter.sessions[await req.transactionID].db as TransactionPg
|
||||
const dir = payload.db.migrationDir
|
||||
|
||||
// get the drizzle migrateUpSQL from drizzle using the last schema
|
||||
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/api')
|
||||
const drizzleJsonAfter = generateDrizzleJson(adapter.schema)
|
||||
|
||||
// Get the previous migration snapshot
|
||||
const previousSnapshot = fs
|
||||
.readdirSync(dir)
|
||||
.filter((file) => file.endsWith('.json') && !file.endsWith('relationships_v2_v3.json'))
|
||||
.sort()
|
||||
.reverse()?.[0]
|
||||
|
||||
if (!previousSnapshot) {
|
||||
throw new Error(
|
||||
`No previous migration schema file found! A prior migration from v2 is required to migrate to v3.`,
|
||||
)
|
||||
}
|
||||
|
||||
const drizzleJsonBefore = JSON.parse(
|
||||
fs.readFileSync(`${dir}/${previousSnapshot}`, 'utf8'),
|
||||
) as DrizzleSnapshotJSON
|
||||
|
||||
const generatedSQL = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
|
||||
if (!generatedSQL.length) {
|
||||
payload.logger.info(`No schema changes needed.`)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const sqlUpStatements = groupUpSQLStatements(generatedSQL)
|
||||
|
||||
const addColumnsStatement = sqlUpStatements.addColumn.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('CREATING NEW RELATIONSHIP COLUMNS')
|
||||
payload.logger.info(addColumnsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(addColumnsStatement))
|
||||
|
||||
for (const collection of payload.config.collections) {
|
||||
const tableName = adapter.tableNameMap.get(toSnakeCase(collection.slug))
|
||||
const pathsToQuery: PathsToQuery = new Set()
|
||||
|
||||
traverseFields({
|
||||
adapter,
|
||||
collectionSlug: collection.slug,
|
||||
columnPrefix: '',
|
||||
db,
|
||||
disableNotNull: false,
|
||||
fields: collection.flattenedFields,
|
||||
isVersions: false,
|
||||
newTableName: tableName,
|
||||
parentTableName: tableName,
|
||||
path: '',
|
||||
pathsToQuery,
|
||||
payload,
|
||||
rootTableName: tableName,
|
||||
})
|
||||
|
||||
await migrateRelationships({
|
||||
adapter,
|
||||
collectionSlug: collection.slug,
|
||||
db,
|
||||
debug,
|
||||
fields: collection.flattenedFields,
|
||||
isVersions: false,
|
||||
pathsToQuery,
|
||||
payload,
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
|
||||
if (collection.versions) {
|
||||
const versionsTableName = adapter.tableNameMap.get(
|
||||
`_${toSnakeCase(collection.slug)}${adapter.versionsSuffix}`,
|
||||
)
|
||||
|
||||
const versionFields = buildVersionCollectionFields(payload.config, collection, true)
|
||||
const versionPathsToQuery: PathsToQuery = new Set()
|
||||
|
||||
traverseFields({
|
||||
adapter,
|
||||
collectionSlug: collection.slug,
|
||||
columnPrefix: '',
|
||||
db,
|
||||
disableNotNull: true,
|
||||
fields: versionFields,
|
||||
isVersions: true,
|
||||
newTableName: versionsTableName,
|
||||
parentTableName: versionsTableName,
|
||||
path: '',
|
||||
pathsToQuery: versionPathsToQuery,
|
||||
payload,
|
||||
rootTableName: versionsTableName,
|
||||
})
|
||||
|
||||
await migrateRelationships({
|
||||
adapter,
|
||||
collectionSlug: collection.slug,
|
||||
db,
|
||||
debug,
|
||||
fields: versionFields,
|
||||
isVersions: true,
|
||||
pathsToQuery: versionPathsToQuery,
|
||||
payload,
|
||||
req,
|
||||
tableName: versionsTableName,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for (const global of payload.config.globals) {
|
||||
const tableName = adapter.tableNameMap.get(toSnakeCase(global.slug))
|
||||
|
||||
const pathsToQuery: PathsToQuery = new Set()
|
||||
|
||||
traverseFields({
|
||||
adapter,
|
||||
columnPrefix: '',
|
||||
db,
|
||||
disableNotNull: false,
|
||||
fields: global.flattenedFields,
|
||||
globalSlug: global.slug,
|
||||
isVersions: false,
|
||||
newTableName: tableName,
|
||||
parentTableName: tableName,
|
||||
path: '',
|
||||
pathsToQuery,
|
||||
payload,
|
||||
rootTableName: tableName,
|
||||
})
|
||||
|
||||
await migrateRelationships({
|
||||
adapter,
|
||||
db,
|
||||
debug,
|
||||
fields: global.flattenedFields,
|
||||
globalSlug: global.slug,
|
||||
isVersions: false,
|
||||
pathsToQuery,
|
||||
payload,
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
|
||||
if (global.versions) {
|
||||
const versionsTableName = adapter.tableNameMap.get(
|
||||
`_${toSnakeCase(global.slug)}${adapter.versionsSuffix}`,
|
||||
)
|
||||
|
||||
const versionFields = buildVersionGlobalFields(payload.config, global, true)
|
||||
|
||||
const versionPathsToQuery: PathsToQuery = new Set()
|
||||
|
||||
traverseFields({
|
||||
adapter,
|
||||
columnPrefix: '',
|
||||
db,
|
||||
disableNotNull: true,
|
||||
fields: versionFields,
|
||||
globalSlug: global.slug,
|
||||
isVersions: true,
|
||||
newTableName: versionsTableName,
|
||||
parentTableName: versionsTableName,
|
||||
path: '',
|
||||
pathsToQuery: versionPathsToQuery,
|
||||
payload,
|
||||
rootTableName: versionsTableName,
|
||||
})
|
||||
|
||||
await migrateRelationships({
|
||||
adapter,
|
||||
db,
|
||||
debug,
|
||||
fields: versionFields,
|
||||
globalSlug: global.slug,
|
||||
isVersions: true,
|
||||
pathsToQuery: versionPathsToQuery,
|
||||
payload,
|
||||
req,
|
||||
tableName: versionsTableName,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ADD CONSTRAINT
|
||||
const addConstraintsStatement = sqlUpStatements.addConstraint.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('ADDING CONSTRAINTS')
|
||||
payload.logger.info(addConstraintsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(addConstraintsStatement))
|
||||
|
||||
// NOT NULL
|
||||
const notNullStatements = sqlUpStatements.notNull.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('NOT NULL CONSTRAINTS')
|
||||
payload.logger.info(notNullStatements)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(notNullStatements))
|
||||
|
||||
// DROP TABLE
|
||||
const dropTablesStatement = sqlUpStatements.dropTable.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('DROPPING TABLES')
|
||||
payload.logger.info(dropTablesStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(dropTablesStatement))
|
||||
|
||||
// DROP CONSTRAINT
|
||||
const dropConstraintsStatement = sqlUpStatements.dropConstraint.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('DROPPING CONSTRAINTS')
|
||||
payload.logger.info(dropConstraintsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(dropConstraintsStatement))
|
||||
|
||||
// DROP COLUMN
|
||||
const dropColumnsStatement = sqlUpStatements.dropColumn.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('DROPPING COLUMNS')
|
||||
payload.logger.info(dropColumnsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(dropColumnsStatement))
|
||||
}
|
||||
@@ -0,0 +1,109 @@
|
||||
import type { TransactionPg } from '@payloadcms/drizzle/types'
|
||||
import type { FlattenedField, Payload, PayloadRequest } from 'payload'
|
||||
|
||||
import { sql } from 'drizzle-orm'
|
||||
|
||||
import type { VercelPostgresAdapter } from '../../types.js'
|
||||
import type { DocsToResave, PathsToQuery } from './types.js'
|
||||
|
||||
import { fetchAndResave } from './fetchAndResave/index.js'
|
||||
|
||||
type Args = {
|
||||
adapter: VercelPostgresAdapter
|
||||
collectionSlug?: string
|
||||
db: TransactionPg
|
||||
debug: boolean
|
||||
fields: FlattenedField[]
|
||||
globalSlug?: string
|
||||
isVersions: boolean
|
||||
pathsToQuery: PathsToQuery
|
||||
payload: Payload
|
||||
req?: Partial<PayloadRequest>
|
||||
tableName: string
|
||||
}
|
||||
|
||||
export const migrateRelationships = async ({
|
||||
adapter,
|
||||
collectionSlug,
|
||||
db,
|
||||
debug,
|
||||
fields,
|
||||
globalSlug,
|
||||
isVersions,
|
||||
pathsToQuery,
|
||||
payload,
|
||||
req,
|
||||
tableName,
|
||||
}: Args) => {
|
||||
if (pathsToQuery.size === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
let offset = 0
|
||||
|
||||
let paginationResult
|
||||
|
||||
const where = Array.from(pathsToQuery).reduce((statement, path, i) => {
|
||||
return (statement += `
|
||||
"${tableName}${adapter.relationshipsSuffix}"."path" LIKE '${path}'${pathsToQuery.size !== i + 1 ? ' OR' : ''}
|
||||
`)
|
||||
}, '')
|
||||
|
||||
while (typeof paginationResult === 'undefined' || paginationResult.rows.length > 0) {
|
||||
const paginationStatement = `SELECT DISTINCT parent_id FROM ${tableName}${adapter.relationshipsSuffix} WHERE
|
||||
${where} ORDER BY parent_id LIMIT 500 OFFSET ${offset * 500};
|
||||
`
|
||||
|
||||
paginationResult = await adapter.drizzle.execute(sql.raw(`${paginationStatement}`))
|
||||
|
||||
if (paginationResult.rows.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
offset += 1
|
||||
|
||||
const statement = `SELECT * FROM ${tableName}${adapter.relationshipsSuffix} WHERE
|
||||
(${where}) AND parent_id IN (${paginationResult.rows.map((row) => row.parent_id).join(', ')});
|
||||
`
|
||||
if (debug) {
|
||||
payload.logger.info('FINDING ROWS TO MIGRATE')
|
||||
payload.logger.info(statement)
|
||||
}
|
||||
|
||||
const result = await adapter.drizzle.execute(sql.raw(`${statement}`))
|
||||
|
||||
const docsToResave: DocsToResave = {}
|
||||
|
||||
result.rows.forEach((row) => {
|
||||
const parentID = row.parent_id
|
||||
|
||||
if (typeof parentID === 'string' || typeof parentID === 'number') {
|
||||
if (!docsToResave[parentID]) {
|
||||
docsToResave[parentID] = []
|
||||
}
|
||||
docsToResave[parentID].push(row)
|
||||
}
|
||||
})
|
||||
|
||||
await fetchAndResave({
|
||||
adapter,
|
||||
collectionSlug,
|
||||
db,
|
||||
debug,
|
||||
docsToResave,
|
||||
fields,
|
||||
globalSlug,
|
||||
isVersions,
|
||||
payload,
|
||||
req: req as unknown as PayloadRequest,
|
||||
tableName,
|
||||
})
|
||||
}
|
||||
|
||||
const deleteStatement = `DELETE FROM ${tableName}${adapter.relationshipsSuffix} WHERE ${where}`
|
||||
if (debug) {
|
||||
payload.logger.info('DELETING ROWS')
|
||||
payload.logger.info(deleteStatement)
|
||||
}
|
||||
await db.execute(sql.raw(`${deleteStatement}`))
|
||||
}
|
||||
@@ -0,0 +1,90 @@
|
||||
import type { TransactionPg } from '@payloadcms/drizzle/types'
|
||||
import type { FlattenedField, Payload } from 'payload'
|
||||
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { VercelPostgresAdapter } from '../../types.js'
|
||||
import type { PathsToQuery } from './types.js'
|
||||
|
||||
type Args = {
|
||||
adapter: VercelPostgresAdapter
|
||||
collectionSlug?: string
|
||||
columnPrefix: string
|
||||
db: TransactionPg
|
||||
disableNotNull: boolean
|
||||
fields: FlattenedField[]
|
||||
globalSlug?: string
|
||||
isVersions: boolean
|
||||
newTableName: string
|
||||
parentTableName: string
|
||||
path: string
|
||||
pathsToQuery: PathsToQuery
|
||||
payload: Payload
|
||||
rootTableName: string
|
||||
}
|
||||
|
||||
export const traverseFields = (args: Args) => {
|
||||
args.fields.forEach((field) => {
|
||||
switch (field.type) {
|
||||
case 'array': {
|
||||
const newTableName = args.adapter.tableNameMap.get(
|
||||
`${args.newTableName}_${toSnakeCase(field.name)}`,
|
||||
)
|
||||
|
||||
return traverseFields({
|
||||
...args,
|
||||
columnPrefix: '',
|
||||
fields: field.flattenedFields,
|
||||
newTableName,
|
||||
parentTableName: newTableName,
|
||||
path: `${args.path ? `${args.path}.` : ''}${field.name}.%`,
|
||||
})
|
||||
}
|
||||
|
||||
case 'blocks': {
|
||||
return field.blocks.forEach((block) => {
|
||||
const newTableName = args.adapter.tableNameMap.get(
|
||||
`${args.rootTableName}_blocks_${toSnakeCase(block.slug)}`,
|
||||
)
|
||||
|
||||
traverseFields({
|
||||
...args,
|
||||
columnPrefix: '',
|
||||
fields: block.flattenedFields,
|
||||
newTableName,
|
||||
parentTableName: newTableName,
|
||||
path: `${args.path ? `${args.path}.` : ''}${field.name}.%`,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
case 'group':
|
||||
case 'tab': {
|
||||
let newTableName = `${args.newTableName}_${toSnakeCase(field.name)}`
|
||||
|
||||
if (field.localized && args.payload.config.localization) {
|
||||
newTableName += args.adapter.localesSuffix
|
||||
}
|
||||
|
||||
return traverseFields({
|
||||
...args,
|
||||
columnPrefix: `${args.columnPrefix}${toSnakeCase(field.name)}_`,
|
||||
fields: field.flattenedFields,
|
||||
newTableName,
|
||||
path: `${args.path ? `${args.path}.` : ''}${field.name}`,
|
||||
})
|
||||
}
|
||||
|
||||
case 'relationship':
|
||||
case 'upload': {
|
||||
if (typeof field.relationTo === 'string') {
|
||||
if (field.type === 'upload' || !field.hasMany) {
|
||||
args.pathsToQuery.add(`${args.path ? `${args.path}.` : ''}${field.name}`)
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* Set of all paths which should be moved
|
||||
* This will be built up into one WHERE query
|
||||
*/
|
||||
export type PathsToQuery = Set<string>
|
||||
|
||||
export type DocsToResave = {
|
||||
[id: number | string]: Record<string, unknown>[]
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/drizzle",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"description": "A library of shared functions used by different payload database adapters",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -2,12 +2,13 @@ export { countDistinct } from '../postgres/countDistinct.js'
|
||||
export { createDatabase } from '../postgres/createDatabase.js'
|
||||
export { createExtensions } from '../postgres/createExtensions.js'
|
||||
export { createJSONQuery } from '../postgres/createJSONQuery/index.js'
|
||||
export { createMigration } from '../postgres/createMigration.js'
|
||||
export { defaultDrizzleSnapshot } from '../postgres/defaultSnapshot.js'
|
||||
export { deleteWhere } from '../postgres/deleteWhere.js'
|
||||
export { dropDatabase } from '../postgres/dropDatabase.js'
|
||||
export { execute } from '../postgres/execute.js'
|
||||
export { getMigrationTemplate } from '../postgres/getMigrationTemplate.js'
|
||||
export { init } from '../postgres/init.js'
|
||||
export { insert } from '../postgres/insert.js'
|
||||
export { migratePostgresV2toV3 } from '../postgres/predefinedMigrations/v2-v3/index.js'
|
||||
export { requireDrizzleKit } from '../postgres/requireDrizzleKit.js'
|
||||
export * from '../postgres/types.js'
|
||||
|
||||
@@ -34,7 +34,6 @@ export { updateGlobal } from './updateGlobal.js'
|
||||
export { updateGlobalVersion } from './updateGlobalVersion.js'
|
||||
export { updateVersion } from './updateVersion.js'
|
||||
export { upsertRow } from './upsertRow/index.js'
|
||||
export { buildCreateMigration } from './utilities/buildCreateMigration.js'
|
||||
export { buildIndexName } from './utilities/buildIndexName.js'
|
||||
export { executeSchemaHooks } from './utilities/executeSchemaHooks.js'
|
||||
export { extendDrizzleTable } from './utilities/extendDrizzleTable.js'
|
||||
|
||||
@@ -44,8 +44,7 @@ export async function migrateDown(this: DrizzleAdapter): Promise<void> {
|
||||
try {
|
||||
payload.logger.info({ msg: `Migrating down: ${migrationFile.name}` })
|
||||
await initTransaction(req)
|
||||
const db = this.sessions[await req.transactionID]?.db || this.drizzle
|
||||
await migrationFile.down({ db, payload, req })
|
||||
await migrationFile.down({ payload, req })
|
||||
payload.logger.info({
|
||||
msg: `Migrated down: ${migrationFile.name} (${Date.now() - start}ms)`,
|
||||
})
|
||||
|
||||
@@ -59,7 +59,8 @@ export async function migrateFresh(
|
||||
try {
|
||||
const start = Date.now()
|
||||
await initTransaction(req)
|
||||
const db = this.sessions[await req.transactionID]?.db || this.drizzle
|
||||
const adapter = payload.db as DrizzleAdapter
|
||||
const db = adapter?.sessions[await req.transactionID]?.db || adapter.drizzle
|
||||
await migration.up({ db, payload, req })
|
||||
await payload.create({
|
||||
collection: 'payload-migrations',
|
||||
|
||||
@@ -48,8 +48,7 @@ export async function migrateRefresh(this: DrizzleAdapter) {
|
||||
payload.logger.info({ msg: `Migrating down: ${migration.name}` })
|
||||
const start = Date.now()
|
||||
await initTransaction(req)
|
||||
const db = this.sessions[await req.transactionID]?.db || this.drizzle
|
||||
await migrationFile.down({ db, payload, req })
|
||||
await migrationFile.down({ payload, req })
|
||||
payload.logger.info({
|
||||
msg: `Migrated down: ${migration.name} (${Date.now() - start}ms)`,
|
||||
})
|
||||
|
||||
@@ -39,8 +39,7 @@ export async function migrateReset(this: DrizzleAdapter): Promise<void> {
|
||||
const start = Date.now()
|
||||
payload.logger.info({ msg: `Migrating down: ${migrationFile.name}` })
|
||||
await initTransaction(req)
|
||||
const db = this.sessions[await req.transactionID]?.db || this.drizzle
|
||||
await migrationFile.down({ db, payload, req })
|
||||
await migrationFile.down({ payload, req })
|
||||
payload.logger.info({
|
||||
msg: `Migrated down: ${migrationFile.name} (${Date.now() - start}ms)`,
|
||||
})
|
||||
|
||||
122
packages/drizzle/src/postgres/createMigration.ts
Normal file
122
packages/drizzle/src/postgres/createMigration.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
import type { CreateMigration } from 'payload'
|
||||
|
||||
import fs from 'fs'
|
||||
import { createRequire } from 'module'
|
||||
import { getPredefinedMigration, writeMigrationIndex } from 'payload'
|
||||
import prompts from 'prompts'
|
||||
|
||||
import type { BasePostgresAdapter } from './types.js'
|
||||
|
||||
import { defaultDrizzleSnapshot } from './defaultSnapshot.js'
|
||||
import { getMigrationTemplate } from './getMigrationTemplate.js'
|
||||
|
||||
const require = createRequire(import.meta.url)
|
||||
|
||||
export const createMigration: CreateMigration = async function createMigration(
|
||||
this: BasePostgresAdapter,
|
||||
{ dirname, file, forceAcceptWarning, migrationName, payload, skipEmpty },
|
||||
) {
|
||||
const dir = payload.db.migrationDir
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir)
|
||||
}
|
||||
const { generateDrizzleJson, generateMigration, upPgSnapshot } = require('drizzle-kit/api')
|
||||
const drizzleJsonAfter = generateDrizzleJson(this.schema)
|
||||
const [yyymmdd, hhmmss] = new Date().toISOString().split('T')
|
||||
const formattedDate = yyymmdd.replace(/\D/g, '')
|
||||
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '')
|
||||
let imports: string = ''
|
||||
let downSQL: string
|
||||
let upSQL: string
|
||||
;({ downSQL, imports, upSQL } = await getPredefinedMigration({
|
||||
dirname,
|
||||
file,
|
||||
migrationName,
|
||||
payload,
|
||||
}))
|
||||
|
||||
const timestamp = `${formattedDate}_${formattedTime}`
|
||||
|
||||
const name = migrationName || file?.split('/').slice(2).join('/')
|
||||
const fileName = `${timestamp}${name ? `_${name.replace(/\W/g, '_')}` : ''}`
|
||||
|
||||
const filePath = `${dir}/${fileName}`
|
||||
|
||||
let drizzleJsonBefore = defaultDrizzleSnapshot
|
||||
|
||||
if (this.schemaName) {
|
||||
drizzleJsonBefore.schemas = {
|
||||
[this.schemaName]: this.schemaName,
|
||||
}
|
||||
}
|
||||
|
||||
if (!upSQL) {
|
||||
// Get latest migration snapshot
|
||||
const latestSnapshot = fs
|
||||
.readdirSync(dir)
|
||||
.filter((file) => file.endsWith('.json'))
|
||||
.sort()
|
||||
.reverse()?.[0]
|
||||
|
||||
if (latestSnapshot) {
|
||||
drizzleJsonBefore = JSON.parse(fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'))
|
||||
|
||||
if (drizzleJsonBefore.version < drizzleJsonAfter.version) {
|
||||
drizzleJsonBefore = upPgSnapshot(drizzleJsonBefore)
|
||||
}
|
||||
}
|
||||
|
||||
const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)
|
||||
const sqlExecute = 'await payload.db.drizzle.execute(sql`'
|
||||
|
||||
if (sqlStatementsUp?.length) {
|
||||
upSQL = `${sqlExecute}\n ${sqlStatementsUp?.join('\n')}\`)`
|
||||
}
|
||||
if (sqlStatementsDown?.length) {
|
||||
downSQL = `${sqlExecute}\n ${sqlStatementsDown?.join('\n')}\`)`
|
||||
}
|
||||
|
||||
if (!upSQL?.length && !downSQL?.length && !forceAcceptWarning) {
|
||||
if (skipEmpty) {
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const { confirm: shouldCreateBlankMigration } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: 'No schema changes detected. Would you like to create a blank migration file?',
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if (!shouldCreateBlankMigration) {
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
// write schema
|
||||
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))
|
||||
}
|
||||
|
||||
// write migration
|
||||
fs.writeFileSync(
|
||||
`${filePath}.ts`,
|
||||
getMigrationTemplate({
|
||||
downSQL: downSQL || ` // Migration code`,
|
||||
imports,
|
||||
packageName: payload.db.packageName,
|
||||
upSQL: upSQL || ` // Migration code`,
|
||||
}),
|
||||
)
|
||||
|
||||
writeMigrationIndex({ migrationsDir: payload.db.migrationDir })
|
||||
|
||||
payload.logger.info({ msg: `Migration created at ${filePath}.ts` })
|
||||
}
|
||||
@@ -13,11 +13,11 @@ export const getMigrationTemplate = ({
|
||||
upSQL,
|
||||
}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '${packageName}'
|
||||
${imports ? `${imports}\n` : ''}
|
||||
export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
|
||||
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
|
||||
${indent(upSQL)}
|
||||
}
|
||||
|
||||
export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
|
||||
export async function down({ payload, req }: MigrateDownArgs): Promise<void> {
|
||||
${indent(downSQL)}
|
||||
}
|
||||
`
|
||||
@@ -3,19 +3,4 @@ import { createRequire } from 'module'
|
||||
import type { RequireDrizzleKit } from '../types.js'
|
||||
|
||||
const require = createRequire(import.meta.url)
|
||||
|
||||
export const requireDrizzleKit: RequireDrizzleKit = () => {
|
||||
const {
|
||||
generateDrizzleJson,
|
||||
generateMigration,
|
||||
pushSchema,
|
||||
upPgSnapshot,
|
||||
} = require('drizzle-kit/api')
|
||||
|
||||
return {
|
||||
generateDrizzleJson,
|
||||
generateMigration,
|
||||
pushSchema,
|
||||
upSnapshot: upPgSnapshot,
|
||||
}
|
||||
}
|
||||
export const requireDrizzleKit: RequireDrizzleKit = () => require('drizzle-kit/api')
|
||||
|
||||
@@ -191,66 +191,5 @@ export type PostgresDrizzleAdapter = Omit<
|
||||
|
||||
export type IDType = 'integer' | 'numeric' | 'uuid' | 'varchar'
|
||||
|
||||
export type MigrateUpArgs = {
|
||||
/**
|
||||
* The Postgres Drizzle instance that you can use to execute SQL directly within the current transaction.
|
||||
* @example
|
||||
* ```ts
|
||||
* import { type MigrateUpArgs, sql } from '@payloadcms/db-postgres'
|
||||
*
|
||||
* export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
|
||||
* const { rows: posts } = await db.execute(sql`SELECT * FROM posts`)
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
db: PostgresDB
|
||||
/**
|
||||
* The Payload instance that you can use to execute Local API methods
|
||||
* To use the current transaction you must pass `req` to arguments
|
||||
* @example
|
||||
* ```ts
|
||||
* import { type MigrateUpArgs, sql } from '@payloadcms/db-postgres'
|
||||
*
|
||||
* export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
|
||||
* const posts = await payload.find({ collection: 'posts', req })
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
payload: Payload
|
||||
/**
|
||||
* The `PayloadRequest` object that contains the current transaction
|
||||
*/
|
||||
req: PayloadRequest
|
||||
}
|
||||
|
||||
export type MigrateDownArgs = {
|
||||
/**
|
||||
* The Postgres Drizzle instance that you can use to execute SQL directly within the current transaction.
|
||||
* @example
|
||||
* ```ts
|
||||
* import { type MigrateDownArgs, sql } from '@payloadcms/db-postgres'
|
||||
*
|
||||
* export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
|
||||
* const { rows: posts } = await db.execute(sql`SELECT * FROM posts`)
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
db: PostgresDB
|
||||
/**
|
||||
* The Payload instance that you can use to execute Local API methods
|
||||
* To use the current transaction you must pass `req` to arguments
|
||||
* @example
|
||||
* ```ts
|
||||
* import { type MigrateDownArgs } from '@payloadcms/db-postgres'
|
||||
*
|
||||
* export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
|
||||
* const posts = await payload.find({ collection: 'posts', req })
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
payload: Payload
|
||||
/**
|
||||
* The `PayloadRequest` object that contains the current transaction
|
||||
*/
|
||||
req: PayloadRequest
|
||||
}
|
||||
export type MigrateUpArgs = { payload: Payload; req: PayloadRequest }
|
||||
export type MigrateDownArgs = { payload: Payload; req: PayloadRequest }
|
||||
|
||||
@@ -14,14 +14,19 @@ import type { NodePgDatabase, NodePgQueryResultHKT } from 'drizzle-orm/node-post
|
||||
import type { PgColumn, PgTable, PgTransaction } from 'drizzle-orm/pg-core'
|
||||
import type { SQLiteColumn, SQLiteTable, SQLiteTransaction } from 'drizzle-orm/sqlite-core'
|
||||
import type { Result } from 'drizzle-orm/sqlite-core/session'
|
||||
import type { BaseDatabaseAdapter, MigrationData, Payload, PayloadRequest } from 'payload'
|
||||
import type {
|
||||
BaseDatabaseAdapter,
|
||||
MigrationData,
|
||||
MigrationTemplateArgs,
|
||||
Payload,
|
||||
PayloadRequest,
|
||||
} from 'payload'
|
||||
|
||||
import type { BuildQueryJoinAliases } from './queries/buildQuery.js'
|
||||
|
||||
export { BuildQueryJoinAliases }
|
||||
|
||||
import type { ResultSet } from '@libsql/client'
|
||||
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
|
||||
import type { SQLiteRaw } from 'drizzle-orm/sqlite-core/query-builders/raw'
|
||||
import type { QueryResult } from 'pg'
|
||||
|
||||
@@ -112,10 +117,7 @@ export type Insert = (args: {
|
||||
}) => Promise<Record<string, unknown>[]>
|
||||
|
||||
export type RequireDrizzleKit = () => {
|
||||
generateDrizzleJson: (
|
||||
args: Record<string, unknown>,
|
||||
) => DrizzleSnapshotJSON | Promise<DrizzleSnapshotJSON>
|
||||
generateMigration: (prev: DrizzleSnapshotJSON, cur: DrizzleSnapshotJSON) => Promise<string[]>
|
||||
generateDrizzleJson: (args: { schema: Record<string, unknown> }) => unknown
|
||||
pushSchema: (
|
||||
schema: Record<string, unknown>,
|
||||
drizzle: DrizzleAdapter['drizzle'],
|
||||
@@ -123,7 +125,6 @@ export type RequireDrizzleKit = () => {
|
||||
tablesFilter?: string[],
|
||||
extensionsFilter?: string[],
|
||||
) => Promise<{ apply; hasDataLoss; warnings }>
|
||||
upSnapshot?: (snapshot: Record<string, unknown>) => DrizzleSnapshotJSON
|
||||
}
|
||||
|
||||
export type Migration = {
|
||||
@@ -176,6 +177,7 @@ export interface DrizzleAdapter extends BaseDatabaseAdapter {
|
||||
* Used for returning properly formed errors from unique fields
|
||||
*/
|
||||
fieldConstraints: Record<string, Record<string, string>>
|
||||
getMigrationTemplate: (args: MigrationTemplateArgs) => string
|
||||
idType: 'serial' | 'uuid'
|
||||
indexes: Set<string>
|
||||
initializing: Promise<void>
|
||||
|
||||
@@ -1,134 +0,0 @@
|
||||
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
|
||||
import type { CreateMigration } from 'payload'
|
||||
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { getPredefinedMigration, writeMigrationIndex } from 'payload'
|
||||
import prompts from 'prompts'
|
||||
|
||||
import type { DrizzleAdapter } from '../types.js'
|
||||
|
||||
import { getMigrationTemplate } from './getMigrationTemplate.js'
|
||||
|
||||
export const buildCreateMigration = ({
|
||||
executeMethod,
|
||||
filename,
|
||||
sanitizeStatements,
|
||||
}: {
|
||||
executeMethod: string
|
||||
filename: string
|
||||
sanitizeStatements: (args: { sqlExecute: string; statements: string[] }) => string
|
||||
}): CreateMigration => {
|
||||
const dirname = path.dirname(filename)
|
||||
return async function createMigration(
|
||||
this: DrizzleAdapter,
|
||||
{ file, forceAcceptWarning, migrationName, payload, skipEmpty },
|
||||
) {
|
||||
const dir = payload.db.migrationDir
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir)
|
||||
}
|
||||
|
||||
const { generateDrizzleJson, generateMigration, upSnapshot } = this.requireDrizzleKit()
|
||||
|
||||
const drizzleJsonAfter = await generateDrizzleJson(this.schema)
|
||||
|
||||
const [yyymmdd, hhmmss] = new Date().toISOString().split('T')
|
||||
const formattedDate = yyymmdd.replace(/\D/g, '')
|
||||
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '')
|
||||
let imports: string = ''
|
||||
let downSQL: string
|
||||
let upSQL: string
|
||||
;({ downSQL, imports, upSQL } = await getPredefinedMigration({
|
||||
dirname,
|
||||
file,
|
||||
migrationName,
|
||||
payload,
|
||||
}))
|
||||
|
||||
const timestamp = `${formattedDate}_${formattedTime}`
|
||||
|
||||
const name = migrationName || file?.split('/').slice(2).join('/')
|
||||
const fileName = `${timestamp}${name ? `_${name.replace(/\W/g, '_')}` : ''}`
|
||||
|
||||
const filePath = `${dir}/${fileName}`
|
||||
|
||||
let drizzleJsonBefore = this.defaultDrizzleSnapshot as DrizzleSnapshotJSON
|
||||
|
||||
if (this.schemaName) {
|
||||
drizzleJsonBefore.schemas = {
|
||||
[this.schemaName]: this.schemaName,
|
||||
}
|
||||
}
|
||||
|
||||
if (!upSQL) {
|
||||
// Get latest migration snapshot
|
||||
const latestSnapshot = fs
|
||||
.readdirSync(dir)
|
||||
.filter((file) => file.endsWith('.json'))
|
||||
.sort()
|
||||
.reverse()?.[0]
|
||||
|
||||
if (latestSnapshot) {
|
||||
drizzleJsonBefore = JSON.parse(fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'))
|
||||
|
||||
if (upSnapshot && drizzleJsonBefore.version < drizzleJsonAfter.version) {
|
||||
drizzleJsonBefore = upSnapshot(drizzleJsonBefore)
|
||||
}
|
||||
}
|
||||
|
||||
const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)
|
||||
const sqlExecute = `await db.${executeMethod}(` + 'sql`'
|
||||
|
||||
if (sqlStatementsUp?.length) {
|
||||
upSQL = sanitizeStatements({ sqlExecute, statements: sqlStatementsUp })
|
||||
}
|
||||
if (sqlStatementsDown?.length) {
|
||||
downSQL = sanitizeStatements({ sqlExecute, statements: sqlStatementsDown })
|
||||
}
|
||||
|
||||
if (!upSQL?.length && !downSQL?.length && !forceAcceptWarning) {
|
||||
if (skipEmpty) {
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const { confirm: shouldCreateBlankMigration } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: 'No schema changes detected. Would you like to create a blank migration file?',
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if (!shouldCreateBlankMigration) {
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
// write schema
|
||||
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))
|
||||
}
|
||||
|
||||
// write migration
|
||||
fs.writeFileSync(
|
||||
`${filePath}.ts`,
|
||||
getMigrationTemplate({
|
||||
downSQL: downSQL || ` // Migration code`,
|
||||
imports,
|
||||
packageName: payload.db.packageName,
|
||||
upSQL: upSQL || ` // Migration code`,
|
||||
}),
|
||||
)
|
||||
|
||||
writeMigrationIndex({ migrationsDir: payload.db.migrationDir })
|
||||
|
||||
payload.logger.info({ msg: `Migration created at ${filePath}.ts` })
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/email-nodemailer",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"description": "Payload Nodemailer Email Adapter",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/email-resend",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"description": "Payload Resend Email Adapter",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/graphql",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -30,13 +30,15 @@ export function createResolver<TSlug extends CollectionSlug>(
|
||||
context.req.locale = args.locale
|
||||
}
|
||||
|
||||
const result = await createOperation({
|
||||
const options = {
|
||||
collection,
|
||||
data: args.data,
|
||||
depth: 0,
|
||||
draft: args.draft,
|
||||
req: isolateObjectProperty(context.req, 'transactionID'),
|
||||
})
|
||||
}
|
||||
|
||||
const result = await createOperation(options)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import type { Context } from '../types.js'
|
||||
export type Resolver<TData> = (
|
||||
_: unknown,
|
||||
args: {
|
||||
data: TData
|
||||
draft: boolean
|
||||
fallbackLocale?: string
|
||||
id: string
|
||||
@@ -29,14 +28,15 @@ export function duplicateResolver<TSlug extends CollectionSlug>(
|
||||
req.fallbackLocale = args.fallbackLocale || fallbackLocale
|
||||
context.req = req
|
||||
|
||||
const result = await duplicateOperation({
|
||||
const options = {
|
||||
id: args.id,
|
||||
collection,
|
||||
data: args.data,
|
||||
depth: 0,
|
||||
draft: args.draft,
|
||||
req: isolateObjectProperty(req, 'transactionID'),
|
||||
})
|
||||
}
|
||||
|
||||
const result = await duplicateOperation(options)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -280,9 +280,6 @@ export function initCollections({ config, graphqlResult }: InitCollectionsGraphQ
|
||||
type: collection.graphQL.type,
|
||||
args: {
|
||||
id: { type: new GraphQLNonNull(idType) },
|
||||
...(createMutationInputType
|
||||
? { data: { type: collection.graphQL.mutationInputType } }
|
||||
: {}),
|
||||
},
|
||||
resolve: duplicateResolver(collection),
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview-react",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"description": "The official React SDK for Payload Live Preview",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview-vue",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"description": "The official Vue SDK for Payload Live Preview",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"description": "The official live preview JavaScript SDK for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/next",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -93,12 +93,7 @@ export const RootLayout = async ({
|
||||
})
|
||||
|
||||
return (
|
||||
<html
|
||||
data-theme={theme}
|
||||
dir={dir}
|
||||
lang={languageCode}
|
||||
suppressHydrationWarning={config?.admin?.suppressHydrationWarning ?? false}
|
||||
>
|
||||
<html data-theme={theme} dir={dir} lang={languageCode}>
|
||||
<head>
|
||||
<style>{`@layer payload-default, payload;`}</style>
|
||||
</head>
|
||||
|
||||
@@ -27,7 +27,6 @@ export const duplicate: CollectionRouteHandlerWithID = async ({
|
||||
const doc = await duplicateOperation({
|
||||
id,
|
||||
collection,
|
||||
data: req.data,
|
||||
depth: isNumber(depth) ? Number(depth) : undefined,
|
||||
draft,
|
||||
populate: sanitizePopulateParam(req.query.populate),
|
||||
|
||||
@@ -11,7 +11,7 @@ export const preview: CollectionRouteHandlerWithID = async ({ id, collection, re
|
||||
const { searchParams } = req
|
||||
const depth = searchParams.get('depth')
|
||||
|
||||
const doc = await findByIDOperation({
|
||||
const result = await findByIDOperation({
|
||||
id,
|
||||
collection,
|
||||
depth: isNumber(depth) ? Number(depth) : undefined,
|
||||
@@ -29,11 +29,16 @@ export const preview: CollectionRouteHandlerWithID = async ({ id, collection, re
|
||||
|
||||
if (typeof generatePreviewURL === 'function') {
|
||||
try {
|
||||
previewURL = await generatePreviewURL(doc, {
|
||||
previewURL = await generatePreviewURL(result, {
|
||||
locale: req.locale,
|
||||
req,
|
||||
token,
|
||||
})
|
||||
|
||||
// Support relative URLs by prepending the origin, if necessary
|
||||
if (previewURL && previewURL.startsWith('/')) {
|
||||
previewURL = `${req.protocol}//${req.host}${previewURL}`
|
||||
}
|
||||
} catch (err) {
|
||||
return routeError({
|
||||
collection,
|
||||
|
||||
@@ -11,7 +11,7 @@ export const preview: GlobalRouteHandler = async ({ globalConfig, req }) => {
|
||||
const { searchParams } = req
|
||||
const depth = searchParams.get('depth')
|
||||
|
||||
const doc = await findOneOperation({
|
||||
const result = await findOneOperation({
|
||||
slug: globalConfig.slug,
|
||||
depth: isNumber(depth) ? Number(depth) : undefined,
|
||||
draft: searchParams.get('draft') === 'true',
|
||||
@@ -29,7 +29,7 @@ export const preview: GlobalRouteHandler = async ({ globalConfig, req }) => {
|
||||
|
||||
if (typeof generatePreviewURL === 'function') {
|
||||
try {
|
||||
previewURL = await generatePreviewURL(doc, {
|
||||
previewURL = await generatePreviewURL(result, {
|
||||
locale: req.locale,
|
||||
req,
|
||||
token,
|
||||
|
||||
@@ -48,20 +48,6 @@ export const DefaultTemplate: React.FC<DefaultTemplateProps> = ({
|
||||
} = {},
|
||||
} = payload.config || {}
|
||||
|
||||
const serverProps = React.useMemo<ServerProps>(
|
||||
() => ({
|
||||
i18n,
|
||||
locale,
|
||||
params,
|
||||
payload,
|
||||
permissions,
|
||||
searchParams,
|
||||
user,
|
||||
visibleEntities,
|
||||
}),
|
||||
[i18n, locale, params, payload, permissions, searchParams, user, visibleEntities],
|
||||
)
|
||||
|
||||
const { Actions } = React.useMemo<{
|
||||
Actions: Record<string, React.ReactNode>
|
||||
}>(() => {
|
||||
@@ -73,13 +59,11 @@ export const DefaultTemplate: React.FC<DefaultTemplateProps> = ({
|
||||
acc[action.path] = RenderServerComponent({
|
||||
Component: action,
|
||||
importMap: payload.importMap,
|
||||
serverProps,
|
||||
})
|
||||
} else {
|
||||
acc[action] = RenderServerComponent({
|
||||
Component: action,
|
||||
importMap: payload.importMap,
|
||||
serverProps,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -88,14 +72,23 @@ export const DefaultTemplate: React.FC<DefaultTemplateProps> = ({
|
||||
}, {})
|
||||
: undefined,
|
||||
}
|
||||
}, [payload, serverProps, viewActions])
|
||||
}, [viewActions, payload])
|
||||
|
||||
const NavComponent = RenderServerComponent({
|
||||
clientProps: { clientProps: { visibleEntities } },
|
||||
Component: CustomNav,
|
||||
Fallback: DefaultNav,
|
||||
importMap: payload.importMap,
|
||||
serverProps,
|
||||
serverProps: {
|
||||
i18n,
|
||||
locale,
|
||||
params,
|
||||
payload,
|
||||
permissions,
|
||||
searchParams,
|
||||
user,
|
||||
visibleEntities,
|
||||
},
|
||||
})
|
||||
|
||||
return (
|
||||
@@ -106,7 +99,16 @@ export const DefaultTemplate: React.FC<DefaultTemplateProps> = ({
|
||||
clientProps: { clientProps: { visibleEntities } },
|
||||
Component: CustomHeader,
|
||||
importMap: payload.importMap,
|
||||
serverProps,
|
||||
serverProps: {
|
||||
i18n,
|
||||
locale,
|
||||
params,
|
||||
payload,
|
||||
permissions,
|
||||
searchParams,
|
||||
user,
|
||||
visibleEntities,
|
||||
},
|
||||
})}
|
||||
<div style={{ position: 'relative' }}>
|
||||
<div className={`${baseClass}__nav-toggler-wrapper`} id="nav-toggler">
|
||||
@@ -125,7 +127,6 @@ export const DefaultTemplate: React.FC<DefaultTemplateProps> = ({
|
||||
? RenderServerComponent({
|
||||
Component: avatar.Component,
|
||||
importMap: payload.importMap,
|
||||
serverProps,
|
||||
})
|
||||
: undefined
|
||||
}
|
||||
@@ -134,7 +135,6 @@ export const DefaultTemplate: React.FC<DefaultTemplateProps> = ({
|
||||
? RenderServerComponent({
|
||||
Component: components.graphics.Icon,
|
||||
importMap: payload.importMap,
|
||||
serverProps,
|
||||
})
|
||||
: undefined
|
||||
}
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
import type { AdminViewProps, Data, PayloadComponent, ServerSideEditViewProps } from 'payload'
|
||||
|
||||
import { DocumentInfoProvider, EditDepthProvider, HydrateAuthProvider } from '@payloadcms/ui'
|
||||
import {
|
||||
DocumentInfoProvider,
|
||||
EditDepthProvider,
|
||||
HydrateAuthProvider,
|
||||
ShimmerEffect,
|
||||
} from '@payloadcms/ui'
|
||||
import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerComponent'
|
||||
import { formatAdminURL, isEditing as getIsEditing } from '@payloadcms/ui/shared'
|
||||
import { buildFormState } from '@payloadcms/ui/utilities/buildFormState'
|
||||
import { isRedirectError } from 'next/dist/client/components/redirect.js'
|
||||
import { notFound, redirect } from 'next/navigation.js'
|
||||
import React from 'react'
|
||||
import React, { Suspense } from 'react'
|
||||
|
||||
import type { GenerateEditViewMetadata } from './getMetaBySegment.js'
|
||||
import type { ViewFromConfig } from './getViewsFromConfig.js'
|
||||
@@ -376,12 +382,23 @@ export const renderDocument = async ({
|
||||
}
|
||||
}
|
||||
|
||||
export const Document: React.FC<AdminViewProps> = async (args) => {
|
||||
const DocumentWithData: React.FC<AdminViewProps> = async (args) => {
|
||||
const { Document: RenderedDocument } = await renderDocument(args)
|
||||
return RenderedDocument
|
||||
}
|
||||
|
||||
export const Document: React.FC<AdminViewProps> = (args) => {
|
||||
try {
|
||||
const { Document: RenderedDocument } = await renderDocument(args)
|
||||
return RenderedDocument
|
||||
return (
|
||||
<Suspense
|
||||
fallback={<ShimmerEffect height="100%" />}
|
||||
key={`document-view-${args?.initPageResult?.collectionConfig?.slug ?? args?.initPageResult?.globalConfig?.slug}`}
|
||||
>
|
||||
<DocumentWithData {...args} />
|
||||
</Suspense>
|
||||
)
|
||||
} catch (error) {
|
||||
if (error?.message === 'NEXT_REDIRECT') {
|
||||
if (isRedirectError(error)) {
|
||||
throw error
|
||||
}
|
||||
args.initPageResult.req.payload.logger.error(error)
|
||||
|
||||
@@ -5,12 +5,12 @@ import type {
|
||||
SanitizedCollectionConfig,
|
||||
SanitizedDocumentPermissions,
|
||||
SanitizedGlobalConfig,
|
||||
ServerProps,
|
||||
StaticDescription,
|
||||
} from 'payload'
|
||||
|
||||
import { ViewDescription } from '@payloadcms/ui'
|
||||
import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerComponent'
|
||||
import React from 'react'
|
||||
|
||||
import { getDocumentPermissions } from './getDocumentPermissions.js'
|
||||
|
||||
@@ -29,13 +29,6 @@ export const renderDocumentSlots: (args: {
|
||||
|
||||
const isPreviewEnabled = collectionConfig?.admin?.preview || globalConfig?.admin?.preview
|
||||
|
||||
const serverProps: ServerProps = {
|
||||
i18n: req.i18n,
|
||||
payload: req.payload,
|
||||
user: req.user,
|
||||
// TODO: Add remaining serverProps
|
||||
}
|
||||
|
||||
const CustomPreviewButton =
|
||||
collectionConfig?.admin?.components?.edit?.PreviewButton ||
|
||||
globalConfig?.admin?.components?.elements?.PreviewButton
|
||||
@@ -44,7 +37,6 @@ export const renderDocumentSlots: (args: {
|
||||
components.PreviewButton = RenderServerComponent({
|
||||
Component: CustomPreviewButton,
|
||||
importMap: req.payload.importMap,
|
||||
serverProps,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -68,7 +60,6 @@ export const renderDocumentSlots: (args: {
|
||||
Component: CustomDescription,
|
||||
Fallback: ViewDescription,
|
||||
importMap: req.payload.importMap,
|
||||
serverProps,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -82,7 +73,6 @@ export const renderDocumentSlots: (args: {
|
||||
components.PublishButton = RenderServerComponent({
|
||||
Component: CustomPublishButton,
|
||||
importMap: req.payload.importMap,
|
||||
serverProps,
|
||||
})
|
||||
}
|
||||
const CustomSaveDraftButton =
|
||||
@@ -97,7 +87,6 @@ export const renderDocumentSlots: (args: {
|
||||
components.SaveDraftButton = RenderServerComponent({
|
||||
Component: CustomSaveDraftButton,
|
||||
importMap: req.payload.importMap,
|
||||
serverProps,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
@@ -109,7 +98,6 @@ export const renderDocumentSlots: (args: {
|
||||
components.SaveButton = RenderServerComponent({
|
||||
Component: CustomSaveButton,
|
||||
importMap: req.payload.importMap,
|
||||
serverProps,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,13 +6,18 @@ import type {
|
||||
} from '@payloadcms/ui'
|
||||
import type { AdminViewProps, ListQuery, Where } from 'payload'
|
||||
|
||||
import { DefaultListView, HydrateAuthProvider, ListQueryProvider } from '@payloadcms/ui'
|
||||
import {
|
||||
DefaultListView,
|
||||
HydrateAuthProvider,
|
||||
ListQueryProvider,
|
||||
ShimmerEffect,
|
||||
} from '@payloadcms/ui'
|
||||
import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerComponent'
|
||||
import { renderFilters, renderTable } from '@payloadcms/ui/rsc'
|
||||
import { formatAdminURL, mergeListSearchAndWhere } from '@payloadcms/ui/shared'
|
||||
import { notFound } from 'next/navigation.js'
|
||||
import { isNumber } from 'payload/shared'
|
||||
import React, { Fragment } from 'react'
|
||||
import React, { Fragment, Suspense } from 'react'
|
||||
|
||||
import { renderListViewSlots } from './renderListViewSlots.js'
|
||||
|
||||
@@ -171,13 +176,13 @@ export const renderListView = async (
|
||||
const clientCollectionConfig = clientConfig.collections.find((c) => c.slug === collectionSlug)
|
||||
|
||||
const { columnState, Table } = renderTable({
|
||||
clientCollectionConfig,
|
||||
collectionConfig,
|
||||
collectionConfig: clientCollectionConfig,
|
||||
columnPreferences: listPreferences?.columns,
|
||||
customCellProps,
|
||||
docs: data.docs,
|
||||
drawerSlug,
|
||||
enableRowSelections,
|
||||
fields,
|
||||
i18n: req.i18n,
|
||||
payload,
|
||||
useAsTitle,
|
||||
@@ -266,10 +271,21 @@ export const renderListView = async (
|
||||
throw new Error('not-found')
|
||||
}
|
||||
|
||||
export const ListView: React.FC<ListViewArgs> = async (args) => {
|
||||
const ListViewWithData: React.FC<ListViewArgs> = async (args) => {
|
||||
const { List: RenderedList } = await renderListView({ ...args, enableRowSelections: true })
|
||||
return RenderedList
|
||||
}
|
||||
|
||||
export const ListView: React.FC<ListViewArgs> = (args) => {
|
||||
try {
|
||||
const { List: RenderedList } = await renderListView({ ...args, enableRowSelections: true })
|
||||
return RenderedList
|
||||
return (
|
||||
<Suspense
|
||||
fallback={<ShimmerEffect height="100%" />}
|
||||
key={`list-view-${args?.initPageResult?.collectionConfig?.slug ?? args?.initPageResult?.globalConfig?.slug}`}
|
||||
>
|
||||
<ListViewWithData {...args} />
|
||||
</Suspense>
|
||||
)
|
||||
} catch (error) {
|
||||
if (error.message === 'not-found') {
|
||||
notFound()
|
||||
|
||||
@@ -36,21 +36,21 @@ export const LivePreviewView: PayloadServerReactComponent<EditViewComponent> = a
|
||||
},
|
||||
]
|
||||
|
||||
const url =
|
||||
let url =
|
||||
typeof livePreviewConfig?.url === 'function'
|
||||
? await livePreviewConfig.url({
|
||||
collectionConfig,
|
||||
data: doc,
|
||||
globalConfig,
|
||||
locale,
|
||||
req,
|
||||
/**
|
||||
* @deprecated
|
||||
* Use `req.payload` instead. This will be removed in the next major version.
|
||||
*/
|
||||
payload: initPageResult.req.payload,
|
||||
})
|
||||
: livePreviewConfig?.url
|
||||
|
||||
// Support relative URLs by prepending the origin, if necessary
|
||||
if (url && url.startsWith('/')) {
|
||||
url = `${initPageResult.req.protocol}//${initPageResult.req.host}${url}`
|
||||
}
|
||||
|
||||
return <LivePreviewClient breakpoints={breakpoints} initialData={doc} url={url} />
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/payload-cloud",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"description": "The official Payload Cloud plugin",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -94,8 +94,6 @@ describe('plugin', () => {
|
||||
|
||||
const existingTransport = nodemailer.createTransport({
|
||||
name: 'existing-transport',
|
||||
// eslint-disable-next-line @typescript-eslint/require-await
|
||||
verify: async (): Promise<true> => true,
|
||||
// eslint-disable-next-line @typescript-eslint/require-await, @typescript-eslint/no-misused-promises
|
||||
send: async (mail) => {
|
||||
// eslint-disable-next-line no-console
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload",
|
||||
"version": "3.6.0",
|
||||
"version": "3.5.0",
|
||||
"description": "Node, React, Headless CMS and Application Framework built on Next.js",
|
||||
"keywords": [
|
||||
"admin panel",
|
||||
@@ -93,7 +93,7 @@
|
||||
"ci-info": "^4.0.0",
|
||||
"console-table-printer": "2.12.1",
|
||||
"croner": "9.0.0",
|
||||
"dataloader": "2.2.3",
|
||||
"dataloader": "2.2.2",
|
||||
"deepmerge": "4.3.1",
|
||||
"file-type": "19.3.0",
|
||||
"get-tsconfig": "4.8.1",
|
||||
|
||||
@@ -137,8 +137,8 @@ export const forgotPasswordOperation = async <TSlug extends CollectionSlug>(
|
||||
|
||||
user.resetPasswordToken = token
|
||||
user.resetPasswordExpiration = new Date(
|
||||
Date.now() + (collectionConfig.auth?.forgotPassword?.expiration ?? expiration ?? 3600000),
|
||||
).toISOString()
|
||||
collectionConfig.auth?.forgotPassword?.expiration || expiration || Date.now() + 3600000,
|
||||
).toISOString() // 1 hour
|
||||
|
||||
user = await payload.update({
|
||||
id: user.id,
|
||||
|
||||
@@ -195,10 +195,6 @@ export const sanitizeCollection = async (
|
||||
sanitized.auth.loginWithUsername = false
|
||||
}
|
||||
|
||||
if (!collection?.admin?.useAsTitle) {
|
||||
sanitized.admin.useAsTitle = sanitized.auth.loginWithUsername ? 'username' : 'email'
|
||||
}
|
||||
|
||||
sanitized.fields = mergeBaseFields(sanitized.fields, getBaseAuthFields(sanitized.auth))
|
||||
}
|
||||
|
||||
|
||||
@@ -335,12 +335,9 @@ export type CollectionAdminOptions = {
|
||||
enableRichTextLink?: boolean
|
||||
enableRichTextRelationship?: boolean
|
||||
/**
|
||||
* Specify a navigational group for collections in the admin sidebar.
|
||||
* - Provide a string to place the entity in a custom group.
|
||||
* - Provide a record to define localized group names.
|
||||
* - Set to `false` to exclude the entity from the sidebar / dashboard without disabling its routes.
|
||||
*/
|
||||
group?: false | Record<string, string> | string
|
||||
* Place collections into a navigational group
|
||||
* */
|
||||
group?: Record<string, string> | string
|
||||
/**
|
||||
* Exclude the collection from the admin nav and routes
|
||||
*/
|
||||
|
||||
@@ -13,7 +13,6 @@ import type {
|
||||
BeforeOperationHook,
|
||||
BeforeValidateHook,
|
||||
Collection,
|
||||
DataFromCollectionSlug,
|
||||
RequiredDataFromCollectionSlug,
|
||||
SelectFromCollectionSlug,
|
||||
} from '../config/types.js'
|
||||
@@ -22,7 +21,6 @@ import { ensureUsernameOrEmail } from '../../auth/ensureUsernameOrEmail.js'
|
||||
import executeAccess from '../../auth/executeAccess.js'
|
||||
import { sendVerificationEmail } from '../../auth/sendVerificationEmail.js'
|
||||
import { registerLocalStrategy } from '../../auth/strategies/local/register.js'
|
||||
import { getDuplicateDocumentData } from '../../duplicateDocument/index.js'
|
||||
import { afterChange } from '../../fields/hooks/afterChange/index.js'
|
||||
import { afterRead } from '../../fields/hooks/afterRead/index.js'
|
||||
import { beforeChange } from '../../fields/hooks/beforeChange/index.js'
|
||||
@@ -45,7 +43,6 @@ export type Arguments<TSlug extends CollectionSlug> = {
|
||||
disableTransaction?: boolean
|
||||
disableVerificationEmail?: boolean
|
||||
draft?: boolean
|
||||
duplicateFromID?: DataFromCollectionSlug<TSlug>['id']
|
||||
overrideAccess?: boolean
|
||||
overwriteExistingFiles?: boolean
|
||||
populate?: PopulateType
|
||||
@@ -100,7 +97,6 @@ export const createOperation = async <
|
||||
depth,
|
||||
disableVerificationEmail,
|
||||
draft = false,
|
||||
duplicateFromID,
|
||||
overrideAccess,
|
||||
overwriteExistingFiles = false,
|
||||
populate,
|
||||
@@ -119,23 +115,6 @@ export const createOperation = async <
|
||||
|
||||
const shouldSaveDraft = Boolean(draft && collectionConfig.versions.drafts)
|
||||
|
||||
let duplicatedFromDocWithLocales: JsonObject = {}
|
||||
let duplicatedFromDoc: JsonObject = {}
|
||||
|
||||
if (duplicateFromID) {
|
||||
const duplicateResult = await getDuplicateDocumentData({
|
||||
id: duplicateFromID,
|
||||
collectionConfig,
|
||||
draftArg: shouldSaveDraft,
|
||||
overrideAccess,
|
||||
req,
|
||||
shouldSaveDraft,
|
||||
})
|
||||
|
||||
duplicatedFromDoc = duplicateResult.duplicatedFromDoc
|
||||
duplicatedFromDocWithLocales = duplicateResult.duplicatedFromDocWithLocales
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Access
|
||||
// /////////////////////////////////////
|
||||
@@ -152,9 +131,7 @@ export const createOperation = async <
|
||||
collection,
|
||||
config,
|
||||
data,
|
||||
isDuplicating: Boolean(duplicateFromID),
|
||||
operation: 'create',
|
||||
originalDoc: duplicatedFromDoc,
|
||||
overwriteExistingFiles,
|
||||
req,
|
||||
throwOnMissingFile:
|
||||
@@ -171,7 +148,7 @@ export const createOperation = async <
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data,
|
||||
doc: duplicatedFromDoc,
|
||||
doc: {},
|
||||
global: null,
|
||||
operation: 'create',
|
||||
overrideAccess,
|
||||
@@ -192,7 +169,6 @@ export const createOperation = async <
|
||||
context: req.context,
|
||||
data,
|
||||
operation: 'create',
|
||||
originalDoc: duplicatedFromDoc,
|
||||
req,
|
||||
})) || data
|
||||
},
|
||||
@@ -212,7 +188,6 @@ export const createOperation = async <
|
||||
context: req.context,
|
||||
data,
|
||||
operation: 'create',
|
||||
originalDoc: duplicatedFromDoc,
|
||||
req,
|
||||
})) || data
|
||||
}, Promise.resolve())
|
||||
@@ -225,8 +200,8 @@ export const createOperation = async <
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data,
|
||||
doc: duplicatedFromDoc,
|
||||
docWithLocales: duplicatedFromDocWithLocales,
|
||||
doc: {},
|
||||
docWithLocales: {},
|
||||
global: null,
|
||||
operation: 'create',
|
||||
req,
|
||||
|
||||
@@ -1,26 +1,391 @@
|
||||
import type { DeepPartial } from 'ts-essentials'
|
||||
|
||||
import httpStatus from 'http-status'
|
||||
|
||||
import type { FindOneArgs } from '../../database/types.js'
|
||||
import type { CollectionSlug } from '../../index.js'
|
||||
import type { TransformCollectionWithSelect } from '../../types/index.js'
|
||||
import type { RequiredDataFromCollectionSlug, SelectFromCollectionSlug } from '../config/types.js'
|
||||
import type {
|
||||
PayloadRequest,
|
||||
PopulateType,
|
||||
SelectType,
|
||||
TransformCollectionWithSelect,
|
||||
} from '../../types/index.js'
|
||||
import type {
|
||||
Collection,
|
||||
DataFromCollectionSlug,
|
||||
SelectFromCollectionSlug,
|
||||
} from '../config/types.js'
|
||||
|
||||
import { type Arguments as CreateArguments, createOperation } from './create.js'
|
||||
import executeAccess from '../../auth/executeAccess.js'
|
||||
import { hasWhereAccessResult } from '../../auth/types.js'
|
||||
import { combineQueries } from '../../database/combineQueries.js'
|
||||
import { APIError, Forbidden, NotFound } from '../../errors/index.js'
|
||||
import { afterChange } from '../../fields/hooks/afterChange/index.js'
|
||||
import { afterRead } from '../../fields/hooks/afterRead/index.js'
|
||||
import { beforeChange } from '../../fields/hooks/beforeChange/index.js'
|
||||
import { beforeDuplicate } from '../../fields/hooks/beforeDuplicate/index.js'
|
||||
import { beforeValidate } from '../../fields/hooks/beforeValidate/index.js'
|
||||
import { generateFileData } from '../../uploads/generateFileData.js'
|
||||
import { uploadFiles } from '../../uploads/uploadFiles.js'
|
||||
import { commitTransaction } from '../../utilities/commitTransaction.js'
|
||||
import { initTransaction } from '../../utilities/initTransaction.js'
|
||||
import { killTransaction } from '../../utilities/killTransaction.js'
|
||||
import sanitizeInternalFields from '../../utilities/sanitizeInternalFields.js'
|
||||
import { getLatestCollectionVersion } from '../../versions/getLatestCollectionVersion.js'
|
||||
import { saveVersion } from '../../versions/saveVersion.js'
|
||||
import { buildAfterOperation } from './utils.js'
|
||||
|
||||
export type Arguments<TSlug extends CollectionSlug> = {
|
||||
data?: DeepPartial<RequiredDataFromCollectionSlug<TSlug>>
|
||||
export type Arguments = {
|
||||
collection: Collection
|
||||
depth?: number
|
||||
disableTransaction?: boolean
|
||||
draft?: boolean
|
||||
id: number | string
|
||||
} & Omit<CreateArguments<TSlug>, 'data' | 'duplicateFromID'>
|
||||
overrideAccess?: boolean
|
||||
populate?: PopulateType
|
||||
req: PayloadRequest
|
||||
select?: SelectType
|
||||
showHiddenFields?: boolean
|
||||
}
|
||||
|
||||
export const duplicateOperation = async <
|
||||
TSlug extends CollectionSlug,
|
||||
TSelect extends SelectFromCollectionSlug<TSlug>,
|
||||
>(
|
||||
incomingArgs: Arguments<TSlug>,
|
||||
incomingArgs: Arguments,
|
||||
): Promise<TransformCollectionWithSelect<TSlug, TSelect>> => {
|
||||
const { id, ...args } = incomingArgs
|
||||
return createOperation({
|
||||
...args,
|
||||
data: incomingArgs?.data || {},
|
||||
duplicateFromID: id,
|
||||
})
|
||||
let args = incomingArgs
|
||||
const operation = 'create'
|
||||
|
||||
try {
|
||||
const shouldCommit = !args.disableTransaction && (await initTransaction(args.req))
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await args.collection.config.hooks.beforeOperation.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
args =
|
||||
(await hook({
|
||||
args,
|
||||
collection: args.collection.config,
|
||||
context: args.req.context,
|
||||
operation,
|
||||
req: args.req,
|
||||
})) || args
|
||||
}, Promise.resolve())
|
||||
|
||||
const {
|
||||
id,
|
||||
collection: { config: collectionConfig },
|
||||
depth,
|
||||
draft: draftArg = true,
|
||||
overrideAccess,
|
||||
populate,
|
||||
req: { fallbackLocale, locale: localeArg, payload },
|
||||
req,
|
||||
select,
|
||||
showHiddenFields,
|
||||
} = args
|
||||
|
||||
if (!id) {
|
||||
throw new APIError('Missing ID of document to duplicate.', httpStatus.BAD_REQUEST)
|
||||
}
|
||||
const shouldSaveDraft = Boolean(draftArg && collectionConfig.versions.drafts)
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Read Access
|
||||
// /////////////////////////////////////
|
||||
|
||||
const accessResults = !overrideAccess
|
||||
? await executeAccess({ id, req }, collectionConfig.access.read)
|
||||
: true
|
||||
const hasWherePolicy = hasWhereAccessResult(accessResults)
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Retrieve document
|
||||
// /////////////////////////////////////
|
||||
const findOneArgs: FindOneArgs = {
|
||||
collection: collectionConfig.slug,
|
||||
locale: req.locale,
|
||||
req,
|
||||
where: combineQueries({ id: { equals: id } }, accessResults),
|
||||
}
|
||||
|
||||
let docWithLocales = await getLatestCollectionVersion({
|
||||
id,
|
||||
config: collectionConfig,
|
||||
payload,
|
||||
query: findOneArgs,
|
||||
req,
|
||||
})
|
||||
|
||||
if (!docWithLocales && !hasWherePolicy) {
|
||||
throw new NotFound(req.t)
|
||||
}
|
||||
if (!docWithLocales && hasWherePolicy) {
|
||||
throw new Forbidden(req.t)
|
||||
}
|
||||
|
||||
// remove the createdAt timestamp and id to rely on the db to set the default it
|
||||
delete docWithLocales.createdAt
|
||||
delete docWithLocales.id
|
||||
|
||||
docWithLocales = await beforeDuplicate({
|
||||
id,
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
doc: docWithLocales,
|
||||
overrideAccess,
|
||||
req,
|
||||
})
|
||||
|
||||
// for version enabled collections, override the current status with draft, unless draft is explicitly set to false
|
||||
if (shouldSaveDraft) {
|
||||
docWithLocales._status = 'draft'
|
||||
}
|
||||
|
||||
let result
|
||||
|
||||
let originalDoc = await afterRead({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
depth: 0,
|
||||
doc: docWithLocales,
|
||||
draft: draftArg,
|
||||
fallbackLocale: null,
|
||||
global: null,
|
||||
locale: req.locale,
|
||||
overrideAccess: true,
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
const { data: newFileData, files: filesToUpload } = await generateFileData({
|
||||
collection: args.collection,
|
||||
config: req.payload.config,
|
||||
data: originalDoc,
|
||||
operation: 'create',
|
||||
overwriteExistingFiles: 'forceDisable',
|
||||
req,
|
||||
throwOnMissingFile: true,
|
||||
})
|
||||
|
||||
originalDoc = newFileData
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Create Access
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (!overrideAccess) {
|
||||
await executeAccess({ data: originalDoc, req }, collectionConfig.access.create)
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeValidate - Fields
|
||||
// /////////////////////////////////////
|
||||
|
||||
let data = await beforeValidate<DeepPartial<DataFromCollectionSlug<TSlug>>>({
|
||||
id,
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data: originalDoc,
|
||||
doc: originalDoc,
|
||||
duplicate: true,
|
||||
global: null,
|
||||
operation,
|
||||
overrideAccess,
|
||||
req,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeValidate - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await collectionConfig.hooks.beforeValidate.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
data =
|
||||
(await hook({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data,
|
||||
operation,
|
||||
originalDoc,
|
||||
req,
|
||||
})) || result
|
||||
}, Promise.resolve())
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeChange - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await collectionConfig.hooks.beforeChange.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
data =
|
||||
(await hook({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data,
|
||||
operation,
|
||||
originalDoc: result,
|
||||
req,
|
||||
})) || result
|
||||
}, Promise.resolve())
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeChange - Fields
|
||||
// /////////////////////////////////////
|
||||
|
||||
result = await beforeChange({
|
||||
id,
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data,
|
||||
doc: originalDoc,
|
||||
docWithLocales,
|
||||
global: null,
|
||||
operation,
|
||||
req,
|
||||
skipValidation:
|
||||
shouldSaveDraft &&
|
||||
collectionConfig.versions.drafts &&
|
||||
!collectionConfig.versions.drafts.validate,
|
||||
})
|
||||
|
||||
// set req.locale back to the original locale
|
||||
req.locale = localeArg
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Create / Update
|
||||
// /////////////////////////////////////
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Write files to local storage
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (!collectionConfig.upload.disableLocalStorage) {
|
||||
await uploadFiles(payload, filesToUpload, req)
|
||||
}
|
||||
|
||||
let versionDoc = await payload.db.create({
|
||||
collection: collectionConfig.slug,
|
||||
data: result,
|
||||
req,
|
||||
select,
|
||||
})
|
||||
|
||||
versionDoc = sanitizeInternalFields(versionDoc)
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Create version
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (collectionConfig.versions) {
|
||||
result = await saveVersion({
|
||||
id: versionDoc.id,
|
||||
collection: collectionConfig,
|
||||
docWithLocales: versionDoc,
|
||||
draft: shouldSaveDraft,
|
||||
payload,
|
||||
req,
|
||||
})
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// afterRead - Fields
|
||||
// /////////////////////////////////////
|
||||
|
||||
result = await afterRead({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
depth,
|
||||
doc: versionDoc,
|
||||
draft: draftArg,
|
||||
fallbackLocale,
|
||||
global: null,
|
||||
locale: localeArg,
|
||||
overrideAccess,
|
||||
populate,
|
||||
req,
|
||||
select,
|
||||
showHiddenFields,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
// afterRead - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await collectionConfig.hooks.afterRead.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
result =
|
||||
(await hook({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
doc: result,
|
||||
req,
|
||||
})) || result
|
||||
}, Promise.resolve())
|
||||
|
||||
// /////////////////////////////////////
|
||||
// afterChange - Fields
|
||||
// /////////////////////////////////////
|
||||
|
||||
result = await afterChange({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data: versionDoc,
|
||||
doc: result,
|
||||
global: null,
|
||||
operation,
|
||||
previousDoc: {},
|
||||
req,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
// afterChange - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await collectionConfig.hooks.afterChange.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
result =
|
||||
(await hook({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
doc: result,
|
||||
operation,
|
||||
previousDoc: {},
|
||||
req,
|
||||
})) || result
|
||||
}, Promise.resolve())
|
||||
|
||||
// /////////////////////////////////////
|
||||
// afterOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
result = await buildAfterOperation({
|
||||
args,
|
||||
collection: collectionConfig,
|
||||
operation,
|
||||
result,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Return results
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (shouldCommit) {
|
||||
await commitTransaction(req)
|
||||
}
|
||||
|
||||
return result
|
||||
} catch (error: unknown) {
|
||||
await killTransaction(args.req)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user