Compare commits

..

1 Commits

Author SHA1 Message Date
Alessio Gravili
7aaf3c24e2 feat(next): return document and list views early and stream in the rest 2024-12-08 13:47:17 -07:00
407 changed files with 13574 additions and 8913 deletions

View File

@@ -62,12 +62,8 @@ jobs:
echo "templates: ${{ steps.filter.outputs.templates }}"
lint:
# Follows same github's ci skip: [skip lint], [lint skip], [no lint]
if: >
github.event_name == 'pull_request' &&
!contains(github.event.pull_request.title, '[skip lint]') &&
!contains(github.event.pull_request.title, '[lint skip]') &&
!contains(github.event.pull_request.title, '[no lint]')
github.event_name == 'pull_request' && !contains(github.event.pull_request.title, 'no-lint') && !contains(github.event.pull_request.title, 'skip-lint')
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
@@ -285,28 +281,18 @@ jobs:
- auth-basic
- field-error-states
- fields-relationship
- fields__collections__Array
- fields
- fields__collections__Blocks
- fields__collections__Collapsible
- fields__collections__ConditionalLogic
- fields__collections__CustomID
- fields__collections__Date
- fields__collections__Email
- fields__collections__Indexed
- fields__collections__JSON
- fields__collections__Lexical__e2e__main
- fields__collections__Lexical__e2e__blocks
- fields__collections__Number
- fields__collections__Point
- fields__collections__Radio
- fields__collections__Array
- fields__collections__Relationship
- fields__collections__RichText
- fields__collections__Row
- fields__collections__Select
- fields__collections__Lexical__e2e__main
- fields__collections__Lexical__e2e__blocks
- fields__collections__Date
- fields__collections__Number
- fields__collections__Point
- fields__collections__Tabs
- fields__collections__Tabs2
- fields__collections__Text
- fields__collections__UI
- fields__collections__Upload
- live-preview
- localization
@@ -506,12 +492,9 @@ jobs:
needs:
- lint
- build
- build-templates
- tests-unit
- tests-int
- tests-e2e
- tests-types
- tests-type-generation
steps:
- if: ${{ always() && (contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')) }}

View File

@@ -25,24 +25,24 @@ export const MyCollection: CollectionConfig = {
The following options are available:
| Option | Description |
| -------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`group`** | Text or localization object used to group Collection and Global links in the admin navigation. Set to `false` to hide the link from the navigation while keeping its routes accessible. |
| **`hidden`** | Set to true or a function, called with the current user, returning true to exclude this Collection from navigation and admin routing. |
| **`hooks`** | Admin-specific hooks for this Collection. [More details](../hooks/collections). |
| **`useAsTitle`** | Specify a top-level field to use for a document title throughout the Admin Panel. If no field is defined, the ID of the document is used as the title. A field with `virtual: true` cannot be used as the title. |
| Option | Description |
| -------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`group`** | Text used as a label for grouping Collection and Global links together in the navigation. |
| **`hidden`** | Set to true or a function, called with the current user, returning true to exclude this Collection from navigation and admin routing. |
| **`hooks`** | Admin-specific hooks for this Collection. [More details](../hooks/collections). |
| **`useAsTitle`** | Specify a top-level field to use for a document title throughout the Admin Panel. If no field is defined, the ID of the document is used as the title. A field with `virtual: true` cannot be used as the title. |
| **`description`** | Text to display below the Collection label in the List View to give editors more information. Alternatively, you can use the `admin.components.Description` to render a React component. [More details](#custom-components). |
| **`defaultColumns`** | Array of field names that correspond to which columns to show by default in this Collection's List View. |
| **`hideAPIURL`** | Hides the "API URL" meta field while editing documents within this Collection. |
| **`enableRichTextLink`** | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| **`enableRichTextRelationship`** | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| **`meta`** | Page metadata overrides to apply to this Collection within the Admin Panel. [More details](./metadata). |
| **`preview`** | Function to generate preview URLs within the Admin Panel that can point to your app. [More details](#preview). |
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| **`defaultColumns`** | Array of field names that correspond to which columns to show by default in this Collection's List View. |
| **`hideAPIURL`** | Hides the "API URL" meta field while editing documents within this Collection. |
| **`enableRichTextLink`** | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| **`enableRichTextRelationship`** | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| **`meta`** | Page metadata overrides to apply to this Collection within the Admin Panel. [More details](./metadata). |
| **`preview`** | Function to generate preview URLs within the Admin Panel that can point to your app. [More details](#preview). |
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| **`components`** | Swap in your own React components to be used within this Collection. [More details](#custom-components). |
| **`listSearchableFields`** | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
| **`pagination`** | Set pagination-specific options for this Collection. [More details](#pagination). |
| **`baseListFilter`** | You can define a default base filter for this collection's List view, which will be merged into any filters that the user performs. |
| **`listSearchableFields`** | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
| **`pagination`** | Set pagination-specific options for this Collection. [More details](#pagination). |
| **`baseListFilter`** | You can define a default base filter for this collection's List view, which will be merged into any filters that the user performs. |
### Custom Components
@@ -108,20 +108,14 @@ export const Posts: CollectionConfig = {
}
```
The `preview` property resolves to a string that points to your front-end application with additional URL parameters. This can be an absolute URL or a relative path.
The `preview` property resolves to a string that points to your front-end application with additional URL parameters. This can be an absolute URL or a relative path. If you are using a relative path, Payload will prepend the application's origin onto it, creating a fully qualified URL.
The preview function receives two arguments:
| Argument | Description |
| --- | --- |
| **`doc`** | The Document being edited. |
| **`ctx`** | An object containing `locale`, `token`, and `req` properties. The `token` is the currently logged-in user's JWT. |
If your application requires a fully qualified URL, such as within deploying to Vercel Preview Deployments, you can use the `req` property to build this URL:
```ts
preview: (doc, { req }) => `${req.protocol}//${req.host}/${doc.slug}` // highlight-line
```
| **`ctx`** | An object containing `locale` and `token` properties. The `token` is the currently logged-in user's JWT. |
<Banner type="success">
<strong>Note:</strong>

View File

@@ -25,9 +25,9 @@ export const MyGlobal: GlobalConfig = {
The following options are available:
| Option | Description |
| ----------------- | --------------------------------------------------------------------------------------------------------------------------------- |
| **`group`** | Text or localization object used to group Collection and Global links in the admin navigation. Set to `false` to hide the link from the navigation while keeping its routes accessible. |
| Option | Description |
| ------------- | --------------------------------------------------------------------------------------------------------------------------------- |
| **`group`** | Text used as a label for grouping Collection and Global links together in the navigation. |
| **`hidden`** | Set to true or a function, called with the current user, returning true to exclude this Global from navigation and admin routing. |
| **`components`** | Swap in your own React components to be used within this Global. [More details](#custom-components). |
| **`preview`** | Function to generate a preview URL within the Admin Panel for this Global that can point to your app. [More details](#preview). |

View File

@@ -184,7 +184,7 @@ export const MyGlobal: GlobalConfig = {
meta: {
// highlight-end
title: 'My Global',
description: 'The best admin panel in the world',
description: 'The best
},
},
}

View File

@@ -86,21 +86,20 @@ const config = buildConfig({
The following options are available:
| Option | Description |
|--------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **`avatar`** | Set account profile picture. Options: `gravatar`, `default` or a custom React component. |
| **`autoLogin`** | Used to automate log-in for dev and demonstration convenience. [More details](../authentication/overview). |
| **`buildPath`** | Specify an absolute path for where to store the built Admin bundle used in production. Defaults to `path.resolve(process.cwd(), 'build')`. |
| **`components`** | Component overrides that affect the entirety of the Admin Panel. [More details](./components). |
| **`custom`** | Any custom properties you wish to pass to the Admin Panel. |
| **`dateFormat`** | The date format that will be used for all dates within the Admin Panel. Any valid [date-fns](https://date-fns.org/) format pattern can be used. |
| **`disable`** | If set to `true`, the entire Admin Panel will be disabled. |
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| **`meta`** | Base metadata to use for the Admin Panel. [More details](./metadata). |
| **`routes`** | Replace built-in Admin Panel routes with your own custom routes. [More details](#customizing-routes). |
| **`suppressHydrationWarning`** | If set to `true`, suppresses React hydration mismatch warnings during the hydration of the root `<html>` tag. Defaults to `false`. |
| **`theme`** | Restrict the Admin Panel theme to use only one of your choice. Default is `all`. |
| **`user`** | The `slug` of the Collection that you want to allow to login to the Admin Panel. [More details](#the-admin-user-collection). |
| Option | Description |
|---------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **`avatar`** | Set account profile picture. Options: `gravatar`, `default` or a custom React component. |
| **`autoLogin`** | Used to automate log-in for dev and demonstration convenience. [More details](../authentication/overview). |
| **`buildPath`** | Specify an absolute path for where to store the built Admin bundle used in production. Defaults to `path.resolve(process.cwd(), 'build')`. |
| **`components`** | Component overrides that affect the entirety of the Admin Panel. [More details](./components). |
| **`custom`** | Any custom properties you wish to pass to the Admin Panel. |
| **`dateFormat`** | The date format that will be used for all dates within the Admin Panel. Any valid [date-fns](https://date-fns.org/) format pattern can be used. |
| **`disable`** | If set to `true`, the entire Admin Panel will be disabled. |
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| **`meta`** | Base metadata to use for the Admin Panel. [More details](./metadata). |
| **`routes`** | Replace built-in Admin Panel routes with your own custom routes. [More details](#customizing-routes). |
| **`theme`** | Restrict the Admin Panel theme to use only one of your choice. Default is `all`.
| **`user`** | The `slug` of the Collection that you want to allow to login to the Admin Panel. [More details](#the-admin-user-collection). |
<Banner type="success">
<strong>Reminder:</strong>

View File

@@ -98,7 +98,7 @@ From there, you are ready to make updates to your project. When you are ready to
Projects generated from a template will come pre-configured with the official Cloud Plugin, but if you are using your own repository you will need to add this into your project. To do so, add the plugin to your Payload Config:
`pnpm add @payloadcms/payload-cloud`
`yarn add @payloadcms/payload-cloud`
```js
import { payloadCloudPlugin } from '@payloadcms/payload-cloud'

View File

@@ -51,44 +51,12 @@ export async function down({ payload, req }: MigrateDownArgs): Promise<void> {
## Using Transactions
When migrations are run, each migration is performed in a new [transaction](/docs/database/transactions) for you. All
When migrations are run, each migration is performed in a new [transactions](/docs/database/transactions) for you. All
you need to do is pass the `req` object to any [local API](/docs/local-api/overview) or direct database calls, such as
`payload.db.updateMany()`, to make database changes inside the transaction. Assuming no errors were thrown, the transaction is committed
after your `up` or `down` function runs. If the migration errors at any point or fails to commit, it is caught and the
transaction gets aborted. This way no change is made to the database if the migration fails.
### Using database directly with the transaction
Additionally, you can bypass Payload's layer entirely and perform operations directly on your underlying database within the active transaction:
### MongoDB:
```ts
import { type MigrateUpArgs } from '@payloadcms/db-mongodb'
export async function up({ session, payload, req }: MigrateUpArgs): Promise<void> {
const posts = await payload.db.collections.posts.collection.find({ session }).toArray()
}
```
### Postgres:
```ts
import { type MigrateUpArgs, sql } from '@payloadcms/db-postgres'
export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
const { rows: posts } = await db.execute(sql`SELECT * from posts`)
}
```
### SQLite:
In SQLite, transactions are disabled by default. [More](./transactions).
```ts
import { type MigrateUpArgs, sql } from '@payloadcms/db-sqlite'
export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
const { rows: posts } = await db.run(sql`SELECT * from posts`)
}
```
## Migrations Directory
Each DB adapter has an optional property `migrationDir` where you can override where you want your migrations to be

View File

@@ -16,12 +16,6 @@ By default, Payload will use transactions for all data changing operations, as l
MongoDB requires a connection to a replicaset in order to make use of transactions.
</Banner>
<Banner type="info">
<strong>Note:</strong>
<br />
Transactions in SQLite are disabled by default. You need to pass `transactionOptions: {}` to enable them.
</Banner>
The initial request made to Payload will begin a new transaction and attach it to the `req.transactionID`. If you have a `hook` that interacts with the database, you can opt in to using the same transaction by passing the `req` in the arguments. For example:
```ts

View File

@@ -6,13 +6,7 @@ desc: The Rich Text field allows dynamic content to be written through the Admin
keywords: rich text, fields, config, configuration, documentation, Content Management System, cms, headless, javascript, node, react, nextjs
---
The Rich Text Field lets editors write and format dynamic content in a familiar interface.
The content is saved as JSON in the database and can be converted to HTML or any other format needed.
Consistent with Payload's goal of making you learn as little of Payload as possible, customizing
and using the Rich Text Editor does not involve learning how to develop for a Payload rich text editor.
Instead, you can invest your time and effort into learning the underlying open-source tools that will allow
you to apply your learnings elsewhere as well.
The Rich Text Field is a powerful way to allow editors to write dynamic content. The content is saved as JSON in the database and can be converted into any format, including HTML, that you need.
<LightDarkImage
srcLight="https://payloadcms.com/images/docs/fields/richtext.png"
@@ -21,6 +15,23 @@ you to apply your learnings elsewhere as well.
caption="Admin Panel screenshot of a Rich Text field"
/>
Payload's rich text field is built on an "adapter pattern" which lets you specify which rich text editor you'd like to use.
Right now, Payload is officially supporting two rich text editors:
1. [SlateJS](/docs/rich-text/slate) - legacy, backwards-compatible with 1.0
2. [Lexical](/docs/lexical/overview) - recommended
<Banner type="success">
<strong>
Consistent with Payload's goal of making you learn as little of Payload as possible, customizing
and using the Rich Text Editor does not involve learning how to develop for a{' '}<em>Payload</em>{' '}rich text editor.
</strong>
Instead, you can invest your time and effort into learning the underlying open-source tools that
will allow you to apply your learnings elsewhere as well.
</Banner>
## Config Options
| Option | Description |
@@ -36,7 +47,7 @@ you to apply your learnings elsewhere as well.
| **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. |
| **`required`** | Require this field to have a value. |
| **`admin`** | Admin-specific configuration. [More details](#admin-options). |
| **`editor`** | Customize or override the rich text editor. [More details](/docs/rich-text/overview). |
| **`editor`** | Override the rich text editor specified in your base configuration for this field. |
| **`custom`** | Extension point for adding custom data (e.g. for plugins) |
| **`typescriptSchema`** | Override field type generation with providing a JSON schema |
| **`virtual`** | Provide `true` to disable field in the database. See [Virtual Fields](https://payloadcms.com/blog/learn-how-virtual-fields-can-help-solve-common-cms-challenges) |
@@ -68,5 +79,4 @@ The Rich Text Field inherits all of the default options from the base [Field Adm
## Editor-specific Options
For a ton more editor-specific options, including how to build custom rich text elements directly into your editor,
take a look at the [rich text editor documentation](/docs/rich-text/overview).
For a ton more editor-specific options, including how to build custom rich text elements directly into your editor, take a look at either the [Slate docs](/docs/rich-text/slate) or the [Lexical docs](/docs/lexical/overview) depending on which editor you're using.

View File

@@ -10,9 +10,9 @@ keywords: documentation, getting started, guide, Content Management System, cms,
Payload requires the following software:
- Any JavaScript package manager (pnpm, npm, or yarn - pnpm is preferred)
- Any JavaScript package manager (Yarn, NPM, or pnpm - pnpm is preferred)
- Node.js version 20.9.0+
- Any [compatible database](/docs/database/overview) (MongoDB, Postgres or SQLite)
- Any [compatible database](/docs/database/overview) (MongoDB, Postgres or Sqlite)
<Banner type="warning">
<strong>Important:</strong>
@@ -49,7 +49,7 @@ pnpm i payload @payloadcms/next @payloadcms/richtext-lexical sharp graphql
<Banner type="warning">
<strong>Note:</strong>
Swap out `pnpm` for your package manager. If you are using npm, you might need to install using legacy peer deps: `npm i --legacy-peer-deps`.
Swap out `pnpm` for your package manager. If you are using NPM, you might need to install using legacy peer deps: `npm i --legacy-peer-deps`.
</Banner>
Next, install a [Database Adapter](/docs/database/overview). Payload requires a Database Adapter to establish a database connection. Payload works with all types of databases, but the most common are MongoDB and Postgres.
@@ -181,6 +181,6 @@ Once you have a Payload Config, update your `tsconfig` to include a `path` that
#### 5. Fire it up!
After you've reached this point, it's time to boot up Payload. Start your project in your application's folder to get going. By default, the Next.js dev script is `pnpm dev` (or `npm run dev` if using npm).
After you've reached this point, it's time to boot up Payload. Start your project in your application's folder to get going. By default, the Next.js dev script is `pnpm dev` (or `npm run dev` if using NPM).
After it starts, you can go to `http://localhost:3000/admin` to create your first Payload user!

View File

@@ -7,8 +7,8 @@ keywords: documentation, getting started, guide, Content Management System, cms,
---
<YouTube
id="ftohATkHBi0"
title="Introduction to Payload — The open-source Next.js backend"
id="In_lFhzmbME"
title="Payload Introduction - Closing the Gap Between Headless CMS and Application Frameworks"
/>
**Payload is the Next.js fullstack framework.** Write a Payload Config and instantly get:

View File

@@ -62,7 +62,7 @@ type Collection1 {
The above example outputs all your definitions to a file relative from your payload config as `./graphql/schema.graphql`. By default, the file will be output to your current working directory as `schema.graphql`.
### Adding an npm script
### Adding an NPM script
<Banner type="warning">
<strong>Important</strong>
@@ -72,7 +72,7 @@ The above example outputs all your definitions to a file relative from your payl
Payload will automatically try and locate your config, but might not always be able to find it. For example, if you are working in a `/src` directory or similar, you need to tell Payload where to find your config manually by using an environment variable.
If this applies to you, create an npm script to make generating types easier:
If this applies to you, create an NPM script to make generating types easier:
```json
// package.json

View File

@@ -28,8 +28,6 @@ To pass data between hooks, you can assign values to context in an earlier hook
For example:
```ts
import type { CollectionConfig } from 'payload'
const Customer: CollectionConfig = {
slug: 'customers',
hooks: {
@@ -45,6 +43,7 @@ const Customer: CollectionConfig = {
},
],
afterChange: [
async ({ context, doc, req }) => {
// use context.customerData without needing to fetch it again
if (context.customerData.contacted === false) {
@@ -66,8 +65,6 @@ Let's say you have an `afterChange` hook, and you want to do a calculation insid
Bad example:
```ts
import type { CollectionConfig } from 'payload'
const Customer: CollectionConfig = {
slug: 'customers',
hooks: {
@@ -95,8 +92,6 @@ Instead of the above, we need to tell the `afterChange` hook to not run again if
Fixed example:
```ts
import type { CollectionConfig } from 'payload'
const MyCollection: CollectionConfig = {
slug: 'slug',
hooks: {
@@ -130,7 +125,7 @@ const MyCollection: CollectionConfig = {
The default TypeScript interface for `context` is `{ [key: string]: unknown }`. If you prefer a more strict typing in your project or when authoring plugins for others, you can override this using the `declare` syntax.
This is known as "type augmentation", a TypeScript feature which allows us to add types to existing types. Simply put this in any `.ts` or `.d.ts` file:
This is known as "type augmentation", a TypeScript feature which allows us to add types to existing objects. Simply put this in any `.ts` or `.d.ts` file:
```ts
import { RequestContext as OriginalRequestContext } from 'payload'

View File

@@ -37,7 +37,7 @@ Root Hooks are not associated with any specific Collection, Global, or Field. Th
To add Root Hooks, use the `hooks` property in your [Payload Config](/docs/configuration/config):
```ts
import { buildConfig } from 'payload'
import { buildConfig } from 'payload'
export default buildConfig({
// ...
@@ -60,7 +60,7 @@ The following options are available:
The `afterError` Hook is triggered when an error occurs in the Payload application. This can be useful for logging errors to a third-party service, sending an email to the development team, logging the error to Sentry or DataDog, etc. The output can be used to transform the result object / status code.
```ts
import { buildConfig } from 'payload'
import { buildConfig } from 'payload'
export default buildConfig({
// ...

View File

@@ -98,24 +98,11 @@ After the project is deployed to Vercel, the Vercel Cron job will automatically
If you want to process jobs programmatically from your server-side code, you can use the Local API:
**Run all jobs:**
```ts
const results = await payload.jobs.run()
// You can customize the queue name and limit by passing them as arguments:
await payload.jobs.run({ queue: 'nightly', limit: 100 })
// You can provide a where clause to filter the jobs that should be run:
await payload.jobs.run({ where: { 'input.message': { equals: 'secret' } } })
```
**Run a single job:**
```ts
const results = await payload.jobs.runByID({
id: myJobID
})
```
#### Bin script

View File

@@ -10,24 +10,20 @@ Lexical saves data in JSON - this is great for storage and flexibility and allow
## Lexical => JSX
If your frontend uses React, converting Lexical to JSX is the recommended way to render rich text content. Import the `RichText` component from `@payloadcms/richtext-lexical/react` and pass the Lexical content to it:
If you have a React-based frontend, converting lexical to JSX is the recommended way to render rich text content in your frontend. To do that, import the `RichText` component from `@payloadcms/richtext-lexical/react` and pass the lexical content to it:
```tsx
import React from 'react'
import { RichText } from '@payloadcms/richtext-lexical/react'
import { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical'
export const MyComponent = ({ data }: { data: SerializedEditorState }) => {
export const MyComponent = ({ lexicalData }) => {
return (
<RichText data={data} />
<RichText data={lexicalData} />
)
}
```
The `RichText` component includes built-in serializers for common Lexical nodes but allows customization through the `converters` prop.
In our website template [you have an example](https://github.com/payloadcms/payload/blob/main/templates/website/src/components/RichText/index.tsx) of how to use `converters` to render custom blocks.
The `RichText` component will come with the most common serializers built-in, though you can also pass in your own serializers if you need to.
<Banner type="default">
The JSX converter expects the input data to be fully populated. When fetching data, ensure the `depth` setting is high enough, to ensure that lexical nodes such as uploads are populated.

View File

@@ -1,7 +1,7 @@
---
title: Lexical Migration
label: Migration
order: 90
order: 30
desc: Migration from slate and payload-plugin-lexical to lexical
keywords: lexical, rich text, editor, headless cms, migrate, migration
---

292
docs/lexical/overview.mdx Normal file
View File

@@ -0,0 +1,292 @@
---
title: Lexical Overview
label: Overview
order: 10
desc: Built by Meta, Lexical is an incredibly powerful rich text editor, and it works beautifully within Payload.
keywords: lexical, rich text, editor, headless cms
---
One of Payload's goals is to build the best rich text editor experience that we possibly can. We want to combine the beauty and polish of the Medium editing experience with the strength and features of the Notion editor - all in one place.
Classically, we've used SlateJS to work toward this goal, but building custom elements into Slate has proven to be more difficult than we'd like, and we've been keeping our options open.
Lexical is extremely impressive and trivializes a lot of the hard parts of building new elements into a rich text editor. It has a few distinct advantages over Slate, including the following:
1. A "/" menu, which allows editors to easily add new elements while never leaving their keyboard
1. A "hover" toolbar that pops up if you select text
1. It supports Payload blocks natively, directly within your rich text editor
1. Custom elements, called "features", are much easier to build in Lexical vs. Slate
To use the Lexical editor, first you need to install it:
```
npm install @payloadcms/richtext-lexical
```
Once you have it installed, you can pass it to your top-level Payload Config as follows:
```ts
import { buildConfig } from 'payload'
import { lexicalEditor } from '@payloadcms/richtext-lexical'
export default buildConfig({
collections: [
// your collections here
],
// Pass the Lexical editor to the root config
editor: lexicalEditor({}),
})
```
You can also override Lexical settings on a field-by-field basis as follows:
```ts
import type { CollectionConfig } from 'payload'
import { lexicalEditor } from '@payloadcms/richtext-lexical'
export const Pages: CollectionConfig = {
slug: 'pages',
fields: [
{
name: 'content',
type: 'richText',
// Pass the Lexical editor here and override base settings as necessary
editor: lexicalEditor({}),
},
],
}
```
## Extending the lexical editor with Features
Lexical has been designed with extensibility in mind. Whether you're aiming to introduce new functionalities or tweak the existing ones, Lexical makes it seamless for you to bring those changes to life.
### Features: The Building Blocks
At the heart of Lexical's customization potential are "features". While Lexical ships with a set of default features we believe are essential for most use cases, the true power lies in your ability to redefine, expand, or prune these as needed.
If you remove all the default features, you're left with a blank editor. You can then add in only the features you need, or you can build your own custom features from scratch.
### Integrating New Features
To weave in your custom features, utilize the `features` prop when initializing the Lexical Editor. Here's a basic example of how this is done:
```ts
import {
BlocksFeature,
LinkFeature,
UploadFeature,
lexicalEditor,
} from '@payloadcms/richtext-lexical'
import { Banner } from '../blocks/Banner'
import { CallToAction } from '../blocks/CallToAction'
{
editor: lexicalEditor({
features: ({ defaultFeatures, rootFeatures }) => [
...defaultFeatures,
LinkFeature({
// Example showing how to customize the built-in fields
// of the Link feature
fields: ({ defaultFields }) => [
...defaultFields,
{
name: 'rel',
label: 'Rel Attribute',
type: 'select',
hasMany: true,
options: ['noopener', 'noreferrer', 'nofollow'],
admin: {
description:
'The rel attribute defines the relationship between a linked resource and the current document. This is a custom link field.',
},
},
],
}),
UploadFeature({
collections: {
uploads: {
// Example showing how to customize the built-in fields
// of the Upload feature
fields: [
{
name: 'caption',
type: 'richText',
editor: lexicalEditor(),
},
],
},
},
}),
// This is incredibly powerful. You can re-use your Payload blocks
// directly in the Lexical editor as follows:
BlocksFeature({
blocks: [Banner, CallToAction],
}),
],
})
}
```
`features` can be both an array of features, or a function returning an array of features. The function provides the following props:
| Prop | Description |
|-----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **`defaultFeatures`** | This opinionated array contains all "recommended" default features. You can see which features are included in the default features in the table below. |
| **`rootFeatures`** | This array contains all features that are enabled in the root richText editor (the one defined in the payload.config.ts). If this field is the root richText editor, or if the root richText editor is not a lexical editor, this array will be empty. |
## Features overview
Here's an overview of all the included features:
| Feature Name | Included by default | Description |
|---------------------------------|---------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **`BoldTextFeature`** | Yes | Handles the bold text format |
| **`ItalicTextFeature`** | Yes | Handles the italic text format |
| **`UnderlineTextFeature`** | Yes | Handles the underline text format |
| **`StrikethroughTextFeature`** | Yes | Handles the strikethrough text format |
| **`SubscriptTextFeature`** | Yes | Handles the subscript text format |
| **`SuperscriptTextFeature`** | Yes | Handles the superscript text format |
| **`InlineCodeTextFeature`** | Yes | Handles the inline-code text format |
| **`ParagraphFeature`** | Yes | Handles paragraphs. Since they are already a key feature of lexical itself, this Feature mainly handles the Slash and Add-Block menu entries for paragraphs |
| **`HeadingFeature`** | Yes | Adds Heading Nodes (by default, H1 - H6, but that can be customized) |
| **`AlignFeature`** | Yes | Allows you to align text left, centered and right |
| **`IndentFeature`** | Yes | Allows you to indent text with the tab key |
| **`UnorderedListFeature`** | Yes | Adds unordered lists (ul) |
| **`OrderedListFeature`** | Yes | Adds ordered lists (ol) |
| **`CheckListFeature`** | Yes | Adds checklists |
| **`LinkFeature`** | Yes | Allows you to create internal and external links |
| **`RelationshipFeature`** | Yes | Allows you to create block-level (not inline) relationships to other documents |
| **`BlockQuoteFeature`** | Yes | Allows you to create block-level quotes |
| **`UploadFeature`** | Yes | Allows you to create block-level upload nodes - this supports all kinds of uploads, not just images |
| **`HorizontalRuleFeature`** | Yes | Horizontal rules / separators. Basically displays an `<hr>` element |
| **`InlineToolbarFeature`** | Yes | The inline toolbar is the floating toolbar which appears when you select text. This toolbar only contains actions relevant for selected text |
| **`FixedToolbarFeature`** | No | This classic toolbar is pinned to the top and always visible. Both inline and fixed toolbars can be enabled at the same time. |
| **`BlocksFeature`** | No | Allows you to use Payload's [Blocks Field](/docs/fields/blocks) directly inside your editor. In the feature props, you can specify the allowed blocks - just like in the Blocks field. |
| **`TreeViewFeature`** | No | Adds a debug box under the editor, which allows you to see the current editor state live, the dom, as well as time travel. Very useful for debugging |
| **`EXPERIMENTAL_TableFeature`** | No | Adds support for tables. This feature may be removed or receive breaking changes in the future - even within a stable lexical release, without needing a major release. |
Notice how even the toolbars are features? That's how extensible our lexical editor is - you could theoretically create your own toolbar if you wanted to!
## Creating your own, custom Feature
You can find more information about creating your own feature in our [building custom feature docs](/docs/lexical/building-custom-features).
## TypeScript
Every single piece of saved data is 100% fully-typed within lexical. It provides a type for every single node, which can be imported from `@payloadcms/richtext-lexical` - each type is prefixed with `Serialized`, e.g. `SerializedUploadNode`.
In order to fully type the entire editor JSON, you can use our `TypedEditorState` helper type, which accepts a union of all possible node types as a generic. The reason we do not provide a type which already contains all possible node types is because the possible node types depend on which features you have enabled in your editor. Here is an example:
```ts
import type {
SerializedAutoLinkNode,
SerializedBlockNode,
SerializedHorizontalRuleNode,
SerializedLinkNode,
SerializedListItemNode,
SerializedListNode,
SerializedParagraphNode,
SerializedQuoteNode,
SerializedRelationshipNode,
SerializedTextNode,
SerializedUploadNode,
TypedEditorState,
SerializedHeadingNode,
} from '@payloadcms/richtext-lexical'
const editorState: TypedEditorState<
| SerializedAutoLinkNode
| SerializedBlockNode
| SerializedHorizontalRuleNode
| SerializedLinkNode
| SerializedListItemNode
| SerializedListNode
| SerializedParagraphNode
| SerializedQuoteNode
| SerializedRelationshipNode
| SerializedTextNode
| SerializedUploadNode
| SerializedHeadingNode
> = {
root: {
type: 'root',
direction: 'ltr',
format: '',
indent: 0,
version: 1,
children: [
{
children: [
{
detail: 0,
format: 0,
mode: 'normal',
style: '',
text: 'Some text. Every property here is fully-typed',
type: 'text',
version: 1,
},
],
direction: 'ltr',
format: '',
indent: 0,
type: 'paragraph',
textFormat: 0,
version: 1,
},
],
},
}
```
Alternatively, you can use the `DefaultTypedEditorState` type, which includes all types for all nodes included in the `defaultFeatures`:
```ts
import type {
DefaultTypedEditorState
} from '@payloadcms/richtext-lexical'
const editorState: DefaultTypedEditorState = {
root: {
type: 'root',
direction: 'ltr',
format: '',
indent: 0,
version: 1,
children: [
{
children: [
{
detail: 0,
format: 0,
mode: 'normal',
style: '',
text: 'Some text. Every property here is fully-typed',
type: 'text',
version: 1,
},
],
direction: 'ltr',
format: '',
indent: 0,
type: 'paragraph',
textFormat: 0,
version: 1,
},
],
},
}
```
Just like `TypedEditorState`, the `DefaultTypedEditorState` also accepts an optional node type union as a generic. Here, this would **add** the specified node types to the default ones. Example: `DefaultTypedEditorState<SerializedBlockNode | YourCustomSerializedNode>`.
This is a type-safe representation of the editor state. Looking at the auto-suggestions of `type` it will show you all the possible node types you can use.
Make sure to only use types exported from `@payloadcms/richtext-lexical`, not from the lexical core packages. We only have control over types we export and can guarantee that those are correct, even though lexical core may export types with identical names.
### Automatic type generation
Lexical does not generate the accurate type definitions for your richText fields for you yet - this will be improved in the future. Currently, it only outputs the rough shape of the editor JSON which you can enhance using type assertions.

View File

@@ -54,6 +54,8 @@ _\* An asterisk denotes that a property is required._
The `url` property resolves to a string that points to your front-end application. This value is used as the `src` attribute of the iframe rendering your front-end. Once loaded, the Admin Panel will communicate directly with your app through `window.postMessage` events.
This can be an absolute URL or a relative path. If you are using a relative path, Payload will prepend the application's origin onto it, creating a fully qualified URL. This is useful for Vercel preview deployments, for example, where URLs are not known ahead of time.
To set the URL, use the `admin.livePreview.url` property in your [Payload Config](../configuration/overview):
```ts
@@ -105,16 +107,8 @@ The following arguments are provided to the `url` function:
| Path | Description |
| ------------------ | ----------------------------------------------------------------------------------------------------------------- |
| **`data`** | The data of the Document being edited. This includes changes that have not yet been saved. |
| **`documentInfo`** | Information about the Document being edited like collection slug. [More details](../admin/hooks#usedocumentinfo). |
| **`locale`** | The locale currently being edited (if applicable). [More details](../configuration/localization). |
| **`collectionConfig`** | The Collection Admin Config of the Document being edited. [More details](../admin/collections). |
| **`globalConfig`** | The Global Admin Config of the Document being edited. [More details](../admin/globals). |
| **`req`** | The Payload Request object. |
If your application requires a fully qualified URL, such as within deploying to Vercel Preview Deployments, you can use the `req` property to build this URL:
```ts
url: (doc, { req }) => `${req.protocol}//${req.host}/${doc.slug}` // highlight-line
```
### Breakpoints

View File

@@ -131,9 +131,6 @@ const post = await payload.create({
// Alternatively, you can directly pass a File,
// if file is provided, filePath will be omitted
file: uploadedFile,
// If you want to create a document that is a duplicate of another document
duplicateFromID: 'document-id-to-duplicate',
})
```
@@ -310,13 +307,13 @@ available:
### Auth
```js
// If you're using Next.js, you'll have to import headers from next/headers, like so:
// If you're using nextjs, you'll have to import headers from next/headers, like so:
// import { headers as nextHeaders } from 'next/headers'
// you'll also have to await headers inside your function, or component, like so:
// const headers = await nextHeaders()
// If you're using payload outside of Next.js, you'll have to provide headers accordingly.
// If you're using payload outside of NextJS, you'll have to provide headers accordingly.
// result will be formatted as follows:
// {

View File

@@ -40,7 +40,7 @@ Payload 3.0 requires a set of auto-generated files that you will need to bring i
For more details, see the [Documentation](https://payloadcms.com/docs/getting-started/installation).
1. **Install new dependencies of Payload, Next.js and React**:
1. **Install new dependencies of payload, next.js and react**:
Refer to the package.json file made in the create-payload-app, including peerDependencies, devDependencies, and dependencies. The core package and plugins require all versions to be synced. Previously, on 2.x it was possible to be running the latest version of payload 2.x with an older version of db-mongodb for example. This is no longer the case.
@@ -412,7 +412,7 @@ For more details, see the [Documentation](https://payloadcms.com/docs/getting-st
}
})
```
1. The `./src/public` directory is now located directly at root level `./public` [see Next.js docs for details](https://nextjs.org/docs/pages/building-your-application/optimizing/static-assets)
1. The `./src/public` directory is now located directly at root level `./public` [see nextJS docs for details](https://nextjs.org/docs/pages/building-your-application/optimizing/static-assets)
## Custom Components

View File

@@ -84,7 +84,7 @@ cd dev
npx create-payload-app@latest
```
If you&apos;re using the plugin template, the dev folder is built out for you and the `samplePlugin` has already been installed in `dev/payload.config.ts`.
If you&apos;re using the plugin template, the dev folder is built out for you and the `samplePlugin` has already been installed in `dev/payload.config()`.
```
plugins: [
@@ -95,11 +95,11 @@ If you&apos;re using the plugin template, the dev folder is built out for you an
]
```
You can add to the `dev/payload.config.ts` and build out the dev project as needed to test your plugin.
You can add to the `dev/payload.config` and build out the dev project as needed to test your plugin.
When you&apos;re ready to start development, navigate into this folder with `cd dev`
And then start the project with `pnpm dev` and pull up `http://localhost:3000` in your browser.
And then start the project with `yarn dev` and pull up `http://localhost:3000` in your browser.
## Testing
@@ -112,7 +112,7 @@ Jest organizes tests into test suites and cases. We recommend creating tests bas
The plugin template provides a stubbed out test suite at `dev/plugin.spec.ts` which is ready to go - just add in your own test conditions and you&apos;re all set!
```
let payload: Payload
import payload from 'payload'
describe('Plugin tests', () => {
// Example test to check for seeded data
@@ -245,7 +245,7 @@ config.hooks = {
```
### Extending functions
Function properties cannot use spread syntax. The way to extend them is to execute the existing function if it exists and then run your additional functionality.
Function properties cannot use spread syntax. The way to extend them is to execute the existing function if it exists and then run your additional functionality.
Here is an example extending the `onInit` property:
@@ -285,11 +285,11 @@ For a better user experience, provide a way to disable the plugin without uninst
### Include tests in your GitHub CI workflow
If you&apos;ve configured tests for your package, integrate them into your workflow to run the tests each time you commit to the plugin repository. Learn more about [how to configure tests into your GitHub CI workflow.](https://docs.github.com/en/actions/use-cases-and-examples/building-and-testing/building-and-testing-nodejs)
If you&apos;ve configured tests for your package, integrate them into your workflow to run the tests each time you commit to the plugin repository. Learn more about [how to configure tests into your GitHub CI workflow.](https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-nodejs)
### Publish your finished plugin to npm
### Publish your finished plugin to NPM
The best way to share and allow others to use your plugin once it is complete is to publish an npm package. This process is straightforward and well documented, find out more about [creating and publishing a npm package here](https://docs.npmjs.com/creating-and-publishing-scoped-public-packages/).
The best way to share and allow others to use your plugin once it is complete is to publish an NPM package. This process is straightforward and well documented, find out more about [creating and publishing a NPM package here](https://docs.npmjs.com/creating-and-publishing-scoped-public-packages/).
### Add payload-plugin topic tag

View File

@@ -6,7 +6,7 @@ desc: Easily build and manage forms from the Admin Panel. Send dynamic, personal
keywords: plugins, plugin, form, forms, form builder
---
[![npm](https://img.shields.io/npm/v/@payloadcms/plugin-form-builder)](https://www.npmjs.com/package/@payloadcms/plugin-form-builder)
[![NPM](https://img.shields.io/npm/v/@payloadcms/plugin-form-builder)](https://www.npmjs.com/package/@payloadcms/plugin-form-builder)
This plugin allows you to build and manage custom forms directly within the [Admin Panel](../admin/overview). Instead of hard-coding a new form into your website or application every time you need one, admins can simply define the schema for each form they need on-the-fly, and your front-end can map over this schema, render its own UI components, and match your brand's design system.
@@ -33,7 +33,7 @@ Forms can be as simple or complex as you need, from a basic contact form, to a m
## Installation
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com), or [PNPM](https://pnpm.io):
```bash
pnpm add @payloadcms/plugin-form-builder

View File

@@ -6,7 +6,7 @@ desc: Nested documents in a parent, child, and sibling relationship.
keywords: plugins, nested, documents, parent, child, sibling, relationship
---
[![npm](https://img.shields.io/npm/v/@payloadcms/plugin-nested-docs)](https://www.npmjs.com/package/@payloadcms/plugin-nested-docs)
[![NPM](https://img.shields.io/npm/v/@payloadcms/plugin-nested-docs)](https://www.npmjs.com/package/@payloadcms/plugin-nested-docs)
This plugin allows you to easily nest the documents of your application inside of one another. It does so by adding a
new `parent` field onto each of your documents that, when selected, attaches itself to the parent's tree. When you edit
@@ -44,7 +44,8 @@ but different parents.
## Installation
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com),
or [PNPM](https://pnpm.io):
```bash
pnpm add @payloadcms/plugin-nested-docs

View File

@@ -6,7 +6,7 @@ desc: Automatically create redirects for your Payload application
keywords: plugins, redirects, redirect, plugin, payload, cms, seo, indexing, search, search engine
---
[![npm](https://img.shields.io/npm/v/@payloadcms/plugin-redirects)](https://www.npmjs.com/package/@payloadcms/plugin-redirects)
[![NPM](https://img.shields.io/npm/v/@payloadcms/plugin-redirects)](https://www.npmjs.com/package/@payloadcms/plugin-redirects)
This plugin allows you to easily manage redirects for your application from within your [Admin Panel](../admin/overview). It does so by adding a `redirects` collection to your config that allows you specify a redirect from one URL to another. Your front-end application can use this data to automatically redirect users to the correct page using proper HTTP status codes. This is useful for SEO, indexing, and search engine ranking when re-platforming or when changing your URL structure.
@@ -29,7 +29,7 @@ For example, if you have a page at `/about` and you want to change it to `/about
## Installation
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com), or [PNPM](https://pnpm.io):
```bash
pnpm add @payloadcms/plugin-redirects

View File

@@ -6,7 +6,7 @@ desc: Generates records of your documents that are extremely fast to search on.
keywords: plugins, search, search plugin, search engine, search index, search results, search bar, search box, search field, search form, search input
---
[![npm](https://img.shields.io/npm/v/@payloadcms/plugin-search)](https://www.npmjs.com/package/@payloadcms/plugin-search)
[![NPM](https://img.shields.io/npm/v/@payloadcms/plugin-search)](https://www.npmjs.com/package/@payloadcms/plugin-search)
This plugin generates records of your documents that are extremely fast to search on. It does so by creating a new `search` collection that is indexed in the database then saving a static copy of each of your documents using only search-critical data. Search records are automatically created, synced, and deleted behind-the-scenes as you manage your application's documents.
@@ -37,7 +37,7 @@ This plugin is a great way to implement a fast, immersive search experience such
## Installation
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com), or [PNPM](https://pnpm.io):
```bash
pnpm add @payloadcms/plugin-search

View File

@@ -6,7 +6,7 @@ desc: Integrate Sentry error tracking into your Payload application
keywords: plugins, sentry, error, tracking, monitoring, logging, bug, reporting, performance
---
[![npm](https://img.shields.io/npm/v/@payloadcms/plugin-sentry)](https://www.npmjs.com/package/@payloadcms/plugin-sentry)
[![NPM](https://img.shields.io/npm/v/@payloadcms/plugin-sentry)](https://www.npmjs.com/package/@payloadcms/plugin-sentry)
This plugin allows you to integrate [Sentry](https://sentry.io/) seamlessly with your [Payload](https://github.com/payloadcms/payload) application.
@@ -36,7 +36,7 @@ This multi-faceted software offers a range of features that will help you manage
## Installation
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com), or [PNPM](https://pnpm.io):
```bash
pnpm add @payloadcms/plugin-sentry

View File

@@ -6,7 +6,7 @@ desc: Manage SEO metadata from your Payload admin
keywords: plugins, seo, meta, search, engine, ranking, google
---
[![npm](https://img.shields.io/npm/v/@payloadcms/plugin-seo)](https://www.npmjs.com/package/@payloadcms/plugin-seo)
[![NPM](https://img.shields.io/npm/v/@payloadcms/plugin-seo)](https://www.npmjs.com/package/@payloadcms/plugin-seo)
This plugin allows you to easily manage SEO metadata for your application from within your [Admin Panel](../admin/overview). When enabled on your [Collections](../configuration/collections) and [Globals](../configuration/globals), it adds a new `meta` field group containing `title`, `description`, and `image` by default. Your front-end application can then use this data to render meta tags however your application requires. For example, you would inject a `title` tag into the `<head>` of your page using `meta.title` as its content.
@@ -34,7 +34,7 @@ To help you visualize what your page might look like in a search engine, a previ
## Installation
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com), or [PNPM](https://pnpm.io):
```bash
pnpm add @payloadcms/plugin-seo
@@ -277,7 +277,7 @@ Tip: You can override the length rules by changing the minLength and maxLength p
All types can be directly imported:
```ts
import type {
import {
PluginConfig,
GenerateTitle,
GenerateDescription
@@ -288,9 +288,9 @@ import type {
You can then pass the collections from your generated Payload types into the generation types, for example:
```ts
import type { Page } from './payload-types.ts';
import { Page } from './payload-types.ts';
import type { GenerateTitle } from '@payloadcms/plugin-seo/types';
import { GenerateTitle } from '@payloadcms/plugin-seo/types';
const generateTitle: GenerateTitle<Page> = async ({ doc, locale }) => {
return `Website.com — ${doc?.title}`

View File

@@ -6,7 +6,7 @@ desc: Easily accept payments with Stripe
keywords: plugins, stripe, payments, ecommerce
---
[![npm](https://img.shields.io/npm/v/@payloadcms/plugin-stripe)](https://www.npmjs.com/package/@payloadcms/plugin-stripe)
[![NPM](https://img.shields.io/npm/v/@payloadcms/plugin-stripe)](https://www.npmjs.com/package/@payloadcms/plugin-stripe)
With this plugin you can easily integrate [Stripe](https://stripe.com) into Payload. Simply provide your Stripe credentials and this plugin will open up a two-way communication channel between the two platforms. This enables you to easily sync data back and forth, as well as proxy the Stripe REST API through Payload's [Access Control](../access-control/overview). Use this plugin to completely offload billing to Stripe and retain full control over your application's data.
@@ -36,7 +36,7 @@ The beauty of this plugin is the entirety of your application's content and busi
## Installation
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
Install the plugin using any JavaScript package manager like [Yarn](https://yarnpkg.com), [NPM](https://npmjs.com), or [PNPM](https://pnpm.io):
```bash
pnpm add @payloadcms/plugin-stripe

View File

@@ -43,9 +43,7 @@ But with a `depth` of `1`, the response might look like this:
To specify depth in the [Local API](../local-api/overview), you can use the `depth` option in your query:
```ts
import type { Payload } from 'payload'
const getPosts = async (payload: Payload) => {
const getPosts = async () => {
const posts = await payload.find({
collection: 'posts',
depth: 2, // highlight-line

View File

@@ -19,9 +19,7 @@ Each of these APIs share the same underlying querying language, and fully suppor
To query your Documents, you can send any number of [Operators](#operators) through your request:
```ts
import type { Where } from 'payload'
const query: Where = {
const query = {
color: {
equals: 'blue',
},
@@ -69,9 +67,7 @@ In addition to defining simple queries, you can join multiple queries together u
To join queries, use the `and` or `or` keys in your query object:
```ts
import type { Where } from 'payload'
const query: Where = {
const query = {
or: [ // highlight-line
{
color: {
@@ -103,9 +99,7 @@ Written in plain English, if the above query were passed to a `find` operation,
When working with nested properties, which can happen when using relational fields, it is possible to use the dot notation to access the nested property. For example, when working with a `Song` collection that has a `artists` field which is related to an `Artists` collection using the `name: 'artists'`. You can access a property within the collection `Artists` like so:
```js
import type { Where } from 'payload'
const query: Where = {
const query = {
'artists.featured': {
// nested property name to filter on
exists: true, // operator to use and boolean value that needs to be true
@@ -122,9 +116,7 @@ Writing queries in Payload is simple and consistent across all APIs, with only m
The [Local API](../local-api/overview) supports the `find` operation that accepts a raw query object:
```ts
import type { Payload } from 'payload'
const getPosts = async (payload: Payload) => {
const getPosts = async () => {
const posts = await payload.find({
collection: 'posts',
where: {
@@ -165,20 +157,19 @@ For this reason, we recommend to use the extremely helpful and ubiquitous [`qs-e
```ts
import { stringify } from 'qs-esm'
import type { Where } from 'payload'
const query: Where = {
const query = {
color: {
equals: 'mint',
},
// This query could be much more complex
// and qs-esm would handle it beautifully
// and QS would handle it beautifully
}
const getPosts = async () => {
const stringifiedQuery = stringify(
{
where: query, // ensure that `qs-esm` adds the `where` property, too!
where: query, // ensure that `qs` adds the `where` property, too!
},
{ addQueryPrefix: true },
)

View File

@@ -15,10 +15,8 @@ This is where Payload's `select` feature comes in. Here, you can define exactly
To specify `select` in the [Local API](../local-api/overview), you can use the `select` option in your query:
```ts
import type { Payload } from 'payload'
// Include mode
const getPosts = async (payload: Payload) => {
const getPosts = async () => {
const posts = await payload.find({
collection: 'posts',
select: {
@@ -36,7 +34,7 @@ const getPosts = async (payload: Payload) => {
}
// Exclude mode
const getPosts = async (payload: Payload) => {
const getPosts = async () => {
const posts = await payload.find({
collection: 'posts',
// Select everything except for array and group.number
@@ -75,9 +73,8 @@ For this reason, we recommend to use the extremely helpful and ubiquitous [`qs-e
```ts
import { stringify } from 'qs-esm'
import type { Where } from 'payload'
const select: Where = {
const select = {
text: true,
group: {
number: true
@@ -119,6 +116,9 @@ Loading all of the page content, its related links, and everything else is going
```ts
import type { CollectionConfig } from 'payload'
import { lexicalEditor, LinkFeature } from '@payloadcms/richtext-lexical'
import { slateEditor } from '@payloadcms/richtext-slate'
// The TSlug generic can be passed to have type safety for `defaultPopulate`.
// If avoided, the `defaultPopulate` type resolves to `SelectType`.
export const Pages: CollectionConfig<'pages'> = {
@@ -144,9 +144,7 @@ Setting `defaultPopulate` will enforce that each time Payload performs a "popula
**Local API:**
```ts
import type { Payload } from 'payload'
const getPosts = async (payload: Payload) => {
const getPosts = async () => {
const posts = await payload.find({
collection: 'posts',
populate: {

View File

@@ -20,9 +20,7 @@ Because sorting is handled by the database, the field cannot be a [Virtual Field
To sort Documents in the [Local API](../local-api/overview), you can use the `sort` option in your query:
```ts
import type { Payload } from 'payload'
const getPosts = async (payload: Payload) => {
const getPosts = async () => {
const posts = await payload.find({
collection: 'posts',
sort: '-createdAt', // highlight-line
@@ -35,9 +33,7 @@ const getPosts = async (payload: Payload) => {
To sort by multiple fields, you can use the `sort` option with fields in an array:
```ts
import type { Payload } from 'payload'
const getPosts = async (payload: Payload) => {
const getPosts = async () => {
const posts = await payload.find({
collection: 'posts',
sort: ['priority', '-createdAt'], // highlight-line

View File

@@ -0,0 +1,9 @@
---
title: Lexical Rich Text
label: Lexical
order: 30
desc: Built by Meta, Lexical is an incredibly powerful rich text editor, and it works beautifully within Payload.
keywords: lexical, rich text, editor, headless cms
---
The new lexical docs can be found at [Lexical](/docs/lexical/overview).

View File

@@ -1,300 +1,18 @@
---
title: Rich Text Editor
title: Overview
label: Overview
order: 10
desc: The Payload editor, based on Lexical, allows for great customization with unparalleled ease.
keywords: lexical, rich text, editor, headless cms
desc: Rich Text within Payload is extremely powerful. We've combined the beauty of the Medium editor with the power of the Notion editor all in one place.
keywords: slatejs, lexical, rich text, json, custom editor, javascript, typescript
---
<Banner type="warning">
Payload currently supports two official rich text editors and you can choose either one depending on your needs.
The Payload editor is based on Lexical, Meta's rich text editor. The previous default editor was
based on Slate and is still supported. You can read [its documentation](/docs/rich-text/slate),
or the optional [migration guide](/docs/rich-text/migration) to migrate from Slate to Lexical (recommended).
1. [SlateJS](/docs/rich-text/slate) - stable, backwards-compatible with 1.0
2. [Lexical](/docs/lexical/overview) - recommended
</Banner>
These editors are built on an "adapter pattern" which means that you will need to install the editor you'd like to use. Take a look at the docs for the editor you'd like to use for instructions on how to install it.
One of Payload's goals is to build the best rich text editor experience that we possibly can. We want to combine the beauty and polish of the Medium editing experience with the strength and features of the Notion editor - all in one place.
The big TL;DR here is that Slate is what we have used in the past, and we still support it for existing projects, but if you're building something new and you're feeling adventurous, you should give Lexical a shot. Slate has a lot of good stuff, but Lexical has lots more.
Classically, we've used SlateJS to work toward this goal, but building custom elements into Slate has proven to be more difficult than we'd like, and we've been keeping our options open.
Lexical is extremely impressive and trivializes a lot of the hard parts of building new elements into a rich text editor. It has a few distinct advantages over Slate, including the following:
1. A "/" menu, which allows editors to easily add new elements while never leaving their keyboard
1. A "hover" toolbar that pops up if you select text
1. It supports Payload blocks natively, directly within your rich text editor
1. Custom elements, called "features", are much easier to build in Lexical vs. Slate
To use the Lexical editor, first you need to install it:
```
npm install @payloadcms/richtext-lexical
```
Once you have it installed, you can pass it to your top-level Payload Config as follows:
```ts
import { buildConfig } from 'payload'
import { lexicalEditor } from '@payloadcms/richtext-lexical'
export default buildConfig({
collections: [
// your collections here
],
// Pass the Lexical editor to the root config
editor: lexicalEditor({}),
})
```
You can also override Lexical settings on a field-by-field basis as follows:
```ts
import type { CollectionConfig } from 'payload'
import { lexicalEditor } from '@payloadcms/richtext-lexical'
export const Pages: CollectionConfig = {
slug: 'pages',
fields: [
{
name: 'content',
type: 'richText',
// Pass the Lexical editor here and override base settings as necessary
editor: lexicalEditor({}),
},
],
}
```
## Extending the lexical editor with Features
Lexical has been designed with extensibility in mind. Whether you're aiming to introduce new functionalities or tweak the existing ones, Lexical makes it seamless for you to bring those changes to life.
### Features: The Building Blocks
At the heart of Lexical's customization potential are "features". While Lexical ships with a set of default features we believe are essential for most use cases, the true power lies in your ability to redefine, expand, or prune these as needed.
If you remove all the default features, you're left with a blank editor. You can then add in only the features you need, or you can build your own custom features from scratch.
### Integrating New Features
To weave in your custom features, utilize the `features` prop when initializing the Lexical Editor. Here's a basic example of how this is done:
```ts
import {
BlocksFeature,
LinkFeature,
UploadFeature,
lexicalEditor,
} from '@payloadcms/richtext-lexical'
import { Banner } from '../blocks/Banner'
import { CallToAction } from '../blocks/CallToAction'
{
editor: lexicalEditor({
features: ({ defaultFeatures, rootFeatures }) => [
...defaultFeatures,
LinkFeature({
// Example showing how to customize the built-in fields
// of the Link feature
fields: ({ defaultFields }) => [
...defaultFields,
{
name: 'rel',
label: 'Rel Attribute',
type: 'select',
hasMany: true,
options: ['noopener', 'noreferrer', 'nofollow'],
admin: {
description:
'The rel attribute defines the relationship between a linked resource and the current document. This is a custom link field.',
},
},
],
}),
UploadFeature({
collections: {
uploads: {
// Example showing how to customize the built-in fields
// of the Upload feature
fields: [
{
name: 'caption',
type: 'richText',
editor: lexicalEditor(),
},
],
},
},
}),
// This is incredibly powerful. You can re-use your Payload blocks
// directly in the Lexical editor as follows:
BlocksFeature({
blocks: [Banner, CallToAction],
}),
],
})
}
```
`features` can be both an array of features, or a function returning an array of features. The function provides the following props:
| Prop | Description |
|-----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **`defaultFeatures`** | This opinionated array contains all "recommended" default features. You can see which features are included in the default features in the table below. |
| **`rootFeatures`** | This array contains all features that are enabled in the root richText editor (the one defined in the payload.config.ts). If this field is the root richText editor, or if the root richText editor is not a lexical editor, this array will be empty. |
## Features overview
Here's an overview of all the included features:
| Feature Name | Included by default | Description |
|---------------------------------|---------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **`BoldTextFeature`** | Yes | Handles the bold text format |
| **`ItalicTextFeature`** | Yes | Handles the italic text format |
| **`UnderlineTextFeature`** | Yes | Handles the underline text format |
| **`StrikethroughTextFeature`** | Yes | Handles the strikethrough text format |
| **`SubscriptTextFeature`** | Yes | Handles the subscript text format |
| **`SuperscriptTextFeature`** | Yes | Handles the superscript text format |
| **`InlineCodeTextFeature`** | Yes | Handles the inline-code text format |
| **`ParagraphFeature`** | Yes | Handles paragraphs. Since they are already a key feature of lexical itself, this Feature mainly handles the Slash and Add-Block menu entries for paragraphs |
| **`HeadingFeature`** | Yes | Adds Heading Nodes (by default, H1 - H6, but that can be customized) |
| **`AlignFeature`** | Yes | Allows you to align text left, centered and right |
| **`IndentFeature`** | Yes | Allows you to indent text with the tab key |
| **`UnorderedListFeature`** | Yes | Adds unordered lists (ul) |
| **`OrderedListFeature`** | Yes | Adds ordered lists (ol) |
| **`CheckListFeature`** | Yes | Adds checklists |
| **`LinkFeature`** | Yes | Allows you to create internal and external links |
| **`RelationshipFeature`** | Yes | Allows you to create block-level (not inline) relationships to other documents |
| **`BlockQuoteFeature`** | Yes | Allows you to create block-level quotes |
| **`UploadFeature`** | Yes | Allows you to create block-level upload nodes - this supports all kinds of uploads, not just images |
| **`HorizontalRuleFeature`** | Yes | Horizontal rules / separators. Basically displays an `<hr>` element |
| **`InlineToolbarFeature`** | Yes | The inline toolbar is the floating toolbar which appears when you select text. This toolbar only contains actions relevant for selected text |
| **`FixedToolbarFeature`** | No | This classic toolbar is pinned to the top and always visible. Both inline and fixed toolbars can be enabled at the same time. |
| **`BlocksFeature`** | No | Allows you to use Payload's [Blocks Field](/docs/fields/blocks) directly inside your editor. In the feature props, you can specify the allowed blocks - just like in the Blocks field. |
| **`TreeViewFeature`** | No | Adds a debug box under the editor, which allows you to see the current editor state live, the dom, as well as time travel. Very useful for debugging |
| **`EXPERIMENTAL_TableFeature`** | No | Adds support for tables. This feature may be removed or receive breaking changes in the future - even within a stable lexical release, without needing a major release. |
Notice how even the toolbars are features? That's how extensible our lexical editor is - you could theoretically create your own toolbar if you wanted to!
## Creating your own, custom Feature
You can find more information about creating your own feature in our [building custom feature docs](/docs/lexical/building-custom-features).
## TypeScript
Every single piece of saved data is 100% fully-typed within lexical. It provides a type for every single node, which can be imported from `@payloadcms/richtext-lexical` - each type is prefixed with `Serialized`, e.g. `SerializedUploadNode`.
In order to fully type the entire editor JSON, you can use our `TypedEditorState` helper type, which accepts a union of all possible node types as a generic. The reason we do not provide a type which already contains all possible node types is because the possible node types depend on which features you have enabled in your editor. Here is an example:
```ts
import type {
SerializedAutoLinkNode,
SerializedBlockNode,
SerializedHorizontalRuleNode,
SerializedLinkNode,
SerializedListItemNode,
SerializedListNode,
SerializedParagraphNode,
SerializedQuoteNode,
SerializedRelationshipNode,
SerializedTextNode,
SerializedUploadNode,
TypedEditorState,
SerializedHeadingNode,
} from '@payloadcms/richtext-lexical'
const editorState: TypedEditorState<
| SerializedAutoLinkNode
| SerializedBlockNode
| SerializedHorizontalRuleNode
| SerializedLinkNode
| SerializedListItemNode
| SerializedListNode
| SerializedParagraphNode
| SerializedQuoteNode
| SerializedRelationshipNode
| SerializedTextNode
| SerializedUploadNode
| SerializedHeadingNode
> = {
root: {
type: 'root',
direction: 'ltr',
format: '',
indent: 0,
version: 1,
children: [
{
children: [
{
detail: 0,
format: 0,
mode: 'normal',
style: '',
text: 'Some text. Every property here is fully-typed',
type: 'text',
version: 1,
},
],
direction: 'ltr',
format: '',
indent: 0,
type: 'paragraph',
textFormat: 0,
version: 1,
},
],
},
}
```
Alternatively, you can use the `DefaultTypedEditorState` type, which includes all types for all nodes included in the `defaultFeatures`:
```ts
import type {
DefaultTypedEditorState
} from '@payloadcms/richtext-lexical'
const editorState: DefaultTypedEditorState = {
root: {
type: 'root',
direction: 'ltr',
format: '',
indent: 0,
version: 1,
children: [
{
children: [
{
detail: 0,
format: 0,
mode: 'normal',
style: '',
text: 'Some text. Every property here is fully-typed',
type: 'text',
version: 1,
},
],
direction: 'ltr',
format: '',
indent: 0,
type: 'paragraph',
textFormat: 0,
version: 1,
},
],
},
}
```
Just like `TypedEditorState`, the `DefaultTypedEditorState` also accepts an optional node type union as a generic. Here, this would **add** the specified node types to the default ones. Example: `DefaultTypedEditorState<SerializedBlockNode | YourCustomSerializedNode>`.
This is a type-safe representation of the editor state. Looking at the auto-suggestions of `type` it will show you all the possible node types you can use.
Make sure to only use types exported from `@payloadcms/richtext-lexical`, not from the lexical core packages. We only have control over types we export and can guarantee that those are correct, even though lexical core may export types with identical names.
### Automatic type generation
Lexical does not generate the accurate type definitions for your richText fields for you yet - this will be improved in the future. Currently, it only outputs the rough shape of the editor JSON which you can enhance using type assertions.
No matter which editor you use, you have to install it at the top-level on the `config.editor` property, which will then cascade throughout all of your rich text fields and be used accordingly. Additionally, you also have the option to override the editor on a field-by-field basis if you'd like.

View File

@@ -1,7 +1,7 @@
---
title: Slate Editor
label: Slate (legacy)
order: 100
title: Slate Rich Text
label: Slate
order: 20
desc: The Slate editor has been supported by Payload since beta. It's very powerful and stores content as JSON, which unlocks a ton of power.
keywords: slatejs, slate, rich text, editor, headless cms
---

View File

@@ -213,7 +213,7 @@ export interface Collection1 {
Now that your types have been generated, payloads local API will now be typed. It is common for users to want to use this in their frontend code, we recommend generating them with Payload and then copying the file over to your frontend codebase. This is the simplest way to get your types into your frontend codebase.
### Adding an npm script
### Adding an NPM script
<Banner type="warning">
<strong>Important</strong>
@@ -221,9 +221,9 @@ Now that your types have been generated, payloads local API will now be typed. I
Payload needs to be able to find your config to generate your types.
</Banner>
Payload will automatically try and locate your config, but might not always be able to find it. For example, if you are working in a `/src` directory or similar, you need to tell Payload where to find your config manually by using an environment variable. If this applies to you, you can create an npm script to make generating your types easier.
Payload will automatically try and locate your config, but might not always be able to find it. For example, if you are working in a `/src` directory or similar, you need to tell Payload where to find your config manually by using an environment variable. If this applies to you, you can create an NPM script to make generating your types easier.
To add an npm script to generate your types and show Payload where to find your config, open your `package.json` and update the `scripts` property to the following:
To add an NPM script to generate your types and show Payload where to find your config, open your `package.json` and update the `scripts` property to the following:
```
{
@@ -233,4 +233,4 @@ To add an npm script to generate your types and show Payload where to find your
}
```
Now you can run `pnpm generate:types` to easily generate your types.
Now you can run `yarn generate:types` to easily generate your types.

View File

@@ -32,7 +32,7 @@ const config = withBundleAnalyzer(
return [
{
destination: '/admin',
permanent: false,
permanent: true,
source: '/',
},
]

View File

@@ -1,6 +1,6 @@
{
"name": "payload-monorepo",
"version": "3.7.0",
"version": "3.5.0",
"private": true,
"type": "module",
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "create-payload-app",
"version": "3.7.0",
"version": "3.5.0",
"homepage": "https://payloadcms.com",
"repository": {
"type": "git",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-mongodb",
"version": "3.7.0",
"version": "3.5.0",
"description": "The officially supported MongoDB database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -10,11 +10,11 @@ const migrationTemplate = ({ downSQL, imports, upSQL }: MigrationTemplateArgs):
MigrateUpArgs,
} from '@payloadcms/db-mongodb'
${imports ?? ''}
export async function up({ payload, req, session }: MigrateUpArgs): Promise<void> {
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
${upSQL ?? ` // Migration code`}
}
export async function down({ payload, req, session }: MigrateDownArgs): Promise<void> {
export async function down({ payload, req }: MigrateDownArgs): Promise<void> {
${downSQL ?? ` // Migration code`}
}
`

View File

@@ -1,4 +1,3 @@
import type { ClientSession } from 'mongodb'
import type {
AggregatePaginateModel,
IndexDefinition,
@@ -111,65 +110,5 @@ export type FieldToSchemaMap<TSchema> = {
upload: FieldGeneratorFunction<TSchema, UploadField>
}
export type MigrateUpArgs = {
/**
* The Payload instance that you can use to execute Local API methods
* To use the current transaction you must pass `req` to arguments
* @example
* ```ts
* import { type MigrateUpArgs } from '@payloadcms/db-mongodb'
*
* export async function up({ session, payload, req }: MigrateUpArgs): Promise<void> {
* const posts = await payload.find({ collection: 'posts', req })
* }
* ```
*/
payload: Payload
/**
* The `PayloadRequest` object that contains the current transaction
*/
req: PayloadRequest
/**
* The MongoDB client session that you can use to execute MongoDB methods directly within the current transaction.
* @example
* ```ts
* import { type MigrateUpArgs } from '@payloadcms/db-mongodb'
*
* export async function up({ session, payload, req }: MigrateUpArgs): Promise<void> {
* const { rows: posts } = await payload.db.collections.posts.collection.find({ session }).toArray()
* }
* ```
*/
session?: ClientSession
}
export type MigrateDownArgs = {
/**
* The Payload instance that you can use to execute Local API methods
* To use the current transaction you must pass `req` to arguments
* @example
* ```ts
* import { type MigrateDownArgs } from '@payloadcms/db-mongodb'
*
* export async function down({ session, payload, req }: MigrateDownArgs): Promise<void> {
* const posts = await payload.find({ collection: 'posts', req })
* }
* ```
*/
payload: Payload
/**
* The `PayloadRequest` object that contains the current transaction
*/
req: PayloadRequest
/**
* The MongoDB client session that you can use to execute MongoDB methods directly within the current transaction.
* @example
* ```ts
* import { type MigrateDownArgs } from '@payloadcms/db-mongodb'
*
* export async function down({ session, payload, req }: MigrateDownArgs): Promise<void> {
* const { rows: posts } = await payload.db.collections.posts.collection.find({ session }).toArray()
* }
* ```
*/
session?: ClientSession
}
export type MigrateUpArgs = { payload: Payload; req: PayloadRequest }
export type MigrateDownArgs = { payload: Payload; req: PayloadRequest }

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-postgres",
"version": "3.7.0",
"version": "3.5.0",
"description": "The officially supported Postgres database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1 +1 @@
export { migratePostgresV2toV3 } from '@payloadcms/drizzle/postgres'
export { migratePostgresV2toV3 } from '../predefinedMigrations/v2-v3/index.js'

View File

@@ -2,7 +2,6 @@ import type { DatabaseAdapterObj, Payload } from 'payload'
import {
beginTransaction,
buildCreateMigration,
commitTransaction,
count,
countGlobalVersions,
@@ -40,15 +39,18 @@ import {
createDatabase,
createExtensions,
createJSONQuery,
createMigration,
defaultDrizzleSnapshot,
deleteWhere,
dropDatabase,
execute,
getMigrationTemplate,
init,
insert,
requireDrizzleKit,
} from '@payloadcms/drizzle/postgres'
import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'
import path from 'path'
import { createDatabaseAdapter, defaultBeginTransaction } from 'payload'
import { fileURLToPath } from 'url'
@@ -57,6 +59,7 @@ import type { Args, PostgresAdapter } from './types.js'
import { connect } from './connect.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter> {
const postgresIDType = args.idType || 'serial'
@@ -90,13 +93,9 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
beforeSchemaInit: args.beforeSchemaInit ?? [],
createDatabase,
createExtensions,
createMigration: buildCreateMigration({
executeMethod: 'execute',
filename,
sanitizeStatements({ sqlExecute, statements }) {
return `${sqlExecute}\n ${statements.join('\n')}\`)`
},
}),
createMigration(args) {
return createMigration.bind(this)({ ...args, dirname })
},
defaultDrizzleSnapshot,
disableCreateDatabase: args.disableCreateDatabase ?? false,
drizzle: undefined,
@@ -106,6 +105,7 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
json: true,
},
fieldConstraints: {},
getMigrationTemplate,
idType: postgresIDType,
initializing,
localesSuffix: args.localesSuffix || '_locales',

View File

@@ -1,14 +1,15 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { FlattenedField, Payload, PayloadRequest } from 'payload'
import type { TransactionPg } from '../../../../types.js'
import type { BasePostgresAdapter } from '../../../types.js'
import { upsertRow } from '@payloadcms/drizzle'
import type { PostgresAdapter } from '../../../types.js'
import type { DocsToResave } from '../types.js'
import { upsertRow } from '../../../../upsertRow/index.js'
import { traverseFields } from './traverseFields.js'
type Args = {
adapter: BasePostgresAdapter
adapter: PostgresAdapter
collectionSlug?: string
db: TransactionPg
debug: boolean

View File

@@ -1,19 +1,22 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
import type { Payload, PayloadRequest } from 'payload'
import { sql } from 'drizzle-orm'
import fs from 'fs'
import { createRequire } from 'module'
import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload'
import toSnakeCase from 'to-snake-case'
import type { TransactionPg } from '../../../types.js'
import type { BasePostgresAdapter } from '../../types.js'
import type { PostgresAdapter } from '../../types.js'
import type { PathsToQuery } from './types.js'
import { groupUpSQLStatements } from './groupUpSQLStatements.js'
import { migrateRelationships } from './migrateRelationships.js'
import { traverseFields } from './traverseFields.js'
const require = createRequire(import.meta.url)
type Args = {
debug?: boolean
payload: Payload
@@ -35,13 +38,13 @@ type Args = {
* @param req
*/
export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
const adapter = payload.db as unknown as BasePostgresAdapter
const adapter = payload.db as unknown as PostgresAdapter
const db = adapter.sessions[await req.transactionID].db as TransactionPg
const dir = payload.db.migrationDir
// get the drizzle migrateUpSQL from drizzle using the last schema
const { generateDrizzleJson, generateMigration, upSnapshot } = adapter.requireDrizzleKit()
const drizzleJsonAfter = generateDrizzleJson(adapter.schema) as DrizzleSnapshotJSON
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/api')
const drizzleJsonAfter = generateDrizzleJson(adapter.schema)
// Get the previous migration snapshot
const previousSnapshot = fs
@@ -56,14 +59,10 @@ export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
)
}
let drizzleJsonBefore = JSON.parse(
const drizzleJsonBefore = JSON.parse(
fs.readFileSync(`${dir}/${previousSnapshot}`, 'utf8'),
) as DrizzleSnapshotJSON
if (upSnapshot && drizzleJsonBefore.version < drizzleJsonAfter.version) {
drizzleJsonBefore = upSnapshot(drizzleJsonBefore)
}
const generatedSQL = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
if (!generatedSQL.length) {

View File

@@ -1,15 +1,15 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { FlattenedField, Payload, PayloadRequest } from 'payload'
import { sql } from 'drizzle-orm'
import type { TransactionPg } from '../../../types.js'
import type { BasePostgresAdapter } from '../../types.js'
import type { PostgresAdapter } from '../../types.js'
import type { DocsToResave, PathsToQuery } from './types.js'
import { fetchAndResave } from './fetchAndResave/index.js'
type Args = {
adapter: BasePostgresAdapter
adapter: PostgresAdapter
collectionSlug?: string
db: TransactionPg
debug: boolean

View File

@@ -1,13 +1,13 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { FlattenedField, Payload } from 'payload'
import toSnakeCase from 'to-snake-case'
import type { TransactionPg } from '../../../types.js'
import type { BasePostgresAdapter } from '../../types.js'
import type { PostgresAdapter } from '../../types.js'
import type { PathsToQuery } from './types.js'
type Args = {
adapter: BasePostgresAdapter
adapter: PostgresAdapter
collectionSlug?: string
columnPrefix: string
db: TransactionPg

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-sqlite",
"version": "3.7.0",
"version": "3.5.0",
"description": "The officially supported SQLite database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -0,0 +1,123 @@
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
import type { CreateMigration } from 'payload'
import fs from 'fs'
import { createRequire } from 'module'
import path from 'path'
import { getPredefinedMigration, writeMigrationIndex } from 'payload'
import prompts from 'prompts'
import { fileURLToPath } from 'url'
import type { SQLiteAdapter } from './types.js'
import { defaultDrizzleSnapshot } from './defaultSnapshot.js'
import { getMigrationTemplate } from './getMigrationTemplate.js'
const require = createRequire(import.meta.url)
export const createMigration: CreateMigration = async function createMigration(
this: SQLiteAdapter,
{ file, migrationName, payload, skipEmpty },
) {
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
const dir = payload.db.migrationDir
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir)
}
const { generateSQLiteDrizzleJson, generateSQLiteMigration } = require('drizzle-kit/api')
const drizzleJsonAfter = await generateSQLiteDrizzleJson(this.schema)
const [yyymmdd, hhmmss] = new Date().toISOString().split('T')
const formattedDate = yyymmdd.replace(/\D/g, '')
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '')
let imports: string = ''
let downSQL: string
let upSQL: string
;({ downSQL, imports, upSQL } = await getPredefinedMigration({
dirname,
file,
migrationName,
payload,
}))
const timestamp = `${formattedDate}_${formattedTime}`
const name = migrationName || file?.split('/').slice(2).join('/')
const fileName = `${timestamp}${name ? `_${name.replace(/\W/g, '_')}` : ''}`
const filePath = `${dir}/${fileName}`
let drizzleJsonBefore = defaultDrizzleSnapshot as any
if (!upSQL) {
// Get latest migration snapshot
const latestSnapshot = fs
.readdirSync(dir)
.filter((file) => file.endsWith('.json'))
.sort()
.reverse()?.[0]
if (latestSnapshot) {
drizzleJsonBefore = JSON.parse(
fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'),
) as DrizzleSnapshotJSON
}
const sqlStatementsUp = await generateSQLiteMigration(drizzleJsonBefore, drizzleJsonAfter)
const sqlStatementsDown = await generateSQLiteMigration(drizzleJsonAfter, drizzleJsonBefore)
// need to create tables as separate statements
const sqlExecute = 'await payload.db.drizzle.run(sql`'
if (sqlStatementsUp?.length) {
upSQL = sqlStatementsUp
.map((statement) => `${sqlExecute}${statement?.replaceAll('`', '\\`')}\`)`)
.join('\n')
}
if (sqlStatementsDown?.length) {
downSQL = sqlStatementsDown
.map((statement) => `${sqlExecute}${statement?.replaceAll('`', '\\`')}\`)`)
.join('\n')
}
if (!upSQL?.length && !downSQL?.length) {
if (skipEmpty) {
process.exit(0)
}
const { confirm: shouldCreateBlankMigration } = await prompts(
{
name: 'confirm',
type: 'confirm',
initial: false,
message: 'No schema changes detected. Would you like to create a blank migration file?',
},
{
onCancel: () => {
process.exit(0)
},
},
)
if (!shouldCreateBlankMigration) {
process.exit(0)
}
}
// write schema
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))
}
// write migration
fs.writeFileSync(
`${filePath}.ts`,
getMigrationTemplate({
downSQL: downSQL || ` // Migration code`,
imports,
upSQL: upSQL || ` // Migration code`,
}),
)
writeMigrationIndex({ migrationsDir: payload.db.migrationDir })
payload.logger.info({ msg: `Migration created at ${filePath}.ts` })
}

View File

@@ -0,0 +1,22 @@
import type { MigrationTemplateArgs } from 'payload'
export const indent = (text: string) =>
text
.split('\n')
.map((line) => ` ${line}`)
.join('\n')
export const getMigrationTemplate = ({
downSQL,
imports,
upSQL,
}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '@payloadcms/db-sqlite'
${imports ? `${imports}\n` : ''}
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
${indent(upSQL)}
}
export async function down({ payload, req }: MigrateDownArgs): Promise<void> {
${indent(downSQL)}
}
`

View File

@@ -3,7 +3,6 @@ import type { DatabaseAdapterObj, Payload } from 'payload'
import {
beginTransaction,
buildCreateMigration,
commitTransaction,
count,
countGlobalVersions,
@@ -38,7 +37,6 @@ import {
} from '@payloadcms/drizzle'
import { like } from 'drizzle-orm'
import { createDatabaseAdapter, defaultBeginTransaction } from 'payload'
import { fileURLToPath } from 'url'
import type { Args, SQLiteAdapter } from './types.js'
@@ -46,10 +44,12 @@ import { connect } from './connect.js'
import { countDistinct } from './countDistinct.js'
import { convertPathToJSONTraversal } from './createJSONQuery/convertPathToJSONTraversal.js'
import { createJSONQuery } from './createJSONQuery/index.js'
import { createMigration } from './createMigration.js'
import { defaultDrizzleSnapshot } from './defaultSnapshot.js'
import { deleteWhere } from './deleteWhere.js'
import { dropDatabase } from './dropDatabase.js'
import { execute } from './execute.js'
import { getMigrationTemplate } from './getMigrationTemplate.js'
import { init } from './init.js'
import { insert } from './insert.js'
import { requireDrizzleKit } from './requireDrizzleKit.js'
@@ -58,8 +58,6 @@ export type { MigrateDownArgs, MigrateUpArgs } from './types.js'
export { sql } from 'drizzle-orm'
const filename = fileURLToPath(import.meta.url)
export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
const postgresIDType = args.idType || 'serial'
const payloadIDType = postgresIDType === 'serial' ? 'number' : 'text'
@@ -93,6 +91,7 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
json: true,
},
fieldConstraints: {},
getMigrationTemplate,
idType: postgresIDType,
initializing,
localesSuffix: args.localesSuffix || '_locales',
@@ -123,15 +122,7 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
createGlobal,
createGlobalVersion,
createJSONQuery,
createMigration: buildCreateMigration({
executeMethod: 'run',
filename,
sanitizeStatements({ sqlExecute, statements }) {
return statements
.map((statement) => `${sqlExecute}${statement?.replaceAll('`', '\\`')}\`)`)
.join('\n')
},
}),
createMigration,
createVersion,
defaultIDType: payloadIDType,
deleteMany,

View File

@@ -1,19 +1,15 @@
import type { RequireDrizzleKit } from '@payloadcms/drizzle/types'
import { createRequire } from 'module'
const require = createRequire(import.meta.url)
/**
* Dynamically requires the `drizzle-kit` package to access the `generateSQLiteDrizzleJson` and `pushSQLiteSchema` functions and exports them generically to call them from @payloadcms/drizzle.
*/
export const requireDrizzleKit: RequireDrizzleKit = () => {
const {
generateSQLiteDrizzleJson,
generateSQLiteMigration,
pushSQLiteSchema,
generateSQLiteDrizzleJson: generateDrizzleJson,
pushSQLiteSchema: pushSchema,
} = require('drizzle-kit/api')
return {
generateDrizzleJson: generateSQLiteDrizzleJson,
generateMigration: generateSQLiteMigration,
pushSchema: pushSQLiteSchema,
}
return { generateDrizzleJson, pushSchema }
}

View File

@@ -154,65 +154,11 @@ export type SQLiteAdapter = {
export type IDType = 'integer' | 'numeric' | 'text'
export type MigrateUpArgs = {
/**
* The SQLite Drizzle instance that you can use to execute SQL directly within the current transaction.
* @example
* ```ts
* import { type MigrateUpArgs, sql } from '@payloadcms/db-sqlite'
*
* export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
* const { rows: posts } = await db.run(sql`SELECT * FROM posts`)
* }
* ```
*/
db: LibSQLDatabase
/**
* The Payload instance that you can use to execute Local API methods
* To use the current transaction you must pass `req` to arguments
* @example
* ```ts
* import { type MigrateUpArgs } from '@payloadcms/db-sqlite'
*
* export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
* const posts = await payload.find({ collection: 'posts', req })
* }
* ```
*/
payload: Payload
/**
* The `PayloadRequest` object that contains the current transaction
*/
req: PayloadRequest
}
export type MigrateDownArgs = {
/**
* The SQLite Drizzle instance that you can use to execute SQL directly within the current transaction.
* @example
* ```ts
* import { type MigrateDownArgs, sql } from '@payloadcms/db-sqlite'
*
* export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
* const { rows: posts } = await db.run(sql`SELECT * FROM posts`)
* }
* ```
*/
db: LibSQLDatabase
/**
* The Payload instance that you can use to execute Local API methods
* To use the current transaction you must pass `req` to arguments
* @example
* ```ts
* import { type MigrateDownArgs } from '@payloadcms/db-sqlite'
*
* export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
* const posts = await payload.find({ collection: 'posts', req })
* }
* ```
*/
payload: Payload
/**
* The `PayloadRequest` object that contains the current transaction
*/
req: PayloadRequest
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-vercel-postgres",
"version": "3.7.0",
"version": "3.5.0",
"description": "Vercel Postgres adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1 +1 @@
export { migratePostgresV2toV3 } from '@payloadcms/drizzle/postgres'
export { migratePostgresV2toV3 } from '../predefinedMigrations/v2-v3/index.js'

View File

@@ -2,7 +2,6 @@ import type { DatabaseAdapterObj, Payload } from 'payload'
import {
beginTransaction,
buildCreateMigration,
commitTransaction,
count,
countGlobalVersions,
@@ -40,15 +39,18 @@ import {
createDatabase,
createExtensions,
createJSONQuery,
createMigration,
defaultDrizzleSnapshot,
deleteWhere,
dropDatabase,
execute,
getMigrationTemplate,
init,
insert,
requireDrizzleKit,
} from '@payloadcms/drizzle/postgres'
import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'
import path from 'path'
import { createDatabaseAdapter, defaultBeginTransaction } from 'payload'
import { fileURLToPath } from 'url'
@@ -57,6 +59,7 @@ import type { Args, VercelPostgresAdapter } from './types.js'
import { connect } from './connect.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<VercelPostgresAdapter> {
const postgresIDType = args.idType || 'serial'
@@ -99,6 +102,7 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<Verce
json: true,
},
fieldConstraints: {},
getMigrationTemplate,
idType: postgresIDType,
indexes: new Set<string>(),
initializing,
@@ -134,13 +138,9 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<Verce
createGlobal,
createGlobalVersion,
createJSONQuery,
createMigration: buildCreateMigration({
executeMethod: 'execute',
filename,
sanitizeStatements({ sqlExecute, statements }) {
return `${sqlExecute}\n ${statements.join('\n')}\`)`
},
}),
createMigration(args) {
return createMigration.bind(this)({ ...args, dirname })
},
createVersion,
defaultIDType: payloadIDType,
deleteMany,

View File

@@ -0,0 +1,237 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { FlattenedField, Payload, PayloadRequest } from 'payload'
import { upsertRow } from '@payloadcms/drizzle'
import type { VercelPostgresAdapter } from '../../../types.js'
import type { DocsToResave } from '../types.js'
import { traverseFields } from './traverseFields.js'
type Args = {
adapter: VercelPostgresAdapter
collectionSlug?: string
db: TransactionPg
debug: boolean
docsToResave: DocsToResave
fields: FlattenedField[]
globalSlug?: string
isVersions: boolean
payload: Payload
req: PayloadRequest
tableName: string
}
export const fetchAndResave = async ({
adapter,
collectionSlug,
db,
debug,
docsToResave,
fields,
globalSlug,
isVersions,
payload,
req,
tableName,
}: Args) => {
for (const [id, rows] of Object.entries(docsToResave)) {
if (collectionSlug) {
const collectionConfig = payload.collections[collectionSlug].config
if (collectionConfig) {
if (isVersions) {
const doc = await payload.findVersionByID({
id,
collection: collectionSlug,
depth: 0,
fallbackLocale: null,
locale: 'all',
req,
showHiddenFields: true,
})
if (debug) {
payload.logger.info(
`The collection "${collectionConfig.slug}" version with ID ${id} will be migrated`,
)
}
traverseFields({
doc,
fields,
path: '',
rows,
})
try {
await upsertRow({
id: doc.id,
adapter,
data: doc,
db,
fields,
ignoreResult: true,
operation: 'update',
req,
tableName,
})
} catch (err) {
payload.logger.error(
`"${collectionConfig.slug}" version with ID ${doc.id} FAILED TO MIGRATE`,
)
throw err
}
if (debug) {
payload.logger.info(
`"${collectionConfig.slug}" version with ID ${doc.id} migrated successfully!`,
)
}
} else {
const doc = await payload.findByID({
id,
collection: collectionSlug,
depth: 0,
fallbackLocale: null,
locale: 'all',
req,
showHiddenFields: true,
})
if (debug) {
payload.logger.info(
`The collection "${collectionConfig.slug}" with ID ${doc.id} will be migrated`,
)
}
traverseFields({
doc,
fields,
path: '',
rows,
})
try {
await upsertRow({
id: doc.id,
adapter,
data: doc,
db,
fields,
ignoreResult: true,
operation: 'update',
req,
tableName,
})
} catch (err) {
payload.logger.error(
`The collection "${collectionConfig.slug}" with ID ${doc.id} has FAILED TO MIGRATE`,
)
throw err
}
if (debug) {
payload.logger.info(
`The collection "${collectionConfig.slug}" with ID ${doc.id} has migrated successfully!`,
)
}
}
}
}
if (globalSlug) {
const globalConfig = payload.config.globals?.find((global) => global.slug === globalSlug)
if (globalConfig) {
if (isVersions) {
const { docs } = await payload.findGlobalVersions({
slug: globalSlug,
depth: 0,
fallbackLocale: null,
limit: 0,
locale: 'all',
req,
showHiddenFields: true,
})
if (debug) {
payload.logger.info(`${docs.length} global "${globalSlug}" versions will be migrated`)
}
for (const doc of docs) {
traverseFields({
doc,
fields,
path: '',
rows,
})
try {
await upsertRow({
id: doc.id,
adapter,
data: doc,
db,
fields,
ignoreResult: true,
operation: 'update',
req,
tableName,
})
} catch (err) {
payload.logger.error(`"${globalSlug}" version with ID ${doc.id} FAILED TO MIGRATE`)
throw err
}
if (debug) {
payload.logger.info(
`"${globalSlug}" version with ID ${doc.id} migrated successfully!`,
)
}
}
} else {
const doc = await payload.findGlobal({
slug: globalSlug,
depth: 0,
fallbackLocale: null,
locale: 'all',
req,
showHiddenFields: true,
})
traverseFields({
doc,
fields,
path: '',
rows,
})
try {
await upsertRow({
adapter,
data: doc,
db,
fields,
ignoreResult: true,
operation: 'update',
req,
tableName,
})
} catch (err) {
payload.logger.error(`The global "${globalSlug}" has FAILED TO MIGRATE`)
throw err
}
if (debug) {
payload.logger.info(`The global "${globalSlug}" has migrated successfully!`)
}
}
}
}
}
}

View File

@@ -0,0 +1,171 @@
import type { FlattenedField } from 'payload'
type Args = {
doc: Record<string, unknown>
fields: FlattenedField[]
locale?: string
path: string
rows: Record<string, unknown>[]
}
export const traverseFields = ({ doc, fields, locale, path, rows }: Args) => {
fields.forEach((field) => {
switch (field.type) {
case 'array': {
const rowData = doc?.[field.name]
if (field.localized && typeof rowData === 'object' && rowData !== null) {
Object.entries(rowData).forEach(([locale, localeRows]) => {
if (Array.isArray(localeRows)) {
localeRows.forEach((row, i) => {
return traverseFields({
doc: row as Record<string, unknown>,
fields: field.flattenedFields,
locale,
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
rows,
})
})
}
})
}
if (Array.isArray(rowData)) {
rowData.forEach((row, i) => {
return traverseFields({
doc: row as Record<string, unknown>,
fields: field.flattenedFields,
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
rows,
})
})
}
break
}
case 'blocks': {
const rowData = doc?.[field.name]
if (field.localized && typeof rowData === 'object' && rowData !== null) {
Object.entries(rowData).forEach(([locale, localeRows]) => {
if (Array.isArray(localeRows)) {
localeRows.forEach((row, i) => {
const matchedBlock = field.blocks.find((block) => block.slug === row.blockType)
if (matchedBlock) {
return traverseFields({
doc: row as Record<string, unknown>,
fields: matchedBlock.flattenedFields,
locale,
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
rows,
})
}
})
}
})
}
if (Array.isArray(rowData)) {
rowData.forEach((row, i) => {
const matchedBlock = field.blocks.find((block) => block.slug === row.blockType)
if (matchedBlock) {
return traverseFields({
doc: row as Record<string, unknown>,
fields: matchedBlock.flattenedFields,
path: `${path ? `${path}.` : ''}${field.name}.${i}`,
rows,
})
}
})
}
break
}
case 'group':
case 'tab': {
const newPath = `${path ? `${path}.` : ''}${field.name}`
const newDoc = doc?.[field.name]
if (typeof newDoc === 'object' && newDoc !== null) {
if (field.localized) {
Object.entries(newDoc).forEach(([locale, localeDoc]) => {
return traverseFields({
doc: localeDoc,
fields: field.flattenedFields,
locale,
path: newPath,
rows,
})
})
} else {
return traverseFields({
doc: newDoc as Record<string, unknown>,
fields: field.flattenedFields,
path: newPath,
rows,
})
}
}
break
}
case 'relationship':
// falls through
case 'upload': {
if (typeof field.relationTo === 'string') {
if (field.type === 'upload' || !field.hasMany) {
const relationshipPath = `${path ? `${path}.` : ''}${field.name}`
if (field.localized) {
const matchedRelationshipsWithLocales = rows.filter(
(row) => row.path === relationshipPath,
)
if (matchedRelationshipsWithLocales.length && !doc[field.name]) {
doc[field.name] = {}
}
const newDoc = doc[field.name] as Record<string, unknown>
matchedRelationshipsWithLocales.forEach((localeRow) => {
if (typeof localeRow.locale === 'string') {
const [, id] = Object.entries(localeRow).find(
([key, val]) =>
val !== null && !['id', 'locale', 'order', 'parent_id', 'path'].includes(key),
)
newDoc[localeRow.locale] = id
}
})
} else {
const matchedRelationship = rows.find((row) => {
const matchesPath = row.path === relationshipPath
if (locale) {
return matchesPath && locale === row.locale
}
return row.path === relationshipPath
})
if (matchedRelationship) {
const [, id] = Object.entries(matchedRelationship).find(
([key, val]) =>
val !== null && !['id', 'locale', 'order', 'parent_id', 'path'].includes(key),
)
doc[field.name] = id
}
}
}
}
break
}
}
})
}

View File

@@ -0,0 +1,74 @@
export type Groups =
| 'addColumn'
| 'addConstraint'
| 'dropColumn'
| 'dropConstraint'
| 'dropTable'
| 'notNull'
/**
* Convert an "ADD COLUMN" statement to an "ALTER COLUMN" statement
* example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
* to: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
* @param sql
*/
function convertAddColumnToAlterColumn(sql) {
// Regular expression to match the ADD COLUMN statement with its constraints
const regex = /ALTER TABLE ("[^"]+") ADD COLUMN ("[^"]+") [\w\s]+ NOT NULL;/
// Replace the matched part with "ALTER COLUMN ... SET NOT NULL;"
return sql.replace(regex, 'ALTER TABLE $1 ALTER COLUMN $2 SET NOT NULL;')
}
export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> => {
const groups = {
addColumn: 'ADD COLUMN',
// example: ALTER TABLE "posts" ADD COLUMN "category_id" integer
addConstraint: 'ADD CONSTRAINT',
//example:
// DO $$ BEGIN
// ALTER TABLE "pages_blocks_my_block" ADD CONSTRAINT "pages_blocks_my_block_person_id_users_id_fk" FOREIGN KEY ("person_id") REFERENCES "users"("id") ON DELETE cascade ON UPDATE no action;
// EXCEPTION
// WHEN duplicate_object THEN null;
// END $$;
dropColumn: 'DROP COLUMN',
// example: ALTER TABLE "_posts_v_rels" DROP COLUMN IF EXISTS "posts_id";
dropConstraint: 'DROP CONSTRAINT',
// example: ALTER TABLE "_posts_v_rels" DROP CONSTRAINT "_posts_v_rels_posts_fk";
dropTable: 'DROP TABLE',
// example: DROP TABLE "pages_rels";
notNull: 'NOT NULL',
// example: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
}
const result = Object.keys(groups).reduce((result, group: Groups) => {
result[group] = []
return result
}, {}) as Record<Groups, string[]>
for (const line of list) {
Object.entries(groups).some(([key, value]) => {
if (line.endsWith('NOT NULL;')) {
// split up the ADD COLUMN and ALTER COLUMN NOT NULL statements
// example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
// becomes two separate statements:
// 1. ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer;
// 2. ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
result.addColumn.push(line.replace(' NOT NULL;', ';'))
result.notNull.push(convertAddColumnToAlterColumn(line))
return true
}
if (line.includes(value)) {
result[key].push(line)
return true
}
})
}
return result
}

View File

@@ -0,0 +1,279 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
import type { Payload, PayloadRequest } from 'payload'
import { sql } from 'drizzle-orm'
import fs from 'fs'
import { createRequire } from 'module'
import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload'
import toSnakeCase from 'to-snake-case'
import type { VercelPostgresAdapter } from '../../types.js'
import type { PathsToQuery } from './types.js'
import { groupUpSQLStatements } from './groupUpSQLStatements.js'
import { migrateRelationships } from './migrateRelationships.js'
import { traverseFields } from './traverseFields.js'
const require = createRequire(import.meta.url)
type Args = {
debug?: boolean
payload: Payload
req?: Partial<PayloadRequest>
}
/**
* Moves upload and relationship columns from the join table and into the tables while moving data
* This is done in the following order:
* ADD COLUMNs
* -- manipulate data to move relationships to new columns
* ADD CONSTRAINTs
* NOT NULLs
* DROP TABLEs
* DROP CONSTRAINTs
* DROP COLUMNs
* @param debug
* @param payload
* @param req
*/
export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
const adapter = payload.db as unknown as VercelPostgresAdapter
const db = adapter.sessions[await req.transactionID].db as TransactionPg
const dir = payload.db.migrationDir
// get the drizzle migrateUpSQL from drizzle using the last schema
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/api')
const drizzleJsonAfter = generateDrizzleJson(adapter.schema)
// Get the previous migration snapshot
const previousSnapshot = fs
.readdirSync(dir)
.filter((file) => file.endsWith('.json') && !file.endsWith('relationships_v2_v3.json'))
.sort()
.reverse()?.[0]
if (!previousSnapshot) {
throw new Error(
`No previous migration schema file found! A prior migration from v2 is required to migrate to v3.`,
)
}
const drizzleJsonBefore = JSON.parse(
fs.readFileSync(`${dir}/${previousSnapshot}`, 'utf8'),
) as DrizzleSnapshotJSON
const generatedSQL = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
if (!generatedSQL.length) {
payload.logger.info(`No schema changes needed.`)
process.exit(0)
}
const sqlUpStatements = groupUpSQLStatements(generatedSQL)
const addColumnsStatement = sqlUpStatements.addColumn.join('\n')
if (debug) {
payload.logger.info('CREATING NEW RELATIONSHIP COLUMNS')
payload.logger.info(addColumnsStatement)
}
await db.execute(sql.raw(addColumnsStatement))
for (const collection of payload.config.collections) {
const tableName = adapter.tableNameMap.get(toSnakeCase(collection.slug))
const pathsToQuery: PathsToQuery = new Set()
traverseFields({
adapter,
collectionSlug: collection.slug,
columnPrefix: '',
db,
disableNotNull: false,
fields: collection.flattenedFields,
isVersions: false,
newTableName: tableName,
parentTableName: tableName,
path: '',
pathsToQuery,
payload,
rootTableName: tableName,
})
await migrateRelationships({
adapter,
collectionSlug: collection.slug,
db,
debug,
fields: collection.flattenedFields,
isVersions: false,
pathsToQuery,
payload,
req,
tableName,
})
if (collection.versions) {
const versionsTableName = adapter.tableNameMap.get(
`_${toSnakeCase(collection.slug)}${adapter.versionsSuffix}`,
)
const versionFields = buildVersionCollectionFields(payload.config, collection, true)
const versionPathsToQuery: PathsToQuery = new Set()
traverseFields({
adapter,
collectionSlug: collection.slug,
columnPrefix: '',
db,
disableNotNull: true,
fields: versionFields,
isVersions: true,
newTableName: versionsTableName,
parentTableName: versionsTableName,
path: '',
pathsToQuery: versionPathsToQuery,
payload,
rootTableName: versionsTableName,
})
await migrateRelationships({
adapter,
collectionSlug: collection.slug,
db,
debug,
fields: versionFields,
isVersions: true,
pathsToQuery: versionPathsToQuery,
payload,
req,
tableName: versionsTableName,
})
}
}
for (const global of payload.config.globals) {
const tableName = adapter.tableNameMap.get(toSnakeCase(global.slug))
const pathsToQuery: PathsToQuery = new Set()
traverseFields({
adapter,
columnPrefix: '',
db,
disableNotNull: false,
fields: global.flattenedFields,
globalSlug: global.slug,
isVersions: false,
newTableName: tableName,
parentTableName: tableName,
path: '',
pathsToQuery,
payload,
rootTableName: tableName,
})
await migrateRelationships({
adapter,
db,
debug,
fields: global.flattenedFields,
globalSlug: global.slug,
isVersions: false,
pathsToQuery,
payload,
req,
tableName,
})
if (global.versions) {
const versionsTableName = adapter.tableNameMap.get(
`_${toSnakeCase(global.slug)}${adapter.versionsSuffix}`,
)
const versionFields = buildVersionGlobalFields(payload.config, global, true)
const versionPathsToQuery: PathsToQuery = new Set()
traverseFields({
adapter,
columnPrefix: '',
db,
disableNotNull: true,
fields: versionFields,
globalSlug: global.slug,
isVersions: true,
newTableName: versionsTableName,
parentTableName: versionsTableName,
path: '',
pathsToQuery: versionPathsToQuery,
payload,
rootTableName: versionsTableName,
})
await migrateRelationships({
adapter,
db,
debug,
fields: versionFields,
globalSlug: global.slug,
isVersions: true,
pathsToQuery: versionPathsToQuery,
payload,
req,
tableName: versionsTableName,
})
}
}
// ADD CONSTRAINT
const addConstraintsStatement = sqlUpStatements.addConstraint.join('\n')
if (debug) {
payload.logger.info('ADDING CONSTRAINTS')
payload.logger.info(addConstraintsStatement)
}
await db.execute(sql.raw(addConstraintsStatement))
// NOT NULL
const notNullStatements = sqlUpStatements.notNull.join('\n')
if (debug) {
payload.logger.info('NOT NULL CONSTRAINTS')
payload.logger.info(notNullStatements)
}
await db.execute(sql.raw(notNullStatements))
// DROP TABLE
const dropTablesStatement = sqlUpStatements.dropTable.join('\n')
if (debug) {
payload.logger.info('DROPPING TABLES')
payload.logger.info(dropTablesStatement)
}
await db.execute(sql.raw(dropTablesStatement))
// DROP CONSTRAINT
const dropConstraintsStatement = sqlUpStatements.dropConstraint.join('\n')
if (debug) {
payload.logger.info('DROPPING CONSTRAINTS')
payload.logger.info(dropConstraintsStatement)
}
await db.execute(sql.raw(dropConstraintsStatement))
// DROP COLUMN
const dropColumnsStatement = sqlUpStatements.dropColumn.join('\n')
if (debug) {
payload.logger.info('DROPPING COLUMNS')
payload.logger.info(dropColumnsStatement)
}
await db.execute(sql.raw(dropColumnsStatement))
}

View File

@@ -0,0 +1,109 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { FlattenedField, Payload, PayloadRequest } from 'payload'
import { sql } from 'drizzle-orm'
import type { VercelPostgresAdapter } from '../../types.js'
import type { DocsToResave, PathsToQuery } from './types.js'
import { fetchAndResave } from './fetchAndResave/index.js'
type Args = {
adapter: VercelPostgresAdapter
collectionSlug?: string
db: TransactionPg
debug: boolean
fields: FlattenedField[]
globalSlug?: string
isVersions: boolean
pathsToQuery: PathsToQuery
payload: Payload
req?: Partial<PayloadRequest>
tableName: string
}
export const migrateRelationships = async ({
adapter,
collectionSlug,
db,
debug,
fields,
globalSlug,
isVersions,
pathsToQuery,
payload,
req,
tableName,
}: Args) => {
if (pathsToQuery.size === 0) {
return
}
let offset = 0
let paginationResult
const where = Array.from(pathsToQuery).reduce((statement, path, i) => {
return (statement += `
"${tableName}${adapter.relationshipsSuffix}"."path" LIKE '${path}'${pathsToQuery.size !== i + 1 ? ' OR' : ''}
`)
}, '')
while (typeof paginationResult === 'undefined' || paginationResult.rows.length > 0) {
const paginationStatement = `SELECT DISTINCT parent_id FROM ${tableName}${adapter.relationshipsSuffix} WHERE
${where} ORDER BY parent_id LIMIT 500 OFFSET ${offset * 500};
`
paginationResult = await adapter.drizzle.execute(sql.raw(`${paginationStatement}`))
if (paginationResult.rows.length === 0) {
return
}
offset += 1
const statement = `SELECT * FROM ${tableName}${adapter.relationshipsSuffix} WHERE
(${where}) AND parent_id IN (${paginationResult.rows.map((row) => row.parent_id).join(', ')});
`
if (debug) {
payload.logger.info('FINDING ROWS TO MIGRATE')
payload.logger.info(statement)
}
const result = await adapter.drizzle.execute(sql.raw(`${statement}`))
const docsToResave: DocsToResave = {}
result.rows.forEach((row) => {
const parentID = row.parent_id
if (typeof parentID === 'string' || typeof parentID === 'number') {
if (!docsToResave[parentID]) {
docsToResave[parentID] = []
}
docsToResave[parentID].push(row)
}
})
await fetchAndResave({
adapter,
collectionSlug,
db,
debug,
docsToResave,
fields,
globalSlug,
isVersions,
payload,
req: req as unknown as PayloadRequest,
tableName,
})
}
const deleteStatement = `DELETE FROM ${tableName}${adapter.relationshipsSuffix} WHERE ${where}`
if (debug) {
payload.logger.info('DELETING ROWS')
payload.logger.info(deleteStatement)
}
await db.execute(sql.raw(`${deleteStatement}`))
}

View File

@@ -0,0 +1,90 @@
import type { TransactionPg } from '@payloadcms/drizzle/types'
import type { FlattenedField, Payload } from 'payload'
import toSnakeCase from 'to-snake-case'
import type { VercelPostgresAdapter } from '../../types.js'
import type { PathsToQuery } from './types.js'
type Args = {
adapter: VercelPostgresAdapter
collectionSlug?: string
columnPrefix: string
db: TransactionPg
disableNotNull: boolean
fields: FlattenedField[]
globalSlug?: string
isVersions: boolean
newTableName: string
parentTableName: string
path: string
pathsToQuery: PathsToQuery
payload: Payload
rootTableName: string
}
export const traverseFields = (args: Args) => {
args.fields.forEach((field) => {
switch (field.type) {
case 'array': {
const newTableName = args.adapter.tableNameMap.get(
`${args.newTableName}_${toSnakeCase(field.name)}`,
)
return traverseFields({
...args,
columnPrefix: '',
fields: field.flattenedFields,
newTableName,
parentTableName: newTableName,
path: `${args.path ? `${args.path}.` : ''}${field.name}.%`,
})
}
case 'blocks': {
return field.blocks.forEach((block) => {
const newTableName = args.adapter.tableNameMap.get(
`${args.rootTableName}_blocks_${toSnakeCase(block.slug)}`,
)
traverseFields({
...args,
columnPrefix: '',
fields: block.flattenedFields,
newTableName,
parentTableName: newTableName,
path: `${args.path ? `${args.path}.` : ''}${field.name}.%`,
})
})
}
case 'group':
case 'tab': {
let newTableName = `${args.newTableName}_${toSnakeCase(field.name)}`
if (field.localized && args.payload.config.localization) {
newTableName += args.adapter.localesSuffix
}
return traverseFields({
...args,
columnPrefix: `${args.columnPrefix}${toSnakeCase(field.name)}_`,
fields: field.flattenedFields,
newTableName,
path: `${args.path ? `${args.path}.` : ''}${field.name}`,
})
}
case 'relationship':
case 'upload': {
if (typeof field.relationTo === 'string') {
if (field.type === 'upload' || !field.hasMany) {
args.pathsToQuery.add(`${args.path ? `${args.path}.` : ''}${field.name}`)
}
}
return null
}
}
})
}

View File

@@ -0,0 +1,9 @@
/**
* Set of all paths which should be moved
* This will be built up into one WHERE query
*/
export type PathsToQuery = Set<string>
export type DocsToResave = {
[id: number | string]: Record<string, unknown>[]
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/drizzle",
"version": "3.7.0",
"version": "3.5.0",
"description": "A library of shared functions used by different payload database adapters",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -2,12 +2,13 @@ export { countDistinct } from '../postgres/countDistinct.js'
export { createDatabase } from '../postgres/createDatabase.js'
export { createExtensions } from '../postgres/createExtensions.js'
export { createJSONQuery } from '../postgres/createJSONQuery/index.js'
export { createMigration } from '../postgres/createMigration.js'
export { defaultDrizzleSnapshot } from '../postgres/defaultSnapshot.js'
export { deleteWhere } from '../postgres/deleteWhere.js'
export { dropDatabase } from '../postgres/dropDatabase.js'
export { execute } from '../postgres/execute.js'
export { getMigrationTemplate } from '../postgres/getMigrationTemplate.js'
export { init } from '../postgres/init.js'
export { insert } from '../postgres/insert.js'
export { migratePostgresV2toV3 } from '../postgres/predefinedMigrations/v2-v3/index.js'
export { requireDrizzleKit } from '../postgres/requireDrizzleKit.js'
export * from '../postgres/types.js'

View File

@@ -34,7 +34,6 @@ export { updateGlobal } from './updateGlobal.js'
export { updateGlobalVersion } from './updateGlobalVersion.js'
export { updateVersion } from './updateVersion.js'
export { upsertRow } from './upsertRow/index.js'
export { buildCreateMigration } from './utilities/buildCreateMigration.js'
export { buildIndexName } from './utilities/buildIndexName.js'
export { executeSchemaHooks } from './utilities/executeSchemaHooks.js'
export { extendDrizzleTable } from './utilities/extendDrizzleTable.js'

View File

@@ -44,8 +44,7 @@ export async function migrateDown(this: DrizzleAdapter): Promise<void> {
try {
payload.logger.info({ msg: `Migrating down: ${migrationFile.name}` })
await initTransaction(req)
const db = this.sessions[await req.transactionID]?.db || this.drizzle
await migrationFile.down({ db, payload, req })
await migrationFile.down({ payload, req })
payload.logger.info({
msg: `Migrated down: ${migrationFile.name} (${Date.now() - start}ms)`,
})

View File

@@ -59,7 +59,8 @@ export async function migrateFresh(
try {
const start = Date.now()
await initTransaction(req)
const db = this.sessions[await req.transactionID]?.db || this.drizzle
const adapter = payload.db as DrizzleAdapter
const db = adapter?.sessions[await req.transactionID]?.db || adapter.drizzle
await migration.up({ db, payload, req })
await payload.create({
collection: 'payload-migrations',

View File

@@ -48,8 +48,7 @@ export async function migrateRefresh(this: DrizzleAdapter) {
payload.logger.info({ msg: `Migrating down: ${migration.name}` })
const start = Date.now()
await initTransaction(req)
const db = this.sessions[await req.transactionID]?.db || this.drizzle
await migrationFile.down({ db, payload, req })
await migrationFile.down({ payload, req })
payload.logger.info({
msg: `Migrated down: ${migration.name} (${Date.now() - start}ms)`,
})

View File

@@ -39,8 +39,7 @@ export async function migrateReset(this: DrizzleAdapter): Promise<void> {
const start = Date.now()
payload.logger.info({ msg: `Migrating down: ${migrationFile.name}` })
await initTransaction(req)
const db = this.sessions[await req.transactionID]?.db || this.drizzle
await migrationFile.down({ db, payload, req })
await migrationFile.down({ payload, req })
payload.logger.info({
msg: `Migrated down: ${migrationFile.name} (${Date.now() - start}ms)`,
})

View File

@@ -0,0 +1,122 @@
import type { CreateMigration } from 'payload'
import fs from 'fs'
import { createRequire } from 'module'
import { getPredefinedMigration, writeMigrationIndex } from 'payload'
import prompts from 'prompts'
import type { BasePostgresAdapter } from './types.js'
import { defaultDrizzleSnapshot } from './defaultSnapshot.js'
import { getMigrationTemplate } from './getMigrationTemplate.js'
const require = createRequire(import.meta.url)
export const createMigration: CreateMigration = async function createMigration(
this: BasePostgresAdapter,
{ dirname, file, forceAcceptWarning, migrationName, payload, skipEmpty },
) {
const dir = payload.db.migrationDir
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir)
}
const { generateDrizzleJson, generateMigration, upPgSnapshot } = require('drizzle-kit/api')
const drizzleJsonAfter = generateDrizzleJson(this.schema)
const [yyymmdd, hhmmss] = new Date().toISOString().split('T')
const formattedDate = yyymmdd.replace(/\D/g, '')
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '')
let imports: string = ''
let downSQL: string
let upSQL: string
;({ downSQL, imports, upSQL } = await getPredefinedMigration({
dirname,
file,
migrationName,
payload,
}))
const timestamp = `${formattedDate}_${formattedTime}`
const name = migrationName || file?.split('/').slice(2).join('/')
const fileName = `${timestamp}${name ? `_${name.replace(/\W/g, '_')}` : ''}`
const filePath = `${dir}/${fileName}`
let drizzleJsonBefore = defaultDrizzleSnapshot
if (this.schemaName) {
drizzleJsonBefore.schemas = {
[this.schemaName]: this.schemaName,
}
}
if (!upSQL) {
// Get latest migration snapshot
const latestSnapshot = fs
.readdirSync(dir)
.filter((file) => file.endsWith('.json'))
.sort()
.reverse()?.[0]
if (latestSnapshot) {
drizzleJsonBefore = JSON.parse(fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'))
if (drizzleJsonBefore.version < drizzleJsonAfter.version) {
drizzleJsonBefore = upPgSnapshot(drizzleJsonBefore)
}
}
const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)
const sqlExecute = 'await payload.db.drizzle.execute(sql`'
if (sqlStatementsUp?.length) {
upSQL = `${sqlExecute}\n ${sqlStatementsUp?.join('\n')}\`)`
}
if (sqlStatementsDown?.length) {
downSQL = `${sqlExecute}\n ${sqlStatementsDown?.join('\n')}\`)`
}
if (!upSQL?.length && !downSQL?.length && !forceAcceptWarning) {
if (skipEmpty) {
process.exit(0)
}
const { confirm: shouldCreateBlankMigration } = await prompts(
{
name: 'confirm',
type: 'confirm',
initial: false,
message: 'No schema changes detected. Would you like to create a blank migration file?',
},
{
onCancel: () => {
process.exit(0)
},
},
)
if (!shouldCreateBlankMigration) {
process.exit(0)
}
}
// write schema
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))
}
// write migration
fs.writeFileSync(
`${filePath}.ts`,
getMigrationTemplate({
downSQL: downSQL || ` // Migration code`,
imports,
packageName: payload.db.packageName,
upSQL: upSQL || ` // Migration code`,
}),
)
writeMigrationIndex({ migrationsDir: payload.db.migrationDir })
payload.logger.info({ msg: `Migration created at ${filePath}.ts` })
}

View File

@@ -13,11 +13,11 @@ export const getMigrationTemplate = ({
upSQL,
}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '${packageName}'
${imports ? `${imports}\n` : ''}
export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
${indent(upSQL)}
}
export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
export async function down({ payload, req }: MigrateDownArgs): Promise<void> {
${indent(downSQL)}
}
`

View File

@@ -3,19 +3,4 @@ import { createRequire } from 'module'
import type { RequireDrizzleKit } from '../types.js'
const require = createRequire(import.meta.url)
export const requireDrizzleKit: RequireDrizzleKit = () => {
const {
generateDrizzleJson,
generateMigration,
pushSchema,
upPgSnapshot,
} = require('drizzle-kit/api')
return {
generateDrizzleJson,
generateMigration,
pushSchema,
upSnapshot: upPgSnapshot,
}
}
export const requireDrizzleKit: RequireDrizzleKit = () => require('drizzle-kit/api')

View File

@@ -191,66 +191,5 @@ export type PostgresDrizzleAdapter = Omit<
export type IDType = 'integer' | 'numeric' | 'uuid' | 'varchar'
export type MigrateUpArgs = {
/**
* The Postgres Drizzle instance that you can use to execute SQL directly within the current transaction.
* @example
* ```ts
* import { type MigrateUpArgs, sql } from '@payloadcms/db-postgres'
*
* export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
* const { rows: posts } = await db.execute(sql`SELECT * FROM posts`)
* }
* ```
*/
db: PostgresDB
/**
* The Payload instance that you can use to execute Local API methods
* To use the current transaction you must pass `req` to arguments
* @example
* ```ts
* import { type MigrateUpArgs, sql } from '@payloadcms/db-postgres'
*
* export async function up({ db, payload, req }: MigrateUpArgs): Promise<void> {
* const posts = await payload.find({ collection: 'posts', req })
* }
* ```
*/
payload: Payload
/**
* The `PayloadRequest` object that contains the current transaction
*/
req: PayloadRequest
}
export type MigrateDownArgs = {
/**
* The Postgres Drizzle instance that you can use to execute SQL directly within the current transaction.
* @example
* ```ts
* import { type MigrateDownArgs, sql } from '@payloadcms/db-postgres'
*
* export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
* const { rows: posts } = await db.execute(sql`SELECT * FROM posts`)
* }
* ```
*/
db: PostgresDB
/**
* The Payload instance that you can use to execute Local API methods
* To use the current transaction you must pass `req` to arguments
* @example
* ```ts
* import { type MigrateDownArgs } from '@payloadcms/db-postgres'
*
* export async function down({ db, payload, req }: MigrateDownArgs): Promise<void> {
* const posts = await payload.find({ collection: 'posts', req })
* }
* ```
*/
payload: Payload
/**
* The `PayloadRequest` object that contains the current transaction
*/
req: PayloadRequest
}
export type MigrateUpArgs = { payload: Payload; req: PayloadRequest }
export type MigrateDownArgs = { payload: Payload; req: PayloadRequest }

View File

@@ -14,14 +14,19 @@ import type { NodePgDatabase, NodePgQueryResultHKT } from 'drizzle-orm/node-post
import type { PgColumn, PgTable, PgTransaction } from 'drizzle-orm/pg-core'
import type { SQLiteColumn, SQLiteTable, SQLiteTransaction } from 'drizzle-orm/sqlite-core'
import type { Result } from 'drizzle-orm/sqlite-core/session'
import type { BaseDatabaseAdapter, MigrationData, Payload, PayloadRequest } from 'payload'
import type {
BaseDatabaseAdapter,
MigrationData,
MigrationTemplateArgs,
Payload,
PayloadRequest,
} from 'payload'
import type { BuildQueryJoinAliases } from './queries/buildQuery.js'
export { BuildQueryJoinAliases }
import type { ResultSet } from '@libsql/client'
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
import type { SQLiteRaw } from 'drizzle-orm/sqlite-core/query-builders/raw'
import type { QueryResult } from 'pg'
@@ -112,10 +117,7 @@ export type Insert = (args: {
}) => Promise<Record<string, unknown>[]>
export type RequireDrizzleKit = () => {
generateDrizzleJson: (
args: Record<string, unknown>,
) => DrizzleSnapshotJSON | Promise<DrizzleSnapshotJSON>
generateMigration: (prev: DrizzleSnapshotJSON, cur: DrizzleSnapshotJSON) => Promise<string[]>
generateDrizzleJson: (args: { schema: Record<string, unknown> }) => unknown
pushSchema: (
schema: Record<string, unknown>,
drizzle: DrizzleAdapter['drizzle'],
@@ -123,7 +125,6 @@ export type RequireDrizzleKit = () => {
tablesFilter?: string[],
extensionsFilter?: string[],
) => Promise<{ apply; hasDataLoss; warnings }>
upSnapshot?: (snapshot: Record<string, unknown>) => DrizzleSnapshotJSON
}
export type Migration = {
@@ -176,6 +177,7 @@ export interface DrizzleAdapter extends BaseDatabaseAdapter {
* Used for returning properly formed errors from unique fields
*/
fieldConstraints: Record<string, Record<string, string>>
getMigrationTemplate: (args: MigrationTemplateArgs) => string
idType: 'serial' | 'uuid'
indexes: Set<string>
initializing: Promise<void>

View File

@@ -4,7 +4,6 @@ import toSnakeCase from 'to-snake-case'
import type { DrizzleAdapter } from './types.js'
import { buildFindManyArgs } from './find/buildFindManyArgs.js'
import buildQuery from './queries/buildQuery.js'
import { selectDistinct } from './queries/selectDistinct.js'
import { upsertRow } from './upsertRow/index.js'
@@ -39,22 +38,6 @@ export const updateOne: UpdateOne = async function updateOne(
if (selectDistinctResult?.[0]?.id) {
idToUpdate = selectDistinctResult?.[0]?.id
// If id wasn't passed but `where` without any joins, retrieve it with findFirst
} else if (whereArg && !joins.length) {
const findManyArgs = buildFindManyArgs({
adapter: this,
depth: 0,
fields: collection.flattenedFields,
joinQuery: false,
select: {},
tableName,
})
findManyArgs.where = where
const docToUpdate = await db.query[tableName].findFirst(findManyArgs)
idToUpdate = docToUpdate?.id
}
const result = await upsertRow({

View File

@@ -1,134 +0,0 @@
import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'
import type { CreateMigration } from 'payload'
import fs from 'fs'
import path from 'path'
import { getPredefinedMigration, writeMigrationIndex } from 'payload'
import prompts from 'prompts'
import type { DrizzleAdapter } from '../types.js'
import { getMigrationTemplate } from './getMigrationTemplate.js'
export const buildCreateMigration = ({
executeMethod,
filename,
sanitizeStatements,
}: {
executeMethod: string
filename: string
sanitizeStatements: (args: { sqlExecute: string; statements: string[] }) => string
}): CreateMigration => {
const dirname = path.dirname(filename)
return async function createMigration(
this: DrizzleAdapter,
{ file, forceAcceptWarning, migrationName, payload, skipEmpty },
) {
const dir = payload.db.migrationDir
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir)
}
const { generateDrizzleJson, generateMigration, upSnapshot } = this.requireDrizzleKit()
const drizzleJsonAfter = await generateDrizzleJson(this.schema)
const [yyymmdd, hhmmss] = new Date().toISOString().split('T')
const formattedDate = yyymmdd.replace(/\D/g, '')
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '')
let imports: string = ''
let downSQL: string
let upSQL: string
;({ downSQL, imports, upSQL } = await getPredefinedMigration({
dirname,
file,
migrationName,
payload,
}))
const timestamp = `${formattedDate}_${formattedTime}`
const name = migrationName || file?.split('/').slice(2).join('/')
const fileName = `${timestamp}${name ? `_${name.replace(/\W/g, '_')}` : ''}`
const filePath = `${dir}/${fileName}`
let drizzleJsonBefore = this.defaultDrizzleSnapshot as DrizzleSnapshotJSON
if (this.schemaName) {
drizzleJsonBefore.schemas = {
[this.schemaName]: this.schemaName,
}
}
if (!upSQL) {
// Get latest migration snapshot
const latestSnapshot = fs
.readdirSync(dir)
.filter((file) => file.endsWith('.json'))
.sort()
.reverse()?.[0]
if (latestSnapshot) {
drizzleJsonBefore = JSON.parse(fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'))
if (upSnapshot && drizzleJsonBefore.version < drizzleJsonAfter.version) {
drizzleJsonBefore = upSnapshot(drizzleJsonBefore)
}
}
const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)
const sqlExecute = `await db.${executeMethod}(` + 'sql`'
if (sqlStatementsUp?.length) {
upSQL = sanitizeStatements({ sqlExecute, statements: sqlStatementsUp })
}
if (sqlStatementsDown?.length) {
downSQL = sanitizeStatements({ sqlExecute, statements: sqlStatementsDown })
}
if (!upSQL?.length && !downSQL?.length && !forceAcceptWarning) {
if (skipEmpty) {
process.exit(0)
}
const { confirm: shouldCreateBlankMigration } = await prompts(
{
name: 'confirm',
type: 'confirm',
initial: false,
message: 'No schema changes detected. Would you like to create a blank migration file?',
},
{
onCancel: () => {
process.exit(0)
},
},
)
if (!shouldCreateBlankMigration) {
process.exit(0)
}
}
// write schema
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))
}
// write migration
fs.writeFileSync(
`${filePath}.ts`,
getMigrationTemplate({
downSQL: downSQL || ` // Migration code`,
imports,
packageName: payload.db.packageName,
upSQL: upSQL || ` // Migration code`,
}),
)
writeMigrationIndex({ migrationsDir: payload.db.migrationDir })
payload.logger.info({ msg: `Migration created at ${filePath}.ts` })
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/email-nodemailer",
"version": "3.7.0",
"version": "3.5.0",
"description": "Payload Nodemailer Email Adapter",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/email-resend",
"version": "3.7.0",
"version": "3.5.0",
"description": "Payload Resend Email Adapter",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/graphql",
"version": "3.7.0",
"version": "3.5.0",
"homepage": "https://payloadcms.com",
"repository": {
"type": "git",

View File

@@ -30,13 +30,15 @@ export function createResolver<TSlug extends CollectionSlug>(
context.req.locale = args.locale
}
const result = await createOperation({
const options = {
collection,
data: args.data,
depth: 0,
draft: args.draft,
req: isolateObjectProperty(context.req, 'transactionID'),
})
}
const result = await createOperation(options)
return result
}

View File

@@ -7,7 +7,6 @@ import type { Context } from '../types.js'
export type Resolver<TData> = (
_: unknown,
args: {
data: TData
draft: boolean
fallbackLocale?: string
id: string
@@ -29,14 +28,15 @@ export function duplicateResolver<TSlug extends CollectionSlug>(
req.fallbackLocale = args.fallbackLocale || fallbackLocale
context.req = req
const result = await duplicateOperation({
const options = {
id: args.id,
collection,
data: args.data,
depth: 0,
draft: args.draft,
req: isolateObjectProperty(req, 'transactionID'),
})
}
const result = await duplicateOperation(options)
return result
}

View File

@@ -13,7 +13,6 @@ export type Resolver = (
limit?: number
locale?: string
page?: number
pagination?: boolean
sort?: string
where?: Where
},
@@ -54,7 +53,6 @@ export function findResolver(collection: Collection): Resolver {
draft: args.draft,
limit: args.limit,
page: args.page,
pagination: args.pagination,
req,
sort: args.sort,
where: args.where,

View File

@@ -12,7 +12,6 @@ export type Resolver = (
limit?: number
locale?: string
page?: number
pagination?: boolean
sort?: string
where: Where
},
@@ -51,7 +50,6 @@ export function findVersionsResolver(collection: Collection): Resolver {
depth: 0,
limit: args.limit,
page: args.page,
pagination: args.pagination,
req: isolateObjectProperty(req, 'transactionID'),
sort: args.sort,
where: args.where,

View File

@@ -11,7 +11,6 @@ export type Resolver = (
limit?: number
locale?: string
page?: number
pagination?: boolean
sort?: string
where: Where
},
@@ -27,7 +26,6 @@ export function findVersions(globalConfig: SanitizedGlobalConfig): Resolver {
globalConfig,
limit: args.limit,
page: args.page,
pagination: args.pagination,
req: isolateObjectProperty(context.req, 'transactionID'),
sort: args.sort,
where: args.where,

View File

@@ -255,17 +255,18 @@ export function buildObjectType({
[field.on]: { equals: parent._id ?? parent.id },
})
return await req.payload.find({
const results = await req.payload.find({
collection,
depth: 0,
fallbackLocale: req.fallbackLocale,
limit,
locale: req.locale,
overrideAccess: false,
req,
sort,
where: fullWhere,
})
return results
},
}

View File

@@ -196,7 +196,6 @@ export function initCollections({ config, graphqlResult }: InitCollectionsGraphQ
: {}),
limit: { type: GraphQLInt },
page: { type: GraphQLInt },
pagination: { type: GraphQLBoolean },
sort: { type: GraphQLString },
},
resolve: findResolver(collection),
@@ -281,9 +280,6 @@ export function initCollections({ config, graphqlResult }: InitCollectionsGraphQ
type: collection.graphQL.type,
args: {
id: { type: new GraphQLNonNull(idType) },
...(createMutationInputType
? { data: { type: collection.graphQL.mutationInputType } }
: {}),
},
resolve: duplicateResolver(collection),
}
@@ -352,7 +348,6 @@ export function initCollections({ config, graphqlResult }: InitCollectionsGraphQ
: {}),
limit: { type: GraphQLInt },
page: { type: GraphQLInt },
pagination: { type: GraphQLBoolean },
sort: { type: GraphQLString },
},
resolve: findVersionsResolver(collection),

View File

@@ -166,7 +166,6 @@ export function initGlobals({ config, graphqlResult }: InitGlobalsGraphQLArgs):
: {}),
limit: { type: GraphQLInt },
page: { type: GraphQLInt },
pagination: { type: GraphQLBoolean },
sort: { type: GraphQLString },
},
resolve: findVersions(global),

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/live-preview-react",
"version": "3.7.0",
"version": "3.5.0",
"description": "The official React SDK for Payload Live Preview",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/live-preview-vue",
"version": "3.7.0",
"version": "3.5.0",
"description": "The official Vue SDK for Payload Live Preview",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/live-preview",
"version": "3.7.0",
"version": "3.5.0",
"description": "The official live preview JavaScript SDK for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/next",
"version": "3.7.0",
"version": "3.5.0",
"homepage": "https://payloadcms.com",
"repository": {
"type": "git",

View File

@@ -93,12 +93,7 @@ export const RootLayout = async ({
})
return (
<html
data-theme={theme}
dir={dir}
lang={languageCode}
suppressHydrationWarning={config?.admin?.suppressHydrationWarning ?? false}
>
<html data-theme={theme} dir={dir} lang={languageCode}>
<head>
<style>{`@layer payload-default, payload;`}</style>
</head>

Some files were not shown because too many files have changed in this diff Show More