Compare commits

..

2 Commits

Author SHA1 Message Date
German Jablonski
0333e2cd1c tests: spin postgres docker automatically in jest (pnpm test:int:postgres) 2025-07-15 19:55:12 +01:00
German Jablonski
bbf0c2474d tests: spin postgres docker automatically in pnpm dev 2025-07-15 19:41:10 +01:00
1037 changed files with 9233 additions and 40900 deletions

View File

@@ -1,20 +1,18 @@
#!/bin/bash
severity=${1:-"high"}
severity=${1:-"critical"}
audit_json=$(pnpm audit --prod --json)
output_file="audit_output.json"
echo "Auditing for ${severity} vulnerabilities..."
audit_json=$(pnpm audit --prod --json)
echo "${audit_json}" | jq --arg severity "${severity}" '
.advisories | to_entries |
map(select(.value.patched_versions != "<0.0.0" and (.value.severity == $severity or ($severity == "high" and .value.severity == "critical"))) |
map(select(.value.patched_versions != "<0.0.0" and .value.severity == $severity) |
{
package: .value.module_name,
vulnerable: .value.vulnerable_versions,
fixed_in: .value.patched_versions,
findings: .value.findings
fixed_in: .value.patched_versions
}
)
' >$output_file
@@ -24,11 +22,7 @@ audit_length=$(jq 'length' $output_file)
if [[ "${audit_length}" -gt "0" ]]; then
echo "Actionable vulnerabilities found in the following packages:"
jq -r '.[] | "\u001b[1m\(.package)\u001b[0m vulnerable in \u001b[31m\(.vulnerable)\u001b[0m fixed in \u001b[32m\(.fixed_in)\u001b[0m"' $output_file | while read -r line; do echo -e "$line"; done
echo ""
echo "Output written to ${output_file}"
cat $output_file
echo ""
echo "This script can be rerun with: './.github/workflows/audit-dependencies.sh $severity'"
exit 1
else
echo "No actionable vulnerabilities"

View File

@@ -9,7 +9,7 @@ on:
audit-level:
description: The level of audit to run (low, moderate, high, critical)
required: false
default: high
default: critical
debug:
description: Enable debug logging
required: false
@@ -46,7 +46,7 @@ jobs:
"type": "section",
"text": {
"type": "mrkdwn",
"text": "🚨 Actionable vulnerabilities found: <https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Script Run Details>"
"text": "🚨 Actionable vulnerabilities found: <https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Details>"
}
},
]

View File

@@ -6,6 +6,7 @@ on:
- opened
- reopened
- synchronize
- labeled
push:
branches:
- main
@@ -152,7 +153,6 @@ jobs:
matrix:
database:
- mongodb
- firestore
- postgres
- postgres-custom-schema
- postgres-uuid
@@ -283,8 +283,6 @@ jobs:
- fields__collections__Text
- fields__collections__UI
- fields__collections__Upload
- group-by
- folders
- hooks
- lexical__collections__Lexical__e2e__main
- lexical__collections__Lexical__e2e__blocks
@@ -303,7 +301,6 @@ jobs:
- plugin-nested-docs
- plugin-seo
- sort
- trash
- versions
- uploads
env:
@@ -370,7 +367,6 @@ jobs:
# report-tag: ${{ matrix.suite }}
# job-summary: true
# This is unused, keeping it here for reference and possibly enabling in the future
tests-e2e-turbo:
runs-on: ubuntu-24.04
needs: [changes, build]
@@ -421,8 +417,6 @@ jobs:
- fields__collections__Text
- fields__collections__UI
- fields__collections__Upload
- group-by
- folders
- hooks
- lexical__collections__Lexical__e2e__main
- lexical__collections__Lexical__e2e__blocks
@@ -441,7 +435,6 @@ jobs:
- plugin-nested-docs
- plugin-seo
- sort
- trash
- versions
- uploads
env:
@@ -725,8 +718,6 @@ jobs:
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
- name: Analyze esbuild bundle size
# Temporarily disable this for community PRs until this can be implemented in a separate workflow
if: github.event.pull_request.head.repo.fork == false
uses: exoego/esbuild-bundle-analyzer@v1
with:
metafiles: 'packages/payload/meta_index.json,packages/payload/meta_shared.json,packages/ui/meta_client.json,packages/ui/meta_shared.json,packages/next/meta_index.json,packages/richtext-lexical/meta_client.json'

View File

@@ -17,9 +17,6 @@ env:
jobs:
post_release:
permissions:
issues: write
pull-requests: write
runs-on: ubuntu-24.04
if: ${{ github.event_name != 'workflow_dispatch' }}
steps:

2
.gitignore vendored
View File

@@ -331,7 +331,5 @@ test/databaseAdapter.js
test/.localstack
test/google-cloud-storage
test/azurestoragedata/
/media-without-delete-access
licenses.csv

7
.vscode/launch.json vendored
View File

@@ -139,13 +139,6 @@
"request": "launch",
"type": "node-terminal"
},
{
"command": "pnpm tsx --no-deprecation test/dev.ts trash",
"cwd": "${workspaceFolder}",
"name": "Run Dev Trash",
"request": "launch",
"type": "node-terminal"
},
{
"command": "pnpm tsx --no-deprecation test/dev.ts uploads",
"cwd": "${workspaceFolder}",

View File

@@ -77,9 +77,13 @@ If you wish to use your own MongoDB database for the `test` directory instead of
### Using Postgres
If you have postgres installed on your system, you can also run the test suites using postgres. By default, mongodb is used.
Our test suites supports automatic PostgreSQL + PostGIS setup using Docker. No local PostgreSQL installation required. By default, mongodb is used.
To do that, simply set the `PAYLOAD_DATABASE` environment variable to `postgres`.
To use postgres, simply set the `PAYLOAD_DATABASE` environment variable to `postgres`.
```bash
PAYLOAD_DATABASE=postgres pnpm dev {suite}
```
### Running the e2e and int tests

View File

@@ -77,7 +77,7 @@ All auto-generated files will contain the following comments at the top of each
## Admin Options
All root-level options for the Admin Panel are defined in your [Payload Config](../configuration/overview) under the `admin` property:
All options for the Admin Panel are defined in your [Payload Config](../configuration/overview) under the `admin` property:
```ts
import { buildConfig } from 'payload'
@@ -107,7 +107,6 @@ The following options are available:
| `suppressHydrationWarning` | If set to `true`, suppresses React hydration mismatch warnings during the hydration of the root `<html>` tag. Defaults to `false`. |
| `theme` | Restrict the Admin Panel theme to use only one of your choice. Default is `all`. |
| `timezones` | Configure the timezone settings for the admin panel. [More details](#timezones) |
| `toast` | Customize the handling of toast messages within the Admin Panel. [More details](#toasts) |
| `user` | The `slug` of the Collection that you want to allow to login to the Admin Panel. [More details](#the-admin-user-collection). |
<Banner type="success">
@@ -299,20 +298,3 @@ We validate the supported timezones array by checking the value against the list
`timezone: true`. See [Date Fields](../fields/overview#date) for more
information.
</Banner>
## Toast
The `admin.toast` configuration allows you to customize the handling of toast messages within the Admin Panel, such as increasing the duration they are displayed and limiting the number of visible toasts at once.
<Banner type="info">
**Note:** The Admin Panel currently uses the
[Sonner](https://sonner.emilkowal.ski) library for toast notifications.
</Banner>
The following options are available for the `admin.toast` configuration:
| Option | Description | Default |
| ---------- | ---------------------------------------------------------------------------------------------------------------- | ------- |
| `duration` | The length of time (in milliseconds) that a toast message is displayed. | `4000` |
| `expand` | If `true`, will expand the message stack so that all messages are shown simultaneously without user interaction. | `false` |
| `limit` | The maximum number of toasts that can be visible on the screen at once. | `5` |

View File

@@ -739,7 +739,7 @@ The `useDocumentInfo` hook provides information about the current document being
| **`lastUpdateTime`** | Timestamp of the last update to the document. |
| **`mostRecentVersionIsAutosaved`** | Whether the most recent version is an autosaved version. |
| **`preferencesKey`** | The `preferences` key to use when interacting with document-level user preferences. [More details](./preferences). |
| **`data`** | The saved data of the document. |
| **`savedDocumentData`** | The saved data of the document. |
| **`setDocFieldPreferences`** | Method to set preferences for a specific field. [More details](./preferences). |
| **`setDocumentTitle`** | Method to set the document title. |
| **`setHasPublishedDoc`** | Method to update whether the document has been published. |

View File

@@ -33,7 +33,7 @@ export const Users: CollectionConfig = {
}
```
![Authentication Admin Panel functionality](https://payloadcms.com/images/docs/auth-overview.jpg)
![Authentication Admin Panel functionality](https://payloadcms.com/images/docs/auth-admin.jpg)
_Admin Panel screenshot depicting an Admins Collection with Auth enabled_
## Config Options

View File

@@ -0,0 +1,62 @@
---
title: Project Configuration
label: Configuration
order: 20
desc: Quickly configure and deploy your Payload Cloud project in a few simple steps.
keywords: configuration, config, settings, project, cloud, payload cloud, deploy, deployment
---
## Select your plan
Once you have created a project, you will need to select your plan. This will determine the resources that are allocated to your project and the features that are available to you.
<Banner type="success">
Note: All Payload Cloud teams that deploy a project require a card on file.
This helps us prevent fraud and abuse on our platform. If you select a plan
with a free trial, you will not be charged until your trial period is over.
Well remind you 7 days before your trial ends and you can cancel anytime.
</Banner>
## Project Details
| Option | Description |
| ---------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **Region** | Select the region closest to your audience. This will ensure the fastest communication between your data and your client. |
| **Project Name** | A name for your project. You can change this at any time. |
| **Project Slug** | Choose a unique slug to identify your project. This needs to be unique for your team and you can change it any time. |
| **Team** | Select the team you want to create the project under. If this is your first project, a personal team will be created for you automatically. You can modify your team settings and invite new members at any time from the Team Settings page. |
## Build Settings
If you are deploying a new project from a template, the following settings will be automatically configured for you. If you are using your own repository, you need to make sure your build settings are accurate for your project to deploy correctly.
| Option | Description |
| -------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **Root Directory** | The folder where your `package.json` file lives. |
| **Install Command** | The command used to install your modules, for example: `yarn install` or `npm install` |
| **Build Command** | The command used to build your application, for example: `yarn build` or `npm run build` |
| **Serve Command** | The command used to serve your application, for example: `yarn serve` or `npm run serve` |
| **Branch to Deploy** | Select the branch of your repository that you want to deploy from. This is the branch that will be used to build your project when you commit new changes. |
| **Default Domain** | Set a default domain for your project. This must be unique and you will not able to change it. You can always add a custom domain later in your project settings. |
## Environment Variables
Any of the features in Payload Cloud that require environment variables will automatically be provided to your application. If your app requires any custom environment variables, you can set them here.
<Banner type="warning">
Note: For security reasons, any variables you wish to provide to the [Admin
Panel](../admin/overview) must be prefixed with `NEXT_PUBLIC_`.  Learn more
[here](../configuration/environment-vars).
</Banner>
## Payment
Payment methods can be set per project and can be updated any time. You can use teams default payment method, or add a new one. Modify your payment methods in your Project settings / Team settings.
<Banner type="success">
**Note:** All Payload Cloud teams that deploy a project require a card on
file. This helps us prevent fraud and abuse on our platform. If you select a
plan with a free trial, you will not be charged until your trial period is
over. Well remind you 7 days before your trial ends and you can cancel
anytime.
</Banner>

View File

@@ -0,0 +1,53 @@
---
title: Getting Started
label: Getting Started
order: 10
desc: Get started with Payload Cloud, a deployment solution specifically designed for Node + MongoDB applications.
keywords: cloud, hosted, database, storage, email, deployment, serverless, node, mongodb, s3, aws, cloudflare, atlas, resend, payload, cms
---
A deployment solution specifically designed for Node.js + MongoDB applications, offering seamless deployment of your entire stack in one place. You can get started in minutes with a one-click template or bring your own codebase with you.
Payload Cloud offers various plans tailored to meet your specific needs, including a MongoDB Atlas database, S3 file storage, and email delivery powered by [Resend](https://resend.com). To see a full breakdown of features and plans, see our [Cloud Pricing page](https://payloadcms.com/cloud-pricing).
To get started, you first need to create an account. Head over to [the login screen](https://payloadcms.com/login) and **Register for Free**.
<Banner type="success">
To create your first project, you can either select [a
template](#starting-from-a-template) or [import an existing
project](#importing-from-an-existing-codebase) from GitHub.
</Banner>
## Starting from a Template
Templates come preconfigured and provide a one-click solution to quickly deploy a new application.
![Screen for creating a new project from a template](https://payloadcms.com/images/docs/cloud/create-from-template.jpg)
_Creating a new project from a template._
After creating an account, select your desired template from the Projects page. At this point, you need to connect to authorize the Payload Cloud application with your GitHub account. Click Continue with GitHub and follow the prompts to authorize the app.
Next, select your `GitHub Scope`. If you belong to multiple organizations, they will show up here. If you do not see the organization you are looking for, you may need to adjust your GitHub app permissions.
After selecting your scope, create a unique `repository name` and select whether you want your repository to be public or private on GitHub.
<Banner type="warning">
**Note:** Public repositories can be accessed by anyone online, while private
repositories grant access only to you and anyone you explicitly authorize.
</Banner>
Once you are ready, click **Create Project**. This will clone the selected template to a new repository in your GitHub account, and take you to the configuration page to set up your project for deployment.
## Importing from an Existing Codebase
Payload Cloud works for any Node.js + MongoDB app. From the New Project page, select **import an existing Git codebase**. Choose the organization and select the repository you want to import. From here, you will be taken to the configuration page to set up your project for deployment.
![Screen for creating a new project from an existing repository](https://payloadcms.com/images/docs/cloud/create-from-existing.jpg)
_Creating a new project from an existing repository._
<Banner type="warning">
**Note:** In order to make use of the features of Payload Cloud in your own
codebase, you will need to add the [Cloud
Plugin](https://github.com/payloadcms/payload/tree/main/packages/payload-cloud)
to your Payload app.
</Banner>

137
docs/cloud/projects.mdx Normal file
View File

@@ -0,0 +1,137 @@
---
title: Cloud Projects
label: Projects
order: 40
desc: Manage your Payload Cloud projects.
keywords: cloud, payload cloud, projects, project, overview, database, file storage, build settings, environment variables, custom domains, email, developing locally
---
## Overview
<Banner>
The overview tab shows your most recent deployment, along with build and
deployment logs. From here, you can see your live URL, deployment details like
timestamps and commit hash, as well as the status of your deployment. You can
also trigger a redeployment manually, which will rebuild your project using
the current configuration.
</Banner>
![Payload Cloud Overview Page](https://payloadcms.com/images/docs/cloud/overview-page.jpg)
_A screenshot of the Overview page for a Cloud project._
## Database
Your Payload Cloud project comes with a MongoDB serverless Atlas DB instance or a Dedicated Atlas cluster, depending on your plan. To interact with your cloud database, you will be provided with a MongoDB connection string. This can be found under the **Database** tab of your project.
`mongodb+srv://your_connection_string`
## File Storage
Payload Cloud gives you S3 file storage backed by Cloudflare as a CDN, and this plugin extends Payload so that all of your media will be stored in S3 rather than locally.
AWS Cognito is used for authentication to your S3 bucket. The [Payload Cloud Plugin](https://github.com/payloadcms/payload/tree/main/packages/payload-cloud) will automatically pick up these values. These values are only if you'd like to access your files directly, outside of Payload Cloud.
### Accessing Files Outside of Payload Cloud
If you'd like to access your files outside of Payload Cloud, you'll need to retrieve some values from your project's settings and put them into your environment variables. In Payload Cloud, navigate to the File Storage tab and copy the values using the copy button. Put these values in your .env file. Also copy the Cognito Password value separately and put into your .env file as well.
When you are done, you should have the following values in your .env file:
```env
PAYLOAD_CLOUD=true
PAYLOAD_CLOUD_ENVIRONMENT=prod
PAYLOAD_CLOUD_COGNITO_USER_POOL_CLIENT_ID=
PAYLOAD_CLOUD_COGNITO_USER_POOL_ID=
PAYLOAD_CLOUD_COGNITO_IDENTITY_POOL_ID=
PAYLOAD_CLOUD_PROJECT_ID=
PAYLOAD_CLOUD_BUCKET=
PAYLOAD_CLOUD_BUCKET_REGION=
PAYLOAD_CLOUD_COGNITO_PASSWORD=
```
The plugin will pick up these values and use them to access your files.
## Build Settings
You can update settings from your Projects Settings tab. Changes to your build settings will trigger a redeployment of your project.
## Environment Variables
From the Environment Variables page of the Settings tab, you can add, update and delete variables for use in your project. Like build settings, these changes will trigger a redeployment of your project.
<Banner>
Note: For security reasons, any variables you wish to provide to the [Admin
Panel](../admin/overview) must be prefixed with `NEXT_PUBLIC_`. [More
details](../configuration/environment-vars).
</Banner>
## Custom Domains
With Payload Cloud, you can add custom domain names to your project. To do so, first go to the Domains page of the Settings tab of your project. Here you can see your default domain. To add a new domain, type in the domain name you wish to use.
<Banner>
Note: do not include the protocol (http:// or https://) or any paths (/page).
Only include the domain name and extension, and optionally a subdomain. -
your-domain.com - backend.your-domain.com
</Banner>
Once you click save, a DNS record will be generated for your domain name to point to your live project. Add this record into your DNS providers records, and once the records are resolving properly (this can take 1hr to 48hrs in some cases), your domain will now to point to your live project.
You will also need to configure your Payload project to use your specified domain. In your `payload.config.ts` file, specify your `serverURL` with your domain:
```ts
export default buildConfig({
serverURL: 'https://example.com',
// the rest of your config,
})
```
## Email
Powered by [Resend](https://resend.com), Payload Cloud comes with integrated email support out of the box. No configuration is needed, and you can use `payload.sendEmail()` to send email right from your Payload app. To learn more about sending email with Payload, checkout the [Email Configuration](../email/overview) overview.
If you are on the Pro or Enterprise plan, you can add your own custom Email domain name. From the Email page of your projects Settings, add the domain you wish to use for email delivery. This will generate a set of DNS records. Add these records to your DNS provider and click verify to check that your records are resolving properly. Once verified, your emails will now be sent from your custom domain name.
## Developing Locally
To make changes to your project, you will need to clone the repository defined in your project settings to your local machine. In order to run your project locally, you will need configure your local environment first. Refer to your repositorys `README.md` file to see the steps needed for your specific template.
From there, you are ready to make updates to your project. When you are ready to make your changes live, commit your changes to the branch you specified in your Project settings, and your application will automatically trigger a redeploy and build from your latest commit.
## Cloud Plugin
Projects generated from a template will come pre-configured with the official Cloud Plugin, but if you are using your own repository you will need to add this into your project. To do so, add the plugin to your Payload Config:
`pnpm add @payloadcms/payload-cloud`
```js
import { payloadCloudPlugin } from '@payloadcms/payload-cloud'
import { buildConfig } from 'payload'
export default buildConfig({
plugins: [payloadCloudPlugin()],
// rest of config
})
```
<Banner type="warning">
**Note:** If your Payload Config already has an email with transport, this
will take precedence over Payload Cloud's email service.
</Banner>
<Banner type="info">
Good to know: the Payload Cloud Plugin was previously named
`@payloadcms/plugin-cloud`. If you are using this plugin, you should update to
the new package name.
</Banner>
#### **Optional configuration**
If you wish to opt-out of any Payload cloud features, the plugin also accepts options to do so.
```js
payloadCloud({
storage: false, // Disable file storage
email: false, // Disable email delivery
})
```

35
docs/cloud/teams.mdx Normal file
View File

@@ -0,0 +1,35 @@
---
title: Cloud Teams
label: Teams
order: 30
desc: Manage your Payload Cloud team and billing settings.
keywords: team, teams, billing, subscription, payment, plan, plans, cloud, payload cloud
---
<Banner>
Within Payload Cloud, the team management feature offers you the ability to
manage your organization, team members, billing, and subscription settings.
</Banner>
![Payload Cloud Team Settings](https://payloadcms.com/images/docs/cloud/team-settings.jpg)
_A screenshot of the Team Settings page._
## Members
Each team has members that can interact with your projects. You can invite multiple people to your team and each individual can belong to more than one team. You can assign them either `owner` or `user` permissions. Owners are able to make admin-only changes, such as deleting projects, and editing billing information.
## Adding Members
To add a new member to your team, visit your Teams Settings page, and click “Invite Teammate”. You can then add their email address, and assign their role. Press “Save” to send the invitations, which will send an email to the invited team member where they can create a new account.
## Billing
Users can update billing settings and subscriptions for any teams where they are designated as an `owner`. To make updates to the teams payment methods, visit the Billing page under the Team Settings tab. You can add new cards, delete cards, and set a payment method as a default. The default payment method will be used in the event that another payment method fails.
## Subscriptions
From the Subscriptions page, a team owner can see all current plans for their team. From here, you can see the price of each plan, if there is an active trial, and when you will be billed next.
## Invoices
The Invoices page will you show you the invoices for your account, as well as the status on their payment.

View File

@@ -61,7 +61,7 @@ export const Posts: CollectionConfig = {
The following options are available:
| Option | Description |
| -------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| -------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `admin` | The configuration options for the Admin Panel. [More details](#admin-options). |
| `access` | Provide Access Control functions to define exactly who should be able to do what with Documents in this Collection. [More details](../access-control/collections). |
| `auth` | Specify options if you would like this Collection to feature authentication. [More details](../authentication/overview). |
@@ -79,12 +79,11 @@ The following options are available:
| `lockDocuments` | Enables or disables document locking. By default, document locking is enabled. Set to an object to configure, or set to `false` to disable locking. [More details](../admin/locked-documents). |
| `slug` \* | Unique, URL-friendly string that will act as an identifier for this Collection. |
| `timestamps` | Set to false to disable documents' automatically generated `createdAt` and `updatedAt` timestamps. |
| `trash` | A boolean to enable soft deletes for this collection. Defaults to `false`. [More details](../trash/overview). |
| `typescript` | An object with property `interface` as the text used in schema generation. Auto-generated from slug if not defined. |
| `upload` | Specify options if you would like this Collection to support file uploads. For more, consult the [Uploads](../upload/overview) documentation. |
| `versions` | Set to true to enable default options, or configure with object properties. [More details](../versions/overview#collection-config). |
| `defaultPopulate` | Specify which fields to select when this Collection is populated from another document. [More Details](../queries/select#defaultpopulate-collection-config-property). |
| `indexes` | Define compound indexes for this collection. This can be used to either speed up querying/sorting by 2 or more fields at the same time or to ensure uniqueness between several fields. |
| `indexes` | Define compound indexes for this collection. This can be used to either speed up querying/sorting by 2 or more fields at the same time or to ensure uniqueness between several fields. [More details](../database/indexes#compound-indexes). |
| `forceSelect` | Specify which fields should be selected always, regardless of the `select` query which can be useful that the field exists for access control / hooks |
| `disableBulkEdit` | Disable the bulk edit operation for the collection in the admin panel and the REST API |
@@ -131,7 +130,6 @@ The following options are available:
| `description` | Text to display below the Collection label in the List View to give editors more information. Alternatively, you can use the `admin.components.Description` to render a React component. [More details](#custom-components). |
| `defaultColumns` | Array of field names that correspond to which columns to show by default in this Collection's List View. |
| `disableCopyToLocale` | Disables the "Copy to Locale" button while editing documents within this Collection. Only applicable when localization is enabled. |
| `groupBy` | Beta. Enable grouping by a field in the list view. |
| `hideAPIURL` | Hides the "API URL" meta field while editing documents within this Collection. |
| `enableRichTextLink` | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| `enableRichTextRelationship` | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
@@ -141,8 +139,8 @@ The following options are available:
| `livePreview` | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| `components` | Swap in your own React components to be used within this Collection. [More details](#custom-components). |
| `listSearchableFields` | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
| `pagination` | Set pagination-specific options for this Collection in the List View. [More details](#pagination). |
| `baseFilter` | Defines a default base filter which will be applied to the List View (along with any other filters applied by the user) and internal links in Lexical Editor, |
| `pagination` | Set pagination-specific options for this Collection. [More details](#pagination). |
| `baseListFilter` | You can define a default base filter for this collection's List view, which will be merged into any filters that the user performs. |
<Banner type="warning">
**Note:** If you set `useAsTitle` to a relationship or join field, it will use

View File

@@ -158,7 +158,7 @@ export function MyCustomView(props: AdminViewServerProps) {
<Banner type="success">
**Tip:** For consistent layout and navigation, you may want to wrap your
Custom View with one of the built-in [Templates](./overview#templates).
Custom View with one of the built-in [Template](./overview#templates).
</Banner>
### View Templates

View File

@@ -293,6 +293,7 @@ Here's an example of a custom `editMenuItems` component:
```tsx
import React from 'react'
import { PopupList } from '@payloadcms/ui'
import type { EditMenuItemsServerProps } from 'payload'
@@ -300,12 +301,12 @@ export const EditMenuItems = async (props: EditMenuItemsServerProps) => {
const href = `/custom-action?id=${props.id}`
return (
<>
<a href={href}>Custom Edit Menu Item</a>
<a href={href}>
<PopupList.ButtonGroup>
<PopupList.Button href={href}>Custom Edit Menu Item</PopupList.Button>
<PopupList.Button href={href}>
Another Custom Edit Menu Item - add as many as you need!
</a>
</>
</PopupList.Button>
</PopupList.ButtonGroup>
)
}
```

View File

@@ -63,22 +63,3 @@ export const MyCollection: CollectionConfig = {
],
}
```
## Localized fields and MongoDB indexes
When you set `index: true` or `unique: true` on a localized field, MongoDB creates one index **per locale path** (e.g., `slug.en`, `slug.da-dk`, etc.). With many locales and indexed fields, this can quickly approach MongoDB's per-collection index limit.
If you know you'll query specifically by a locale, index only those locale paths using the collection-level `indexes` option instead of setting `index: true` on the localized field. This approach gives you more control and helps avoid unnecessary indexes.
```ts
import type { CollectionConfig } from 'payload'
export const Pages: CollectionConfig = {
fields: [{ name: 'slug', type: 'text', localized: true }],
indexes: [
// Index English slug only (rather than all locales)
{ fields: ['slug.en'] },
// You could also make it unique:
// { fields: ['slug.en'], unique: true },
],
}
```

View File

@@ -31,7 +31,7 @@ export default buildConfig({
## Options
| Option | Description |
| ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| -------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| `autoPluralization` | Tell Mongoose to auto-pluralize any collection names if it encounters any singular words used as collection `slug`s. |
| `connectOptions` | Customize MongoDB connection options. Payload will connect to your MongoDB database using default options which you can override and extend to include all the [options](https://mongoosejs.com/docs/connections.html#options) available to mongoose. |
| `collectionsSchemaOptions` | Customize Mongoose schema options for collections. |
@@ -42,10 +42,6 @@ export default buildConfig({
| `allowAdditionalKeys` | By default, Payload strips all additional keys from MongoDB data that don't exist in the Payload schema. If you have some data that you want to include to the result but it doesn't exist in Payload, you can set this to `true`. Be careful as Payload access control _won't_ work for this data. |
| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. |
| `disableFallbackSort` | Set to `true` to disable the adapter adding a fallback sort when sorting by non-unique fields, this can affect performance in some cases but it ensures a consistent order of results. |
| `useAlternativeDropDatabase` | Set to `true` to use an alternative `dropDatabase` implementation that calls `collection.deleteMany({})` on every collection instead of sending a raw `dropDatabase` command. Payload only uses `dropDatabase` for testing purposes. Defaults to `false`. |
| `useBigIntForNumberIDs` | Set to `true` to use `BigInt` for custom ID fields of type `'number'`. Useful for databases that don't support `double` or `int32` IDs. Defaults to `false`. |
| `useJoinAggregations` | Set to `false` to disable join aggregations (which use correlated subqueries) and instead populate join fields via multiple `find` queries. Defaults to `true`. |
| `usePipelineInSortLookup` | Set to `false` to disable the use of `pipeline` in the `$lookup` aggregation in sorting. Defaults to `true`. |
## Access to Mongoose models
@@ -60,21 +56,9 @@ You can access Mongoose models as follows:
## Using other MongoDB implementations
You can import the `compatibilityOptions` object to get the recommended settings for other MongoDB implementations. Since these databases aren't officially supported by payload, you may still encounter issues even with these settings (please create an issue or PR if you believe these options should be updated):
Limitations with [DocumentDB](https://aws.amazon.com/documentdb/) and [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db):
```ts
import { mongooseAdapter, compatibilityOptions } from '@payloadcms/db-mongodb'
export default buildConfig({
db: mongooseAdapter({
url: process.env.DATABASE_URI,
// For example, if you're using firestore:
...compatibilityOptions.firestore,
}),
})
```
We export compatibility options for [DocumentDB](https://aws.amazon.com/documentdb/), [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db) and [Firestore](https://cloud.google.com/firestore/mongodb-compatibility/docs/overview). Known limitations:
- Azure Cosmos DB does not support transactions that update two or more documents in different collections, which is a common case when using Payload (via hooks).
- Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`.
- For Azure Cosmos DB you must pass `transactionOptions: false` to the adapter options. Azure Cosmos DB does not support transactions that update two and more documents in different collections, which is a common case when using Payload (via hooks).
- For Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`.
- The [Join Field](../fields/join) is not supported in DocumentDB and Azure Cosmos DB, as we internally use MongoDB aggregations to query data for that field, which are limited there. This can be changed in the future.
- For DocumentDB pass `disableIndexHints: true` to disable hinting to the DB to use `id` as index which can cause problems with DocumentDB.

View File

@@ -296,16 +296,11 @@ query {
sort: "createdAt"
limit: 5
where: { author: { equals: "66e3431a3f23e684075aaeb9" } }
"""
Optionally pass count: true if you want to retrieve totalDocs
"""
count: true -- s
) {
docs {
title
}
hasNextPage
totalDocs
}
}
}

View File

@@ -157,7 +157,6 @@ The following field names are forbidden and cannot be used:
- `salt`
- `hash`
- `file`
- `status` - with Postgres Adapter and when drafts are enabled
### Field-level Hooks

View File

@@ -81,7 +81,7 @@ To install a Database Adapter, you can run **one** of the following commands:
#### 2. Copy Payload files into your Next.js app folder
Payload installs directly in your Next.js `/app` folder, and you'll need to place some files into that folder for Payload to run. You can copy these files from the [Blank Template](https://github.com/payloadcms/payload/tree/main/templates/blank/src/app/%28payload%29) on GitHub. Once you have the required Payload files in place in your `/app` folder, you should have something like this:
Payload installs directly in your Next.js `/app` folder, and you'll need to place some files into that folder for Payload to run. You can copy these files from the [Blank Template](<https://github.com/payloadcms/payload/tree/main/templates/blank/src/app/(payload)>) on GitHub. Once you have the required Payload files in place in your `/app` folder, you should have something like this:
```plaintext
app/

View File

@@ -34,20 +34,20 @@ npm i @payloadcms/plugin-csm
Then in the `plugins` array of your Payload Config, call the plugin and enable any collections that require Content Source Maps.
```ts
import { buildConfig } from 'payload/config'
import contentSourceMaps from '@payloadcms/plugin-csm'
import { buildConfig } from "payload/config"
import contentSourceMaps from "@payloadcms/plugin-csm"
const config = buildConfig({
collections: [
{
slug: 'pages',
slug: "pages",
fields: [
{
name: 'slug',
type: 'text',
},
{
name: 'title',
name: 'title,'
type: 'text',
},
],
@@ -55,7 +55,7 @@ const config = buildConfig({
],
plugins: [
contentSourceMaps({
collections: ['pages'],
collections: ["pages"],
}),
],
})

View File

@@ -51,7 +51,7 @@ export default buildConfig({
// add as many cron jobs as you want
],
shouldAutoRun: async (payload) => {
// Tell Payload if it should run jobs or not. This function is optional and will return true by default.
// Tell Payload if it should run jobs or not.
// This function will be invoked each time Payload goes to pick up and run jobs.
// If this function ever returns false, the cron schedule will be stopped.
return true

View File

@@ -1,155 +0,0 @@
---
title: Job Schedules
label: Schedules
order: 60
desc: Payload allows you to schedule jobs to run periodically
keywords: jobs queue, application framework, typescript, node, react, nextjs, scheduling, cron, schedule
---
Payload's `schedule` property lets you enqueue Jobs regularly according to a cron schedule - daily, weekly, hourly, or any custom interval. This is ideal for tasks or workflows that must repeat automatically and without manual intervention.
Scheduling Jobs differs significantly from running them:
- **Queueing**: Scheduling only creates (enqueues) the Job according to your cron expression. It does not immediately execute any business logic.
- **Running**: Execution happens separately through your Jobs runner - such as autorun, or manual invocation using `payload.jobs.run()` or the `payload-jobs/run` endpoint.
Use the `schedule` property specifically when you have recurring tasks or workflows. To enqueue a single Job to run once in the future, use the `waitUntil` property instead.
## Example use cases
**Regular emails or notifications**
Send nightly digests, weekly newsletters, or hourly updates.
**Batch processing during off-hours**
Process analytics data or rebuild static sites during low-traffic times.
**Periodic data synchronization**
Regularly push or pull updates to or from external APIs.
## Handling schedules
Something needs to actually trigger the scheduling of jobs (execute the scheduling lifecycle seen below). By default, the `jobs.autorun` configuration, as well as the `/api/payload-jobs/run` will also handle scheduling for the queue specified in the `autorun` configuration.
You can disable this behavior by setting `disableScheduling: true` in your `autorun` configuration, or by passing `disableScheduling=true` to the `/api/payload-jobs/run` endpoint. This is useful if you want to handle scheduling manually, for example, by using a cron job or a serverless function that calls the `/api/payload-jobs/handle-schedules` endpoint or the `payload.jobs.handleSchedules()` local API method.
## Defining schedules on Tasks or Workflows
Schedules are defined using the `schedule` property:
```ts
export type ScheduleConfig = {
cron: string // required, supports seconds precision
queue: string // required, the queue to push Jobs onto
hooks?: {
// Optional hooks to customize scheduling behavior
beforeSchedule?: BeforeScheduleFn
afterSchedule?: AfterScheduleFn
}
}
```
### Example schedule
The following example demonstrates scheduling a Job to enqueue every day at midnight:
```ts
import type { TaskConfig } from 'payload'
export const SendDigestEmail: TaskConfig<'SendDigestEmail'> = {
slug: 'SendDigestEmail',
schedule: [
{
cron: '0 0 * * *', // Every day at midnight
queue: 'nightly',
},
],
handler: async () => {
await sendDigestToAllUsers()
},
}
```
This configuration only queues the Job - it does not execute it immediately. To actually run the queued Job, you configure autorun in your Payload config (note that autorun should **not** be used on serverless platforms):
```ts
export default buildConfig({
jobs: {
autoRun: [
{
cron: '* * * * *', // Runs every minute
queue: 'nightly',
},
],
tasks: [SendDigestEmail],
},
})
```
That way, Payload's scheduler will automatically enqueue the job into the `nightly` queue every day at midnight. The autorun configuration will check the `nightly` queue every minute and execute any Jobs that are due to run.
## Scheduling lifecycle
Here's how the scheduling process operates in detail:
1. **Cron evaluation**: Payload (or your external trigger in `manual` mode) identifies which schedules are due to run. To do that, it will
read the `payload-jobs-stats` global which contains information about the last time each scheduled task or workflow was run.
2. **BeforeSchedule hook**:
- The default beforeSchedule hook checks how many active or runnable jobs of the same type that have been queued by the scheduling system currently exist.
If such a job exists, it will skip scheduling a new one.
- You can provide your own `beforeSchedule` hook to customize this behavior. For example, you might want to allow multiple overlapping Jobs or dynamically set the Job input data.
3. **Enqueue Job**: Payload queues up a new job. This job will have `waitUntil` set to the next scheduled time based on the cron expression.
4. **AfterSchedule hook**:
- The default afterSchedule hook updates the `payload-jobs-stats` global metadata with the last scheduled time for the Job.
- You can provide your own afterSchedule hook to it for custom logging, metrics, or other post-scheduling actions.
## Customizing concurrency and input (Advanced)
You may want more control over concurrency or dynamically set Job inputs at scheduling time. For instance, allowing multiple overlapping Jobs to be scheduled, even if a previously scheduled job has not completed yet, or preparing dynamic data to pass to your Job handler:
```ts
import { countRunnableOrActiveJobsForQueue } from 'payload'
schedule: [
{
cron: '* * * * *', // every minute
queue: 'reports',
hooks: {
beforeSchedule: async ({ queueable, req }) => {
const runnableOrActiveJobsForQueue =
await countRunnableOrActiveJobsForQueue({
queue: queueable.scheduleConfig.queue,
req,
taskSlug: queueable.taskConfig?.slug,
workflowSlug: queueable.workflowConfig?.slug,
onlyScheduled: true,
})
// Allow up to 3 simultaneous scheduled jobs and set dynamic input
return {
shouldSchedule: runnableOrActiveJobsForQueue < 3,
input: { text: 'Hi there' },
}
},
},
},
]
```
This allows fine-grained control over how many Jobs can run simultaneously and provides dynamically computed input values each time a Job is scheduled.
## Scheduling in serverless environments
On serverless platforms, scheduling must be triggered externally since Payload does not automatically run cron schedules in ephemeral environments. You have two main ways to trigger scheduling manually:
- **Invoke via Payload's API:** `payload.jobs.handleSchedules()`
- **Use the REST API endpoint:** `/api/payload-jobs/handle-schedules`
- **Use the run endpoint, which also handles scheduling by default:** `GET /api/payload-jobs/run`
For example, on Vercel, you can set up a Vercel Cron to regularly trigger scheduling:
- **Vercel Cron Job:** Configure Vercel Cron to periodically call `GET /api/payload-jobs/handle-schedules`. If you would like to auto-run your scheduled jobs as well, you can use the `GET /api/payload-jobs/run` endpoint.
Once Jobs are queued, their execution depends entirely on your configured runner setup (e.g., autorun, or manual invocation).

View File

@@ -45,11 +45,13 @@ The following options are available:
| Path | Description |
| ----------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`url`** | String, or function that returns a string, pointing to your front-end application. This value is used as the iframe `src`. [More details](#url). |
| **`url`** \* | String, or function that returns a string, pointing to your front-end application. This value is used as the iframe `src`. [More details](#url). |
| **`breakpoints`** | Array of breakpoints to be used as “device sizes” in the preview window. Each item appears as an option in the toolbar. [More details](#breakpoints). |
| **`collections`** | Array of collection slugs to enable Live Preview on. |
| **`globals`** | Array of global slugs to enable Live Preview on. |
_\* An asterisk denotes that a property is required._
### URL
The `url` property resolves to a string that points to your front-end application. This value is used as the `src` attribute of the iframe rendering your front-end. Once loaded, the Admin Panel will communicate directly with your app through `window.postMessage` events.
@@ -86,16 +88,17 @@ const config = buildConfig({
// ...
livePreview: {
// highlight-start
url: ({ data, collectionConfig, locale }) =>
`${data.tenant.url}${
collectionConfig.slug === 'posts'
? `/posts/${data.slug}`
: `${data.slug !== 'home' ? `/${data.slug}` : ''}`
url: ({
data,
collectionConfig,
locale
}) => `${data.tenant.url}${ // Multi-tenant top-level domain
collectionConfig.slug === 'posts' ? `/posts/${data.slug}` : `${data.slug !== 'home' : `/${data.slug}` : ''}`
}${locale ? `?locale=${locale?.code}` : ''}`, // Localization query param
collections: ['pages'],
},
// highlight-end
},
}
})
```

View File

@@ -51,7 +51,6 @@ export default async function Page() {
collection: 'pages',
id: '123',
draft: true,
trash: true, // add this if trash is enabled in your collection and want to preview trashed documents
})
return (

View File

@@ -162,11 +162,6 @@ const result = await payload.find({
})
```
<Banner type="info">
`pagination`, `page`, and `limit` are three related properties [documented
here](/docs/queries/pagination).
</Banner>
### Find by ID#collection-find-by-id
```js
@@ -199,27 +194,6 @@ const result = await payload.count({
})
```
### FindDistinct#collection-find-distinct
```js
// Result will be an object with:
// {
// values: ['value-1', 'value-2'], // array of distinct values,
// field: 'title', // the field
// totalDocs: 10, // count of the distinct values satisfies query,
// perPage: 10, // count of distinct values per page (based on provided limit)
// }
const result = await payload.findDistinct({
collection: 'posts', // required
locale: 'en',
where: {}, // pass a `where` query here
user: dummyUser,
overrideAccess: false,
field: 'title',
sort: 'title',
})
```
### Update by ID#collection-update-by-id
```js

View File

@@ -58,7 +58,7 @@ To learn more, see the [Custom Components Performance](../admin/custom-component
### Block references
Use [Block References](../fields/blocks#block-references) to share the same block across multiple fields without bloating the config. This will reduce the number of fields to traverse when processing permissions, etc. and can significantly reduce the amount of data sent from the server to the client in the Admin Panel.
Use [Block References](../fields/blocks#block-references) to share the same block across multiple fields without bloating the config. This will reduce the number of fields to traverse when processing permissions, etc. and can can significantly reduce the amount of data sent from the server to the client in the Admin Panel.
For example, if you have a block that is used in multiple fields, you can define it once and reference it in each field.
@@ -207,7 +207,7 @@ Everything mentioned above applies to local development as well, but there are a
### Enable Turbopack
<Banner type="warning">
**Note:** In the future this will be the default. Use at your own risk.
**Note:** In the future this will be the default. Use as your own risk.
</Banner>
Add `--turbo` to your dev script to significantly speed up your local development server start time.

View File

@@ -1,7 +1,7 @@
---
title: Form Builder Plugin
label: Form Builder
order: 30
order: 40
desc: Easily build and manage forms from the Admin Panel. Send dynamic, personalized emails and even accept and process payments.
keywords: plugins, plugin, form, forms, form builder
---

View File

@@ -1,155 +0,0 @@
---
title: Import Export Plugin
label: Import Export
order: 40
desc: Add Import and export functionality to create CSV and JSON data exports
keywords: plugins, plugin, import, export, csv, JSON, data, ETL, download
---
![https://www.npmjs.com/package/@payloadcms/plugin-import-export](https://img.shields.io/npm/v/@payloadcms/plugin-import-export)
<Banner type="warning">
**Note**: This plugin is in **beta** as some aspects of it may change on any
minor releases. It is under development and currently only supports exporting
of collection data.
</Banner>
This plugin adds features that give admin users the ability to download or create export data as an upload collection and import it back into a project.
## Core Features
- Export data as CSV or JSON format via the admin UI
- Download the export directly through the browser
- Create a file upload of the export data
- Use the jobs queue for large exports
- (Coming soon) Import collection data
## Installation
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
```bash
pnpm add @payloadcms/plugin-import-export
```
## Basic Usage
In the `plugins` array of your [Payload Config](https://payloadcms.com/docs/configuration/overview), call the plugin with [options](#options):
```ts
import { buildConfig } from 'payload'
import { importExportPlugin } from '@payloadcms/plugin-import-export'
const config = buildConfig({
collections: [Pages, Media],
plugins: [
importExportPlugin({
collections: ['users', 'pages'],
// see below for a list of available options
}),
],
})
export default config
```
## Options
| Property | Type | Description |
| -------------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------ |
| `collections` | string[] | Collections to include Import/Export controls in. Defaults to all collections. |
| `debug` | boolean | If true, enables debug logging. |
| `disableDownload` | boolean | If true, disables the download button in the export preview UI. |
| `disableJobsQueue` | boolean | If true, forces the export to run synchronously. |
| `disableSave` | boolean | If true, disables the save button in the export preview UI. |
| `format` | string | Forces a specific export format (`csv` or `json`), hides the format dropdown, and prevents the user from choosing the export format. |
| `overrideExportCollection` | function | Function to override the default export collection; takes the default export collection and allows you to modify and return it. |
## Field Options
In addition to the above plugin configuration options, you can granularly set the following field level options using the `custom['plugin-import-export']` properties in any of your collections.
| Property | Type | Description |
| ---------- | -------- | ----------------------------------------------------------------------------------------------------------------------------- |
| `disabled` | boolean | When `true` the field is completely excluded from the import-export plugin. |
| `toCSV` | function | Custom function used to modify the outgoing csv data by manipulating the data, siblingData or by returning the desired value. |
### Customizing the output of CSV data
To manipulate the data that a field exports you can add `toCSV` custom functions. This allows you to modify the outgoing csv data by manipulating the data, siblingData or by returning the desired value.
The toCSV function argument is an object with the following properties:
| Property | Type | Description |
| ------------ | ------- | ----------------------------------------------------------------- |
| `columnName` | string | The CSV column name given to the field. |
| `doc` | object | The top level document |
| `row` | object | The object data that can be manipulated to assign data to the CSV |
| `siblingDoc` | object | The document data at the level where it belongs |
| `value` | unknown | The data for the field. |
Example function:
```ts
const pages: CollectionConfig = {
slug: 'pages',
fields: [
{
name: 'author',
type: 'relationship',
relationTo: 'users',
custom: {
'plugin-import-export': {
toCSV: ({ value, columnName, row }) => {
// add both `author_id` and the `author_email` to the csv export
if (
value &&
typeof value === 'object' &&
'id' in value &&
'email' in value
) {
row[`${columnName}_id`] = (value as { id: number | string }).id
row[`${columnName}_email`] = (value as { email: string }).email
}
},
},
},
},
],
}
```
## Exporting Data
There are four possible ways that the plugin allows for exporting documents, the first two are available in the admin UI from the list view of a collection:
1. Direct download - Using a `POST` to `/api/exports/download` and streams the response as a file download
2. File storage - Goes to the `exports` collection as an uploads enabled collection
3. Local API - A create call to the uploads collection: `payload.create({ slug: 'uploads', ...parameters })`
4. Jobs Queue - `payload.jobs.queue({ task: 'createCollectionExport', input: parameters })`
By default, a user can use the Export drawer to create a file download by choosing `Save` or stream a downloadable file directly without persisting it by using the `Download` button. Either option can be disabled to provide the export experience you desire for your use-case.
The UI for creating exports provides options so that users can be selective about which documents to include and also which columns or fields to include.
It is necessary to add access control to the uploads collection configuration using the `overrideExportCollection` function if you have enabled this plugin on collections with data that some authenticated users should not have access to.
<Banner type="warning">
**Note**: Users who have read access to the upload collection may be able to
download data that is normally not readable due to [access
control](../access-control/overview).
</Banner>
The following parameters are used by the export function to handle requests:
| Property | Type | Description |
| ---------------- | -------- | ----------------------------------------------------------------------------------------------------------------- |
| `format` | text | Either `csv` or `json` to determine the shape of data exported |
| `limit` | number | The max number of documents to return |
| `sort` | select | The field to use for ordering documents |
| `locale` | string | The locale code to query documents or `all` |
| `draft` | string | Either `yes` or `no` to return documents with their newest drafts for drafts enabled collections |
| `fields` | string[] | Which collection fields are used to create the export, defaults to all |
| `collectionSlug` | string | The slug to query against |
| `where` | object | The WhereObject used to query documents to export. This is set by making selections or filters from the list view |
| `filename` | text | What to call the export being created |

View File

@@ -1,7 +1,7 @@
---
title: Multi-Tenant Plugin
label: Multi-Tenant
order: 50
order: 40
desc: Scaffolds multi-tenancy for your Payload application
keywords: plugins, multi-tenant, multi-tenancy, plugin, payload, cms, seo, indexing, search, search engine
---
@@ -53,14 +53,6 @@ The plugin accepts an object with the following properties:
```ts
type MultiTenantPluginConfig<ConfigTypes = unknown> = {
/**
* Base path for your application
*
* https://nextjs.org/docs/app/api-reference/config/next-config-js/basePath
*
* @default undefined
*/
basePath?: string
/**
* After a tenant is deleted, the plugin will attempt to clean up related documents
* - removing documents with the tenant ID
@@ -80,30 +72,8 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
* @default false
*/
isGlobal?: boolean
/**
* Opt out of adding the tenant field and place
* it manually using the `tenantField` export from the plugin
*/
customTenantField?: boolean
/**
* Overrides for the tenant field, will override the entire tenantField configuration
*/
tenantFieldOverrides?: CollectionTenantFieldConfigOverrides
/**
* Set to `false` if you want to manually apply the baseListFilter
* Set to `false` if you want to manually apply the baseFilter
*
* @default true
*/
useBaseFilter?: boolean
/**
* @deprecated Use `useBaseFilter` instead. If both are defined,
* `useBaseFilter` will take precedence. This property remains only
* for backward compatibility and may be removed in a future version.
*
* Originally, `baseListFilter` was intended to filter only the List View
* in the admin panel. However, base filtering is often required in other areas
* such as internal link relationships in the Lexical editor.
*
* @default true
*/
@@ -129,37 +99,18 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
* @default true
*/
enabled?: boolean
/**
* Localization for the plugin
*/
i18n?: {
translations: {
[key in AcceptedLanguages]?: {
/**
* @default 'You are about to change ownership from <0>{{fromTenant}}</0> to <0>{{toTenant}}</0>'
*/
'confirm-modal-tenant-switch--body'?: string
/**
* `tenantLabel` defaults to the value of the `nav-tenantSelector-label` translation
*
* @default 'Confirm {{tenantLabel}} change'
*/
'confirm-modal-tenant-switch--heading'?: string
/**
* @default 'Assigned Tenant'
*/
'field-assignedTenant-label'?: string
/**
* @default 'Tenant'
*/
'nav-tenantSelector-label'?: string
}
}
}
/**
* Field configuration for the field added to all tenant enabled collections
*/
tenantField?: RootTenantFieldConfigOverrides
tenantField?: {
access?: RelationshipField['access']
/**
* The name of the field added to all tenant enabled collections
*
* @default 'tenant'
*/
name?: string
}
/**
* Field configuration for the field added to the users collection
*
@@ -212,8 +163,6 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
* Customize tenant selector label
*
* Either a string or an object where the keys are i18n codes and the values are the string labels
*
* @deprecated Use `i18n.translations` instead.
*/
tenantSelectorLabel?:
| Partial<{
@@ -232,9 +181,7 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
* Useful for super-admin type users
*/
userHasAccessToAllTenants?: (
user: ConfigTypes extends { user: unknown }
? ConfigTypes['user']
: TypedUser,
user: ConfigTypes extends { user: unknown } ? ConfigTypes['user'] : User,
) => boolean
/**
* Opt out of adding access constraints to the tenants collection
@@ -265,8 +212,8 @@ const config = buildConfig({
{
slug: 'tenants',
admin: {
useAsTitle: 'name',
},
useAsTitle: 'name'
}
fields: [
// remember, you own these fields
// these are merely suggestions/examples
@@ -284,7 +231,7 @@ const config = buildConfig({
name: 'domain',
type: 'text',
required: true,
},
}
],
},
],
@@ -294,7 +241,7 @@ const config = buildConfig({
pages: {},
navigation: {
isGlobal: true,
},
}
},
}),
],
@@ -380,16 +327,14 @@ type ContextType = {
/**
* Prevents a refresh when the tenant is changed
*
* If not switching tenants while viewing a "global",
* set to true
* If not switching tenants while viewing a "global", set to true
*/
setPreventRefreshOnChange: React.Dispatch<React.SetStateAction<boolean>>
/**
* Sets the selected tenant ID
*
* @param args.id - The ID of the tenant to select
* @param args.refresh - Whether to refresh the page
* after changing the tenant
* @param args.refresh - Whether to refresh the page after changing the tenant
*/
setTenant: (args: {
id: number | string | undefined

View File

@@ -1,7 +1,7 @@
---
title: Nested Docs Plugin
label: Nested Docs
order: 60
order: 40
desc: Nested documents in a parent, child, and sibling relationship.
keywords: plugins, nested, documents, parent, child, sibling, relationship
---

View File

@@ -55,7 +55,6 @@ Payload maintains a set of Official Plugins that solve for some of the common us
- [Sentry](./sentry)
- [SEO](./seo)
- [Stripe](./stripe)
- [Import/Export](./import-export)
You can also [build your own plugin](./build-your-own) to easily extend Payload's functionality in some other way. Once your plugin is ready, consider [sharing it with the community](#community-plugins).

View File

@@ -1,7 +1,7 @@
---
title: Redirects Plugin
label: Redirects
order: 70
order: 40
desc: Automatically create redirects for your Payload application
keywords: plugins, redirects, redirect, plugin, payload, cms, seo, indexing, search, search engine
---

View File

@@ -1,7 +1,7 @@
---
title: Search Plugin
label: Search
order: 80
order: 40
desc: Generates records of your documents that are extremely fast to search on.
keywords: plugins, search, search plugin, search engine, search index, search results, search bar, search box, search field, search form, search input
---

View File

@@ -1,7 +1,7 @@
---
title: Sentry Plugin
label: Sentry
order: 90
order: 40
desc: Integrate Sentry error tracking into your Payload application
keywords: plugins, sentry, error, tracking, monitoring, logging, bug, reporting, performance
---

View File

@@ -2,7 +2,7 @@
description: Manage SEO metadata from your Payload admin
keywords: plugins, seo, meta, search, engine, ranking, google
label: SEO
order: 100
order: 30
title: SEO Plugin
---

View File

@@ -1,7 +1,7 @@
---
title: Stripe Plugin
label: Stripe
order: 110
order: 40
desc: Easily accept payments with Stripe
keywords: plugins, stripe, payments, ecommerce
---

View File

@@ -24,6 +24,16 @@ Payload can be deployed _anywhere that Next.js can run_ - including Vercel, Netl
But it's important to remember that most Payload projects will also need a database, file storage, an email provider, and a CDN. Make sure you have all of the requirements that your project needs, no matter what deployment platform you choose.
Often, the easiest and fastest way to deploy Payload is to use [Payload Cloud](https://payloadcms.com/new) — where you get everything you need out of the box, including:
1. A MongoDB Atlas database
1. S3 file storage
1. Resend email service
1. Cloudflare CDN
1. Blue / green deployments
1. Logs
1. And more
## Basics
Payload runs fully in Next.js, so the [Next.js build process](https://nextjs.org/docs/app/building-your-application/deploying) is used for building Payload. If you've used `create-payload-app` to create your project, executing the `build`

View File

@@ -148,12 +148,6 @@ export const Pages: CollectionConfig<'pages'> = {
}
```
<VideoDrawer
id="Snqjng_w-QU"
label="Watch default populate in action"
drawerTitle="How to easily optimize Payload CMS requests with defaultPopulate"
/>
<Banner type="warning">
**Important:** When using `defaultPopulate` on a collection with
[Uploads](/docs/fields/upload) enabled and you want to select the `url` field,

View File

@@ -474,15 +474,11 @@ const MyNodeComponent = React.lazy(() =>
)
/**
* This node is a DecoratorNode. DecoratorNodes allow
* you to render React components in the editor.
* This node is a DecoratorNode. DecoratorNodes allow you to render React components in the editor.
*
* They need both createDom and decorate functions.
* createDom => outside of the html.
* decorate => React Component inside of the html.
* They need both createDom and decorate functions. createDom => outside of the html. decorate => React Component inside of the html.
*
* If we used DecoratorBlockNode instead,
* we would only need a decorate method
* If we used DecoratorBlockNode instead, we would only need a decorate method
*/
export class MyNode extends DecoratorNode<React.ReactElement> {
static clone(node: MyNode): MyNode {
@@ -494,11 +490,9 @@ export class MyNode extends DecoratorNode<React.ReactElement> {
}
/**
* Defines what happens if you copy a div element
* from another page and paste it into the lexical editor
* Defines what happens if you copy a div element from another page and paste it into the lexical editor
*
* This also determines the behavior of lexical's
* internal HTML -> Lexical converter
* This also determines the behavior of lexical's internal HTML -> Lexical converter
*/
static importDOM(): DOMConversionMap | null {
return {
@@ -510,18 +504,14 @@ export class MyNode extends DecoratorNode<React.ReactElement> {
}
/**
* The data for this node is stored serialized as JSON.
* This is the "load function" of that node: it takes
* the saved data and converts it into a node.
* The data for this node is stored serialized as JSON. This is the "load function" of that node: it takes the saved data and converts it into a node.
*/
static importJSON(serializedNode: SerializedMyNode): MyNode {
return $createMyNode()
}
/**
* Determines how the hr element is rendered in the
* lexical editor. This is only the "initial" / "outer"
* HTML element.
* Determines how the hr element is rendered in the lexical editor. This is only the "initial" / "outer" HTML element.
*/
createDOM(config: EditorConfig): HTMLElement {
const element = document.createElement('div')
@@ -529,28 +519,22 @@ export class MyNode extends DecoratorNode<React.ReactElement> {
}
/**
* Allows you to render a React component within
* whatever createDOM returns.
* Allows you to render a React component within whatever createDOM returns.
*/
decorate(): React.ReactElement {
return <MyNodeComponent nodeKey={this.__key} />
}
/**
* Opposite of importDOM, this function defines what
* happens when you copy a div element from the lexical
* editor and paste it into another page.
* Opposite of importDOM, this function defines what happens when you copy a div element from the lexical editor and paste it into another page.
*
* This also determines the behavior of lexical's
* internal Lexical -> HTML converter
* This also determines the behavior of lexical's internal Lexical -> HTML converter
*/
exportDOM(): DOMExportOutput {
return { element: document.createElement('div') }
}
/**
* Opposite of importJSON. This determines what
* data is saved in the database / in the lexical
* editor state.
* Opposite of importJSON. This determines what data is saved in the database / in the lexical editor state.
*/
exportJSON(): SerializedLexicalNode {
return {
@@ -572,23 +556,18 @@ export class MyNode extends DecoratorNode<React.ReactElement> {
}
}
// This is used in the importDOM method. Totally optional
// if you do not want your node to be created automatically
// when copy & pasting certain dom elements into your editor.
// This is used in the importDOM method. Totally optional if you do not want your node to be created automatically when copy & pasting certain dom elements
// into your editor.
function $yourConversionMethod(): DOMConversionOutput {
return { node: $createMyNode() }
}
// This is a utility method to create a new MyNode.
// Utility methods prefixed with $ make it explicit
// that this should only be used within lexical
// This is a utility method to create a new MyNode. Utility methods prefixed with $ make it explicit that this should only be used within lexical
export function $createMyNode(): MyNode {
return $applyNodeReplacement(new MyNode())
}
// This is just a utility method you can use
// to check if a node is a MyNode. This also
// ensures correct typing.
// This is just a utility method you can use to check if a node is a MyNode. This also ensures correct typing.
export function $isMyNode(
node: LexicalNode | null | undefined,
): node is MyNode {
@@ -647,12 +626,10 @@ export const INSERT_MYNODE_COMMAND: LexicalCommand<void> = createCommand(
)
/**
* Plugin which registers a lexical command to
* insert a new MyNode into the editor
* Plugin which registers a lexical command to insert a new MyNode into the editor
*/
export const MyNodePlugin: PluginComponent = () => {
// The useLexicalComposerContext hook can be used
// to access the lexical editor instance
// The useLexicalComposerContext hook can be used to access the lexical editor instance
const [editor] = useLexicalComposerContext()
useEffect(() => {

View File

@@ -124,15 +124,12 @@ HeadingFeature({
```ts
type IndentFeatureProps = {
/**
* The nodes that should not be indented. "type"
* property of the nodes you don't want to be indented.
* These can be: "paragraph", "heading", "listitem",
* "quote" or other indentable nodes if they exist.
* The nodes that should not be indented. "type" property of the nodes you don't want to be indented.
* These can be: "paragraph", "heading", "listitem", "quote" or other indentable nodes if they exist.
*/
disabledNodes?: string[]
/**
* If true, pressing Tab in the middle of a block such
* as a paragraph or heading will not insert a tabNode.
* If true, pressing Tab in the middle of a block such as a paragraph or heading will not insert a tabNode.
* Instead, Tab will only be used for block-level indentation.
* @default false
*/
@@ -183,8 +180,7 @@ type LinkFeatureServerProps = {
*/
disableAutoLinks?: 'creationOnly' | true
/**
* A function or array defining additional
* fields for the link feature.
* A function or array defining additional fields for the link feature.
* These will be displayed in the link editor drawer.
*/
fields?:
@@ -239,9 +235,7 @@ LinkFeature({
```ts
type RelationshipFeatureProps = {
/**
* Sets a maximum population depth for this relationship,
* regardless of the remaining depth when the respective
* field is reached.
* Sets a maximum population depth for this relationship, regardless of the remaining depth when the respective field is reached.
*/
maxDepth?: number
} & ExclusiveRelationshipFeatureProps
@@ -280,10 +274,7 @@ type UploadFeatureProps = {
}
}
/**
* Sets a maximum population depth for this upload
* (not the fields for this upload), regardless of
* the remaining depth when the respective field is
* reached.
* Sets a maximum population depth for this upload (not the fields for this upload), regardless of the remaining depth when the respective field is reached.
*/
maxDepth?: number
}

View File

@@ -1,200 +0,0 @@
---
title: Trash
label: Overview
order: 10
desc: Enable soft deletes for your collections to mark documents as deleted without permanently removing them.
keywords: trash, soft delete, deletedAt, recovery, restore
---
Trash (also known as soft delete) allows documents to be marked as deleted without being permanently removed. When enabled on a collection, deleted documents will receive a `deletedAt` timestamp, making it possible to restore them later, view them in a dedicated Trash view, or permanently delete them.
Soft delete is a safer way to manage content lifecycle, giving editors a chance to review and recover documents that may have been deleted by mistake.
<Banner type="warning">
**Note:** The Trash feature is currently in beta and may be subject to change
in minor version updates.
</Banner>
## Collection Configuration
To enable soft deleting for a collection, set the `trash` property to `true`:
```ts
import type { CollectionConfig } from 'payload'
export const Posts: CollectionConfig = {
slug: 'posts',
trash: true,
fields: [
{
name: 'title',
type: 'text',
},
// other fields...
],
}
```
When enabled, Payload automatically injects a deletedAt field into the collection's schema. This timestamp is set when a document is soft-deleted, and cleared when the document is restored.
## Admin Panel behavior
Once `trash` is enabled, the Admin Panel provides a dedicated Trash view for each collection:
- A new route is added at `/collections/:collectionSlug/trash`
- The `Trash` view shows all documents that have a `deletedAt` timestamp
From the Trash view, you can:
- Use bulk actions to manage trashed documents:
- **Restore** to clear the `deletedAt` timestamp and return documents to their original state
- **Delete** to permanently remove selected documents
- **Empty Trash** to select and permanently delete all trashed documents at once
- Enter each document's **edit view**, just like in the main list view. While in the edit view of a trashed document:
- All fields are in a **read-only** state
- Standard document actions (e.g., Save, Publish, Restore Version) are hidden and disabled.
- The available actions are **Restore** and **Permanently Delete**.
- Access to the **API**, **Versions**, and **Preview** views is preserved.
When deleting a document from the main collection List View, Payload will soft-delete the document by default. A checkbox in the delete confirmation modal allows users to skip the trash and permanently delete instead.
## API Support
Soft deletes are fully supported across all Payload APIs: **Local**, **REST**, and **GraphQL**.
The following operations respect and support the `trash` functionality:
- `find`
- `findByID`
- `update`
- `updateByID`
- `delete`
- `deleteByID`
- `findVersions`
- `findVersionByID`
### Understanding `trash` Behavior
Passing `trash: true` to these operations will **include soft-deleted documents** in the query results.
To return _only_ soft-deleted documents, you must combine `trash: true` with a `where` clause that checks if `deletedAt` exists.
### Examples
#### Local API
Return all documents including trashed:
```ts
const result = await payload.find({
collection: 'posts',
trash: true,
})
```
Return only trashed documents:
```ts
const result = await payload.find({
collection: 'posts',
trash: true,
where: {
deletedAt: {
exists: true,
},
},
})
```
Return only non-trashed documents:
```ts
const result = await payload.find({
collection: 'posts',
trash: false,
})
```
#### REST
Return **all** documents including trashed:
```http
GET /api/posts?trash=true
```
Return **only trashed** documents:
```http
GET /api/posts?trash=true&where[deletedAt][exists]=true
```
Return only non-trashed documents:
```http
GET /api/posts?trash=false
```
#### GraphQL
Return all documents including trashed:
```ts
query {
Posts(trash: true) {
docs {
id
deletedAt
}
}
}
```
Return only trashed documents:
```ts
query {
Posts(
trash: true
where: { deletedAt: { exists: true } }
) {
docs {
id
deletedAt
}
}
}
```
Return only non-trashed documents:
```ts
query {
Posts(trash: false) {
docs {
id
deletedAt
}
}
}
```
## Access Control
All trash-related actions (delete, permanent delete) respect the `delete` access control defined in your collection config.
This means:
- If a user is denied delete access, they cannot soft delete or permanently delete documents
## Versions and Trash
When a document is soft-deleted:
- It can no longer have a version **restored** until it is first restored from trash
- Attempting to restore a version while the document is in trash will result in an error
- This ensures consistency between the current document state and its version history
However, versions are still fully **visible and accessible** from the **edit view** of a trashed document. You can view the full version history, but must restore the document itself before restoring any individual version.

View File

@@ -6,112 +6,9 @@ desc: Troubleshooting Common Issues in Payload
keywords: admin, components, custom, customize, documentation, Content Management System, cms, headless, javascript, node, react, nextjs, troubleshooting
---
## Dependency mismatches
## Common Issues
All `payload` and `@payloadcms/*` packages must be on exactly the same version and installed only once.
When two copies—or two different versions—of any of these packages (or of `react` / `react-dom`) appear in your dependency graph, you can see puzzling runtime errors. The most frequent is a broken React context:
```bash
TypeError: Cannot destructure property 'config' of...
```
This happens because one package imports a hook (most commonly `useConfig`) from _version A_ while the context provider comes from _version B_. The fix is always the same: make sure every Payload-related and React package resolves to the same module.
### Confirm whether duplicates exist
The first thing to do is to confirm whether duplicative dependencies do in fact exist.
There are two ways to do this:
1. Using pnpm's built-in inspection tool
```bash
pnpm why @payloadcms/ui
```
This prints the dependency tree and shows which versions are being installed. If you see more than one distinct version—or the same version listed under different paths—you have duplication.
2. Manual check (works with any package manager)
```bash
find node_modules -name package.json \
-exec grep -H '"name": "@payloadcms/ui"' {} \;
```
Most of these hits are likely symlinks created by pnpm. Edit the matching package.json files (temporarily add a comment or change a description) to confirm whether they point to the same physical folder or to multiple copies.
Perform the same two checks for react and react-dom; a second copy of React can cause identical symptoms.
#### If no duplicates are found
`@payloadcms/ui` intentionally contains two bundles of itself, so you may see dual paths even when everything is correct. Inside the Payload Admin UI you must import only:
- `@payloadcms/ui`
- `@payloadcms/ui/rsc`
- `@payloadcms/ui/shared`
Any other deep import such as `@payloadcms/ui/elements/Button` should **only** be used in your own frontend, outside of the Payload Admin Panel. Those deep entries are published un-bundled to help you tree-shake and ship a smaller client bundle if you only need a few components from `@payloadcms/ui`.
### Fixing depedendency issues
These steps assume `pnpm`, which the Payload team recommends and uses internally. The principles apply to other package managers like npm and yarn as well. Do note that yarn 1.x is not supported by Payload.
1. Pin every critical package to an exact version
In package.json remove `^` or `~` from all versions of:
- `payload`
- `@payloadcms/*`
- `react`
- `react-dom`
Prefixes allow your package manager to float to a newer minor/patch release, causing mismatches.
2. Delete node_modules
Old packages often linger even after you change versions or removed them from your package.json. Deleting node_modules ensures a clean slate.
3. Re-install dependencies
```bash
pnpm install
```
#### If the error persists
1. Clean the global store (pnpm only)
```bash
pnpm store prune
```
2. Delete the lockfile
Depending on your package manager, this could be `pnpm-lock.yaml`, `package-lock.json`, or `yarn.lock`.
Make sure you delete the lockfile **and** the node_modules folder at the same time, then run `pnpm install`. This forces a fresh, consistent resolution for all packages. It will also update all packages with dynamic versions to the latest version.
While it's best practice to manage dependencies in such a way where the lockfile can easily be re-generated (often this is the easiest way to resolve dependency issues), this may break your project if you have not tested the latest versions of your dependencies.
If you are using a version control system, make sure to commit your lockfile after this step.
3. Deduplicate anything that slipped through
```bash
pnpm dedupe
```
**Still stuck?**
- Switch to `pnpm` if you are on npm. Its symlinked store helps reducing accidental duplication.
- Inspect the lockfile directly for peer-dependency violations.
- Check project-level .npmrc / .pnpmfile.cjs overrides.
- Run [Syncpack](https://www.npmjs.com/package/syncpack) to enforce identical versions of every `@payloadcms/*`, `react`, and `react-dom` reference.
Absolute last resort: add Webpack aliases so that all imports of a given package resolve to the same path (e.g. `resolve.alias['react'] = path.resolve('./node_modules/react')`). Keep this only until you can fix the underlying version skew.
## "Unauthorized, you must be logged in to make this request" when attempting to log in
### "Unauthorized, you must be logged in to make this request" when attempting to log in
This means that your auth cookie is not being set or accepted correctly upon logging in. To resolve check the following settings in your Payload Config:

View File

@@ -13,8 +13,8 @@ keywords: uploads, images, media, overview, documentation, Content Management Sy
</Banner>
<LightDarkImage
srcLight="https://payloadcms.com/images/docs/uploads-overview.jpg"
srcDark="https://payloadcms.com/images/docs/uploads-overview.jpg"
srcLight="https://payloadcms.com/images/docs/upload-admin.jpg"
srcDark="https://payloadcms.com/images/docs/upload-admin.jpg"
alt="Shows an Upload enabled collection in the Payload Admin Panel"
caption="Admin Panel screenshot depicting a Media Collection with Upload enabled"
/>
@@ -91,7 +91,7 @@ export const Media: CollectionConfig = {
_An asterisk denotes that an option is required._
| Option | Description |
| ------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| ------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`adminThumbnail`** | Set the way that the [Admin Panel](../admin/overview) will display thumbnails for this Collection. [More](#admin-thumbnails) |
| **`bulkUpload`** | Allow users to upload in bulk from the list view, default is true |
| **`cacheTags`** | Set to `false` to disable the cache tag set in the UI for the admin thumbnail component. Useful for when CDNs don't allow certain cache queries. |
@@ -99,7 +99,7 @@ _An asterisk denotes that an option is required._
| **`crop`** | Set to `false` to disable the cropping tool in the [Admin Panel](../admin/overview). Crop is enabled by default. [More](#crop-and-focal-point-selector) |
| **`disableLocalStorage`** | Completely disable uploading files to disk locally. [More](#disabling-local-upload-storage) |
| **`displayPreview`** | Enable displaying preview of the uploaded file in Upload fields related to this Collection. Can be locally overridden by `displayPreview` option in Upload field. [More](/docs/fields/upload#config-options). |
| **`externalFileHeaderFilter`** | Accepts existing headers and returns the headers after filtering or modifying. If using this option, you should handle the removal of any sensitive cookies (like payload-prefixed cookies) to prevent leaking session information to external services. By default, Payload automatically filters out payload-prefixed cookies when this option is not defined. |
| **`externalFileHeaderFilter`** | Accepts existing headers and returns the headers after filtering or modifying. |
| **`filesRequiredOnCreate`** | Mandate file data on creation, default is true. |
| **`filenameCompoundIndex`** | Field slugs to use for a compound index instead of the default filename index. |
| **`focalPoint`** | Set to `false` to disable the focal point selection tool in the [Admin Panel](../admin/overview). The focal point selector is only available when `imageSizes` or `resizeOptions` are defined. [More](#crop-and-focal-point-selector) |

View File

@@ -292,8 +292,7 @@ Reference any of the existing storage adapters for guidance on how this should b
```ts
export interface GeneratedAdapter {
/**
* Additional fields to be injected into the base
* collection and image sizes
* Additional fields to be injected into the base collection and image sizes
*/
fields?: Field[]
/**

View File

@@ -12,7 +12,7 @@ Extending on Payload's [Draft](/docs/versions/drafts) functionality, you can con
Autosave relies on Versions and Drafts being enabled in order to function.
</Banner>
![Autosave Enabled](/images/docs/autosave-v3.jpg)
![Autosave Enabled](/images/docs/autosave-enabled.png)
_If Autosave is enabled, drafts will be created automatically as the document is modified and the Admin UI adds an indicator describing when the document was last saved to the top right of the sidebar._
## Options

View File

@@ -14,7 +14,7 @@ Payload's Draft functionality builds on top of the Versions functionality to all
By enabling Versions with Drafts, your collections and globals can maintain _newer_, and _unpublished_ versions of your documents. It's perfect for cases where you might want to work on a document, update it and save your progress, but not necessarily make it publicly published right away. Drafts are extremely helpful when building preview implementations.
![Drafts Enabled](/images/docs/autosave-drafts.jpg)
![Drafts Enabled](/images/docs/drafts-enabled.png)
_If Drafts are enabled, the typical Save button is replaced with new actions which allow you to either save a draft, or publish your changes._
## Options

View File

@@ -13,7 +13,7 @@ keywords: version history, revisions, audit log, draft, publish, restore, autosa
When enabled, Payload will automatically scaffold a new Collection in your database to store versions of your document(s) over time, and the Admin UI will be extended with additional views that allow you to browse document versions, view diffs in order to see exactly what has changed in your documents (and when they changed), and restore documents back to prior versions easily.
![Versions](/images/docs/versions-v3.jpg)
![Versions](/images/docs/versions.png)
_Comparing an old version to a newer version of a document_
**With Versions, you can:**

View File

@@ -6,8 +6,6 @@ import { anyone } from './access/anyone'
import { checkRole } from './access/checkRole'
import { loginAfterCreate } from './hooks/loginAfterCreate'
import { protectRoles } from './hooks/protectRoles'
import { access } from 'fs'
import { create } from 'domain'
export const Users: CollectionConfig = {
slug: 'users',
@@ -34,34 +32,6 @@ export const Users: CollectionConfig = {
afterChange: [loginAfterCreate],
},
fields: [
{
name: 'email',
type: 'email',
required: true,
unique: true,
access: {
read: adminsAndUser,
update: adminsAndUser,
},
},
{
name: 'password',
type: 'password',
required: true,
admin: {
description: 'Leave blank to keep the current password.',
},
},
{
name: 'resetPasswordToken',
type: 'text',
hidden: true,
},
{
name: 'resetPasswordExpiration',
type: 'date',
hidden: true,
},
{
name: 'firstName',
type: 'text',
@@ -75,11 +45,6 @@ export const Users: CollectionConfig = {
type: 'select',
hasMany: true,
saveToJWT: true,
access: {
read: admins,
update: admins,
create: admins,
},
hooks: {
beforeChange: [protectRoles],
},

View File

@@ -1,6 +1,6 @@
{
"name": "payload-monorepo",
"version": "3.53.0",
"version": "3.47.0",
"private": true,
"type": "module",
"workspaces": [
@@ -76,6 +76,8 @@
"dev:prod:memorydb": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --prod --start-memory-db",
"dev:vercel-postgres": "cross-env PAYLOAD_DATABASE=vercel-postgres pnpm runts ./test/dev.ts",
"devsafe": "node ./scripts/delete-recursively.js '**/.next' && pnpm dev",
"docker:postgres": "docker compose -f test/docker-compose.yml up -d postgres",
"docker:postgres:stop": "docker compose -f test/docker-compose.yml down postgres",
"docker:restart": "pnpm docker:stop --remove-orphans && pnpm docker:start",
"docker:start": "docker compose -f test/docker-compose.yml up -d",
"docker:stop": "docker compose -f test/docker-compose.yml down",
@@ -112,7 +114,6 @@
"test:e2e:prod:ci": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod",
"test:e2e:prod:ci:noturbo": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod --no-turbo",
"test:int": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:int:firestore": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=firestore DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:int:postgres": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:int:sqlite": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=sqlite DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:types": "tstyche",
@@ -132,12 +133,12 @@
"devDependencies": {
"@jest/globals": "29.7.0",
"@libsql/client": "0.14.0",
"@next/bundle-analyzer": "15.4.4",
"@next/bundle-analyzer": "15.3.2",
"@payloadcms/db-postgres": "workspace:*",
"@payloadcms/eslint-config": "workspace:*",
"@payloadcms/eslint-plugin": "workspace:*",
"@payloadcms/live-preview-react": "workspace:*",
"@playwright/test": "1.54.1",
"@playwright/test": "1.50.0",
"@sentry/nextjs": "^8.33.1",
"@sentry/node": "^8.33.1",
"@swc-node/register": "1.10.10",
@@ -147,8 +148,8 @@
"@types/jest": "29.5.12",
"@types/minimist": "1.2.5",
"@types/node": "22.15.30",
"@types/react": "19.1.8",
"@types/react-dom": "19.1.6",
"@types/react": "19.1.0",
"@types/react-dom": "19.1.2",
"@types/shelljs": "0.8.15",
"chalk": "^4.1.2",
"comment-json": "^4.2.3",
@@ -168,12 +169,12 @@
"lint-staged": "15.2.7",
"minimist": "1.2.8",
"mongodb-memory-server": "10.1.4",
"next": "15.4.4",
"next": "15.3.2",
"open": "^10.1.0",
"p-limit": "^5.0.0",
"pg": "8.16.3",
"playwright": "1.54.1",
"playwright-core": "1.54.1",
"playwright": "1.50.0",
"playwright-core": "1.50.0",
"prettier": "3.5.3",
"react": "19.1.0",
"react-dom": "19.1.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/admin-bar",
"version": "3.53.0",
"version": "3.47.0",
"description": "An admin bar for React apps using Payload",
"homepage": "https://payloadcms.com",
"repository": {
@@ -42,8 +42,8 @@
},
"devDependencies": {
"@payloadcms/eslint-config": "workspace:*",
"@types/react": "19.1.8",
"@types/react-dom": "19.1.6",
"@types/react": "19.1.0",
"@types/react-dom": "19.1.2",
"payload": "workspace:*"
},
"peerDependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "create-payload-app",
"version": "3.53.0",
"version": "3.47.0",
"homepage": "https://payloadcms.com",
"repository": {
"type": "git",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-mongodb",
"version": "3.53.0",
"version": "3.47.0",
"description": "The officially supported MongoDB database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -35,31 +35,7 @@ export const connect: Connect = async function connect(
}
try {
if (!this.connection) {
this.connection = await mongoose.createConnection(urlToConnect, connectionOptions).asPromise()
}
await this.connection.openUri(urlToConnect, connectionOptions)
if (this.useAlternativeDropDatabase) {
if (this.connection.db) {
// Firestore doesn't support dropDatabase, so we monkey patch
// dropDatabase to delete all documents from all collections instead
this.connection.db.dropDatabase = async function (): Promise<boolean> {
const existingCollections = await this.listCollections().toArray()
await Promise.all(
existingCollections.map(async (collectionInfo) => {
const collection = this.collection(collectionInfo.name)
await collection.deleteMany({})
}),
)
return true
}
this.connection.dropDatabase = async function () {
await this.db?.dropDatabase()
}
}
}
this.connection = (await mongoose.connect(urlToConnect, connectionOptions)).connection
// If we are running a replica set with MongoDB Memory Server,
// wait until the replica set elects a primary before proceeding
@@ -80,8 +56,7 @@ export const connect: Connect = async function connect(
if (!hotReload) {
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
this.payload.logger.info('---- DROPPING DATABASE ----')
await this.connection.dropDatabase()
await mongoose.connection.dropDatabase()
this.payload.logger.info('---- DROPPED DATABASE ----')
}
}

View File

@@ -17,16 +17,10 @@ export const create: Create = async function create(
const options: CreateOptions = {
session: await getSession(this, req),
// Timestamps are manually added by the write transform
timestamps: false,
}
let doc
if (!data.createdAt) {
data.createdAt = new Date().toISOString()
}
transform({
adapter: this,
data,

View File

@@ -14,10 +14,6 @@ export const createGlobal: CreateGlobal = async function createGlobal(
) {
const { globalConfig, Model } = getGlobal({ adapter: this, globalSlug })
if (!data.createdAt) {
;(data as any).createdAt = new Date().toISOString()
}
transform({
adapter: this,
data,
@@ -28,8 +24,6 @@ export const createGlobal: CreateGlobal = async function createGlobal(
const options: CreateOptions = {
session: await getSession(this, req),
// Timestamps are manually added by the write transform
timestamps: false,
}
let [result] = (await Model.create([data], options)) as any

View File

@@ -25,8 +25,6 @@ export const createGlobalVersion: CreateGlobalVersion = async function createGlo
const options = {
session: await getSession(this, req),
// Timestamps are manually added by the write transform
timestamps: false,
}
const data = {
@@ -39,9 +37,6 @@ export const createGlobalVersion: CreateGlobalVersion = async function createGlo
updatedAt,
version: versionData,
}
if (!data.createdAt) {
data.createdAt = new Date().toISOString()
}
const fields = buildVersionGlobalFields(this.payload.config, globalConfig)

View File

@@ -29,8 +29,6 @@ export const createVersion: CreateVersion = async function createVersion(
const options = {
session: await getSession(this, req),
// Timestamps are manually added by the write transform
timestamps: false,
}
const data = {
@@ -43,9 +41,6 @@ export const createVersion: CreateVersion = async function createVersion(
updatedAt,
version: versionData,
}
if (!data.createdAt) {
data.createdAt = new Date().toISOString()
}
const fields = buildVersionCollectionFields(this.payload.config, collectionConfig)

View File

@@ -1,5 +1,5 @@
import type { MongooseUpdateQueryOptions } from 'mongoose'
import type { DeleteOne, SelectType } from 'payload'
import type { DeleteOne } from 'payload'
import type { MongooseAdapter } from './index.js'
@@ -11,17 +11,10 @@ import { transform } from './utilities/transform.js'
export const deleteOne: DeleteOne = async function deleteOne(
this: MongooseAdapter,
{ collection: collectionSlug, req, returning, select: selectArg, where },
{ collection: collectionSlug, req, returning, select, where },
) {
const { collectionConfig, Model } = getCollection({ adapter: this, collectionSlug })
const select: SelectType | undefined =
returning === false
? {
id: true,
}
: selectArg
const options: MongooseUpdateQueryOptions = {
projection: buildProjectionFromSelect({
adapter: this,

View File

@@ -1,11 +1,11 @@
import type { Destroy } from 'payload'
import mongoose from 'mongoose'
import type { MongooseAdapter } from './index.js'
export const destroy: Destroy = async function destroy(this: MongooseAdapter) {
await this.connection.close()
await mongoose.disconnect()
for (const name of Object.keys(this.connection.models)) {
this.connection.deleteModel(name)
}
Object.keys(mongoose.models).map((model) => mongoose.deleteModel(model))
}

View File

@@ -12,7 +12,6 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js'
import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js'
import { getCollection } from './utilities/getEntity.js'
import { getSession } from './utilities/getSession.js'
import { resolveJoins } from './utilities/resolveJoins.js'
import { transform } from './utilities/transform.js'
export const find: Find = async function find(
@@ -156,16 +155,6 @@ export const find: Find = async function find(
result = await Model.paginate(query, paginationOptions)
}
if (!this.useJoinAggregations) {
await resolveJoins({
adapter: this,
collectionSlug,
docs: result.docs as Record<string, unknown>[],
joins,
locale,
})
}
transform({
adapter: this,
data: result.docs,

View File

@@ -1,141 +0,0 @@
import type { PipelineStage } from 'mongoose'
import { type FindDistinct, getFieldByPath } from 'payload'
import type { MongooseAdapter } from './index.js'
import { buildQuery } from './queries/buildQuery.js'
import { buildSortParam } from './queries/buildSortParam.js'
import { getCollection } from './utilities/getEntity.js'
import { getSession } from './utilities/getSession.js'
export const findDistinct: FindDistinct = async function (this: MongooseAdapter, args) {
const { collectionConfig, Model } = getCollection({
adapter: this,
collectionSlug: args.collection,
})
const session = await getSession(this, args.req)
const { where = {} } = args
const sortAggregation: PipelineStage[] = []
const sort = buildSortParam({
adapter: this,
config: this.payload.config,
fields: collectionConfig.flattenedFields,
locale: args.locale,
sort: args.sort ?? args.field,
sortAggregation,
timestamps: true,
})
const query = await buildQuery({
adapter: this,
collectionSlug: args.collection,
fields: collectionConfig.flattenedFields,
locale: args.locale,
where,
})
const fieldPathResult = getFieldByPath({
fields: collectionConfig.flattenedFields,
path: args.field,
})
let fieldPath = args.field
if (fieldPathResult?.pathHasLocalized && args.locale) {
fieldPath = fieldPathResult.localizedPath.replace('<locale>', args.locale)
}
const page = args.page || 1
const sortProperty = Object.keys(sort)[0]! // assert because buildSortParam always returns at least 1 key.
const sortDirection = sort[sortProperty] === 'asc' ? 1 : -1
const pipeline: PipelineStage[] = [
{
$match: query,
},
...(sortAggregation.length > 0 ? sortAggregation : []),
{
$group: {
_id: {
_field: `$${fieldPath}`,
...(sortProperty === fieldPath
? {}
: {
_sort: `$${sortProperty}`,
}),
},
},
},
{
$sort: {
[sortProperty === fieldPath ? '_id._field' : '_id._sort']: sortDirection,
},
},
]
const getValues = async () => {
return Model.aggregate(pipeline, { session }).then((res) =>
res.map((each) => ({
[args.field]: JSON.parse(JSON.stringify(each._id._field)),
})),
)
}
if (args.limit) {
pipeline.push({
$skip: (page - 1) * args.limit,
})
pipeline.push({ $limit: args.limit })
const totalDocs = await Model.aggregate(
[
{
$match: query,
},
{
$group: {
_id: `$${fieldPath}`,
},
},
{ $count: 'count' },
],
{
session,
},
).then((res) => res[0]?.count ?? 0)
const totalPages = Math.ceil(totalDocs / args.limit)
const hasPrevPage = page > 1
const hasNextPage = totalPages > page
const pagingCounter = (page - 1) * args.limit + 1
return {
hasNextPage,
hasPrevPage,
limit: args.limit,
nextPage: hasNextPage ? page + 1 : null,
page,
pagingCounter,
prevPage: hasPrevPage ? page - 1 : null,
totalDocs,
totalPages,
values: await getValues(),
}
}
const values = await getValues()
return {
hasNextPage: false,
hasPrevPage: false,
limit: 0,
page: 1,
pagingCounter: 1,
totalDocs: values.length,
totalPages: 1,
values,
}
}

View File

@@ -10,7 +10,6 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js'
import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js'
import { getCollection } from './utilities/getEntity.js'
import { getSession } from './utilities/getSession.js'
import { resolveJoins } from './utilities/resolveJoins.js'
import { transform } from './utilities/transform.js'
export const findOne: FindOne = async function findOne(
@@ -68,16 +67,6 @@ export const findOne: FindOne = async function findOne(
doc = await Model.findOne(query, {}, options)
}
if (doc && !this.useJoinAggregations) {
await resolveJoins({
adapter: this,
collectionSlug,
docs: [doc] as Record<string, unknown>[],
joins,
locale,
})
}
if (!doc) {
return null
}

View File

@@ -42,7 +42,6 @@ import { deleteOne } from './deleteOne.js'
import { deleteVersions } from './deleteVersions.js'
import { destroy } from './destroy.js'
import { find } from './find.js'
import { findDistinct } from './findDistinct.js'
import { findGlobal } from './findGlobal.js'
import { findGlobalVersions } from './findGlobalVersions.js'
import { findOne } from './findOne.js'
@@ -144,29 +143,6 @@ export interface Args {
/** The URL to connect to MongoDB or false to start payload and prevent connecting */
url: false | string
/**
* Set to `true` to use an alternative `dropDatabase` implementation that calls `collection.deleteMany({})` on every collection instead of sending a raw `dropDatabase` command.
* Payload only uses `dropDatabase` for testing purposes.
* @default false
*/
useAlternativeDropDatabase?: boolean
/**
* Set to `true` to use `BigInt` for custom ID fields of type `'number'`.
* Useful for databases that don't support `double` or `int32` IDs.
* @default false
*/
useBigIntForNumberIDs?: boolean
/**
* Set to `false` to disable join aggregations (which use correlated subqueries) and instead populate join fields via multiple `find` queries.
* @default true
*/
useJoinAggregations?: boolean
/**
* Set to `false` to disable the use of `pipeline` in the `$lookup` aggregation in sorting.
* @default true
*/
usePipelineInSortLookup?: boolean
}
export type MongooseAdapter = {
@@ -183,10 +159,6 @@ export type MongooseAdapter = {
up: (args: MigrateUpArgs) => Promise<void>
}[]
sessions: Record<number | string, ClientSession>
useAlternativeDropDatabase: boolean
useBigIntForNumberIDs: boolean
useJoinAggregations: boolean
usePipelineInSortLookup: boolean
versions: {
[slug: string]: CollectionModel
}
@@ -222,10 +194,6 @@ declare module 'payload' {
updateVersion: <T extends TypeWithID = TypeWithID>(
args: { options?: QueryOptions } & UpdateVersionArgs<T>,
) => Promise<TypeWithVersion<T>>
useAlternativeDropDatabase: boolean
useBigIntForNumberIDs: boolean
useJoinAggregations: boolean
usePipelineInSortLookup: boolean
versions: {
[slug: string]: CollectionModel
}
@@ -246,10 +214,6 @@ export function mongooseAdapter({
prodMigrations,
transactionOptions = {},
url,
useAlternativeDropDatabase = false,
useBigIntForNumberIDs = false,
useJoinAggregations = true,
usePipelineInSortLookup = true,
}: Args): DatabaseAdapterObj {
function adapter({ payload }: { payload: Payload }) {
const migrationDir = findMigrationDir(migrationDirArg)
@@ -298,7 +262,6 @@ export function mongooseAdapter({
destroy,
disableFallbackSort,
find,
findDistinct,
findGlobal,
findGlobalVersions,
findOne,
@@ -316,10 +279,6 @@ export function mongooseAdapter({
updateOne,
updateVersion,
upsert,
useAlternativeDropDatabase,
useBigIntForNumberIDs,
useJoinAggregations,
usePipelineInSortLookup,
})
}
@@ -331,8 +290,6 @@ export function mongooseAdapter({
}
}
export { compatibilityOptions } from './utilities/compatibilityOptions.js'
/**
* Attempt to find migrations directory.
*

View File

@@ -19,14 +19,11 @@ import { getBuildQueryPlugin } from './queries/getBuildQueryPlugin.js'
import { getDBName } from './utilities/getDBName.js'
export const init: Init = function init(this: MongooseAdapter) {
// Always create a scoped, **unopened** connection object
// (no URI here; models compile per-connection and do not require an open socket)
this.connection ??= mongoose.createConnection()
this.payload.config.collections.forEach((collection: SanitizedCollectionConfig) => {
const schemaOptions = this.collectionsSchemaOptions?.[collection.slug]
const schema = buildCollectionSchema(collection, this.payload, schemaOptions)
if (collection.versions) {
const versionModelName = getDBName({ config: collection, versions: true })
@@ -58,7 +55,7 @@ export const init: Init = function init(this: MongooseAdapter) {
const versionCollectionName =
this.autoPluralization === true && !collection.dbName ? undefined : versionModelName
this.versions[collection.slug] = this.connection.model(
this.versions[collection.slug] = mongoose.model(
versionModelName,
versionSchema,
versionCollectionName,
@@ -69,14 +66,14 @@ export const init: Init = function init(this: MongooseAdapter) {
const collectionName =
this.autoPluralization === true && !collection.dbName ? undefined : modelName
this.collections[collection.slug] = this.connection.model<any>(
this.collections[collection.slug] = mongoose.model<any>(
modelName,
schema,
collectionName,
) as CollectionModel
})
this.globals = buildGlobalModel(this) as GlobalModel
this.globals = buildGlobalModel(this.payload) as GlobalModel
this.payload.config.globals.forEach((global) => {
if (global.versions) {
@@ -104,7 +101,7 @@ export const init: Init = function init(this: MongooseAdapter) {
}),
)
this.versions[global.slug] = this.connection.model<any>(
this.versions[global.slug] = mongoose.model<any>(
versionModelName,
versionSchema,
versionModelName,

View File

@@ -1,13 +1,14 @@
import type { Payload } from 'payload'
import mongoose from 'mongoose'
import type { MongooseAdapter } from '../index.js'
import type { GlobalModel } from '../types.js'
import { getBuildQueryPlugin } from '../queries/getBuildQueryPlugin.js'
import { buildSchema } from './buildSchema.js'
export const buildGlobalModel = (adapter: MongooseAdapter): GlobalModel | null => {
if (adapter.payload.config.globals && adapter.payload.config.globals.length > 0) {
export const buildGlobalModel = (payload: Payload): GlobalModel | null => {
if (payload.config.globals && payload.config.globals.length > 0) {
const globalsSchema = new mongoose.Schema(
{},
{ discriminatorKey: 'globalType', minimize: false, timestamps: true },
@@ -15,13 +16,9 @@ export const buildGlobalModel = (adapter: MongooseAdapter): GlobalModel | null =
globalsSchema.plugin(getBuildQueryPlugin())
const Globals = adapter.connection.model(
'globals',
globalsSchema,
'globals',
) as unknown as GlobalModel
const Globals = mongoose.model('globals', globalsSchema, 'globals') as unknown as GlobalModel
Object.values(adapter.payload.config.globals).forEach((globalConfig) => {
Object.values(payload.config.globals).forEach((globalConfig) => {
const globalSchema = buildSchema({
buildSchemaOptions: {
options: {
@@ -29,7 +26,7 @@ export const buildGlobalModel = (adapter: MongooseAdapter): GlobalModel | null =
},
},
configFields: globalConfig.fields,
payload: adapter.payload,
payload,
})
Globals.discriminator(globalConfig.slug, globalSchema)
})

View File

@@ -143,12 +143,7 @@ export const buildSchema = (args: {
const idField = schemaFields.find((field) => fieldAffectsData(field) && field.name === 'id')
if (idField) {
fields = {
_id:
idField.type === 'number'
? payload.db.useBigIntForNumberIDs
? mongoose.Schema.Types.BigInt
: Number
: String,
_id: idField.type === 'number' ? Number : String,
}
schemaFields = schemaFields.filter(
(field) => !(fieldAffectsData(field) && field.name === 'id'),
@@ -905,12 +900,8 @@ const getRelationshipValueType = (field: RelationshipField | UploadField, payloa
}
if (customIDType === 'number') {
if (payload.db.useBigIntForNumberIDs) {
return mongoose.Schema.Types.BigInt
} else {
return mongoose.Schema.Types.Number
}
}
return mongoose.Schema.Types.String
}

View File

@@ -63,10 +63,7 @@ const migrateModelWithBatching = async ({
},
},
})),
{
session, // Timestamps are manually added by the write transform
timestamps: false,
},
{ session },
)
skip += batchSize

View File

@@ -99,29 +99,17 @@ const relationshipSort = ({
sortFieldPath = foreignFieldPath.localizedPath.replace('<locale>', locale)
}
const as = `__${relationshipPath.replace(/\./g, '__')}`
// If we have not already sorted on this relationship yet, we need to add a lookup stage
if (!sortAggregation.some((each) => '$lookup' in each && each.$lookup.as === as)) {
let localField = versions ? `version.${relationshipPath}` : relationshipPath
if (adapter.usePipelineInSortLookup) {
const flattenedField = `__${localField.replace(/\./g, '__')}_lookup`
sortAggregation.push({
$addFields: {
[flattenedField]: `$${localField}`,
},
if (
!sortAggregation.some((each) => {
return '$lookup' in each && each.$lookup.as === `__${path}`
})
localField = flattenedField
}
) {
sortAggregation.push({
$lookup: {
as,
as: `__${path}`,
foreignField: '_id',
from: foreignCollection.Model.collection.name,
localField,
...(!adapter.usePipelineInSortLookup && {
localField: versions ? `version.${relationshipPath}` : relationshipPath,
pipeline: [
{
$project: {
@@ -129,29 +117,15 @@ const relationshipSort = ({
},
},
],
}),
},
})
if (adapter.usePipelineInSortLookup) {
sortAggregation.push({
$unset: localField,
})
}
}
sort[`__${path}.${sortFieldPath}`] = sortDirection
if (!adapter.usePipelineInSortLookup) {
const lookup = sortAggregation.find(
(each) => '$lookup' in each && each.$lookup.as === as,
) as PipelineStage.Lookup
const pipeline = lookup.$lookup.pipeline![0] as PipelineStage.Project
pipeline.$project[sortFieldPath] = true
}
sort[`${as}.${sortFieldPath}`] = sortDirection
return true
}
}
}
return false
}

View File

@@ -12,7 +12,6 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js'
import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js'
import { getCollection } from './utilities/getEntity.js'
import { getSession } from './utilities/getSession.js'
import { resolveJoins } from './utilities/resolveJoins.js'
import { transform } from './utilities/transform.js'
export const queryDrafts: QueryDrafts = async function queryDrafts(
@@ -159,17 +158,6 @@ export const queryDrafts: QueryDrafts = async function queryDrafts(
result = await Model.paginate(versionQuery, paginationOptions)
}
if (!this.useJoinAggregations) {
await resolveJoins({
adapter: this,
collectionSlug,
docs: result.docs as Record<string, unknown>[],
joins,
locale,
versions: true,
})
}
transform({
adapter: this,
data: result.docs,

View File

@@ -26,8 +26,6 @@ export const updateGlobal: UpdateGlobal = async function updateGlobal(
select,
}),
session: await getSession(this, req),
// Timestamps are manually added by the write transform
timestamps: false,
}
transform({ adapter: this, data, fields, globalSlug, operation: 'write' })

View File

@@ -39,8 +39,6 @@ export async function updateGlobalVersion<T extends TypeWithID>(
select,
}),
session: await getSession(this, req),
// Timestamps are manually added by the write transform
timestamps: false,
}
const query = await buildQuery({

View File

@@ -1,4 +1,4 @@
import type { MongooseUpdateQueryOptions, UpdateQuery } from 'mongoose'
import type { MongooseUpdateQueryOptions } from 'mongoose'
import type { Job, UpdateJobs, Where } from 'payload'
import type { MongooseAdapter } from './index.js'
@@ -14,13 +14,9 @@ export const updateJobs: UpdateJobs = async function updateMany(
this: MongooseAdapter,
{ id, data, limit, req, returning, sort: sortArg, where: whereArg },
) {
if (
!(data?.log as object[])?.length &&
!(data.log && typeof data.log === 'object' && '$push' in data.log)
) {
if (!(data?.log as object[])?.length) {
delete data.log
}
const where = id ? { id: { equals: id } } : (whereArg as Where)
const { collectionConfig, Model } = getCollection({
@@ -40,8 +36,6 @@ export const updateJobs: UpdateJobs = async function updateMany(
lean: true,
new: true,
session: await getSession(this, req),
// Timestamps are manually added by the write transform
timestamps: false,
}
let query = await buildQuery({
@@ -51,44 +45,17 @@ export const updateJobs: UpdateJobs = async function updateMany(
where,
})
let updateData: UpdateQuery<any> = data
const $inc: Record<string, number> = {}
const $push: Record<string, { $each: any[] } | any> = {}
transform({
$inc,
$push,
adapter: this,
data,
fields: collectionConfig.fields,
operation: 'write',
})
const updateOps: UpdateQuery<any> = {}
if (Object.keys($inc).length) {
updateOps.$inc = $inc
}
if (Object.keys($push).length) {
updateOps.$push = $push
}
if (Object.keys(updateOps).length) {
updateOps.$set = updateData
updateData = updateOps
}
transform({ adapter: this, data, fields: collectionConfig.fields, operation: 'write' })
let result: Job[] = []
try {
if (id) {
if (returning === false) {
await Model.updateOne(query, updateData, options)
transform({ adapter: this, data, fields: collectionConfig.fields, operation: 'read' })
await Model.updateOne(query, data, options)
return null
} else {
const doc = await Model.findOneAndUpdate(query, updateData, options)
const doc = await Model.findOneAndUpdate(query, data, options)
result = doc ? [doc] : []
}
} else {
@@ -105,7 +72,7 @@ export const updateJobs: UpdateJobs = async function updateMany(
query = { _id: { $in: documentsToUpdate.map((doc) => doc._id) } }
}
await Model.updateMany(query, updateData, options)
await Model.updateMany(query, data, options)
if (returning === false) {
return null

View File

@@ -58,8 +58,6 @@ export const updateMany: UpdateMany = async function updateMany(
select,
}),
session: await getSession(this, req),
// Timestamps are manually added by the write transform
timestamps: false,
}
let query = await buildQuery({

View File

@@ -1,4 +1,4 @@
import type { MongooseUpdateQueryOptions, UpdateQuery } from 'mongoose'
import type { MongooseUpdateQueryOptions } from 'mongoose'
import type { UpdateOne } from 'payload'
import type { MongooseAdapter } from './index.js'
@@ -38,8 +38,6 @@ export const updateOne: UpdateOne = async function updateOne(
select,
}),
session: await getSession(this, req),
// Timestamps are manually added by the write transform
timestamps: false,
}
const query = await buildQuery({
@@ -52,33 +50,15 @@ export const updateOne: UpdateOne = async function updateOne(
let result
let updateData: UpdateQuery<any> = data
const $inc: Record<string, number> = {}
const $push: Record<string, { $each: any[] } | any> = {}
transform({ $inc, $push, adapter: this, data, fields, operation: 'write' })
const updateOps: UpdateQuery<any> = {}
if (Object.keys($inc).length) {
updateOps.$inc = $inc
}
if (Object.keys($push).length) {
updateOps.$push = $push
}
if (Object.keys(updateOps).length) {
updateOps.$set = updateData
updateData = updateOps
}
transform({ adapter: this, data, fields, operation: 'write' })
try {
if (returning === false) {
await Model.updateOne(query, updateData, options)
await Model.updateOne(query, data, options)
transform({ adapter: this, data, fields, operation: 'read' })
return null
} else {
result = await Model.findOneAndUpdate(query, updateData, options)
result = await Model.findOneAndUpdate(query, data, options)
}
} catch (error) {
handleError({ collection: collectionSlug, error, req })

View File

@@ -45,8 +45,6 @@ export const updateVersion: UpdateVersion = async function updateVersion(
select,
}),
session: await getSession(this, req),
// Timestamps are manually added by the write transform
timestamps: false,
}
const query = await buildQuery({

View File

@@ -76,11 +76,7 @@ export const aggregatePaginate = async ({
countPromise = Model.estimatedDocumentCount(query)
} else {
const hint = adapter.disableIndexHints !== true ? { _id: 1 } : undefined
countPromise = Model.countDocuments(query, {
collation,
session,
...(hint ? { hint } : {}),
})
countPromise = Model.countDocuments(query, { collation, hint, session })
}
}

View File

@@ -44,9 +44,6 @@ export const buildJoinAggregation = async ({
projection,
versions,
}: BuildJoinAggregationArgs): Promise<PipelineStage[] | undefined> => {
if (!adapter.useJoinAggregations) {
return
}
if (
(Object.keys(collectionConfig.joins).length === 0 &&
collectionConfig.polymorphicJoins.length == 0) ||

View File

@@ -1,26 +0,0 @@
import type { Args } from '../index.js'
/**
* Each key is a mongo-compatible database and the value
* is the recommended `mongooseAdapter` settings for compatibility.
*/
export const compatibilityOptions = {
cosmosdb: {
transactionOptions: false,
useJoinAggregations: false,
usePipelineInSortLookup: false,
},
documentdb: {
disableIndexHints: true,
useJoinAggregations: false,
},
firestore: {
disableIndexHints: true,
ensureIndexes: false,
transactionOptions: false,
useAlternativeDropDatabase: true,
useBigIntForNumberIDs: true,
useJoinAggregations: false,
usePipelineInSortLookup: false,
},
} satisfies Record<string, Partial<Args>>

View File

@@ -2,15 +2,6 @@ import type { PayloadRequest } from 'payload'
import { ValidationError } from 'payload'
function extractFieldFromMessage(message: string) {
// eslint-disable-next-line regexp/no-super-linear-backtracking
const match = message.match(/index:\s*(.*?)_/)
if (match && match[1]) {
return match[1] // e.g., returns "email" from "index: email_1"
}
return null
}
export const handleError = ({
collection,
error,
@@ -27,22 +18,20 @@ export const handleError = ({
}
// Handle uniqueness error from MongoDB
if ('code' in error && error.code === 11000) {
let path: null | string = null
if ('keyValue' in error && error.keyValue && typeof error.keyValue === 'object') {
path = Object.keys(error.keyValue)[0] ?? ''
} else if ('message' in error && typeof error.message === 'string') {
path = extractFieldFromMessage(error.message)
}
if (
'code' in error &&
error.code === 11000 &&
'keyValue' in error &&
error.keyValue &&
typeof error.keyValue === 'object'
) {
throw new ValidationError(
{
collection,
errors: [
{
message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique',
path: path ?? '',
path: Object.keys(error.keyValue)[0] ?? '',
},
],
global,

View File

@@ -1,647 +0,0 @@
import type { JoinQuery, SanitizedJoins, Where } from 'payload'
import {
appendVersionToQueryKey,
buildVersionCollectionFields,
combineQueries,
getQueryDraftsSort,
} from 'payload'
import { fieldShouldBeLocalized } from 'payload/shared'
import type { MongooseAdapter } from '../index.js'
import { buildQuery } from '../queries/buildQuery.js'
import { buildSortParam } from '../queries/buildSortParam.js'
import { transform } from './transform.js'
export type ResolveJoinsArgs = {
/** The MongoDB adapter instance */
adapter: MongooseAdapter
/** The slug of the collection being queried */
collectionSlug: string
/** Array of documents to resolve joins for */
docs: Record<string, unknown>[]
/** Join query specifications (which joins to resolve and how) */
joins?: JoinQuery
/** Optional locale for localized queries */
locale?: string
/** Optional projection for the join query */
projection?: Record<string, true>
/** Whether to resolve versions instead of published documents */
versions?: boolean
}
/**
* Resolves join relationships for a collection of documents.
* This function fetches related documents based on join configurations and
* attaches them to the original documents with pagination support.
*/
export async function resolveJoins({
adapter,
collectionSlug,
docs,
joins,
locale,
projection,
versions = false,
}: ResolveJoinsArgs): Promise<void> {
// Early return if no joins are specified or no documents to process
if (!joins || docs.length === 0) {
return
}
// Get the collection configuration from the adapter
const collectionConfig = adapter.payload.collections[collectionSlug]?.config
if (!collectionConfig) {
return
}
// Build a map of join paths to their configurations for quick lookup
// This flattens the nested join structure into a single map keyed by join path
const joinMap: Record<string, { targetCollection: string } & SanitizedJoin> = {}
// Add regular joins
for (const [target, joinList] of Object.entries(collectionConfig.joins)) {
for (const join of joinList) {
joinMap[join.joinPath] = { ...join, targetCollection: target }
}
}
// Add polymorphic joins
for (const join of collectionConfig.polymorphicJoins || []) {
// For polymorphic joins, we use the collections array as the target
joinMap[join.joinPath] = { ...join, targetCollection: join.field.collection as string }
}
// Process each requested join concurrently
const joinPromises = Object.entries(joins).map(async ([joinPath, joinQuery]) => {
if (!joinQuery) {
return null
}
// If a projection is provided, and the join path is not in the projection, skip it
if (projection && !projection[joinPath]) {
return null
}
// Get the join definition from our map
const joinDef = joinMap[joinPath]
if (!joinDef) {
return null
}
// Normalize collections to always be an array for unified processing
const allCollections = Array.isArray(joinDef.field.collection)
? joinDef.field.collection
: [joinDef.field.collection]
// Use the provided locale or fall back to the default locale for localized fields
const localizationConfig = adapter.payload.config.localization
const effectiveLocale =
locale ||
(typeof localizationConfig === 'object' &&
localizationConfig &&
localizationConfig.defaultLocale)
// Extract relationTo filter from the where clause to determine which collections to query
const relationToFilter = extractRelationToFilter(joinQuery.where || {})
// Determine which collections to query based on relationTo filter
const collections = relationToFilter
? allCollections.filter((col) => relationToFilter.includes(col))
: allCollections
// Check if this is a polymorphic collection join (where field.collection is an array)
const isPolymorphicJoin = Array.isArray(joinDef.field.collection)
// Apply pagination settings
const limit = joinQuery.limit ?? joinDef.field.defaultLimit ?? 10
const page = joinQuery.page ?? 1
const skip = (page - 1) * limit
// Process collections concurrently
const collectionPromises = collections.map(async (joinCollectionSlug) => {
const targetConfig = adapter.payload.collections[joinCollectionSlug]?.config
if (!targetConfig) {
return null
}
const useDrafts = versions && Boolean(targetConfig.versions?.drafts)
let JoinModel
if (useDrafts) {
JoinModel = adapter.versions[targetConfig.slug]
} else {
JoinModel = adapter.collections[targetConfig.slug]
}
if (!JoinModel) {
return null
}
// Extract all parent document IDs to use in the join query
const parentIDs = docs.map((d) => (versions ? (d.parent ?? d._id ?? d.id) : (d._id ?? d.id)))
// Build the base query
let whereQuery: null | Record<string, unknown> = null
whereQuery = isPolymorphicJoin
? filterWhereForCollection(
joinQuery.where || {},
targetConfig.flattenedFields,
true, // exclude relationTo for individual collections
)
: joinQuery.where || {}
// Skip this collection if the WHERE clause cannot be satisfied for polymorphic collection joins
if (whereQuery === null) {
return null
}
whereQuery = useDrafts
? await JoinModel.buildQuery({
locale,
payload: adapter.payload,
where: combineQueries(appendVersionToQueryKey(whereQuery as Where), {
latest: {
equals: true,
},
}),
})
: await buildQuery({
adapter,
collectionSlug: joinCollectionSlug,
fields: targetConfig.flattenedFields,
locale,
where: whereQuery as Where,
})
// Handle localized paths and version prefixes
let dbFieldName = joinDef.field.on
if (effectiveLocale && typeof localizationConfig === 'object' && localizationConfig) {
const pathSegments = joinDef.field.on.split('.')
const transformedSegments: string[] = []
const fields = useDrafts
? buildVersionCollectionFields(adapter.payload.config, targetConfig, true)
: targetConfig.flattenedFields
for (let i = 0; i < pathSegments.length; i++) {
const segment = pathSegments[i]!
transformedSegments.push(segment)
// Check if this segment corresponds to a localized field
const fieldAtSegment = fields.find((f) => f.name === segment)
if (fieldAtSegment && fieldAtSegment.localized) {
transformedSegments.push(effectiveLocale)
}
}
dbFieldName = transformedSegments.join('.')
}
// Add version prefix for draft queries
if (useDrafts) {
dbFieldName = `version.${dbFieldName}`
}
// Check if the target field is a polymorphic relationship
const isPolymorphic = joinDef.targetField
? Array.isArray(joinDef.targetField.relationTo)
: false
if (isPolymorphic) {
// For polymorphic relationships, we need to match both relationTo and value
whereQuery[`${dbFieldName}.relationTo`] = collectionSlug
whereQuery[`${dbFieldName}.value`] = { $in: parentIDs }
} else {
// For regular relationships and polymorphic collection joins
whereQuery[dbFieldName] = { $in: parentIDs }
}
// Build the sort parameters for the query
const fields = useDrafts
? buildVersionCollectionFields(adapter.payload.config, targetConfig, true)
: targetConfig.flattenedFields
const sort = buildSortParam({
adapter,
config: adapter.payload.config,
fields,
locale,
sort: useDrafts
? getQueryDraftsSort({
collectionConfig: targetConfig,
sort: joinQuery.sort || joinDef.field.defaultSort || targetConfig.defaultSort,
})
: joinQuery.sort || joinDef.field.defaultSort || targetConfig.defaultSort,
timestamps: true,
})
const projection = buildJoinProjection(dbFieldName, useDrafts, sort)
const [results, dbCount] = await Promise.all([
JoinModel.find(whereQuery, projection, {
sort,
...(isPolymorphicJoin ? {} : { limit, skip }),
}).lean(),
isPolymorphicJoin ? Promise.resolve(0) : JoinModel.countDocuments(whereQuery),
])
const count = isPolymorphicJoin ? results.length : dbCount
transform({
adapter,
data: results,
fields: useDrafts
? buildVersionCollectionFields(adapter.payload.config, targetConfig, false)
: targetConfig.fields,
operation: 'read',
})
// Return results with collection info for grouping
return {
collectionSlug: joinCollectionSlug,
count,
dbFieldName,
results,
sort,
useDrafts,
}
})
const collectionResults = await Promise.all(collectionPromises)
// Group the results by parent ID
const grouped: Record<
string,
{
docs: Record<string, unknown>[]
sort: Record<string, string>
}
> = {}
let totalCount = 0
for (const collectionResult of collectionResults) {
if (!collectionResult) {
continue
}
const { collectionSlug, count, dbFieldName, results, sort, useDrafts } = collectionResult
totalCount += count
for (const result of results) {
if (useDrafts) {
result.id = result.parent
}
const parentValues = getByPathWithArrays(result, dbFieldName) as (
| { relationTo: string; value: number | string }
| number
| string
)[]
if (parentValues.length === 0) {
continue
}
for (let parentValue of parentValues) {
if (!parentValue) {
continue
}
if (typeof parentValue === 'object') {
parentValue = parentValue.value
}
const joinData = {
relationTo: collectionSlug,
value: result.id,
}
const parentKey = parentValue as string
if (!grouped[parentKey]) {
grouped[parentKey] = {
docs: [],
sort,
}
}
// Always store the ObjectID reference in polymorphic format
grouped[parentKey].docs.push({
...result,
__joinData: joinData,
})
}
}
}
for (const results of Object.values(grouped)) {
results.docs.sort((a, b) => {
for (const [fieldName, sortOrder] of Object.entries(results.sort)) {
const sort = sortOrder === 'asc' ? 1 : -1
const aValue = a[fieldName] as Date | number | string
const bValue = b[fieldName] as Date | number | string
if (aValue < bValue) {
return -1 * sort
}
if (aValue > bValue) {
return 1 * sort
}
}
return 0
})
results.docs = results.docs.map(
(doc) => (isPolymorphicJoin ? doc.__joinData : doc.id) as Record<string, unknown>,
)
}
// Determine if the join field should be localized
const localeSuffix =
fieldShouldBeLocalized({
field: joinDef.field,
parentIsLocalized: joinDef.parentIsLocalized,
}) &&
adapter.payload.config.localization &&
effectiveLocale
? `.${effectiveLocale}`
: ''
// Adjust the join path with locale suffix if needed
const localizedJoinPath = `${joinPath}${localeSuffix}`
return {
grouped,
isPolymorphicJoin,
joinQuery,
limit,
localizedJoinPath,
page,
skip,
totalCount,
}
})
// Wait for all join operations to complete
const joinResults = await Promise.all(joinPromises)
// Process the results and attach them to documents
for (const joinResult of joinResults) {
if (!joinResult) {
continue
}
const { grouped, isPolymorphicJoin, joinQuery, limit, localizedJoinPath, skip, totalCount } =
joinResult
// Attach the joined data to each parent document
for (const doc of docs) {
const id = (versions ? (doc.parent ?? doc._id ?? doc.id) : (doc._id ?? doc.id)) as string
const all = grouped[id]?.docs || []
// Calculate the slice for pagination
// When limit is 0, it means unlimited - return all results
const slice = isPolymorphicJoin
? limit === 0
? all
: all.slice(skip, skip + limit)
: // For non-polymorphic joins, we assume that page and limit were applied at the database level
all
// Create the join result object with pagination metadata
const value: Record<string, unknown> = {
docs: slice,
hasNextPage: limit === 0 ? false : totalCount > skip + slice.length,
}
// Include total count if requested
if (joinQuery.count) {
value.totalDocs = totalCount
}
// Navigate to the correct nested location in the document and set the join data
// This handles nested join paths like "user.posts" by creating intermediate objects
const segments = localizedJoinPath.split('.')
let ref: Record<string, unknown>
if (versions) {
if (!doc.version) {
doc.version = {}
}
ref = doc.version as Record<string, unknown>
} else {
ref = doc
}
for (let i = 0; i < segments.length - 1; i++) {
const seg = segments[i]!
if (!ref[seg]) {
ref[seg] = {}
}
ref = ref[seg] as Record<string, unknown>
}
// Set the final join data at the target path
ref[segments[segments.length - 1]!] = value
}
}
}
/**
* Extracts relationTo filter values from a WHERE clause
* @param where - The WHERE clause to search
* @returns Array of collection slugs if relationTo filter found, null otherwise
*/
function extractRelationToFilter(where: Record<string, unknown>): null | string[] {
if (!where || typeof where !== 'object') {
return null
}
// Check for direct relationTo conditions
if (where.relationTo && typeof where.relationTo === 'object') {
const relationTo = where.relationTo as Record<string, unknown>
if (relationTo.in && Array.isArray(relationTo.in)) {
return relationTo.in as string[]
}
if (relationTo.equals) {
return [relationTo.equals as string]
}
}
// Check for relationTo in logical operators
if (where.and && Array.isArray(where.and)) {
for (const condition of where.and) {
const result = extractRelationToFilter(condition)
if (result) {
return result
}
}
}
if (where.or && Array.isArray(where.or)) {
for (const condition of where.or) {
const result = extractRelationToFilter(condition)
if (result) {
return result
}
}
}
return null
}
/**
* Filters a WHERE clause to only include fields that exist in the target collection
* This is needed for polymorphic joins where different collections have different fields
* @param where - The original WHERE clause
* @param availableFields - The fields available in the target collection
* @param excludeRelationTo - Whether to exclude relationTo field (for individual collections)
* @returns A filtered WHERE clause, or null if the query cannot match this collection
*/
function filterWhereForCollection(
where: Record<string, unknown>,
availableFields: Array<{ name: string }>,
excludeRelationTo: boolean = false,
): null | Record<string, unknown> {
if (!where || typeof where !== 'object') {
return where
}
const fieldNames = new Set(availableFields.map((f) => f.name))
// Add special fields that are available in polymorphic relationships
if (!excludeRelationTo) {
fieldNames.add('relationTo')
}
const filtered: Record<string, unknown> = {}
for (const [key, value] of Object.entries(where)) {
if (key === 'and') {
// Handle AND operator - all conditions must be satisfiable
if (Array.isArray(value)) {
const filteredConditions: Record<string, unknown>[] = []
for (const condition of value) {
const filteredCondition = filterWhereForCollection(
condition,
availableFields,
excludeRelationTo,
)
// If any condition in AND cannot be satisfied, the whole AND fails
if (filteredCondition === null) {
return null
}
if (Object.keys(filteredCondition).length > 0) {
filteredConditions.push(filteredCondition)
}
}
if (filteredConditions.length > 0) {
filtered[key] = filteredConditions
}
}
} else if (key === 'or') {
// Handle OR operator - at least one condition must be satisfiable
if (Array.isArray(value)) {
const filteredConditions = value
.map((condition) =>
filterWhereForCollection(condition, availableFields, excludeRelationTo),
)
.filter((condition) => condition !== null && Object.keys(condition).length > 0)
if (filteredConditions.length > 0) {
filtered[key] = filteredConditions
}
// If no OR conditions can be satisfied, we still continue (OR is more permissive)
}
} else if (key === 'relationTo' && excludeRelationTo) {
// Skip relationTo field for non-polymorphic collections
continue
} else if (fieldNames.has(key)) {
// Include the condition if the field exists in this collection
filtered[key] = value
} else {
// Field doesn't exist in this collection - this makes the query unsatisfiable
return null
}
}
return filtered
}
type SanitizedJoin = SanitizedJoins[string][number]
/**
* Builds projection for join queries
*/
function buildJoinProjection(
baseFieldName: string,
useDrafts: boolean,
sort: Record<string, string>,
): Record<string, 1> {
const projection: Record<string, 1> = {
_id: 1,
[baseFieldName]: 1,
}
if (useDrafts) {
projection.parent = 1
}
for (const fieldName of Object.keys(sort)) {
projection[fieldName] = 1
}
return projection
}
/**
* Enhanced utility function to safely traverse nested object properties using dot notation
* Handles arrays by searching through array elements for matching values
* @param doc - The document to traverse
* @param path - Dot-separated path (e.g., "array.category")
* @returns Array of values found at the specified path (for arrays) or single value
*/
function getByPathWithArrays(doc: unknown, path: string): unknown[] {
const segments = path.split('.')
let current = doc
for (let i = 0; i < segments.length; i++) {
const segment = segments[i]!
if (current === undefined || current === null) {
return []
}
// Get the value at the current segment
const value = (current as Record<string, unknown>)[segment]
if (value === undefined || value === null) {
return []
}
// If this is the last segment, return the value(s)
if (i === segments.length - 1) {
return Array.isArray(value) ? value : [value]
}
// If the value is an array and we have more segments to traverse
if (Array.isArray(value)) {
const remainingPath = segments.slice(i + 1).join('.')
const results: unknown[] = []
// Search through each array element
for (const item of value) {
if (item && typeof item === 'object') {
const subResults = getByPathWithArrays(item, remainingPath)
results.push(...subResults)
}
}
return results
}
// Continue traversing
current = value
}
return []
}

View File

@@ -395,10 +395,6 @@ describe('transform', () => {
data,
fields: config.collections[0].fields,
})
if ('updatedAt' in data) {
delete data.updatedAt
}
const flattenValuesAfter = Object.values(flattenRelationshipValues(data))
flattenValuesAfter.forEach((value, i) => {

View File

@@ -208,8 +208,6 @@ const sanitizeDate = ({
}
type Args = {
$inc?: Record<string, number>
$push?: Record<string, { $each: any[] } | any>
/** instance of the adapter */
adapter: MongooseAdapter
/** data to transform, can be an array of documents or a single document */
@@ -398,8 +396,6 @@ const stripFields = ({
}
export const transform = ({
$inc,
$push,
adapter,
data,
fields,
@@ -408,22 +404,9 @@ export const transform = ({
parentIsLocalized = false,
validateRelationships = true,
}: Args) => {
if (!data) {
return null
}
if (Array.isArray(data)) {
for (const item of data) {
transform({
$inc,
$push,
adapter,
data: item,
fields,
globalSlug,
operation,
validateRelationships,
})
transform({ adapter, data: item, fields, globalSlug, operation, validateRelationships })
}
return
}
@@ -441,11 +424,6 @@ export const transform = ({
data.id = data.id.toHexString()
}
// Handle BigInt conversion for custom ID fields of type 'number'
if (adapter.useBigIntForNumberIDs && typeof data.id === 'bigint') {
data.id = Number(data.id)
}
if (!adapter.allowAdditionalKeys) {
stripFields({
config,
@@ -460,60 +438,13 @@ export const transform = ({
data.globalType = globalSlug
}
const sanitize: TraverseFieldsCallback = ({ field, parentPath, ref: incomingRef }) => {
const sanitize: TraverseFieldsCallback = ({ field, ref: incomingRef }) => {
if (!incomingRef || typeof incomingRef !== 'object') {
return
}
const ref = incomingRef as Record<string, unknown>
if (
$inc &&
field.type === 'number' &&
operation === 'write' &&
field.name in ref &&
ref[field.name]
) {
const value = ref[field.name]
if (value && typeof value === 'object' && '$inc' in value && typeof value.$inc === 'number') {
$inc[`${parentPath}${field.name}`] = value.$inc
delete ref[field.name]
}
}
if (
$push &&
field.type === 'array' &&
operation === 'write' &&
field.name in ref &&
ref[field.name]
) {
const value = ref[field.name]
if (value && typeof value === 'object' && '$push' in value) {
const push = value.$push
if (config.localization && fieldShouldBeLocalized({ field, parentIsLocalized })) {
if (typeof push === 'object' && push !== null) {
Object.entries(push).forEach(([localeKey, localeData]) => {
if (Array.isArray(localeData)) {
$push[`${parentPath}${field.name}.${localeKey}`] = { $each: localeData }
} else if (typeof localeData === 'object') {
$push[`${parentPath}${field.name}.${localeKey}`] = localeData
}
})
}
} else {
if (Array.isArray(push)) {
$push[`${parentPath}${field.name}`] = { $each: push }
} else if (typeof push === 'object') {
$push[`${parentPath}${field.name}`] = push
}
}
delete ref[field.name]
}
}
if (field.type === 'date' && operation === 'read' && field.name in ref && ref[field.name]) {
if (config.localization && fieldShouldBeLocalized({ field, parentIsLocalized })) {
const fieldRef = ref[field.name] as Record<string, unknown>
@@ -592,15 +523,4 @@ export const transform = ({
parentIsLocalized,
ref: data,
})
if (operation === 'write') {
if (typeof data.updatedAt === 'undefined') {
// If data.updatedAt is explicitly set to `null` we should not set it - this means we don't want to change the value of updatedAt.
data.updatedAt = new Date().toISOString()
} else if (data.updatedAt === null) {
// `updatedAt` may be explicitly set to null to disable updating it - if that is the case, we need to delete the property. Keeping it as null will
// cause the database to think we want to set it to null, which we don't.
delete data.updatedAt
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-postgres",
"version": "3.53.0",
"version": "3.47.0",
"description": "The officially supported Postgres database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -17,7 +17,6 @@ import {
deleteVersions,
destroy,
find,
findDistinct,
findGlobal,
findGlobalVersions,
findMigrationDir,
@@ -121,7 +120,6 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
json: true,
},
fieldConstraints: {},
findDistinct,
generateSchema: createSchemaGenerator({
columnToCodeConverter,
corePackageSuffix: 'pg-core',

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-sqlite",
"version": "3.53.0",
"version": "3.47.0",
"description": "The officially supported SQLite database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -6,13 +6,13 @@ import type { CountDistinct, SQLiteAdapter } from './types.js'
export const countDistinct: CountDistinct = async function countDistinct(
this: SQLiteAdapter,
{ column, db, joins, tableName, where },
{ db, joins, tableName, where },
) {
// When we don't have any joins - use a simple COUNT(*) query.
if (joins.length === 0) {
const countResult = await db
.select({
count: column ? count(sql`DISTINCT ${column}`) : count(),
count: count(),
})
.from(this.tables[tableName])
.where(where)
@@ -25,12 +25,12 @@ export const countDistinct: CountDistinct = async function countDistinct(
})
.from(this.tables[tableName])
.where(where)
.groupBy(column ?? this.tables[tableName].id)
.groupBy(this.tables[tableName].id)
.limit(1)
.$dynamic()
joins.forEach(({ type, condition, table }) => {
query = query[type ?? 'leftJoin'](table, condition)
joins.forEach(({ condition, table }) => {
query = query.leftJoin(table, condition)
})
// When we have any joins, we need to count each individual ID only once.

View File

@@ -60,10 +60,6 @@ const createConstraint = ({
formattedOperator = '='
}
if (pathSegments.length === 1) {
return `EXISTS (SELECT 1 FROM json_each("${pathSegments[0]}") AS ${newAlias} WHERE ${newAlias}.value ${formattedOperator} '${formattedValue}')`
}
return `EXISTS (
SELECT 1
FROM json_each(${alias}.value -> '${pathSegments[0]}') AS ${newAlias}
@@ -72,38 +68,21 @@ const createConstraint = ({
}
export const createJSONQuery = ({
column,
operator,
pathSegments,
rawColumn,
table,
treatAsArray,
treatRootAsArray,
value,
}: CreateJSONQueryArgs): string => {
if ((operator === 'in' || operator === 'not_in') && Array.isArray(value)) {
let sql = ''
for (const [i, v] of value.entries()) {
sql = `${sql}${createJSONQuery({ column, operator: operator === 'in' ? 'equals' : 'not_equals', pathSegments, rawColumn, table, treatAsArray, treatRootAsArray, value: v })} ${i === value.length - 1 ? '' : ` ${operator === 'in' ? 'OR' : 'AND'} `}`
}
return sql
}
if (treatAsArray?.includes(pathSegments[1]!) && table) {
return fromArray({
operator,
pathSegments,
table,
treatAsArray,
value: value as CreateConstraintArgs['value'],
value,
})
}
return createConstraint({
alias: table,
operator,
pathSegments,
treatAsArray,
value: value as CreateConstraintArgs['value'],
})
return createConstraint({ alias: table, operator, pathSegments, treatAsArray, value })
}

View File

@@ -18,7 +18,6 @@ import {
deleteVersions,
destroy,
find,
findDistinct,
findGlobal,
findGlobalVersions,
findMigrationDir,
@@ -102,7 +101,6 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
json: true,
},
fieldConstraints: {},
findDistinct,
generateSchema: createSchemaGenerator({
columnToCodeConverter,
corePackageSuffix: 'sqlite-core',

View File

@@ -5,7 +5,6 @@ import type { DrizzleConfig, Relation, Relations, SQL } from 'drizzle-orm'
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
import type {
AnySQLiteColumn,
SQLiteColumn,
SQLiteInsertOnConflictDoUpdateConfig,
SQLiteTableWithColumns,
SQLiteTransactionConfig,
@@ -88,7 +87,6 @@ export type GenericTable = SQLiteTableWithColumns<{
export type GenericRelation = Relations<string, Record<string, Relation<string>>>
export type CountDistinct = (args: {
column?: SQLiteColumn<any>
db: LibSQLDatabase
joins: BuildQueryJoinAliases
tableName: string

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-vercel-postgres",
"version": "3.53.0",
"version": "3.47.0",
"description": "Vercel Postgres adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -18,7 +18,6 @@ import {
deleteVersions,
destroy,
find,
findDistinct,
findGlobal,
findGlobalVersions,
findMigrationDir,
@@ -175,7 +174,6 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<Verce
dropDatabase,
execute,
find,
findDistinct,
findGlobal,
findGlobalVersions,
readReplicaOptions: args.readReplicas,

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/drizzle",
"version": "3.53.0",
"version": "3.47.0",
"description": "A library of shared functions used by different payload database adapters",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -6,33 +6,17 @@ import toSnakeCase from 'to-snake-case'
import type { DrizzleAdapter } from './types.js'
import { findMany } from './find/findMany.js'
import { buildQuery } from './queries/buildQuery.js'
import { getTransaction } from './utilities/getTransaction.js'
export const deleteMany: DeleteMany = async function deleteMany(
this: DrizzleAdapter,
{ collection, req, where: whereArg },
{ collection, req, where },
) {
const db = await getTransaction(this, req)
const collectionConfig = this.payload.collections[collection].config
const tableName = this.tableNameMap.get(toSnakeCase(collectionConfig.slug))
const table = this.tables[tableName]
const { joins, where } = buildQuery({
adapter: this,
fields: collectionConfig.flattenedFields,
locale: req?.locale,
tableName,
where: whereArg,
})
let whereToUse = where
if (joins?.length) {
// Difficult to support joins (through where referencing other tables) in deleteMany. => 2 separate queries.
// We can look into supporting this using one single query (through a subquery) in the future, though that's difficult to do in a generic way.
const result = await findMany({
adapter: this,
fields: collectionConfig.flattenedFields,
@@ -42,22 +26,21 @@ export const deleteMany: DeleteMany = async function deleteMany(
page: 1,
pagination: false,
req,
select: {
id: true,
},
tableName,
where: whereArg,
where,
})
whereToUse = inArray(
table.id,
result.docs.map((doc) => doc.id),
)
}
const ids = []
result.docs.forEach((data) => {
ids.push(data.id)
})
if (ids.length > 0) {
await this.deleteWhere({
db,
tableName,
where: whereToUse,
where: inArray(this.tables[tableName].id, ids),
})
}
}

View File

@@ -1,31 +1,28 @@
import type { DeleteOne, SelectType } from 'payload'
import type { DeleteOne } from 'payload'
import { eq } from 'drizzle-orm'
import toSnakeCase from 'to-snake-case'
import type { DrizzleAdapter } from './types.js'
import { buildFindManyArgs } from './find/buildFindManyArgs.js'
import { buildQuery } from './queries/buildQuery.js'
import { selectDistinct } from './queries/selectDistinct.js'
import { transform } from './transform/read/index.js'
import { getTransaction } from './utilities/getTransaction.js'
export const deleteOne: DeleteOne = async function deleteOne(
this: DrizzleAdapter,
{ collection: collectionSlug, req, returning, select: selectArg, where: whereArg },
{ collection: collectionSlug, req, returning, select, where: whereArg },
) {
const select: SelectType | undefined =
returning === false
? {
id: true,
}
: selectArg
const db = await getTransaction(this, req)
const collection = this.payload.collections[collectionSlug].config
const tableName = this.tableNameMap.get(toSnakeCase(collection.slug))
const { joins, where } = buildQuery({
let docToDelete: Record<string, unknown>
const { joins, selectFields, where } = buildQuery({
adapter: this,
fields: collection.flattenedFields,
locale: req?.locale,
@@ -33,24 +30,37 @@ export const deleteOne: DeleteOne = async function deleteOne(
where: whereArg,
})
let whereToUse = where
let docToDelete: Record<string, unknown> = null
if (joins?.length || returning !== false) {
// Difficult to support joins (through where referencing other tables) in this.deleteWhere of deleteOne. => 2 separate queries.
// We can look into supporting this using one single query (through a subquery) in the future, though that's difficult to do in a generic way.
docToDelete = await this.findOne({
collection: collectionSlug,
req,
select,
where: whereArg,
const selectDistinctResult = await selectDistinct({
adapter: this,
db,
joins,
query: ({ query }) => query.limit(1),
selectFields,
tableName,
where,
})
if (!docToDelete) {
return null
if (selectDistinctResult?.[0]?.id) {
docToDelete = await db.query[tableName].findFirst({
where: eq(this.tables[tableName].id, selectDistinctResult[0].id),
})
} else {
const findManyArgs = buildFindManyArgs({
adapter: this,
depth: 0,
fields: collection.flattenedFields,
joinQuery: false,
select,
tableName,
})
findManyArgs.where = where
docToDelete = await db.query[tableName].findFirst(findManyArgs)
}
whereToUse = eq(this.tables[tableName].id, docToDelete.id)
if (!docToDelete) {
return null
}
const result =
@@ -64,10 +74,11 @@ export const deleteOne: DeleteOne = async function deleteOne(
joinQuery: false,
tableName,
})
await this.deleteWhere({
db,
tableName,
where: whereToUse,
where: eq(this.tables[tableName].id, docToDelete.id),
})
return result

View File

@@ -44,7 +44,7 @@ export const buildFindManyArgs = ({
select,
tableName,
versions,
}: BuildFindQueryArgs): Result => {
}: BuildFindQueryArgs): Record<string, unknown> => {
const result: Result = {
extras: {},
with: {},
@@ -134,12 +134,5 @@ export const buildFindManyArgs = ({
result.with._locales = _locales
}
// Delete properties that are empty
for (const key of Object.keys(result)) {
if (!Object.keys(result[key]).length) {
delete result[key]
}
}
return result
}

View File

@@ -1,14 +1,12 @@
import type { SQL } from 'drizzle-orm'
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
import type { SQLiteSelect, SQLiteSelectBase } from 'drizzle-orm/sqlite-core'
import { and, asc, count, desc, eq, getTableName, or, sql } from 'drizzle-orm'
import { and, asc, count, desc, eq, or, sql } from 'drizzle-orm'
import {
appendVersionToQueryKey,
buildVersionCollectionFields,
combineQueries,
type FlattenedField,
getFieldByPath,
getQueryDraftsSort,
type JoinQuery,
type SelectMode,
@@ -33,7 +31,7 @@ import {
resolveBlockTableName,
} from '../utilities/validateExistingBlockIsIdentical.js'
const flattenAllWherePaths = (where: Where, paths: { path: string; ref: any }[]) => {
const flattenAllWherePaths = (where: Where, paths: string[]) => {
for (const k in where) {
if (['AND', 'OR'].includes(k.toUpperCase())) {
if (Array.isArray(where[k])) {
@@ -43,7 +41,7 @@ const flattenAllWherePaths = (where: Where, paths: { path: string; ref: any }[])
}
} else {
// TODO: explore how to support arrays/relationship querying.
paths.push({ path: k.split('.').join('_'), ref: where })
paths.push(k.split('.').join('_'))
}
}
}
@@ -61,11 +59,7 @@ const buildSQLWhere = (where: Where, alias: string) => {
}
} else {
const payloadOperator = Object.keys(where[k])[0]
const value = where[k][payloadOperator]
if (payloadOperator === '$raw') {
return sql.raw(value)
}
return operatorMap[payloadOperator](sql.raw(`"${alias}"."${k.split('.').join('_')}"`), value)
}
@@ -478,7 +472,7 @@ export const traverseFields = ({
const sortPath = sanitizedSort.split('.').join('_')
const wherePaths: { path: string; ref: any }[] = []
const wherePaths: string[] = []
if (where) {
flattenAllWherePaths(where, wherePaths)
@@ -498,50 +492,9 @@ export const traverseFields = ({
sortPath: sql`${sortColumn ? sortColumn : null}`.as('sortPath'),
}
const collectionQueryWhere: any[] = []
// Select for WHERE and Fallback NULL
for (const { path, ref } of wherePaths) {
const collectioConfig = adapter.payload.collections[collection].config
const field = getFieldByPath({ fields: collectioConfig.flattenedFields, path })
if (field && field.field.type === 'select' && field.field.hasMany) {
let tableName = adapter.tableNameMap.get(
`${toSnakeCase(collection)}_${toSnakeCase(path)}`,
)
let parentTable = getTableName(table)
if (adapter.schemaName) {
tableName = `"${adapter.schemaName}"."${tableName}"`
parentTable = `"${adapter.schemaName}"."${parentTable}"`
}
if (adapter.name === 'postgres') {
selectFields[path] = sql
.raw(
`(select jsonb_agg(${tableName}.value) from ${tableName} where ${tableName}.parent_id = ${parentTable}.id)`,
)
.as(path)
} else {
selectFields[path] = sql
.raw(
`(select json_group_array(${tableName}.value) from ${tableName} where ${tableName}.parent_id = ${parentTable}.id)`,
)
.as(path)
}
const constraint = ref[path]
const operator = Object.keys(constraint)[0]
const value: any = Object.values(constraint)[0]
const query = adapter.createJSONQuery({
column: `"${path}"`,
operator,
pathSegments: [field.field.name],
table: parentTable,
value,
})
ref[path] = { $raw: query }
} else if (adapter.tables[joinCollectionTableName][path]) {
for (const path of wherePaths) {
if (adapter.tables[joinCollectionTableName][path]) {
selectFields[path] = sql`${adapter.tables[joinCollectionTableName][path]}`.as(path)
// Allow to filter by collectionSlug
} else if (path !== 'relationTo') {
@@ -549,10 +502,7 @@ export const traverseFields = ({
}
}
let query: any = db.select(selectFields).from(adapter.tables[joinCollectionTableName])
if (collectionQueryWhere.length) {
query = query.where(and(...collectionQueryWhere))
}
const query = db.select(selectFields).from(adapter.tables[joinCollectionTableName])
if (currentQuery === null) {
currentQuery = query as unknown as SQLSelect
} else {
@@ -791,14 +741,9 @@ export const traverseFields = ({
} else {
shouldSelect = true
}
const tableName = fieldShouldBeLocalized({ field, parentIsLocalized })
? `${currentTableName}${adapter.localesSuffix}`
: currentTableName
if (shouldSelect) {
args.extras[name] = sql
.raw(`ST_AsGeoJSON("${adapter.tables[tableName][name].name}")::jsonb`)
.as(name)
args.extras[name] = sql.raw(`ST_AsGeoJSON(${toSnakeCase(name)})::jsonb`).as(name)
}
break
}

View File

@@ -1,108 +0,0 @@
import type { FindDistinct, SanitizedCollectionConfig } from 'payload'
import toSnakeCase from 'to-snake-case'
import type { DrizzleAdapter, GenericColumn } from './types.js'
import { buildQuery } from './queries/buildQuery.js'
import { selectDistinct } from './queries/selectDistinct.js'
import { getTransaction } from './utilities/getTransaction.js'
import { DistinctSymbol } from './utilities/rawConstraint.js'
export const findDistinct: FindDistinct = async function (this: DrizzleAdapter, args) {
const db = await getTransaction(this, args.req)
const collectionConfig: SanitizedCollectionConfig =
this.payload.collections[args.collection].config
const page = args.page || 1
const offset = args.limit ? (page - 1) * args.limit : undefined
const tableName = this.tableNameMap.get(toSnakeCase(collectionConfig.slug))
const { joins, orderBy, selectFields, where } = buildQuery({
adapter: this,
fields: collectionConfig.flattenedFields,
locale: args.locale,
sort: args.sort ?? args.field,
tableName,
where: {
and: [
args.where ?? {},
{
[args.field]: {
equals: DistinctSymbol,
},
},
],
},
})
orderBy.pop()
const selectDistinctResult = await selectDistinct({
adapter: this,
db,
forceRun: true,
joins,
query: ({ query }) => {
query = query.orderBy(() => orderBy.map(({ column, order }) => order(column)))
if (args.limit) {
if (offset) {
query = query.offset(offset)
}
query = query.limit(args.limit)
}
return query
},
selectFields: {
_selected: selectFields['_selected'],
...(orderBy[0].column === selectFields['_selected'] ? {} : { _order: orderBy[0].column }),
} as Record<string, GenericColumn>,
tableName,
where,
})
const values = selectDistinctResult.map((each) => ({
[args.field]: (each as Record<string, any>)._selected,
}))
if (args.limit) {
const totalDocs = await this.countDistinct({
column: selectFields['_selected'],
db,
joins,
tableName,
where,
})
const totalPages = Math.ceil(totalDocs / args.limit)
const hasPrevPage = page > 1
const hasNextPage = totalPages > page
const pagingCounter = (page - 1) * args.limit + 1
return {
hasNextPage,
hasPrevPage,
limit: args.limit,
nextPage: hasNextPage ? page + 1 : null,
page,
pagingCounter,
prevPage: hasPrevPage ? page - 1 : null,
totalDocs,
totalPages,
values,
}
}
return {
hasNextPage: false,
hasPrevPage: false,
limit: 0,
page: 1,
pagingCounter: 1,
totalDocs: values.length,
totalPages: 1,
values,
}
}

Some files were not shown because too many files have changed in this diff Show More