Compare commits
121 Commits
postgres-d
...
v3.51.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0688050eb6 | ||
|
|
5a99d8c5f4 | ||
|
|
35ca98e70e | ||
|
|
255bba9606 | ||
|
|
8173180d1d | ||
|
|
3258e78596 | ||
|
|
ad2564e5fa | ||
|
|
995f96bc70 | ||
|
|
306b7f6943 | ||
|
|
72f5763c25 | ||
|
|
a374aabd8d | ||
|
|
2bc9a2def4 | ||
|
|
1d81b0c6dd | ||
|
|
9c8f3202e4 | ||
|
|
161769e50c | ||
|
|
c9a1590fc4 | ||
|
|
e870be094e | ||
|
|
d4f198651c | ||
|
|
5d8f8dc0a5 | ||
|
|
7344d64be3 | ||
|
|
2211f3dd1c | ||
|
|
ac40185158 | ||
|
|
d622d3c5e7 | ||
|
|
b74f4fb9b2 | ||
|
|
8401b2166d | ||
|
|
20b4de94ee | ||
|
|
43b4b22af9 | ||
|
|
3b9dba8641 | ||
|
|
1d70d4d36c | ||
|
|
1b31c74d32 | ||
|
|
f432cc1956 | ||
|
|
2903486974 | ||
|
|
b965db881e | ||
|
|
1b93c4becc | ||
|
|
9031f3bf23 | ||
|
|
df91321f4a | ||
|
|
11755089f8 | ||
|
|
a8b6983ab5 | ||
|
|
f2d4004237 | ||
|
|
8a489410ad | ||
|
|
095e7d904f | ||
|
|
c48b57fdbf | ||
|
|
b26a73be4a | ||
|
|
3114b89d4c | ||
|
|
227a20e94b | ||
|
|
a22f27de1c | ||
|
|
e7124f6176 | ||
|
|
183f313387 | ||
|
|
b1fa76e397 | ||
|
|
08942494e3 | ||
|
|
da8bf69054 | ||
|
|
26d9daeccf | ||
|
|
fc5944840e | ||
|
|
9e04dbb1ca | ||
|
|
72954ce9f2 | ||
|
|
e50220374e | ||
|
|
61ee8fadca | ||
|
|
8d84352ee9 | ||
|
|
4beb27b9ad | ||
|
|
c5c8c13057 | ||
|
|
a888d5cc53 | ||
|
|
72349245ca | ||
|
|
4fde0f23ce | ||
|
|
aff2ce1b9b | ||
|
|
5c94d2dc71 | ||
|
|
b1aac19668 | ||
|
|
d093bb1f00 | ||
|
|
2e9ba10fb5 | ||
|
|
8518141a5e | ||
|
|
6d6c9ebc56 | ||
|
|
7cd4a8a602 | ||
|
|
bc802846c5 | ||
|
|
e8f6cb5ed1 | ||
|
|
23bd67515c | ||
|
|
e29d1d98d4 | ||
|
|
4ac428d250 | ||
|
|
75385de01f | ||
|
|
f63dc2a10c | ||
|
|
4a712b3483 | ||
|
|
fa7d209cc9 | ||
|
|
bccf6ab16f | ||
|
|
14322a71bb | ||
|
|
7e81d30808 | ||
|
|
a83ed5ebb5 | ||
|
|
8f85da8931 | ||
|
|
e48427e59a | ||
|
|
7ae4f8c709 | ||
|
|
1ad7b55e05 | ||
|
|
aeee0704dd | ||
|
|
29fb9ee5b4 | ||
|
|
0eac58ed72 | ||
|
|
380ce04d5c | ||
|
|
94f5e790f6 | ||
|
|
3f8fb6734c | ||
|
|
412bf4ff73 | ||
|
|
246a42b727 | ||
|
|
e7a652f0a8 | ||
|
|
77f279e768 | ||
|
|
c1cfceb7dc | ||
|
|
0eb8f75946 | ||
|
|
af2ddff203 | ||
|
|
dce898d7ca | ||
|
|
7f9de6d101 | ||
|
|
d6e21adaf0 | ||
|
|
d7a3faa4e9 | ||
|
|
46d8a26b0d | ||
|
|
c08b2aea89 | ||
|
|
4ae503d700 | ||
|
|
a3361356b2 | ||
|
|
95e373e60b | ||
|
|
12539c61d4 | ||
|
|
6ae730b33b | ||
|
|
a20b43624b | ||
|
|
cab7ba4a8a | ||
|
|
41cff6d436 | ||
|
|
e6da384a43 | ||
|
|
7cd682c66a | ||
|
|
be8e8d9c7f | ||
|
|
841bf891d0 | ||
|
|
2a59c5bf8c | ||
|
|
64d76a3869 |
14
.github/workflows/audit-dependencies.sh
vendored
14
.github/workflows/audit-dependencies.sh
vendored
@@ -1,18 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
severity=${1:-"critical"}
|
||||
audit_json=$(pnpm audit --prod --json)
|
||||
severity=${1:-"high"}
|
||||
output_file="audit_output.json"
|
||||
|
||||
echo "Auditing for ${severity} vulnerabilities..."
|
||||
|
||||
audit_json=$(pnpm audit --prod --json)
|
||||
|
||||
echo "${audit_json}" | jq --arg severity "${severity}" '
|
||||
.advisories | to_entries |
|
||||
map(select(.value.patched_versions != "<0.0.0" and .value.severity == $severity) |
|
||||
map(select(.value.patched_versions != "<0.0.0" and (.value.severity == $severity or ($severity == "high" and .value.severity == "critical"))) |
|
||||
{
|
||||
package: .value.module_name,
|
||||
vulnerable: .value.vulnerable_versions,
|
||||
fixed_in: .value.patched_versions
|
||||
fixed_in: .value.patched_versions,
|
||||
findings: .value.findings
|
||||
}
|
||||
)
|
||||
' >$output_file
|
||||
@@ -22,7 +24,11 @@ audit_length=$(jq 'length' $output_file)
|
||||
if [[ "${audit_length}" -gt "0" ]]; then
|
||||
echo "Actionable vulnerabilities found in the following packages:"
|
||||
jq -r '.[] | "\u001b[1m\(.package)\u001b[0m vulnerable in \u001b[31m\(.vulnerable)\u001b[0m fixed in \u001b[32m\(.fixed_in)\u001b[0m"' $output_file | while read -r line; do echo -e "$line"; done
|
||||
echo ""
|
||||
echo "Output written to ${output_file}"
|
||||
cat $output_file
|
||||
echo ""
|
||||
echo "This script can be rerun with: './.github/workflows/audit-dependencies.sh $severity'"
|
||||
exit 1
|
||||
else
|
||||
echo "No actionable vulnerabilities"
|
||||
|
||||
4
.github/workflows/audit-dependencies.yml
vendored
4
.github/workflows/audit-dependencies.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
audit-level:
|
||||
description: The level of audit to run (low, moderate, high, critical)
|
||||
required: false
|
||||
default: critical
|
||||
default: high
|
||||
debug:
|
||||
description: Enable debug logging
|
||||
required: false
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "🚨 Actionable vulnerabilities found: <https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Details>"
|
||||
"text": "🚨 Actionable vulnerabilities found: <https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Script Run Details>"
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
9
.github/workflows/main.yml
vendored
9
.github/workflows/main.yml
vendored
@@ -153,6 +153,7 @@ jobs:
|
||||
matrix:
|
||||
database:
|
||||
- mongodb
|
||||
- firestore
|
||||
- postgres
|
||||
- postgres-custom-schema
|
||||
- postgres-uuid
|
||||
@@ -283,6 +284,8 @@ jobs:
|
||||
- fields__collections__Text
|
||||
- fields__collections__UI
|
||||
- fields__collections__Upload
|
||||
- group-by
|
||||
- folders
|
||||
- hooks
|
||||
- lexical__collections__Lexical__e2e__main
|
||||
- lexical__collections__Lexical__e2e__blocks
|
||||
@@ -301,6 +304,7 @@ jobs:
|
||||
- plugin-nested-docs
|
||||
- plugin-seo
|
||||
- sort
|
||||
- trash
|
||||
- versions
|
||||
- uploads
|
||||
env:
|
||||
@@ -417,6 +421,8 @@ jobs:
|
||||
- fields__collections__Text
|
||||
- fields__collections__UI
|
||||
- fields__collections__Upload
|
||||
- group-by
|
||||
- folders
|
||||
- hooks
|
||||
- lexical__collections__Lexical__e2e__main
|
||||
- lexical__collections__Lexical__e2e__blocks
|
||||
@@ -435,6 +441,7 @@ jobs:
|
||||
- plugin-nested-docs
|
||||
- plugin-seo
|
||||
- sort
|
||||
- trash
|
||||
- versions
|
||||
- uploads
|
||||
env:
|
||||
@@ -718,6 +725,8 @@ jobs:
|
||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||
|
||||
- name: Analyze esbuild bundle size
|
||||
# Temporarily disable this for community PRs until this can be implemented in a separate workflow
|
||||
if: github.event.pull_request.head.repo.fork == false
|
||||
uses: exoego/esbuild-bundle-analyzer@v1
|
||||
with:
|
||||
metafiles: 'packages/payload/meta_index.json,packages/payload/meta_shared.json,packages/ui/meta_client.json,packages/ui/meta_shared.json,packages/next/meta_index.json,packages/richtext-lexical/meta_client.json'
|
||||
|
||||
3
.github/workflows/post-release.yml
vendored
3
.github/workflows/post-release.yml
vendored
@@ -17,6 +17,9 @@ env:
|
||||
|
||||
jobs:
|
||||
post_release:
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ github.event_name != 'workflow_dispatch' }}
|
||||
steps:
|
||||
|
||||
7
.vscode/launch.json
vendored
7
.vscode/launch.json
vendored
@@ -139,6 +139,13 @@
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "pnpm tsx --no-deprecation test/dev.ts trash",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"name": "Run Dev Trash",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "pnpm tsx --no-deprecation test/dev.ts uploads",
|
||||
"cwd": "${workspaceFolder}",
|
||||
|
||||
@@ -77,13 +77,9 @@ If you wish to use your own MongoDB database for the `test` directory instead of
|
||||
|
||||
### Using Postgres
|
||||
|
||||
Our test suites supports automatic PostgreSQL + PostGIS setup using Docker. No local PostgreSQL installation required. By default, mongodb is used.
|
||||
If you have postgres installed on your system, you can also run the test suites using postgres. By default, mongodb is used.
|
||||
|
||||
To use postgres, simply set the `PAYLOAD_DATABASE` environment variable to `postgres`.
|
||||
|
||||
```bash
|
||||
PAYLOAD_DATABASE=postgres pnpm dev {suite}
|
||||
```
|
||||
To do that, simply set the `PAYLOAD_DATABASE` environment variable to `postgres`.
|
||||
|
||||
### Running the e2e and int tests
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ All auto-generated files will contain the following comments at the top of each
|
||||
|
||||
## Admin Options
|
||||
|
||||
All options for the Admin Panel are defined in your [Payload Config](../configuration/overview) under the `admin` property:
|
||||
All root-level options for the Admin Panel are defined in your [Payload Config](../configuration/overview) under the `admin` property:
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
|
||||
@@ -739,7 +739,7 @@ The `useDocumentInfo` hook provides information about the current document being
|
||||
| **`lastUpdateTime`** | Timestamp of the last update to the document. |
|
||||
| **`mostRecentVersionIsAutosaved`** | Whether the most recent version is an autosaved version. |
|
||||
| **`preferencesKey`** | The `preferences` key to use when interacting with document-level user preferences. [More details](./preferences). |
|
||||
| **`savedDocumentData`** | The saved data of the document. |
|
||||
| **`data`** | The saved data of the document. |
|
||||
| **`setDocFieldPreferences`** | Method to set preferences for a specific field. [More details](./preferences). |
|
||||
| **`setDocumentTitle`** | Method to set the document title. |
|
||||
| **`setHasPublishedDoc`** | Method to update whether the document has been published. |
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
---
|
||||
title: Project Configuration
|
||||
label: Configuration
|
||||
order: 20
|
||||
desc: Quickly configure and deploy your Payload Cloud project in a few simple steps.
|
||||
keywords: configuration, config, settings, project, cloud, payload cloud, deploy, deployment
|
||||
---
|
||||
|
||||
## Select your plan
|
||||
|
||||
Once you have created a project, you will need to select your plan. This will determine the resources that are allocated to your project and the features that are available to you.
|
||||
|
||||
<Banner type="success">
|
||||
Note: All Payload Cloud teams that deploy a project require a card on file.
|
||||
This helps us prevent fraud and abuse on our platform. If you select a plan
|
||||
with a free trial, you will not be charged until your trial period is over.
|
||||
We’ll remind you 7 days before your trial ends and you can cancel anytime.
|
||||
</Banner>
|
||||
|
||||
## Project Details
|
||||
|
||||
| Option | Description |
|
||||
| ---------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **Region** | Select the region closest to your audience. This will ensure the fastest communication between your data and your client. |
|
||||
| **Project Name** | A name for your project. You can change this at any time. |
|
||||
| **Project Slug** | Choose a unique slug to identify your project. This needs to be unique for your team and you can change it any time. |
|
||||
| **Team** | Select the team you want to create the project under. If this is your first project, a personal team will be created for you automatically. You can modify your team settings and invite new members at any time from the Team Settings page. |
|
||||
|
||||
## Build Settings
|
||||
|
||||
If you are deploying a new project from a template, the following settings will be automatically configured for you. If you are using your own repository, you need to make sure your build settings are accurate for your project to deploy correctly.
|
||||
|
||||
| Option | Description |
|
||||
| -------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **Root Directory** | The folder where your `package.json` file lives. |
|
||||
| **Install Command** | The command used to install your modules, for example: `yarn install` or `npm install` |
|
||||
| **Build Command** | The command used to build your application, for example: `yarn build` or `npm run build` |
|
||||
| **Serve Command** | The command used to serve your application, for example: `yarn serve` or `npm run serve` |
|
||||
| **Branch to Deploy** | Select the branch of your repository that you want to deploy from. This is the branch that will be used to build your project when you commit new changes. |
|
||||
| **Default Domain** | Set a default domain for your project. This must be unique and you will not able to change it. You can always add a custom domain later in your project settings. |
|
||||
|
||||
## Environment Variables
|
||||
|
||||
Any of the features in Payload Cloud that require environment variables will automatically be provided to your application. If your app requires any custom environment variables, you can set them here.
|
||||
|
||||
<Banner type="warning">
|
||||
Note: For security reasons, any variables you wish to provide to the [Admin
|
||||
Panel](../admin/overview) must be prefixed with `NEXT_PUBLIC_`. Learn more
|
||||
[here](../configuration/environment-vars).
|
||||
</Banner>
|
||||
|
||||
## Payment
|
||||
|
||||
Payment methods can be set per project and can be updated any time. You can use team’s default payment method, or add a new one. Modify your payment methods in your Project settings / Team settings.
|
||||
|
||||
<Banner type="success">
|
||||
**Note:** All Payload Cloud teams that deploy a project require a card on
|
||||
file. This helps us prevent fraud and abuse on our platform. If you select a
|
||||
plan with a free trial, you will not be charged until your trial period is
|
||||
over. We’ll remind you 7 days before your trial ends and you can cancel
|
||||
anytime.
|
||||
</Banner>
|
||||
@@ -1,53 +0,0 @@
|
||||
---
|
||||
title: Getting Started
|
||||
label: Getting Started
|
||||
order: 10
|
||||
desc: Get started with Payload Cloud, a deployment solution specifically designed for Node + MongoDB applications.
|
||||
keywords: cloud, hosted, database, storage, email, deployment, serverless, node, mongodb, s3, aws, cloudflare, atlas, resend, payload, cms
|
||||
---
|
||||
|
||||
A deployment solution specifically designed for Node.js + MongoDB applications, offering seamless deployment of your entire stack in one place. You can get started in minutes with a one-click template or bring your own codebase with you.
|
||||
|
||||
Payload Cloud offers various plans tailored to meet your specific needs, including a MongoDB Atlas database, S3 file storage, and email delivery powered by [Resend](https://resend.com). To see a full breakdown of features and plans, see our [Cloud Pricing page](https://payloadcms.com/cloud-pricing).
|
||||
|
||||
To get started, you first need to create an account. Head over to [the login screen](https://payloadcms.com/login) and **Register for Free**.
|
||||
|
||||
<Banner type="success">
|
||||
To create your first project, you can either select [a
|
||||
template](#starting-from-a-template) or [import an existing
|
||||
project](#importing-from-an-existing-codebase) from GitHub.
|
||||
</Banner>
|
||||
|
||||
## Starting from a Template
|
||||
|
||||
Templates come preconfigured and provide a one-click solution to quickly deploy a new application.
|
||||
|
||||

|
||||
_Creating a new project from a template._
|
||||
|
||||
After creating an account, select your desired template from the Projects page. At this point, you need to connect to authorize the Payload Cloud application with your GitHub account. Click Continue with GitHub and follow the prompts to authorize the app.
|
||||
|
||||
Next, select your `GitHub Scope`. If you belong to multiple organizations, they will show up here. If you do not see the organization you are looking for, you may need to adjust your GitHub app permissions.
|
||||
|
||||
After selecting your scope, create a unique `repository name` and select whether you want your repository to be public or private on GitHub.
|
||||
|
||||
<Banner type="warning">
|
||||
**Note:** Public repositories can be accessed by anyone online, while private
|
||||
repositories grant access only to you and anyone you explicitly authorize.
|
||||
</Banner>
|
||||
|
||||
Once you are ready, click **Create Project**. This will clone the selected template to a new repository in your GitHub account, and take you to the configuration page to set up your project for deployment.
|
||||
|
||||
## Importing from an Existing Codebase
|
||||
|
||||
Payload Cloud works for any Node.js + MongoDB app. From the New Project page, select **import an existing Git codebase**. Choose the organization and select the repository you want to import. From here, you will be taken to the configuration page to set up your project for deployment.
|
||||
|
||||

|
||||
_Creating a new project from an existing repository._
|
||||
|
||||
<Banner type="warning">
|
||||
**Note:** In order to make use of the features of Payload Cloud in your own
|
||||
codebase, you will need to add the [Cloud
|
||||
Plugin](https://github.com/payloadcms/payload/tree/main/packages/payload-cloud)
|
||||
to your Payload app.
|
||||
</Banner>
|
||||
@@ -1,137 +0,0 @@
|
||||
---
|
||||
title: Cloud Projects
|
||||
label: Projects
|
||||
order: 40
|
||||
desc: Manage your Payload Cloud projects.
|
||||
keywords: cloud, payload cloud, projects, project, overview, database, file storage, build settings, environment variables, custom domains, email, developing locally
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
<Banner>
|
||||
The overview tab shows your most recent deployment, along with build and
|
||||
deployment logs. From here, you can see your live URL, deployment details like
|
||||
timestamps and commit hash, as well as the status of your deployment. You can
|
||||
also trigger a redeployment manually, which will rebuild your project using
|
||||
the current configuration.
|
||||
</Banner>
|
||||
|
||||

|
||||
_A screenshot of the Overview page for a Cloud project._
|
||||
|
||||
## Database
|
||||
|
||||
Your Payload Cloud project comes with a MongoDB serverless Atlas DB instance or a Dedicated Atlas cluster, depending on your plan. To interact with your cloud database, you will be provided with a MongoDB connection string. This can be found under the **Database** tab of your project.
|
||||
|
||||
`mongodb+srv://your_connection_string`
|
||||
|
||||
## File Storage
|
||||
|
||||
Payload Cloud gives you S3 file storage backed by Cloudflare as a CDN, and this plugin extends Payload so that all of your media will be stored in S3 rather than locally.
|
||||
|
||||
AWS Cognito is used for authentication to your S3 bucket. The [Payload Cloud Plugin](https://github.com/payloadcms/payload/tree/main/packages/payload-cloud) will automatically pick up these values. These values are only if you'd like to access your files directly, outside of Payload Cloud.
|
||||
|
||||
### Accessing Files Outside of Payload Cloud
|
||||
|
||||
If you'd like to access your files outside of Payload Cloud, you'll need to retrieve some values from your project's settings and put them into your environment variables. In Payload Cloud, navigate to the File Storage tab and copy the values using the copy button. Put these values in your .env file. Also copy the Cognito Password value separately and put into your .env file as well.
|
||||
|
||||
When you are done, you should have the following values in your .env file:
|
||||
|
||||
```env
|
||||
PAYLOAD_CLOUD=true
|
||||
PAYLOAD_CLOUD_ENVIRONMENT=prod
|
||||
PAYLOAD_CLOUD_COGNITO_USER_POOL_CLIENT_ID=
|
||||
PAYLOAD_CLOUD_COGNITO_USER_POOL_ID=
|
||||
PAYLOAD_CLOUD_COGNITO_IDENTITY_POOL_ID=
|
||||
PAYLOAD_CLOUD_PROJECT_ID=
|
||||
PAYLOAD_CLOUD_BUCKET=
|
||||
PAYLOAD_CLOUD_BUCKET_REGION=
|
||||
PAYLOAD_CLOUD_COGNITO_PASSWORD=
|
||||
```
|
||||
|
||||
The plugin will pick up these values and use them to access your files.
|
||||
|
||||
## Build Settings
|
||||
|
||||
You can update settings from your Project’s Settings tab. Changes to your build settings will trigger a redeployment of your project.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
From the Environment Variables page of the Settings tab, you can add, update and delete variables for use in your project. Like build settings, these changes will trigger a redeployment of your project.
|
||||
|
||||
<Banner>
|
||||
Note: For security reasons, any variables you wish to provide to the [Admin
|
||||
Panel](../admin/overview) must be prefixed with `NEXT_PUBLIC_`. [More
|
||||
details](../configuration/environment-vars).
|
||||
</Banner>
|
||||
|
||||
## Custom Domains
|
||||
|
||||
With Payload Cloud, you can add custom domain names to your project. To do so, first go to the Domains page of the Settings tab of your project. Here you can see your default domain. To add a new domain, type in the domain name you wish to use.
|
||||
|
||||
<Banner>
|
||||
Note: do not include the protocol (http:// or https://) or any paths (/page).
|
||||
Only include the domain name and extension, and optionally a subdomain. -
|
||||
your-domain.com - backend.your-domain.com
|
||||
</Banner>
|
||||
|
||||
Once you click save, a DNS record will be generated for your domain name to point to your live project. Add this record into your DNS provider’s records, and once the records are resolving properly (this can take 1hr to 48hrs in some cases), your domain will now to point to your live project.
|
||||
|
||||
You will also need to configure your Payload project to use your specified domain. In your `payload.config.ts` file, specify your `serverURL` with your domain:
|
||||
|
||||
```ts
|
||||
export default buildConfig({
|
||||
serverURL: 'https://example.com',
|
||||
// the rest of your config,
|
||||
})
|
||||
```
|
||||
|
||||
## Email
|
||||
|
||||
Powered by [Resend](https://resend.com), Payload Cloud comes with integrated email support out of the box. No configuration is needed, and you can use `payload.sendEmail()` to send email right from your Payload app. To learn more about sending email with Payload, checkout the [Email Configuration](../email/overview) overview.
|
||||
|
||||
If you are on the Pro or Enterprise plan, you can add your own custom Email domain name. From the Email page of your project’s Settings, add the domain you wish to use for email delivery. This will generate a set of DNS records. Add these records to your DNS provider and click verify to check that your records are resolving properly. Once verified, your emails will now be sent from your custom domain name.
|
||||
|
||||
## Developing Locally
|
||||
|
||||
To make changes to your project, you will need to clone the repository defined in your project settings to your local machine. In order to run your project locally, you will need configure your local environment first. Refer to your repository’s `README.md` file to see the steps needed for your specific template.
|
||||
|
||||
From there, you are ready to make updates to your project. When you are ready to make your changes live, commit your changes to the branch you specified in your Project settings, and your application will automatically trigger a redeploy and build from your latest commit.
|
||||
|
||||
## Cloud Plugin
|
||||
|
||||
Projects generated from a template will come pre-configured with the official Cloud Plugin, but if you are using your own repository you will need to add this into your project. To do so, add the plugin to your Payload Config:
|
||||
|
||||
`pnpm add @payloadcms/payload-cloud`
|
||||
|
||||
```js
|
||||
import { payloadCloudPlugin } from '@payloadcms/payload-cloud'
|
||||
import { buildConfig } from 'payload'
|
||||
|
||||
export default buildConfig({
|
||||
plugins: [payloadCloudPlugin()],
|
||||
// rest of config
|
||||
})
|
||||
```
|
||||
|
||||
<Banner type="warning">
|
||||
**Note:** If your Payload Config already has an email with transport, this
|
||||
will take precedence over Payload Cloud's email service.
|
||||
</Banner>
|
||||
|
||||
<Banner type="info">
|
||||
Good to know: the Payload Cloud Plugin was previously named
|
||||
`@payloadcms/plugin-cloud`. If you are using this plugin, you should update to
|
||||
the new package name.
|
||||
</Banner>
|
||||
|
||||
#### **Optional configuration**
|
||||
|
||||
If you wish to opt-out of any Payload cloud features, the plugin also accepts options to do so.
|
||||
|
||||
```js
|
||||
payloadCloud({
|
||||
storage: false, // Disable file storage
|
||||
email: false, // Disable email delivery
|
||||
})
|
||||
```
|
||||
@@ -1,35 +0,0 @@
|
||||
---
|
||||
title: Cloud Teams
|
||||
label: Teams
|
||||
order: 30
|
||||
desc: Manage your Payload Cloud team and billing settings.
|
||||
keywords: team, teams, billing, subscription, payment, plan, plans, cloud, payload cloud
|
||||
---
|
||||
|
||||
<Banner>
|
||||
Within Payload Cloud, the team management feature offers you the ability to
|
||||
manage your organization, team members, billing, and subscription settings.
|
||||
</Banner>
|
||||
|
||||

|
||||
_A screenshot of the Team Settings page._
|
||||
|
||||
## Members
|
||||
|
||||
Each team has members that can interact with your projects. You can invite multiple people to your team and each individual can belong to more than one team. You can assign them either `owner` or `user` permissions. Owners are able to make admin-only changes, such as deleting projects, and editing billing information.
|
||||
|
||||
## Adding Members
|
||||
|
||||
To add a new member to your team, visit your Team’s Settings page, and click “Invite Teammate”. You can then add their email address, and assign their role. Press “Save” to send the invitations, which will send an email to the invited team member where they can create a new account.
|
||||
|
||||
## Billing
|
||||
|
||||
Users can update billing settings and subscriptions for any teams where they are designated as an `owner`. To make updates to the team’s payment methods, visit the Billing page under the Team Settings tab. You can add new cards, delete cards, and set a payment method as a default. The default payment method will be used in the event that another payment method fails.
|
||||
|
||||
## Subscriptions
|
||||
|
||||
From the Subscriptions page, a team owner can see all current plans for their team. From here, you can see the price of each plan, if there is an active trial, and when you will be billed next.
|
||||
|
||||
## Invoices
|
||||
|
||||
The Invoices page will you show you the invoices for your account, as well as the status on their payment.
|
||||
@@ -60,32 +60,33 @@ export const Posts: CollectionConfig = {
|
||||
|
||||
The following options are available:
|
||||
|
||||
| Option | Description |
|
||||
| -------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `admin` | The configuration options for the Admin Panel. [More details](#admin-options). |
|
||||
| `access` | Provide Access Control functions to define exactly who should be able to do what with Documents in this Collection. [More details](../access-control/collections). |
|
||||
| `auth` | Specify options if you would like this Collection to feature authentication. [More details](../authentication/overview). |
|
||||
| `custom` | Extension point for adding custom data (e.g. for plugins) |
|
||||
| `disableDuplicate` | When true, do not show the "Duplicate" button while editing documents within this Collection and prevent `duplicate` from all APIs. |
|
||||
| `defaultSort` | Pass a top-level field to sort by default in the Collection List View. Prefix the name of the field with a minus symbol ("-") to sort in descending order. Multiple fields can be specified by using a string array. |
|
||||
| `dbName` | Custom table or Collection name depending on the Database Adapter. Auto-generated from slug if not defined. |
|
||||
| `endpoints` | Add custom routes to the REST API. Set to `false` to disable routes. [More details](../rest-api/overview#custom-endpoints). |
|
||||
| `fields` \* | Array of field types that will determine the structure and functionality of the data stored within this Collection. [More details](../fields/overview). |
|
||||
| `graphQL` | Manage GraphQL-related properties for this collection. [More](#graphql) |
|
||||
| `hooks` | Entry point for Hooks. [More details](../hooks/overview#collection-hooks). |
|
||||
| `orderable` | If true, enables custom ordering for the collection, and documents can be reordered via drag and drop. Uses [fractional indexing](https://observablehq.com/@dgreensp/implementing-fractional-indexing) for efficient reordering. |
|
||||
| `labels` | Singular and plural labels for use in identifying this Collection throughout Payload. Auto-generated from slug if not defined. |
|
||||
| `enableQueryPresets` | Enable query presets for this Collection. [More details](../query-presets/overview). |
|
||||
| `lockDocuments` | Enables or disables document locking. By default, document locking is enabled. Set to an object to configure, or set to `false` to disable locking. [More details](../admin/locked-documents). |
|
||||
| `slug` \* | Unique, URL-friendly string that will act as an identifier for this Collection. |
|
||||
| `timestamps` | Set to false to disable documents' automatically generated `createdAt` and `updatedAt` timestamps. |
|
||||
| `typescript` | An object with property `interface` as the text used in schema generation. Auto-generated from slug if not defined. |
|
||||
| `upload` | Specify options if you would like this Collection to support file uploads. For more, consult the [Uploads](../upload/overview) documentation. |
|
||||
| `versions` | Set to true to enable default options, or configure with object properties. [More details](../versions/overview#collection-config). |
|
||||
| `defaultPopulate` | Specify which fields to select when this Collection is populated from another document. [More Details](../queries/select#defaultpopulate-collection-config-property). |
|
||||
| `indexes` | Define compound indexes for this collection. This can be used to either speed up querying/sorting by 2 or more fields at the same time or to ensure uniqueness between several fields. [More details](../database/indexes#compound-indexes). |
|
||||
| `forceSelect` | Specify which fields should be selected always, regardless of the `select` query which can be useful that the field exists for access control / hooks |
|
||||
| `disableBulkEdit` | Disable the bulk edit operation for the collection in the admin panel and the REST API |
|
||||
| Option | Description |
|
||||
| -------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `admin` | The configuration options for the Admin Panel. [More details](#admin-options). |
|
||||
| `access` | Provide Access Control functions to define exactly who should be able to do what with Documents in this Collection. [More details](../access-control/collections). |
|
||||
| `auth` | Specify options if you would like this Collection to feature authentication. [More details](../authentication/overview). |
|
||||
| `custom` | Extension point for adding custom data (e.g. for plugins) |
|
||||
| `disableDuplicate` | When true, do not show the "Duplicate" button while editing documents within this Collection and prevent `duplicate` from all APIs. |
|
||||
| `defaultSort` | Pass a top-level field to sort by default in the Collection List View. Prefix the name of the field with a minus symbol ("-") to sort in descending order. Multiple fields can be specified by using a string array. |
|
||||
| `dbName` | Custom table or Collection name depending on the Database Adapter. Auto-generated from slug if not defined. |
|
||||
| `endpoints` | Add custom routes to the REST API. Set to `false` to disable routes. [More details](../rest-api/overview#custom-endpoints). |
|
||||
| `fields` \* | Array of field types that will determine the structure and functionality of the data stored within this Collection. [More details](../fields/overview). |
|
||||
| `graphQL` | Manage GraphQL-related properties for this collection. [More](#graphql) |
|
||||
| `hooks` | Entry point for Hooks. [More details](../hooks/overview#collection-hooks). |
|
||||
| `orderable` | If true, enables custom ordering for the collection, and documents can be reordered via drag and drop. Uses [fractional indexing](https://observablehq.com/@dgreensp/implementing-fractional-indexing) for efficient reordering. |
|
||||
| `labels` | Singular and plural labels for use in identifying this Collection throughout Payload. Auto-generated from slug if not defined. |
|
||||
| `enableQueryPresets` | Enable query presets for this Collection. [More details](../query-presets/overview). |
|
||||
| `lockDocuments` | Enables or disables document locking. By default, document locking is enabled. Set to an object to configure, or set to `false` to disable locking. [More details](../admin/locked-documents). |
|
||||
| `slug` \* | Unique, URL-friendly string that will act as an identifier for this Collection. |
|
||||
| `timestamps` | Set to false to disable documents' automatically generated `createdAt` and `updatedAt` timestamps. |
|
||||
| `trash` | A boolean to enable soft deletes for this collection. Defaults to `false`. [More details](../trash/overview). |
|
||||
| `typescript` | An object with property `interface` as the text used in schema generation. Auto-generated from slug if not defined. |
|
||||
| `upload` | Specify options if you would like this Collection to support file uploads. For more, consult the [Uploads](../upload/overview) documentation. |
|
||||
| `versions` | Set to true to enable default options, or configure with object properties. [More details](../versions/overview#collection-config). |
|
||||
| `defaultPopulate` | Specify which fields to select when this Collection is populated from another document. [More Details](../queries/select#defaultpopulate-collection-config-property). |
|
||||
| `indexes` | Define compound indexes for this collection. This can be used to either speed up querying/sorting by 2 or more fields at the same time or to ensure uniqueness between several fields. |
|
||||
| `forceSelect` | Specify which fields should be selected always, regardless of the `select` query which can be useful that the field exists for access control / hooks |
|
||||
| `disableBulkEdit` | Disable the bulk edit operation for the collection in the admin panel and the REST API |
|
||||
|
||||
_\* An asterisk denotes that a property is required._
|
||||
|
||||
@@ -130,6 +131,7 @@ The following options are available:
|
||||
| `description` | Text to display below the Collection label in the List View to give editors more information. Alternatively, you can use the `admin.components.Description` to render a React component. [More details](#custom-components). |
|
||||
| `defaultColumns` | Array of field names that correspond to which columns to show by default in this Collection's List View. |
|
||||
| `disableCopyToLocale` | Disables the "Copy to Locale" button while editing documents within this Collection. Only applicable when localization is enabled. |
|
||||
| `groupBy` | Beta. Enable grouping by a field in the list view. |
|
||||
| `hideAPIURL` | Hides the "API URL" meta field while editing documents within this Collection. |
|
||||
| `enableRichTextLink` | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
||||
| `enableRichTextRelationship` | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
||||
@@ -140,7 +142,7 @@ The following options are available:
|
||||
| `components` | Swap in your own React components to be used within this Collection. [More details](#custom-components). |
|
||||
| `listSearchableFields` | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
|
||||
| `pagination` | Set pagination-specific options for this Collection. [More details](#pagination). |
|
||||
| `baseListFilter` | You can define a default base filter for this collection's List view, which will be merged into any filters that the user performs. |
|
||||
| `baseFilter` | Defines a default base filter which will be applied to the List View (along with any other filters applied by the user) and internal links in Lexical Editor, |
|
||||
|
||||
<Banner type="warning">
|
||||
**Note:** If you set `useAsTitle` to a relationship or join field, it will use
|
||||
|
||||
@@ -30,18 +30,22 @@ export default buildConfig({
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Description |
|
||||
| -------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `autoPluralization` | Tell Mongoose to auto-pluralize any collection names if it encounters any singular words used as collection `slug`s. |
|
||||
| `connectOptions` | Customize MongoDB connection options. Payload will connect to your MongoDB database using default options which you can override and extend to include all the [options](https://mongoosejs.com/docs/connections.html#options) available to mongoose. |
|
||||
| `collectionsSchemaOptions` | Customize Mongoose schema options for collections. |
|
||||
| `disableIndexHints` | Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination, as it increases the speed of the count function used in that query. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false |
|
||||
| `migrationDir` | Customize the directory that migrations are stored. |
|
||||
| `transactionOptions` | An object with configuration properties used in [transactions](https://www.mongodb.com/docs/manual/core/transactions/) or `false` which will disable the use of transactions. |
|
||||
| `collation` | Enable language-specific string comparison with customizable options. Available on MongoDB 3.4+. Defaults locale to "en". Example: `{ strength: 3 }`. For a full list of collation options and their definitions, see the [MongoDB documentation](https://www.mongodb.com/docs/manual/reference/collation/). |
|
||||
| `allowAdditionalKeys` | By default, Payload strips all additional keys from MongoDB data that don't exist in the Payload schema. If you have some data that you want to include to the result but it doesn't exist in Payload, you can set this to `true`. Be careful as Payload access control _won't_ work for this data. |
|
||||
| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. |
|
||||
| `disableFallbackSort` | Set to `true` to disable the adapter adding a fallback sort when sorting by non-unique fields, this can affect performance in some cases but it ensures a consistent order of results. |
|
||||
| Option | Description |
|
||||
| ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `autoPluralization` | Tell Mongoose to auto-pluralize any collection names if it encounters any singular words used as collection `slug`s. |
|
||||
| `connectOptions` | Customize MongoDB connection options. Payload will connect to your MongoDB database using default options which you can override and extend to include all the [options](https://mongoosejs.com/docs/connections.html#options) available to mongoose. |
|
||||
| `collectionsSchemaOptions` | Customize Mongoose schema options for collections. |
|
||||
| `disableIndexHints` | Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination, as it increases the speed of the count function used in that query. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false |
|
||||
| `migrationDir` | Customize the directory that migrations are stored. |
|
||||
| `transactionOptions` | An object with configuration properties used in [transactions](https://www.mongodb.com/docs/manual/core/transactions/) or `false` which will disable the use of transactions. |
|
||||
| `collation` | Enable language-specific string comparison with customizable options. Available on MongoDB 3.4+. Defaults locale to "en". Example: `{ strength: 3 }`. For a full list of collation options and their definitions, see the [MongoDB documentation](https://www.mongodb.com/docs/manual/reference/collation/). |
|
||||
| `allowAdditionalKeys` | By default, Payload strips all additional keys from MongoDB data that don't exist in the Payload schema. If you have some data that you want to include to the result but it doesn't exist in Payload, you can set this to `true`. Be careful as Payload access control _won't_ work for this data. |
|
||||
| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. |
|
||||
| `disableFallbackSort` | Set to `true` to disable the adapter adding a fallback sort when sorting by non-unique fields, this can affect performance in some cases but it ensures a consistent order of results. |
|
||||
| `useAlternativeDropDatabase` | Set to `true` to use an alternative `dropDatabase` implementation that calls `collection.deleteMany({})` on every collection instead of sending a raw `dropDatabase` command. Payload only uses `dropDatabase` for testing purposes. Defaults to `false`. |
|
||||
| `useBigIntForNumberIDs` | Set to `true` to use `BigInt` for custom ID fields of type `'number'`. Useful for databases that don't support `double` or `int32` IDs. Defaults to `false`. |
|
||||
| `useJoinAggregations` | Set to `false` to disable join aggregations (which use correlated subqueries) and instead populate join fields via multiple `find` queries. Defaults to `true`. |
|
||||
| `usePipelineInSortLookup` | Set to `false` to disable the use of `pipeline` in the `$lookup` aggregation in sorting. Defaults to `true`. |
|
||||
|
||||
## Access to Mongoose models
|
||||
|
||||
@@ -56,9 +60,21 @@ You can access Mongoose models as follows:
|
||||
|
||||
## Using other MongoDB implementations
|
||||
|
||||
Limitations with [DocumentDB](https://aws.amazon.com/documentdb/) and [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db):
|
||||
You can import the `compatabilityOptions` object to get the recommended settings for other MongoDB implementations. Since these databases aren't officially supported by payload, you may still encounter issues even with these settings (please create an issue or PR if you believe these options should be updated):
|
||||
|
||||
- For Azure Cosmos DB you must pass `transactionOptions: false` to the adapter options. Azure Cosmos DB does not support transactions that update two and more documents in different collections, which is a common case when using Payload (via hooks).
|
||||
- For Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`.
|
||||
- The [Join Field](../fields/join) is not supported in DocumentDB and Azure Cosmos DB, as we internally use MongoDB aggregations to query data for that field, which are limited there. This can be changed in the future.
|
||||
- For DocumentDB pass `disableIndexHints: true` to disable hinting to the DB to use `id` as index which can cause problems with DocumentDB.
|
||||
```ts
|
||||
import { mongooseAdapter, compatabilityOptions } from '@payloadcms/db-mongodb'
|
||||
|
||||
export default buildConfig({
|
||||
db: mongooseAdapter({
|
||||
url: process.env.DATABASE_URI,
|
||||
// For example, if you're using firestore:
|
||||
...compatabilityOptions.firestore,
|
||||
}),
|
||||
})
|
||||
```
|
||||
|
||||
We export compatability options for [DocumentDB](https://aws.amazon.com/documentdb/), [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db) and [Firestore](https://cloud.google.com/firestore/mongodb-compatibility/docs/overview). Known limitations:
|
||||
|
||||
- Azure Cosmos DB does not support transactions that update two or more documents in different collections, which is a common case when using Payload (via hooks).
|
||||
- Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`.
|
||||
|
||||
@@ -296,11 +296,16 @@ query {
|
||||
sort: "createdAt"
|
||||
limit: 5
|
||||
where: { author: { equals: "66e3431a3f23e684075aaeb9" } }
|
||||
"""
|
||||
Optionally pass count: true if you want to retrieve totalDocs
|
||||
"""
|
||||
count: true -- s
|
||||
) {
|
||||
docs {
|
||||
title
|
||||
}
|
||||
hasNextPage
|
||||
totalDocs
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,6 +157,7 @@ The following field names are forbidden and cannot be used:
|
||||
- `salt`
|
||||
- `hash`
|
||||
- `file`
|
||||
- `status` - with Postgres Adapter and when drafts are enabled
|
||||
|
||||
### Field-level Hooks
|
||||
|
||||
|
||||
@@ -34,20 +34,20 @@ npm i @payloadcms/plugin-csm
|
||||
Then in the `plugins` array of your Payload Config, call the plugin and enable any collections that require Content Source Maps.
|
||||
|
||||
```ts
|
||||
import { buildConfig } from "payload/config"
|
||||
import contentSourceMaps from "@payloadcms/plugin-csm"
|
||||
import { buildConfig } from 'payload/config'
|
||||
import contentSourceMaps from '@payloadcms/plugin-csm'
|
||||
|
||||
const config = buildConfig({
|
||||
collections: [
|
||||
{
|
||||
slug: "pages",
|
||||
slug: 'pages',
|
||||
fields: [
|
||||
{
|
||||
name: 'slug',
|
||||
type: 'text',
|
||||
},
|
||||
{
|
||||
name: 'title,'
|
||||
name: 'title',
|
||||
type: 'text',
|
||||
},
|
||||
],
|
||||
@@ -55,7 +55,7 @@ const config = buildConfig({
|
||||
],
|
||||
plugins: [
|
||||
contentSourceMaps({
|
||||
collections: ["pages"],
|
||||
collections: ['pages'],
|
||||
}),
|
||||
],
|
||||
})
|
||||
|
||||
@@ -51,7 +51,7 @@ export default buildConfig({
|
||||
// add as many cron jobs as you want
|
||||
],
|
||||
shouldAutoRun: async (payload) => {
|
||||
// Tell Payload if it should run jobs or not.
|
||||
// Tell Payload if it should run jobs or not. This function is optional and will return true by default.
|
||||
// This function will be invoked each time Payload goes to pick up and run jobs.
|
||||
// If this function ever returns false, the cron schedule will be stopped.
|
||||
return true
|
||||
|
||||
155
docs/jobs-queue/schedules.mdx
Normal file
155
docs/jobs-queue/schedules.mdx
Normal file
@@ -0,0 +1,155 @@
|
||||
---
|
||||
title: Job Schedules
|
||||
label: Schedules
|
||||
order: 60
|
||||
desc: Payload allows you to schedule jobs to run periodically
|
||||
keywords: jobs queue, application framework, typescript, node, react, nextjs, scheduling, cron, schedule
|
||||
---
|
||||
|
||||
Payload's `schedule` property lets you enqueue Jobs regularly according to a cron schedule - daily, weekly, hourly, or any custom interval. This is ideal for tasks or workflows that must repeat automatically and without manual intervention.
|
||||
|
||||
Scheduling Jobs differs significantly from running them:
|
||||
|
||||
- **Queueing**: Scheduling only creates (enqueues) the Job according to your cron expression. It does not immediately execute any business logic.
|
||||
- **Running**: Execution happens separately through your Jobs runner - such as autorun, or manual invocation using `payload.jobs.run()` or the `payload-jobs/run` endpoint.
|
||||
|
||||
Use the `schedule` property specifically when you have recurring tasks or workflows. To enqueue a single Job to run once in the future, use the `waitUntil` property instead.
|
||||
|
||||
## Example use cases
|
||||
|
||||
**Regular emails or notifications**
|
||||
|
||||
Send nightly digests, weekly newsletters, or hourly updates.
|
||||
|
||||
**Batch processing during off-hours**
|
||||
|
||||
Process analytics data or rebuild static sites during low-traffic times.
|
||||
|
||||
**Periodic data synchronization**
|
||||
|
||||
Regularly push or pull updates to or from external APIs.
|
||||
|
||||
## Handling schedules
|
||||
|
||||
Something needs to actually trigger the scheduling of jobs (execute the scheduling lifecycle seen below). By default, the `jobs.autorun` configuration, as well as the `/api/payload-jobs/run` will also handle scheduling for the queue specified in the `autorun` configuration.
|
||||
|
||||
You can disable this behavior by setting `disableScheduling: true` in your `autorun` configuration, or by passing `disableScheduling=true` to the `/api/payload-jobs/run` endpoint. This is useful if you want to handle scheduling manually, for example, by using a cron job or a serverless function that calls the `/api/payload-jobs/handle-schedules` endpoint or the `payload.jobs.handleSchedules()` local API method.
|
||||
|
||||
## Defining schedules on Tasks or Workflows
|
||||
|
||||
Schedules are defined using the `schedule` property:
|
||||
|
||||
```ts
|
||||
export type ScheduleConfig = {
|
||||
cron: string // required, supports seconds precision
|
||||
queue: string // required, the queue to push Jobs onto
|
||||
hooks?: {
|
||||
// Optional hooks to customize scheduling behavior
|
||||
beforeSchedule?: BeforeScheduleFn
|
||||
afterSchedule?: AfterScheduleFn
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Example schedule
|
||||
|
||||
The following example demonstrates scheduling a Job to enqueue every day at midnight:
|
||||
|
||||
```ts
|
||||
import type { TaskConfig } from 'payload'
|
||||
|
||||
export const SendDigestEmail: TaskConfig<'SendDigestEmail'> = {
|
||||
slug: 'SendDigestEmail',
|
||||
schedule: [
|
||||
{
|
||||
cron: '0 0 * * *', // Every day at midnight
|
||||
queue: 'nightly',
|
||||
},
|
||||
],
|
||||
handler: async () => {
|
||||
await sendDigestToAllUsers()
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
This configuration only queues the Job - it does not execute it immediately. To actually run the queued Job, you configure autorun in your Payload config (note that autorun should **not** be used on serverless platforms):
|
||||
|
||||
```ts
|
||||
export default buildConfig({
|
||||
jobs: {
|
||||
autoRun: [
|
||||
{
|
||||
cron: '* * * * *', // Runs every minute
|
||||
queue: 'nightly',
|
||||
},
|
||||
],
|
||||
tasks: [SendDigestEmail],
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
That way, Payload's scheduler will automatically enqueue the job into the `nightly` queue every day at midnight. The autorun configuration will check the `nightly` queue every minute and execute any Jobs that are due to run.
|
||||
|
||||
## Scheduling lifecycle
|
||||
|
||||
Here's how the scheduling process operates in detail:
|
||||
|
||||
1. **Cron evaluation**: Payload (or your external trigger in `manual` mode) identifies which schedules are due to run. To do that, it will
|
||||
read the `payload-jobs-stats` global which contains information about the last time each scheduled task or workflow was run.
|
||||
2. **BeforeSchedule hook**:
|
||||
- The default beforeSchedule hook checks how many active or runnable jobs of the same type that have been queued by the scheduling system currently exist.
|
||||
If such a job exists, it will skip scheduling a new one.
|
||||
- You can provide your own `beforeSchedule` hook to customize this behavior. For example, you might want to allow multiple overlapping Jobs or dynamically set the Job input data.
|
||||
3. **Enqueue Job**: Payload queues up a new job. This job will have `waitUntil` set to the next scheduled time based on the cron expression.
|
||||
4. **AfterSchedule hook**:
|
||||
- The default afterSchedule hook updates the `payload-jobs-stats` global metadata with the last scheduled time for the Job.
|
||||
- You can provide your own afterSchedule hook to it for custom logging, metrics, or other post-scheduling actions.
|
||||
|
||||
## Customizing concurrency and input (Advanced)
|
||||
|
||||
You may want more control over concurrency or dynamically set Job inputs at scheduling time. For instance, allowing multiple overlapping Jobs to be scheduled, even if a previously scheduled job has not completed yet, or preparing dynamic data to pass to your Job handler:
|
||||
|
||||
```ts
|
||||
import { countRunnableOrActiveJobsForQueue } from 'payload'
|
||||
|
||||
schedule: [
|
||||
{
|
||||
cron: '* * * * *', // every minute
|
||||
queue: 'reports',
|
||||
hooks: {
|
||||
beforeSchedule: async ({ queueable, req }) => {
|
||||
const runnableOrActiveJobsForQueue =
|
||||
await countRunnableOrActiveJobsForQueue({
|
||||
queue: queueable.scheduleConfig.queue,
|
||||
req,
|
||||
taskSlug: queueable.taskConfig?.slug,
|
||||
workflowSlug: queueable.workflowConfig?.slug,
|
||||
onlyScheduled: true,
|
||||
})
|
||||
|
||||
// Allow up to 3 simultaneous scheduled jobs and set dynamic input
|
||||
return {
|
||||
shouldSchedule: runnableOrActiveJobsForQueue < 3,
|
||||
input: { text: 'Hi there' },
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
```
|
||||
|
||||
This allows fine-grained control over how many Jobs can run simultaneously and provides dynamically computed input values each time a Job is scheduled.
|
||||
|
||||
## Scheduling in serverless environments
|
||||
|
||||
On serverless platforms, scheduling must be triggered externally since Payload does not automatically run cron schedules in ephemeral environments. You have two main ways to trigger scheduling manually:
|
||||
|
||||
- **Invoke via Payload's API:** `payload.jobs.handleSchedules()`
|
||||
- **Use the REST API endpoint:** `/api/payload-jobs/handle-schedules`
|
||||
- **Use the run endpoint, which also handles scheduling by default:** `GET /api/payload-jobs/run`
|
||||
|
||||
For example, on Vercel, you can set up a Vercel Cron to regularly trigger scheduling:
|
||||
|
||||
- **Vercel Cron Job:** Configure Vercel Cron to periodically call `GET /api/payload-jobs/handle-schedules`. If you would like to auto-run your scheduled jobs as well, you can use the `GET /api/payload-jobs/run` endpoint.
|
||||
|
||||
Once Jobs are queued, their execution depends entirely on your configured runner setup (e.g., autorun, or manual invocation).
|
||||
@@ -45,13 +45,11 @@ The following options are available:
|
||||
|
||||
| Path | Description |
|
||||
| ----------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`url`** \* | String, or function that returns a string, pointing to your front-end application. This value is used as the iframe `src`. [More details](#url). |
|
||||
| **`url`** | String, or function that returns a string, pointing to your front-end application. This value is used as the iframe `src`. [More details](#url). |
|
||||
| **`breakpoints`** | Array of breakpoints to be used as “device sizes” in the preview window. Each item appears as an option in the toolbar. [More details](#breakpoints). |
|
||||
| **`collections`** | Array of collection slugs to enable Live Preview on. |
|
||||
| **`globals`** | Array of global slugs to enable Live Preview on. |
|
||||
|
||||
_\* An asterisk denotes that a property is required._
|
||||
|
||||
### URL
|
||||
|
||||
The `url` property resolves to a string that points to your front-end application. This value is used as the `src` attribute of the iframe rendering your front-end. Once loaded, the Admin Panel will communicate directly with your app through `window.postMessage` events.
|
||||
@@ -88,17 +86,16 @@ const config = buildConfig({
|
||||
// ...
|
||||
livePreview: {
|
||||
// highlight-start
|
||||
url: ({
|
||||
data,
|
||||
collectionConfig,
|
||||
locale
|
||||
}) => `${data.tenant.url}${ // Multi-tenant top-level domain
|
||||
collectionConfig.slug === 'posts' ? `/posts/${data.slug}` : `${data.slug !== 'home' : `/${data.slug}` : ''}`
|
||||
}${locale ? `?locale=${locale?.code}` : ''}`, // Localization query param
|
||||
url: ({ data, collectionConfig, locale }) =>
|
||||
`${data.tenant.url}${
|
||||
collectionConfig.slug === 'posts'
|
||||
? `/posts/${data.slug}`
|
||||
: `${data.slug !== 'home' ? `/${data.slug}` : ''}`
|
||||
}${locale ? `?locale=${locale?.code}` : ''}`, // Localization query param
|
||||
collections: ['pages'],
|
||||
},
|
||||
// highlight-end
|
||||
}
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
@@ -51,6 +51,7 @@ export default async function Page() {
|
||||
collection: 'pages',
|
||||
id: '123',
|
||||
draft: true,
|
||||
trash: true, // add this if trash is enabled in your collection and want to preview trashed documents
|
||||
})
|
||||
|
||||
return (
|
||||
|
||||
@@ -194,6 +194,27 @@ const result = await payload.count({
|
||||
})
|
||||
```
|
||||
|
||||
### FindDistinct#collection-find-distinct
|
||||
|
||||
```js
|
||||
// Result will be an object with:
|
||||
// {
|
||||
// values: ['value-1', 'value-2'], // array of distinct values,
|
||||
// field: 'title', // the field
|
||||
// totalDocs: 10, // count of the distinct values satisfies query,
|
||||
// perPage: 10, // count of distinct values per page (based on provided limit)
|
||||
// }
|
||||
const result = await payload.findDistinct({
|
||||
collection: 'posts', // required
|
||||
locale: 'en',
|
||||
where: {}, // pass a `where` query here
|
||||
user: dummyUser,
|
||||
overrideAccess: false,
|
||||
field: 'title',
|
||||
sort: 'title',
|
||||
})
|
||||
```
|
||||
|
||||
### Update by ID#collection-update-by-id
|
||||
|
||||
```js
|
||||
|
||||
@@ -58,7 +58,7 @@ To learn more, see the [Custom Components Performance](../admin/custom-component
|
||||
|
||||
### Block references
|
||||
|
||||
Use [Block References](../fields/blocks#block-references) to share the same block across multiple fields without bloating the config. This will reduce the number of fields to traverse when processing permissions, etc. and can can significantly reduce the amount of data sent from the server to the client in the Admin Panel.
|
||||
Use [Block References](../fields/blocks#block-references) to share the same block across multiple fields without bloating the config. This will reduce the number of fields to traverse when processing permissions, etc. and can significantly reduce the amount of data sent from the server to the client in the Admin Panel.
|
||||
|
||||
For example, if you have a block that is used in multiple fields, you can define it once and reference it in each field.
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
title: Form Builder Plugin
|
||||
label: Form Builder
|
||||
order: 40
|
||||
order: 30
|
||||
desc: Easily build and manage forms from the Admin Panel. Send dynamic, personalized emails and even accept and process payments.
|
||||
keywords: plugins, plugin, form, forms, form builder
|
||||
---
|
||||
|
||||
155
docs/plugins/import-export.mdx
Normal file
155
docs/plugins/import-export.mdx
Normal file
@@ -0,0 +1,155 @@
|
||||
---
|
||||
title: Import Export Plugin
|
||||
label: Import Export
|
||||
order: 40
|
||||
desc: Add Import and export functionality to create CSV and JSON data exports
|
||||
keywords: plugins, plugin, import, export, csv, JSON, data, ETL, download
|
||||
---
|
||||
|
||||

|
||||
|
||||
<Banner type="warning">
|
||||
**Note**: This plugin is in **beta** as some aspects of it may change on any
|
||||
minor releases. It is under development and currently only supports exporting
|
||||
of collection data.
|
||||
</Banner>
|
||||
|
||||
This plugin adds features that give admin users the ability to download or create export data as an upload collection and import it back into a project.
|
||||
|
||||
## Core Features
|
||||
|
||||
- Export data as CSV or JSON format via the admin UI
|
||||
- Download the export directly through the browser
|
||||
- Create a file upload of the export data
|
||||
- Use the jobs queue for large exports
|
||||
- (Coming soon) Import collection data
|
||||
|
||||
## Installation
|
||||
|
||||
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
|
||||
|
||||
```bash
|
||||
pnpm add @payloadcms/plugin-import-export
|
||||
```
|
||||
|
||||
## Basic Usage
|
||||
|
||||
In the `plugins` array of your [Payload Config](https://payloadcms.com/docs/configuration/overview), call the plugin with [options](#options):
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
import { importExportPlugin } from '@payloadcms/plugin-import-export'
|
||||
|
||||
const config = buildConfig({
|
||||
collections: [Pages, Media],
|
||||
plugins: [
|
||||
importExportPlugin({
|
||||
collections: ['users', 'pages'],
|
||||
// see below for a list of available options
|
||||
}),
|
||||
],
|
||||
})
|
||||
|
||||
export default config
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
| Property | Type | Description |
|
||||
| -------------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `collections` | string[] | Collections to include Import/Export controls in. Defaults to all collections. |
|
||||
| `debug` | boolean | If true, enables debug logging. |
|
||||
| `disableDownload` | boolean | If true, disables the download button in the export preview UI. |
|
||||
| `disableJobsQueue` | boolean | If true, forces the export to run synchronously. |
|
||||
| `disableSave` | boolean | If true, disables the save button in the export preview UI. |
|
||||
| `format` | string | Forces a specific export format (`csv` or `json`), hides the format dropdown, and prevents the user from choosing the export format. |
|
||||
| `overrideExportCollection` | function | Function to override the default export collection; takes the default export collection and allows you to modify and return it. |
|
||||
|
||||
## Field Options
|
||||
|
||||
In addition to the above plugin configuration options, you can granularly set the following field level options using the `custom['plugin-import-export']` properties in any of your collections.
|
||||
|
||||
| Property | Type | Description |
|
||||
| ---------- | -------- | ----------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `disabled` | boolean | When `true` the field is completely excluded from the import-export plugin. |
|
||||
| `toCSV` | function | Custom function used to modify the outgoing csv data by manipulating the data, siblingData or by returning the desired value. |
|
||||
|
||||
### Customizing the output of CSV data
|
||||
|
||||
To manipulate the data that a field exports you can add `toCSV` custom functions. This allows you to modify the outgoing csv data by manipulating the data, siblingData or by returning the desired value.
|
||||
|
||||
The toCSV function argument is an object with the following properties:
|
||||
|
||||
| Property | Type | Description |
|
||||
| ------------ | ------- | ----------------------------------------------------------------- |
|
||||
| `columnName` | string | The CSV column name given to the field. |
|
||||
| `doc` | object | The top level document |
|
||||
| `row` | object | The object data that can be manipulated to assign data to the CSV |
|
||||
| `siblingDoc` | object | The document data at the level where it belongs |
|
||||
| `value` | unknown | The data for the field. |
|
||||
|
||||
Example function:
|
||||
|
||||
```ts
|
||||
const pages: CollectionConfig = {
|
||||
slug: 'pages',
|
||||
fields: [
|
||||
{
|
||||
name: 'author',
|
||||
type: 'relationship',
|
||||
relationTo: 'users',
|
||||
custom: {
|
||||
'plugin-import-export': {
|
||||
toCSV: ({ value, columnName, row }) => {
|
||||
// add both `author_id` and the `author_email` to the csv export
|
||||
if (
|
||||
value &&
|
||||
typeof value === 'object' &&
|
||||
'id' in value &&
|
||||
'email' in value
|
||||
) {
|
||||
row[`${columnName}_id`] = (value as { id: number | string }).id
|
||||
row[`${columnName}_email`] = (value as { email: string }).email
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Exporting Data
|
||||
|
||||
There are four possible ways that the plugin allows for exporting documents, the first two are available in the admin UI from the list view of a collection:
|
||||
|
||||
1. Direct download - Using a `POST` to `/api/exports/download` and streams the response as a file download
|
||||
2. File storage - Goes to the `exports` collection as an uploads enabled collection
|
||||
3. Local API - A create call to the uploads collection: `payload.create({ slug: 'uploads', ...parameters })`
|
||||
4. Jobs Queue - `payload.jobs.queue({ task: 'createCollectionExport', input: parameters })`
|
||||
|
||||
By default, a user can use the Export drawer to create a file download by choosing `Save` or stream a downloadable file directly without persisting it by using the `Download` button. Either option can be disabled to provide the export experience you desire for your use-case.
|
||||
|
||||
The UI for creating exports provides options so that users can be selective about which documents to include and also which columns or fields to include.
|
||||
|
||||
It is necessary to add access control to the uploads collection configuration using the `overrideExportCollection` function if you have enabled this plugin on collections with data that some authenticated users should not have access to.
|
||||
|
||||
<Banner type="warning">
|
||||
**Note**: Users who have read access to the upload collection may be able to
|
||||
download data that is normally not readable due to [access
|
||||
control](../access-control/overview).
|
||||
</Banner>
|
||||
|
||||
The following parameters are used by the export function to handle requests:
|
||||
|
||||
| Property | Type | Description |
|
||||
| ---------------- | -------- | ----------------------------------------------------------------------------------------------------------------- |
|
||||
| `format` | text | Either `csv` or `json` to determine the shape of data exported |
|
||||
| `limit` | number | The max number of documents to return |
|
||||
| `sort` | select | The field to use for ordering documents |
|
||||
| `locale` | string | The locale code to query documents or `all` |
|
||||
| `draft` | string | Either `yes` or `no` to return documents with their newest drafts for drafts enabled collections |
|
||||
| `fields` | string[] | Which collection fields are used to create the export, defaults to all |
|
||||
| `collectionSlug` | string | The slug to query against |
|
||||
| `where` | object | The WhereObject used to query documents to export. This is set by making selections or filters from the list view |
|
||||
| `filename` | text | What to call the export being created |
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
title: Multi-Tenant Plugin
|
||||
label: Multi-Tenant
|
||||
order: 40
|
||||
order: 50
|
||||
desc: Scaffolds multi-tenancy for your Payload application
|
||||
keywords: plugins, multi-tenant, multi-tenancy, plugin, payload, cms, seo, indexing, search, search engine
|
||||
---
|
||||
@@ -53,6 +53,14 @@ The plugin accepts an object with the following properties:
|
||||
|
||||
```ts
|
||||
type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
/**
|
||||
* Base path for your application
|
||||
*
|
||||
* https://nextjs.org/docs/app/api-reference/config/next-config-js/basePath
|
||||
*
|
||||
* @default undefined
|
||||
*/
|
||||
basePath?: string
|
||||
/**
|
||||
* After a tenant is deleted, the plugin will attempt to clean up related documents
|
||||
* - removing documents with the tenant ID
|
||||
@@ -72,8 +80,25 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
* @default false
|
||||
*/
|
||||
isGlobal?: boolean
|
||||
/**
|
||||
* Overrides for the tenant field, will override the entire tenantField configuration
|
||||
*/
|
||||
tenantFieldOverrides?: CollectionTenantFieldConfigOverrides
|
||||
/**
|
||||
* Set to `false` if you want to manually apply the baseListFilter
|
||||
* Set to `false` if you want to manually apply the baseFilter
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
useBaseFilter?: boolean
|
||||
/**
|
||||
* @deprecated Use `useBaseFilter` instead. If both are defined,
|
||||
* `useBaseFilter` will take precedence. This property remains only
|
||||
* for backward compatibility and may be removed in a future version.
|
||||
*
|
||||
* Originally, `baseListFilter` was intended to filter only the List View
|
||||
* in the admin panel. However, base filtering is often required in other areas
|
||||
* such as internal link relationships in the Lexical editor.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
@@ -99,18 +124,37 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
* @default true
|
||||
*/
|
||||
enabled?: boolean
|
||||
/**
|
||||
* Localization for the plugin
|
||||
*/
|
||||
i18n?: {
|
||||
translations: {
|
||||
[key in AcceptedLanguages]?: {
|
||||
/**
|
||||
* @default 'You are about to change ownership from <0>{{fromTenant}}</0> to <0>{{toTenant}}</0>'
|
||||
*/
|
||||
'confirm-modal-tenant-switch--body'?: string
|
||||
/**
|
||||
* `tenantLabel` defaults to the value of the `nav-tenantSelector-label` translation
|
||||
*
|
||||
* @default 'Confirm {{tenantLabel}} change'
|
||||
*/
|
||||
'confirm-modal-tenant-switch--heading'?: string
|
||||
/**
|
||||
* @default 'Assigned Tenant'
|
||||
*/
|
||||
'field-assignedTenant-label'?: string
|
||||
/**
|
||||
* @default 'Tenant'
|
||||
*/
|
||||
'nav-tenantSelector-label'?: string
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Field configuration for the field added to all tenant enabled collections
|
||||
*/
|
||||
tenantField?: {
|
||||
access?: RelationshipField['access']
|
||||
/**
|
||||
* The name of the field added to all tenant enabled collections
|
||||
*
|
||||
* @default 'tenant'
|
||||
*/
|
||||
name?: string
|
||||
}
|
||||
tenantField?: RootTenantFieldConfigOverrides
|
||||
/**
|
||||
* Field configuration for the field added to the users collection
|
||||
*
|
||||
@@ -163,6 +207,8 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
* Customize tenant selector label
|
||||
*
|
||||
* Either a string or an object where the keys are i18n codes and the values are the string labels
|
||||
*
|
||||
* @deprecated Use `i18n.translations` instead.
|
||||
*/
|
||||
tenantSelectorLabel?:
|
||||
| Partial<{
|
||||
@@ -181,7 +227,9 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
* Useful for super-admin type users
|
||||
*/
|
||||
userHasAccessToAllTenants?: (
|
||||
user: ConfigTypes extends { user: unknown } ? ConfigTypes['user'] : User,
|
||||
user: ConfigTypes extends { user: unknown }
|
||||
? ConfigTypes['user']
|
||||
: TypedUser,
|
||||
) => boolean
|
||||
/**
|
||||
* Opt out of adding access constraints to the tenants collection
|
||||
@@ -212,15 +260,15 @@ const config = buildConfig({
|
||||
{
|
||||
slug: 'tenants',
|
||||
admin: {
|
||||
useAsTitle: 'name'
|
||||
}
|
||||
useAsTitle: 'name',
|
||||
},
|
||||
fields: [
|
||||
// remember, you own these fields
|
||||
// these are merely suggestions/examples
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
required: true,
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'slug',
|
||||
@@ -231,7 +279,7 @@ const config = buildConfig({
|
||||
name: 'domain',
|
||||
type: 'text',
|
||||
required: true,
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
@@ -241,7 +289,7 @@ const config = buildConfig({
|
||||
pages: {},
|
||||
navigation: {
|
||||
isGlobal: true,
|
||||
}
|
||||
},
|
||||
},
|
||||
}),
|
||||
],
|
||||
@@ -327,14 +375,16 @@ type ContextType = {
|
||||
/**
|
||||
* Prevents a refresh when the tenant is changed
|
||||
*
|
||||
* If not switching tenants while viewing a "global", set to true
|
||||
* If not switching tenants while viewing a "global",
|
||||
* set to true
|
||||
*/
|
||||
setPreventRefreshOnChange: React.Dispatch<React.SetStateAction<boolean>>
|
||||
/**
|
||||
* Sets the selected tenant ID
|
||||
*
|
||||
* @param args.id - The ID of the tenant to select
|
||||
* @param args.refresh - Whether to refresh the page after changing the tenant
|
||||
* @param args.refresh - Whether to refresh the page
|
||||
* after changing the tenant
|
||||
*/
|
||||
setTenant: (args: {
|
||||
id: number | string | undefined
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
title: Nested Docs Plugin
|
||||
label: Nested Docs
|
||||
order: 40
|
||||
order: 60
|
||||
desc: Nested documents in a parent, child, and sibling relationship.
|
||||
keywords: plugins, nested, documents, parent, child, sibling, relationship
|
||||
---
|
||||
|
||||
@@ -55,6 +55,7 @@ Payload maintains a set of Official Plugins that solve for some of the common us
|
||||
- [Sentry](./sentry)
|
||||
- [SEO](./seo)
|
||||
- [Stripe](./stripe)
|
||||
- [Import/Export](./import-export)
|
||||
|
||||
You can also [build your own plugin](./build-your-own) to easily extend Payload's functionality in some other way. Once your plugin is ready, consider [sharing it with the community](#community-plugins).
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
title: Redirects Plugin
|
||||
label: Redirects
|
||||
order: 40
|
||||
order: 70
|
||||
desc: Automatically create redirects for your Payload application
|
||||
keywords: plugins, redirects, redirect, plugin, payload, cms, seo, indexing, search, search engine
|
||||
---
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
title: Search Plugin
|
||||
label: Search
|
||||
order: 40
|
||||
order: 80
|
||||
desc: Generates records of your documents that are extremely fast to search on.
|
||||
keywords: plugins, search, search plugin, search engine, search index, search results, search bar, search box, search field, search form, search input
|
||||
---
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
title: Sentry Plugin
|
||||
label: Sentry
|
||||
order: 40
|
||||
order: 90
|
||||
desc: Integrate Sentry error tracking into your Payload application
|
||||
keywords: plugins, sentry, error, tracking, monitoring, logging, bug, reporting, performance
|
||||
---
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
description: Manage SEO metadata from your Payload admin
|
||||
keywords: plugins, seo, meta, search, engine, ranking, google
|
||||
label: SEO
|
||||
order: 30
|
||||
order: 100
|
||||
title: SEO Plugin
|
||||
---
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
title: Stripe Plugin
|
||||
label: Stripe
|
||||
order: 40
|
||||
order: 110
|
||||
desc: Easily accept payments with Stripe
|
||||
keywords: plugins, stripe, payments, ecommerce
|
||||
---
|
||||
|
||||
@@ -24,16 +24,6 @@ Payload can be deployed _anywhere that Next.js can run_ - including Vercel, Netl
|
||||
|
||||
But it's important to remember that most Payload projects will also need a database, file storage, an email provider, and a CDN. Make sure you have all of the requirements that your project needs, no matter what deployment platform you choose.
|
||||
|
||||
Often, the easiest and fastest way to deploy Payload is to use [Payload Cloud](https://payloadcms.com/new) — where you get everything you need out of the box, including:
|
||||
|
||||
1. A MongoDB Atlas database
|
||||
1. S3 file storage
|
||||
1. Resend email service
|
||||
1. Cloudflare CDN
|
||||
1. Blue / green deployments
|
||||
1. Logs
|
||||
1. And more
|
||||
|
||||
## Basics
|
||||
|
||||
Payload runs fully in Next.js, so the [Next.js build process](https://nextjs.org/docs/app/building-your-application/deploying) is used for building Payload. If you've used `create-payload-app` to create your project, executing the `build`
|
||||
|
||||
@@ -474,11 +474,15 @@ const MyNodeComponent = React.lazy(() =>
|
||||
)
|
||||
|
||||
/**
|
||||
* This node is a DecoratorNode. DecoratorNodes allow you to render React components in the editor.
|
||||
* This node is a DecoratorNode. DecoratorNodes allow
|
||||
* you to render React components in the editor.
|
||||
*
|
||||
* They need both createDom and decorate functions. createDom => outside of the html. decorate => React Component inside of the html.
|
||||
* They need both createDom and decorate functions.
|
||||
* createDom => outside of the html.
|
||||
* decorate => React Component inside of the html.
|
||||
*
|
||||
* If we used DecoratorBlockNode instead, we would only need a decorate method
|
||||
* If we used DecoratorBlockNode instead,
|
||||
* we would only need a decorate method
|
||||
*/
|
||||
export class MyNode extends DecoratorNode<React.ReactElement> {
|
||||
static clone(node: MyNode): MyNode {
|
||||
@@ -490,9 +494,11 @@ export class MyNode extends DecoratorNode<React.ReactElement> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines what happens if you copy a div element from another page and paste it into the lexical editor
|
||||
* Defines what happens if you copy a div element
|
||||
* from another page and paste it into the lexical editor
|
||||
*
|
||||
* This also determines the behavior of lexical's internal HTML -> Lexical converter
|
||||
* This also determines the behavior of lexical's
|
||||
* internal HTML -> Lexical converter
|
||||
*/
|
||||
static importDOM(): DOMConversionMap | null {
|
||||
return {
|
||||
@@ -504,14 +510,18 @@ export class MyNode extends DecoratorNode<React.ReactElement> {
|
||||
}
|
||||
|
||||
/**
|
||||
* The data for this node is stored serialized as JSON. This is the "load function" of that node: it takes the saved data and converts it into a node.
|
||||
* The data for this node is stored serialized as JSON.
|
||||
* This is the "load function" of that node: it takes
|
||||
* the saved data and converts it into a node.
|
||||
*/
|
||||
static importJSON(serializedNode: SerializedMyNode): MyNode {
|
||||
return $createMyNode()
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines how the hr element is rendered in the lexical editor. This is only the "initial" / "outer" HTML element.
|
||||
* Determines how the hr element is rendered in the
|
||||
* lexical editor. This is only the "initial" / "outer"
|
||||
* HTML element.
|
||||
*/
|
||||
createDOM(config: EditorConfig): HTMLElement {
|
||||
const element = document.createElement('div')
|
||||
@@ -519,22 +529,28 @@ export class MyNode extends DecoratorNode<React.ReactElement> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows you to render a React component within whatever createDOM returns.
|
||||
* Allows you to render a React component within
|
||||
* whatever createDOM returns.
|
||||
*/
|
||||
decorate(): React.ReactElement {
|
||||
return <MyNodeComponent nodeKey={this.__key} />
|
||||
}
|
||||
|
||||
/**
|
||||
* Opposite of importDOM, this function defines what happens when you copy a div element from the lexical editor and paste it into another page.
|
||||
* Opposite of importDOM, this function defines what
|
||||
* happens when you copy a div element from the lexical
|
||||
* editor and paste it into another page.
|
||||
*
|
||||
* This also determines the behavior of lexical's internal Lexical -> HTML converter
|
||||
* This also determines the behavior of lexical's
|
||||
* internal Lexical -> HTML converter
|
||||
*/
|
||||
exportDOM(): DOMExportOutput {
|
||||
return { element: document.createElement('div') }
|
||||
}
|
||||
/**
|
||||
* Opposite of importJSON. This determines what data is saved in the database / in the lexical editor state.
|
||||
* Opposite of importJSON. This determines what
|
||||
* data is saved in the database / in the lexical
|
||||
* editor state.
|
||||
*/
|
||||
exportJSON(): SerializedLexicalNode {
|
||||
return {
|
||||
@@ -556,18 +572,23 @@ export class MyNode extends DecoratorNode<React.ReactElement> {
|
||||
}
|
||||
}
|
||||
|
||||
// This is used in the importDOM method. Totally optional if you do not want your node to be created automatically when copy & pasting certain dom elements
|
||||
// into your editor.
|
||||
// This is used in the importDOM method. Totally optional
|
||||
// if you do not want your node to be created automatically
|
||||
// when copy & pasting certain dom elements into your editor.
|
||||
function $yourConversionMethod(): DOMConversionOutput {
|
||||
return { node: $createMyNode() }
|
||||
}
|
||||
|
||||
// This is a utility method to create a new MyNode. Utility methods prefixed with $ make it explicit that this should only be used within lexical
|
||||
// This is a utility method to create a new MyNode.
|
||||
// Utility methods prefixed with $ make it explicit
|
||||
// that this should only be used within lexical
|
||||
export function $createMyNode(): MyNode {
|
||||
return $applyNodeReplacement(new MyNode())
|
||||
}
|
||||
|
||||
// This is just a utility method you can use to check if a node is a MyNode. This also ensures correct typing.
|
||||
// This is just a utility method you can use
|
||||
// to check if a node is a MyNode. This also
|
||||
// ensures correct typing.
|
||||
export function $isMyNode(
|
||||
node: LexicalNode | null | undefined,
|
||||
): node is MyNode {
|
||||
@@ -626,10 +647,12 @@ export const INSERT_MYNODE_COMMAND: LexicalCommand<void> = createCommand(
|
||||
)
|
||||
|
||||
/**
|
||||
* Plugin which registers a lexical command to insert a new MyNode into the editor
|
||||
* Plugin which registers a lexical command to
|
||||
* insert a new MyNode into the editor
|
||||
*/
|
||||
export const MyNodePlugin: PluginComponent = () => {
|
||||
// The useLexicalComposerContext hook can be used to access the lexical editor instance
|
||||
// The useLexicalComposerContext hook can be used
|
||||
// to access the lexical editor instance
|
||||
const [editor] = useLexicalComposerContext()
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@@ -124,12 +124,15 @@ HeadingFeature({
|
||||
```ts
|
||||
type IndentFeatureProps = {
|
||||
/**
|
||||
* The nodes that should not be indented. "type" property of the nodes you don't want to be indented.
|
||||
* These can be: "paragraph", "heading", "listitem", "quote" or other indentable nodes if they exist.
|
||||
* The nodes that should not be indented. "type"
|
||||
* property of the nodes you don't want to be indented.
|
||||
* These can be: "paragraph", "heading", "listitem",
|
||||
* "quote" or other indentable nodes if they exist.
|
||||
*/
|
||||
disabledNodes?: string[]
|
||||
/**
|
||||
* If true, pressing Tab in the middle of a block such as a paragraph or heading will not insert a tabNode.
|
||||
* If true, pressing Tab in the middle of a block such
|
||||
* as a paragraph or heading will not insert a tabNode.
|
||||
* Instead, Tab will only be used for block-level indentation.
|
||||
* @default false
|
||||
*/
|
||||
@@ -180,7 +183,8 @@ type LinkFeatureServerProps = {
|
||||
*/
|
||||
disableAutoLinks?: 'creationOnly' | true
|
||||
/**
|
||||
* A function or array defining additional fields for the link feature.
|
||||
* A function or array defining additional
|
||||
* fields for the link feature.
|
||||
* These will be displayed in the link editor drawer.
|
||||
*/
|
||||
fields?:
|
||||
@@ -235,7 +239,9 @@ LinkFeature({
|
||||
```ts
|
||||
type RelationshipFeatureProps = {
|
||||
/**
|
||||
* Sets a maximum population depth for this relationship, regardless of the remaining depth when the respective field is reached.
|
||||
* Sets a maximum population depth for this relationship,
|
||||
* regardless of the remaining depth when the respective
|
||||
* field is reached.
|
||||
*/
|
||||
maxDepth?: number
|
||||
} & ExclusiveRelationshipFeatureProps
|
||||
@@ -274,7 +280,10 @@ type UploadFeatureProps = {
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sets a maximum population depth for this upload (not the fields for this upload), regardless of the remaining depth when the respective field is reached.
|
||||
* Sets a maximum population depth for this upload
|
||||
* (not the fields for this upload), regardless of
|
||||
* the remaining depth when the respective field is
|
||||
* reached.
|
||||
*/
|
||||
maxDepth?: number
|
||||
}
|
||||
|
||||
200
docs/trash/overview.mdx
Normal file
200
docs/trash/overview.mdx
Normal file
@@ -0,0 +1,200 @@
|
||||
---
|
||||
title: Trash
|
||||
label: Overview
|
||||
order: 10
|
||||
desc: Enable soft deletes for your collections to mark documents as deleted without permanently removing them.
|
||||
keywords: trash, soft delete, deletedAt, recovery, restore
|
||||
---
|
||||
|
||||
Trash (also known as soft delete) allows documents to be marked as deleted without being permanently removed. When enabled on a collection, deleted documents will receive a `deletedAt` timestamp, making it possible to restore them later, view them in a dedicated Trash view, or permanently delete them.
|
||||
|
||||
Soft delete is a safer way to manage content lifecycle, giving editors a chance to review and recover documents that may have been deleted by mistake.
|
||||
|
||||
<Banner type="warning">
|
||||
**Note:** The Trash feature is currently in beta and may be subject to change
|
||||
in minor version updates.
|
||||
</Banner>
|
||||
|
||||
## Collection Configuration
|
||||
|
||||
To enable soft deleting for a collection, set the `trash` property to `true`:
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
trash: true,
|
||||
fields: [
|
||||
{
|
||||
name: 'title',
|
||||
type: 'text',
|
||||
},
|
||||
// other fields...
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
When enabled, Payload automatically injects a deletedAt field into the collection's schema. This timestamp is set when a document is soft-deleted, and cleared when the document is restored.
|
||||
|
||||
## Admin Panel behavior
|
||||
|
||||
Once `trash` is enabled, the Admin Panel provides a dedicated Trash view for each collection:
|
||||
|
||||
- A new route is added at `/collections/:collectionSlug/trash`
|
||||
- The `Trash` view shows all documents that have a `deletedAt` timestamp
|
||||
|
||||
From the Trash view, you can:
|
||||
|
||||
- Use bulk actions to manage trashed documents:
|
||||
|
||||
- **Restore** to clear the `deletedAt` timestamp and return documents to their original state
|
||||
- **Delete** to permanently remove selected documents
|
||||
- **Empty Trash** to select and permanently delete all trashed documents at once
|
||||
|
||||
- Enter each document's **edit view**, just like in the main list view. While in the edit view of a trashed document:
|
||||
- All fields are in a **read-only** state
|
||||
- Standard document actions (e.g., Save, Publish, Restore Version) are hidden and disabled.
|
||||
- The available actions are **Restore** and **Permanently Delete**.
|
||||
- Access to the **API**, **Versions**, and **Preview** views is preserved.
|
||||
|
||||
When deleting a document from the main collection List View, Payload will soft-delete the document by default. A checkbox in the delete confirmation modal allows users to skip the trash and permanently delete instead.
|
||||
|
||||
## API Support
|
||||
|
||||
Soft deletes are fully supported across all Payload APIs: **Local**, **REST**, and **GraphQL**.
|
||||
|
||||
The following operations respect and support the `trash` functionality:
|
||||
|
||||
- `find`
|
||||
- `findByID`
|
||||
- `update`
|
||||
- `updateByID`
|
||||
- `delete`
|
||||
- `deleteByID`
|
||||
- `findVersions`
|
||||
- `findVersionByID`
|
||||
|
||||
### Understanding `trash` Behavior
|
||||
|
||||
Passing `trash: true` to these operations will **include soft-deleted documents** in the query results.
|
||||
|
||||
To return _only_ soft-deleted documents, you must combine `trash: true` with a `where` clause that checks if `deletedAt` exists.
|
||||
|
||||
### Examples
|
||||
|
||||
#### Local API
|
||||
|
||||
Return all documents including trashed:
|
||||
|
||||
```ts
|
||||
const result = await payload.find({
|
||||
collection: 'posts',
|
||||
trash: true,
|
||||
})
|
||||
```
|
||||
|
||||
Return only trashed documents:
|
||||
|
||||
```ts
|
||||
const result = await payload.find({
|
||||
collection: 'posts',
|
||||
trash: true,
|
||||
where: {
|
||||
deletedAt: {
|
||||
exists: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
Return only non-trashed documents:
|
||||
|
||||
```ts
|
||||
const result = await payload.find({
|
||||
collection: 'posts',
|
||||
trash: false,
|
||||
})
|
||||
```
|
||||
|
||||
#### REST
|
||||
|
||||
Return **all** documents including trashed:
|
||||
|
||||
```http
|
||||
GET /api/posts?trash=true
|
||||
```
|
||||
|
||||
Return **only trashed** documents:
|
||||
|
||||
```http
|
||||
GET /api/posts?trash=true&where[deletedAt][exists]=true
|
||||
```
|
||||
|
||||
Return only non-trashed documents:
|
||||
|
||||
```http
|
||||
GET /api/posts?trash=false
|
||||
```
|
||||
|
||||
#### GraphQL
|
||||
|
||||
Return all documents including trashed:
|
||||
|
||||
```ts
|
||||
query {
|
||||
Posts(trash: true) {
|
||||
docs {
|
||||
id
|
||||
deletedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Return only trashed documents:
|
||||
|
||||
```ts
|
||||
query {
|
||||
Posts(
|
||||
trash: true
|
||||
where: { deletedAt: { exists: true } }
|
||||
) {
|
||||
docs {
|
||||
id
|
||||
deletedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Return only non-trashed documents:
|
||||
|
||||
```ts
|
||||
query {
|
||||
Posts(trash: false) {
|
||||
docs {
|
||||
id
|
||||
deletedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Access Control
|
||||
|
||||
All trash-related actions (delete, permanent delete) respect the `delete` access control defined in your collection config.
|
||||
|
||||
This means:
|
||||
|
||||
- If a user is denied delete access, they cannot soft delete or permanently delete documents
|
||||
|
||||
## Versions and Trash
|
||||
|
||||
When a document is soft-deleted:
|
||||
|
||||
- It can no longer have a version **restored** until it is first restored from trash
|
||||
- Attempting to restore a version while the document is in trash will result in an error
|
||||
- This ensures consistency between the current document state and its version history
|
||||
|
||||
However, versions are still fully **visible and accessible** from the **edit view** of a trashed document. You can view the full version history, but must restore the document itself before restoring any individual version.
|
||||
@@ -6,9 +6,112 @@ desc: Troubleshooting Common Issues in Payload
|
||||
keywords: admin, components, custom, customize, documentation, Content Management System, cms, headless, javascript, node, react, nextjs, troubleshooting
|
||||
---
|
||||
|
||||
## Common Issues
|
||||
## Dependency mismatches
|
||||
|
||||
### "Unauthorized, you must be logged in to make this request" when attempting to log in
|
||||
All `payload` and `@payloadcms/*` packages must be on exactly the same version and installed only once.
|
||||
|
||||
When two copies—or two different versions—of any of these packages (or of `react` / `react-dom`) appear in your dependency graph, you can see puzzling runtime errors. The most frequent is a broken React context:
|
||||
|
||||
```bash
|
||||
TypeError: Cannot destructure property 'config' of...
|
||||
```
|
||||
|
||||
This happens because one package imports a hook (most commonly `useConfig`) from _version A_ while the context provider comes from _version B_. The fix is always the same: make sure every Payload-related and React package resolves to the same module.
|
||||
|
||||
### Confirm whether duplicates exist
|
||||
|
||||
The first thing to do is to confirm whether duplicative dependencies do in fact exist.
|
||||
|
||||
There are two ways to do this:
|
||||
|
||||
1. Using pnpm's built-in inspection tool
|
||||
|
||||
```bash
|
||||
pnpm why @payloadcms/ui
|
||||
```
|
||||
|
||||
This prints the dependency tree and shows which versions are being installed. If you see more than one distinct version—or the same version listed under different paths—you have duplication.
|
||||
|
||||
2. Manual check (works with any package manager)
|
||||
|
||||
```bash
|
||||
find node_modules -name package.json \
|
||||
-exec grep -H '"name": "@payloadcms/ui"' {} \;
|
||||
```
|
||||
|
||||
Most of these hits are likely symlinks created by pnpm. Edit the matching package.json files (temporarily add a comment or change a description) to confirm whether they point to the same physical folder or to multiple copies.
|
||||
|
||||
Perform the same two checks for react and react-dom; a second copy of React can cause identical symptoms.
|
||||
|
||||
#### If no duplicates are found
|
||||
|
||||
`@payloadcms/ui` intentionally contains two bundles of itself, so you may see dual paths even when everything is correct. Inside the Payload Admin UI you must import only:
|
||||
|
||||
- `@payloadcms/ui`
|
||||
- `@payloadcms/ui/rsc`
|
||||
- `@payloadcms/ui/shared`
|
||||
|
||||
Any other deep import such as `@payloadcms/ui/elements/Button` should **only** be used in your own frontend, outside of the Payload Admin Panel. Those deep entries are published un-bundled to help you tree-shake and ship a smaller client bundle if you only need a few components from `@payloadcms/ui`.
|
||||
|
||||
### Fixing depedendency issues
|
||||
|
||||
These steps assume `pnpm`, which the Payload team recommends and uses internally. The principles apply to other package managers like npm and yarn as well. Do note that yarn 1.x is not supported by Payload.
|
||||
|
||||
1. Pin every critical package to an exact version
|
||||
|
||||
In package.json remove `^` or `~` from all versions of:
|
||||
|
||||
- `payload`
|
||||
- `@payloadcms/*`
|
||||
- `react`
|
||||
- `react-dom`
|
||||
|
||||
Prefixes allow your package manager to float to a newer minor/patch release, causing mismatches.
|
||||
|
||||
2. Delete node_modules
|
||||
|
||||
Old packages often linger even after you change versions or removed them from your package.json. Deleting node_modules ensures a clean slate.
|
||||
|
||||
3. Re-install dependencies
|
||||
|
||||
```bash
|
||||
pnpm install
|
||||
```
|
||||
|
||||
#### If the error persists
|
||||
|
||||
1. Clean the global store (pnpm only)
|
||||
|
||||
```bash
|
||||
pnpm store prune
|
||||
```
|
||||
|
||||
2. Delete the lockfile
|
||||
|
||||
Depending on your package manager, this could be `pnpm-lock.yaml`, `package-lock.json`, or `yarn.lock`.
|
||||
|
||||
Make sure you delete the lockfile **and** the node_modules folder at the same time, then run `pnpm install`. This forces a fresh, consistent resolution for all packages. It will also update all packages with dynamic versions to the latest version.
|
||||
|
||||
While it's best practice to manage dependencies in such a way where the lockfile can easily be re-generated (often this is the easiest way to resolve dependency issues), this may break your project if you have not tested the latest versions of your dependencies.
|
||||
|
||||
If you are using a version control system, make sure to commit your lockfile after this step.
|
||||
|
||||
3. Deduplicate anything that slipped through
|
||||
|
||||
```bash
|
||||
pnpm dedupe
|
||||
```
|
||||
|
||||
**Still stuck?**
|
||||
|
||||
- Switch to `pnpm` if you are on npm. Its symlinked store helps reducing accidental duplication.
|
||||
- Inspect the lockfile directly for peer-dependency violations.
|
||||
- Check project-level .npmrc / .pnpmfile.cjs overrides.
|
||||
- Run [Syncpack](https://www.npmjs.com/package/syncpack) to enforce identical versions of every `@payloadcms/*`, `react`, and `react-dom` reference.
|
||||
|
||||
Absolute last resort: add Webpack aliases so that all imports of a given package resolve to the same path (e.g. `resolve.alias['react'] = path.resolve('./node_modules/react')`). Keep this only until you can fix the underlying version skew.
|
||||
|
||||
## "Unauthorized, you must be logged in to make this request" when attempting to log in
|
||||
|
||||
This means that your auth cookie is not being set or accepted correctly upon logging in. To resolve check the following settings in your Payload Config:
|
||||
|
||||
|
||||
@@ -90,33 +90,33 @@ export const Media: CollectionConfig = {
|
||||
|
||||
_An asterisk denotes that an option is required._
|
||||
|
||||
| Option | Description |
|
||||
| ------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`adminThumbnail`** | Set the way that the [Admin Panel](../admin/overview) will display thumbnails for this Collection. [More](#admin-thumbnails) |
|
||||
| **`bulkUpload`** | Allow users to upload in bulk from the list view, default is true |
|
||||
| **`cacheTags`** | Set to `false` to disable the cache tag set in the UI for the admin thumbnail component. Useful for when CDNs don't allow certain cache queries. |
|
||||
| **`constructorOptions`** | An object passed to the the Sharp image library that accepts any Constructor options and applies them to the upload file. [More](https://sharp.pixelplumbing.com/api-constructor/) |
|
||||
| **`crop`** | Set to `false` to disable the cropping tool in the [Admin Panel](../admin/overview). Crop is enabled by default. [More](#crop-and-focal-point-selector) |
|
||||
| **`disableLocalStorage`** | Completely disable uploading files to disk locally. [More](#disabling-local-upload-storage) |
|
||||
| **`displayPreview`** | Enable displaying preview of the uploaded file in Upload fields related to this Collection. Can be locally overridden by `displayPreview` option in Upload field. [More](/docs/fields/upload#config-options). |
|
||||
| **`externalFileHeaderFilter`** | Accepts existing headers and returns the headers after filtering or modifying. |
|
||||
| **`filesRequiredOnCreate`** | Mandate file data on creation, default is true. |
|
||||
| **`filenameCompoundIndex`** | Field slugs to use for a compound index instead of the default filename index. |
|
||||
| **`focalPoint`** | Set to `false` to disable the focal point selection tool in the [Admin Panel](../admin/overview). The focal point selector is only available when `imageSizes` or `resizeOptions` are defined. [More](#crop-and-focal-point-selector) |
|
||||
| **`formatOptions`** | An object with `format` and `options` that are used with the Sharp image library to format the upload file. [More](https://sharp.pixelplumbing.com/api-output#toformat) |
|
||||
| **`handlers`** | Array of Request handlers to execute when fetching a file, if a handler returns a Response it will be sent to the client. Otherwise Payload will retrieve and send back the file. |
|
||||
| **`imageSizes`** | If specified, image uploads will be automatically resized in accordance to these image sizes. [More](#image-sizes) |
|
||||
| **`mimeTypes`** | Restrict mimeTypes in the file picker. Array of valid mimetypes or mimetype wildcards [More](#mimetypes) |
|
||||
| **`pasteURL`** | Controls whether files can be uploaded from remote URLs by pasting them into the Upload field. **Enabled by default.** Accepts `false` to disable or an object with an `allowList` of valid remote URLs. [More](#uploading-files-from-remote-urls) |
|
||||
| **`resizeOptions`** | An object passed to the the Sharp image library to resize the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize) |
|
||||
| **`skipSafeFetch`** | Set to an `allowList` to skip the safe fetch check when fetching external files. Set to `true` to skip the safe fetch for all documents in this collection. Defaults to `false`. |
|
||||
| **`allowRestrictedFileTypes`** | Set to `true` to allow restricted file types. If your Collection has defined [mimeTypes](#mimetypes), restricted file verification will be skipped. Defaults to `false`. [More](#restricted-file-types) |
|
||||
| **`staticDir`** | The folder directory to use to store media in. Can be either an absolute path or relative to the directory that contains your config. Defaults to your collection slug |
|
||||
| **`trimOptions`** | An object passed to the the Sharp image library to trim the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize#trim) |
|
||||
| **`withMetadata`** | If specified, appends metadata to the output image file. Accepts a boolean or a function that receives `metadata` and `req`, returning a boolean. |
|
||||
| **`hideFileInputOnCreate`** | Set to `true` to prevent the admin UI from showing file inputs during document creation, useful for programmatic file generation. |
|
||||
| **`hideRemoveFile`** | Set to `true` to prevent the admin UI having a way to remove an existing file while editing. |
|
||||
| **`modifyResponseHeaders`** | Accepts an object with existing `headers` and allows you to manipulate the response headers for media files. [More](#modifying-response-headers) |
|
||||
| Option | Description |
|
||||
| ------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`adminThumbnail`** | Set the way that the [Admin Panel](../admin/overview) will display thumbnails for this Collection. [More](#admin-thumbnails) |
|
||||
| **`bulkUpload`** | Allow users to upload in bulk from the list view, default is true |
|
||||
| **`cacheTags`** | Set to `false` to disable the cache tag set in the UI for the admin thumbnail component. Useful for when CDNs don't allow certain cache queries. |
|
||||
| **`constructorOptions`** | An object passed to the the Sharp image library that accepts any Constructor options and applies them to the upload file. [More](https://sharp.pixelplumbing.com/api-constructor/) |
|
||||
| **`crop`** | Set to `false` to disable the cropping tool in the [Admin Panel](../admin/overview). Crop is enabled by default. [More](#crop-and-focal-point-selector) |
|
||||
| **`disableLocalStorage`** | Completely disable uploading files to disk locally. [More](#disabling-local-upload-storage) |
|
||||
| **`displayPreview`** | Enable displaying preview of the uploaded file in Upload fields related to this Collection. Can be locally overridden by `displayPreview` option in Upload field. [More](/docs/fields/upload#config-options). |
|
||||
| **`externalFileHeaderFilter`** | Accepts existing headers and returns the headers after filtering or modifying. If using this option, you should handle the removal of any sensitive cookies (like payload-prefixed cookies) to prevent leaking session information to external services. By default, Payload automatically filters out payload-prefixed cookies when this option is not defined. |
|
||||
| **`filesRequiredOnCreate`** | Mandate file data on creation, default is true. |
|
||||
| **`filenameCompoundIndex`** | Field slugs to use for a compound index instead of the default filename index. |
|
||||
| **`focalPoint`** | Set to `false` to disable the focal point selection tool in the [Admin Panel](../admin/overview). The focal point selector is only available when `imageSizes` or `resizeOptions` are defined. [More](#crop-and-focal-point-selector) |
|
||||
| **`formatOptions`** | An object with `format` and `options` that are used with the Sharp image library to format the upload file. [More](https://sharp.pixelplumbing.com/api-output#toformat) |
|
||||
| **`handlers`** | Array of Request handlers to execute when fetching a file, if a handler returns a Response it will be sent to the client. Otherwise Payload will retrieve and send back the file. |
|
||||
| **`imageSizes`** | If specified, image uploads will be automatically resized in accordance to these image sizes. [More](#image-sizes) |
|
||||
| **`mimeTypes`** | Restrict mimeTypes in the file picker. Array of valid mimetypes or mimetype wildcards [More](#mimetypes) |
|
||||
| **`pasteURL`** | Controls whether files can be uploaded from remote URLs by pasting them into the Upload field. **Enabled by default.** Accepts `false` to disable or an object with an `allowList` of valid remote URLs. [More](#uploading-files-from-remote-urls) |
|
||||
| **`resizeOptions`** | An object passed to the the Sharp image library to resize the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize) |
|
||||
| **`skipSafeFetch`** | Set to an `allowList` to skip the safe fetch check when fetching external files. Set to `true` to skip the safe fetch for all documents in this collection. Defaults to `false`. |
|
||||
| **`allowRestrictedFileTypes`** | Set to `true` to allow restricted file types. If your Collection has defined [mimeTypes](#mimetypes), restricted file verification will be skipped. Defaults to `false`. [More](#restricted-file-types) |
|
||||
| **`staticDir`** | The folder directory to use to store media in. Can be either an absolute path or relative to the directory that contains your config. Defaults to your collection slug |
|
||||
| **`trimOptions`** | An object passed to the the Sharp image library to trim the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize#trim) |
|
||||
| **`withMetadata`** | If specified, appends metadata to the output image file. Accepts a boolean or a function that receives `metadata` and `req`, returning a boolean. |
|
||||
| **`hideFileInputOnCreate`** | Set to `true` to prevent the admin UI from showing file inputs during document creation, useful for programmatic file generation. |
|
||||
| **`hideRemoveFile`** | Set to `true` to prevent the admin UI having a way to remove an existing file while editing. |
|
||||
| **`modifyResponseHeaders`** | Accepts an object with existing `headers` and allows you to manipulate the response headers for media files. [More](#modifying-response-headers) |
|
||||
|
||||
### Payload-wide Upload Options
|
||||
|
||||
|
||||
@@ -292,7 +292,8 @@ Reference any of the existing storage adapters for guidance on how this should b
|
||||
```ts
|
||||
export interface GeneratedAdapter {
|
||||
/**
|
||||
* Additional fields to be injected into the base collection and image sizes
|
||||
* Additional fields to be injected into the base
|
||||
* collection and image sizes
|
||||
*/
|
||||
fields?: Field[]
|
||||
/**
|
||||
|
||||
@@ -6,6 +6,8 @@ import { anyone } from './access/anyone'
|
||||
import { checkRole } from './access/checkRole'
|
||||
import { loginAfterCreate } from './hooks/loginAfterCreate'
|
||||
import { protectRoles } from './hooks/protectRoles'
|
||||
import { access } from 'fs'
|
||||
import { create } from 'domain'
|
||||
|
||||
export const Users: CollectionConfig = {
|
||||
slug: 'users',
|
||||
@@ -32,6 +34,34 @@ export const Users: CollectionConfig = {
|
||||
afterChange: [loginAfterCreate],
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'email',
|
||||
type: 'email',
|
||||
required: true,
|
||||
unique: true,
|
||||
access: {
|
||||
read: adminsAndUser,
|
||||
update: adminsAndUser,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'password',
|
||||
type: 'password',
|
||||
required: true,
|
||||
admin: {
|
||||
description: 'Leave blank to keep the current password.',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'resetPasswordToken',
|
||||
type: 'text',
|
||||
hidden: true,
|
||||
},
|
||||
{
|
||||
name: 'resetPasswordExpiration',
|
||||
type: 'date',
|
||||
hidden: true,
|
||||
},
|
||||
{
|
||||
name: 'firstName',
|
||||
type: 'text',
|
||||
@@ -45,6 +75,11 @@ export const Users: CollectionConfig = {
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
saveToJWT: true,
|
||||
access: {
|
||||
read: admins,
|
||||
update: admins,
|
||||
create: admins,
|
||||
},
|
||||
hooks: {
|
||||
beforeChange: [protectRoles],
|
||||
},
|
||||
|
||||
19
package.json
19
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload-monorepo",
|
||||
"version": "3.47.0",
|
||||
"version": "3.51.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"workspaces": [
|
||||
@@ -76,8 +76,6 @@
|
||||
"dev:prod:memorydb": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --prod --start-memory-db",
|
||||
"dev:vercel-postgres": "cross-env PAYLOAD_DATABASE=vercel-postgres pnpm runts ./test/dev.ts",
|
||||
"devsafe": "node ./scripts/delete-recursively.js '**/.next' && pnpm dev",
|
||||
"docker:postgres": "docker compose -f test/docker-compose.yml up -d postgres",
|
||||
"docker:postgres:stop": "docker compose -f test/docker-compose.yml down postgres",
|
||||
"docker:restart": "pnpm docker:stop --remove-orphans && pnpm docker:start",
|
||||
"docker:start": "docker compose -f test/docker-compose.yml up -d",
|
||||
"docker:stop": "docker compose -f test/docker-compose.yml down",
|
||||
@@ -114,6 +112,7 @@
|
||||
"test:e2e:prod:ci": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod",
|
||||
"test:e2e:prod:ci:noturbo": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod --no-turbo",
|
||||
"test:int": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
|
||||
"test:int:firestore": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=firestore DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
|
||||
"test:int:postgres": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
|
||||
"test:int:sqlite": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=sqlite DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
|
||||
"test:types": "tstyche",
|
||||
@@ -133,12 +132,12 @@
|
||||
"devDependencies": {
|
||||
"@jest/globals": "29.7.0",
|
||||
"@libsql/client": "0.14.0",
|
||||
"@next/bundle-analyzer": "15.3.2",
|
||||
"@next/bundle-analyzer": "15.4.4",
|
||||
"@payloadcms/db-postgres": "workspace:*",
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@payloadcms/eslint-plugin": "workspace:*",
|
||||
"@payloadcms/live-preview-react": "workspace:*",
|
||||
"@playwright/test": "1.50.0",
|
||||
"@playwright/test": "1.54.1",
|
||||
"@sentry/nextjs": "^8.33.1",
|
||||
"@sentry/node": "^8.33.1",
|
||||
"@swc-node/register": "1.10.10",
|
||||
@@ -148,8 +147,8 @@
|
||||
"@types/jest": "29.5.12",
|
||||
"@types/minimist": "1.2.5",
|
||||
"@types/node": "22.15.30",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.2",
|
||||
"@types/react": "19.1.8",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"@types/shelljs": "0.8.15",
|
||||
"chalk": "^4.1.2",
|
||||
"comment-json": "^4.2.3",
|
||||
@@ -169,12 +168,12 @@
|
||||
"lint-staged": "15.2.7",
|
||||
"minimist": "1.2.8",
|
||||
"mongodb-memory-server": "10.1.4",
|
||||
"next": "15.3.2",
|
||||
"next": "15.4.4",
|
||||
"open": "^10.1.0",
|
||||
"p-limit": "^5.0.0",
|
||||
"pg": "8.16.3",
|
||||
"playwright": "1.50.0",
|
||||
"playwright-core": "1.50.0",
|
||||
"playwright": "1.54.1",
|
||||
"playwright-core": "1.54.1",
|
||||
"prettier": "3.5.3",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/admin-bar",
|
||||
"version": "3.47.0",
|
||||
"version": "3.51.0",
|
||||
"description": "An admin bar for React apps using Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
@@ -42,8 +42,8 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.2",
|
||||
"@types/react": "19.1.8",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "create-payload-app",
|
||||
"version": "3.47.0",
|
||||
"version": "3.51.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "3.47.0",
|
||||
"version": "3.51.0",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -36,6 +36,25 @@ export const connect: Connect = async function connect(
|
||||
|
||||
try {
|
||||
this.connection = (await mongoose.connect(urlToConnect, connectionOptions)).connection
|
||||
if (this.useAlternativeDropDatabase) {
|
||||
if (this.connection.db) {
|
||||
// Firestore doesn't support dropDatabase, so we monkey patch
|
||||
// dropDatabase to delete all documents from all collections instead
|
||||
this.connection.db.dropDatabase = async function (): Promise<boolean> {
|
||||
const existingCollections = await this.listCollections().toArray()
|
||||
await Promise.all(
|
||||
existingCollections.map(async (collectionInfo) => {
|
||||
const collection = this.collection(collectionInfo.name)
|
||||
await collection.deleteMany({})
|
||||
}),
|
||||
)
|
||||
return true
|
||||
}
|
||||
this.connection.dropDatabase = async function () {
|
||||
await this.db?.dropDatabase()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we are running a replica set with MongoDB Memory Server,
|
||||
// wait until the replica set elects a primary before proceeding
|
||||
|
||||
@@ -12,6 +12,7 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js'
|
||||
import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js'
|
||||
import { getCollection } from './utilities/getEntity.js'
|
||||
import { getSession } from './utilities/getSession.js'
|
||||
import { resolveJoins } from './utilities/resolveJoins.js'
|
||||
import { transform } from './utilities/transform.js'
|
||||
|
||||
export const find: Find = async function find(
|
||||
@@ -155,6 +156,16 @@ export const find: Find = async function find(
|
||||
result = await Model.paginate(query, paginationOptions)
|
||||
}
|
||||
|
||||
if (!this.useJoinAggregations) {
|
||||
await resolveJoins({
|
||||
adapter: this,
|
||||
collectionSlug,
|
||||
docs: result.docs as Record<string, unknown>[],
|
||||
joins,
|
||||
locale,
|
||||
})
|
||||
}
|
||||
|
||||
transform({
|
||||
adapter: this,
|
||||
data: result.docs,
|
||||
|
||||
141
packages/db-mongodb/src/findDistinct.ts
Normal file
141
packages/db-mongodb/src/findDistinct.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
import type { PipelineStage } from 'mongoose'
|
||||
|
||||
import { type FindDistinct, getFieldByPath } from 'payload'
|
||||
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import { buildSortParam } from './queries/buildSortParam.js'
|
||||
import { getCollection } from './utilities/getEntity.js'
|
||||
import { getSession } from './utilities/getSession.js'
|
||||
|
||||
export const findDistinct: FindDistinct = async function (this: MongooseAdapter, args) {
|
||||
const { collectionConfig, Model } = getCollection({
|
||||
adapter: this,
|
||||
collectionSlug: args.collection,
|
||||
})
|
||||
|
||||
const session = await getSession(this, args.req)
|
||||
|
||||
const { where = {} } = args
|
||||
|
||||
const sortAggregation: PipelineStage[] = []
|
||||
|
||||
const sort = buildSortParam({
|
||||
adapter: this,
|
||||
config: this.payload.config,
|
||||
fields: collectionConfig.flattenedFields,
|
||||
locale: args.locale,
|
||||
sort: args.sort ?? args.field,
|
||||
sortAggregation,
|
||||
timestamps: true,
|
||||
})
|
||||
|
||||
const query = await buildQuery({
|
||||
adapter: this,
|
||||
collectionSlug: args.collection,
|
||||
fields: collectionConfig.flattenedFields,
|
||||
locale: args.locale,
|
||||
where,
|
||||
})
|
||||
|
||||
const fieldPathResult = getFieldByPath({
|
||||
fields: collectionConfig.flattenedFields,
|
||||
path: args.field,
|
||||
})
|
||||
let fieldPath = args.field
|
||||
if (fieldPathResult?.pathHasLocalized && args.locale) {
|
||||
fieldPath = fieldPathResult.localizedPath.replace('<locale>', args.locale)
|
||||
}
|
||||
|
||||
const page = args.page || 1
|
||||
|
||||
const sortProperty = Object.keys(sort)[0]! // assert because buildSortParam always returns at least 1 key.
|
||||
const sortDirection = sort[sortProperty] === 'asc' ? 1 : -1
|
||||
|
||||
const pipeline: PipelineStage[] = [
|
||||
{
|
||||
$match: query,
|
||||
},
|
||||
...(sortAggregation.length > 0 ? sortAggregation : []),
|
||||
|
||||
{
|
||||
$group: {
|
||||
_id: {
|
||||
_field: `$${fieldPath}`,
|
||||
...(sortProperty === fieldPath
|
||||
? {}
|
||||
: {
|
||||
_sort: `$${sortProperty}`,
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: {
|
||||
[sortProperty === fieldPath ? '_id._field' : '_id._sort']: sortDirection,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
const getValues = async () => {
|
||||
return Model.aggregate(pipeline, { session }).then((res) =>
|
||||
res.map((each) => ({
|
||||
[args.field]: JSON.parse(JSON.stringify(each._id._field)),
|
||||
})),
|
||||
)
|
||||
}
|
||||
|
||||
if (args.limit) {
|
||||
pipeline.push({
|
||||
$skip: (page - 1) * args.limit,
|
||||
})
|
||||
pipeline.push({ $limit: args.limit })
|
||||
const totalDocs = await Model.aggregate(
|
||||
[
|
||||
{
|
||||
$match: query,
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: `$${fieldPath}`,
|
||||
},
|
||||
},
|
||||
{ $count: 'count' },
|
||||
],
|
||||
{
|
||||
session,
|
||||
},
|
||||
).then((res) => res[0]?.count ?? 0)
|
||||
const totalPages = Math.ceil(totalDocs / args.limit)
|
||||
const hasPrevPage = page > 1
|
||||
const hasNextPage = totalPages > page
|
||||
const pagingCounter = (page - 1) * args.limit + 1
|
||||
|
||||
return {
|
||||
hasNextPage,
|
||||
hasPrevPage,
|
||||
limit: args.limit,
|
||||
nextPage: hasNextPage ? page + 1 : null,
|
||||
page,
|
||||
pagingCounter,
|
||||
prevPage: hasPrevPage ? page - 1 : null,
|
||||
totalDocs,
|
||||
totalPages,
|
||||
values: await getValues(),
|
||||
}
|
||||
}
|
||||
|
||||
const values = await getValues()
|
||||
|
||||
return {
|
||||
hasNextPage: false,
|
||||
hasPrevPage: false,
|
||||
limit: 0,
|
||||
page: 1,
|
||||
pagingCounter: 1,
|
||||
totalDocs: values.length,
|
||||
totalPages: 1,
|
||||
values,
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,7 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js'
|
||||
import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js'
|
||||
import { getCollection } from './utilities/getEntity.js'
|
||||
import { getSession } from './utilities/getSession.js'
|
||||
import { resolveJoins } from './utilities/resolveJoins.js'
|
||||
import { transform } from './utilities/transform.js'
|
||||
|
||||
export const findOne: FindOne = async function findOne(
|
||||
@@ -67,6 +68,16 @@ export const findOne: FindOne = async function findOne(
|
||||
doc = await Model.findOne(query, {}, options)
|
||||
}
|
||||
|
||||
if (doc && !this.useJoinAggregations) {
|
||||
await resolveJoins({
|
||||
adapter: this,
|
||||
collectionSlug,
|
||||
docs: [doc] as Record<string, unknown>[],
|
||||
joins,
|
||||
locale,
|
||||
})
|
||||
}
|
||||
|
||||
if (!doc) {
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -42,6 +42,7 @@ import { deleteOne } from './deleteOne.js'
|
||||
import { deleteVersions } from './deleteVersions.js'
|
||||
import { destroy } from './destroy.js'
|
||||
import { find } from './find.js'
|
||||
import { findDistinct } from './findDistinct.js'
|
||||
import { findGlobal } from './findGlobal.js'
|
||||
import { findGlobalVersions } from './findGlobalVersions.js'
|
||||
import { findOne } from './findOne.js'
|
||||
@@ -143,6 +144,29 @@ export interface Args {
|
||||
|
||||
/** The URL to connect to MongoDB or false to start payload and prevent connecting */
|
||||
url: false | string
|
||||
|
||||
/**
|
||||
* Set to `true` to use an alternative `dropDatabase` implementation that calls `collection.deleteMany({})` on every collection instead of sending a raw `dropDatabase` command.
|
||||
* Payload only uses `dropDatabase` for testing purposes.
|
||||
* @default false
|
||||
*/
|
||||
useAlternativeDropDatabase?: boolean
|
||||
/**
|
||||
* Set to `true` to use `BigInt` for custom ID fields of type `'number'`.
|
||||
* Useful for databases that don't support `double` or `int32` IDs.
|
||||
* @default false
|
||||
*/
|
||||
useBigIntForNumberIDs?: boolean
|
||||
/**
|
||||
* Set to `false` to disable join aggregations (which use correlated subqueries) and instead populate join fields via multiple `find` queries.
|
||||
* @default true
|
||||
*/
|
||||
useJoinAggregations?: boolean
|
||||
/**
|
||||
* Set to `false` to disable the use of `pipeline` in the `$lookup` aggregation in sorting.
|
||||
* @default true
|
||||
*/
|
||||
usePipelineInSortLookup?: boolean
|
||||
}
|
||||
|
||||
export type MongooseAdapter = {
|
||||
@@ -159,6 +183,10 @@ export type MongooseAdapter = {
|
||||
up: (args: MigrateUpArgs) => Promise<void>
|
||||
}[]
|
||||
sessions: Record<number | string, ClientSession>
|
||||
useAlternativeDropDatabase: boolean
|
||||
useBigIntForNumberIDs: boolean
|
||||
useJoinAggregations: boolean
|
||||
usePipelineInSortLookup: boolean
|
||||
versions: {
|
||||
[slug: string]: CollectionModel
|
||||
}
|
||||
@@ -194,6 +222,10 @@ declare module 'payload' {
|
||||
updateVersion: <T extends TypeWithID = TypeWithID>(
|
||||
args: { options?: QueryOptions } & UpdateVersionArgs<T>,
|
||||
) => Promise<TypeWithVersion<T>>
|
||||
useAlternativeDropDatabase: boolean
|
||||
useBigIntForNumberIDs: boolean
|
||||
useJoinAggregations: boolean
|
||||
usePipelineInSortLookup: boolean
|
||||
versions: {
|
||||
[slug: string]: CollectionModel
|
||||
}
|
||||
@@ -214,6 +246,10 @@ export function mongooseAdapter({
|
||||
prodMigrations,
|
||||
transactionOptions = {},
|
||||
url,
|
||||
useAlternativeDropDatabase = false,
|
||||
useBigIntForNumberIDs = false,
|
||||
useJoinAggregations = true,
|
||||
usePipelineInSortLookup = true,
|
||||
}: Args): DatabaseAdapterObj {
|
||||
function adapter({ payload }: { payload: Payload }) {
|
||||
const migrationDir = findMigrationDir(migrationDirArg)
|
||||
@@ -262,6 +298,7 @@ export function mongooseAdapter({
|
||||
destroy,
|
||||
disableFallbackSort,
|
||||
find,
|
||||
findDistinct,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
findOne,
|
||||
@@ -279,6 +316,10 @@ export function mongooseAdapter({
|
||||
updateOne,
|
||||
updateVersion,
|
||||
upsert,
|
||||
useAlternativeDropDatabase,
|
||||
useBigIntForNumberIDs,
|
||||
useJoinAggregations,
|
||||
usePipelineInSortLookup,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -290,6 +331,8 @@ export function mongooseAdapter({
|
||||
}
|
||||
}
|
||||
|
||||
export { compatabilityOptions } from './utilities/compatabilityOptions.js'
|
||||
|
||||
/**
|
||||
* Attempt to find migrations directory.
|
||||
*
|
||||
|
||||
@@ -143,7 +143,12 @@ export const buildSchema = (args: {
|
||||
const idField = schemaFields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
if (idField) {
|
||||
fields = {
|
||||
_id: idField.type === 'number' ? Number : String,
|
||||
_id:
|
||||
idField.type === 'number'
|
||||
? payload.db.useBigIntForNumberIDs
|
||||
? mongoose.Schema.Types.BigInt
|
||||
: Number
|
||||
: String,
|
||||
}
|
||||
schemaFields = schemaFields.filter(
|
||||
(field) => !(fieldAffectsData(field) && field.name === 'id'),
|
||||
@@ -900,7 +905,11 @@ const getRelationshipValueType = (field: RelationshipField | UploadField, payloa
|
||||
}
|
||||
|
||||
if (customIDType === 'number') {
|
||||
return mongoose.Schema.Types.Number
|
||||
if (payload.db.useBigIntForNumberIDs) {
|
||||
return mongoose.Schema.Types.BigInt
|
||||
} else {
|
||||
return mongoose.Schema.Types.Number
|
||||
}
|
||||
}
|
||||
|
||||
return mongoose.Schema.Types.String
|
||||
|
||||
@@ -99,31 +99,57 @@ const relationshipSort = ({
|
||||
sortFieldPath = foreignFieldPath.localizedPath.replace('<locale>', locale)
|
||||
}
|
||||
|
||||
if (
|
||||
!sortAggregation.some((each) => {
|
||||
return '$lookup' in each && each.$lookup.as === `__${path}`
|
||||
})
|
||||
) {
|
||||
const as = `__${relationshipPath.replace(/\./g, '__')}`
|
||||
|
||||
// If we have not already sorted on this relationship yet, we need to add a lookup stage
|
||||
if (!sortAggregation.some((each) => '$lookup' in each && each.$lookup.as === as)) {
|
||||
let localField = versions ? `version.${relationshipPath}` : relationshipPath
|
||||
|
||||
if (adapter.usePipelineInSortLookup) {
|
||||
const flattenedField = `__${localField.replace(/\./g, '__')}_lookup`
|
||||
sortAggregation.push({
|
||||
$addFields: {
|
||||
[flattenedField]: `$${localField}`,
|
||||
},
|
||||
})
|
||||
localField = flattenedField
|
||||
}
|
||||
|
||||
sortAggregation.push({
|
||||
$lookup: {
|
||||
as: `__${path}`,
|
||||
as,
|
||||
foreignField: '_id',
|
||||
from: foreignCollection.Model.collection.name,
|
||||
localField: versions ? `version.${relationshipPath}` : relationshipPath,
|
||||
pipeline: [
|
||||
{
|
||||
$project: {
|
||||
[sortFieldPath]: true,
|
||||
localField,
|
||||
...(!adapter.usePipelineInSortLookup && {
|
||||
pipeline: [
|
||||
{
|
||||
$project: {
|
||||
[sortFieldPath]: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
sort[`__${path}.${sortFieldPath}`] = sortDirection
|
||||
|
||||
return true
|
||||
if (adapter.usePipelineInSortLookup) {
|
||||
sortAggregation.push({
|
||||
$unset: localField,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (!adapter.usePipelineInSortLookup) {
|
||||
const lookup = sortAggregation.find(
|
||||
(each) => '$lookup' in each && each.$lookup.as === as,
|
||||
) as PipelineStage.Lookup
|
||||
const pipeline = lookup.$lookup.pipeline![0] as PipelineStage.Project
|
||||
pipeline.$project[sortFieldPath] = true
|
||||
}
|
||||
|
||||
sort[`${as}.${sortFieldPath}`] = sortDirection
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js'
|
||||
import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js'
|
||||
import { getCollection } from './utilities/getEntity.js'
|
||||
import { getSession } from './utilities/getSession.js'
|
||||
import { resolveJoins } from './utilities/resolveJoins.js'
|
||||
import { transform } from './utilities/transform.js'
|
||||
|
||||
export const queryDrafts: QueryDrafts = async function queryDrafts(
|
||||
@@ -158,6 +159,17 @@ export const queryDrafts: QueryDrafts = async function queryDrafts(
|
||||
result = await Model.paginate(versionQuery, paginationOptions)
|
||||
}
|
||||
|
||||
if (!this.useJoinAggregations) {
|
||||
await resolveJoins({
|
||||
adapter: this,
|
||||
collectionSlug,
|
||||
docs: result.docs as Record<string, unknown>[],
|
||||
joins,
|
||||
locale,
|
||||
versions: true,
|
||||
})
|
||||
}
|
||||
|
||||
transform({
|
||||
adapter: this,
|
||||
data: result.docs,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { MongooseUpdateQueryOptions } from 'mongoose'
|
||||
import type { MongooseUpdateQueryOptions, UpdateQuery } from 'mongoose'
|
||||
import type { UpdateOne } from 'payload'
|
||||
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
@@ -50,15 +50,20 @@ export const updateOne: UpdateOne = async function updateOne(
|
||||
|
||||
let result
|
||||
|
||||
transform({ adapter: this, data, fields, operation: 'write' })
|
||||
const $inc: Record<string, number> = {}
|
||||
let updateData: UpdateQuery<any> = data
|
||||
transform({ $inc, adapter: this, data, fields, operation: 'write' })
|
||||
if (Object.keys($inc).length) {
|
||||
updateData = { $inc, $set: updateData }
|
||||
}
|
||||
|
||||
try {
|
||||
if (returning === false) {
|
||||
await Model.updateOne(query, data, options)
|
||||
await Model.updateOne(query, updateData, options)
|
||||
transform({ adapter: this, data, fields, operation: 'read' })
|
||||
return null
|
||||
} else {
|
||||
result = await Model.findOneAndUpdate(query, data, options)
|
||||
result = await Model.findOneAndUpdate(query, updateData, options)
|
||||
}
|
||||
} catch (error) {
|
||||
handleError({ collection: collectionSlug, error, req })
|
||||
|
||||
@@ -76,7 +76,11 @@ export const aggregatePaginate = async ({
|
||||
countPromise = Model.estimatedDocumentCount(query)
|
||||
} else {
|
||||
const hint = adapter.disableIndexHints !== true ? { _id: 1 } : undefined
|
||||
countPromise = Model.countDocuments(query, { collation, hint, session })
|
||||
countPromise = Model.countDocuments(query, {
|
||||
collation,
|
||||
session,
|
||||
...(hint ? { hint } : {}),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -44,6 +44,9 @@ export const buildJoinAggregation = async ({
|
||||
projection,
|
||||
versions,
|
||||
}: BuildJoinAggregationArgs): Promise<PipelineStage[] | undefined> => {
|
||||
if (!adapter.useJoinAggregations) {
|
||||
return
|
||||
}
|
||||
if (
|
||||
(Object.keys(collectionConfig.joins).length === 0 &&
|
||||
collectionConfig.polymorphicJoins.length == 0) ||
|
||||
|
||||
25
packages/db-mongodb/src/utilities/compatabilityOptions.ts
Normal file
25
packages/db-mongodb/src/utilities/compatabilityOptions.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import type { Args } from '../index.js'
|
||||
|
||||
/**
|
||||
* Each key is a mongo-compatible database and the value
|
||||
* is the recommended `mongooseAdapter` settings for compatability.
|
||||
*/
|
||||
export const compatabilityOptions = {
|
||||
cosmosdb: {
|
||||
transactionOptions: false,
|
||||
useJoinAggregations: false,
|
||||
usePipelineInSortLookup: false,
|
||||
},
|
||||
documentdb: {
|
||||
disableIndexHints: true,
|
||||
},
|
||||
firestore: {
|
||||
disableIndexHints: true,
|
||||
ensureIndexes: false,
|
||||
transactionOptions: false,
|
||||
useAlternativeDropDatabase: true,
|
||||
useBigIntForNumberIDs: true,
|
||||
useJoinAggregations: false,
|
||||
usePipelineInSortLookup: false,
|
||||
},
|
||||
} satisfies Record<string, Partial<Args>>
|
||||
@@ -2,6 +2,15 @@ import type { PayloadRequest } from 'payload'
|
||||
|
||||
import { ValidationError } from 'payload'
|
||||
|
||||
function extractFieldFromMessage(message: string) {
|
||||
// eslint-disable-next-line regexp/no-super-linear-backtracking
|
||||
const match = message.match(/index:\s*(.*?)_/)
|
||||
if (match && match[1]) {
|
||||
return match[1] // e.g., returns "email" from "index: email_1"
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
export const handleError = ({
|
||||
collection,
|
||||
error,
|
||||
@@ -18,20 +27,22 @@ export const handleError = ({
|
||||
}
|
||||
|
||||
// Handle uniqueness error from MongoDB
|
||||
if (
|
||||
'code' in error &&
|
||||
error.code === 11000 &&
|
||||
'keyValue' in error &&
|
||||
error.keyValue &&
|
||||
typeof error.keyValue === 'object'
|
||||
) {
|
||||
if ('code' in error && error.code === 11000) {
|
||||
let path: null | string = null
|
||||
|
||||
if ('keyValue' in error && error.keyValue && typeof error.keyValue === 'object') {
|
||||
path = Object.keys(error.keyValue)[0] ?? ''
|
||||
} else if ('message' in error && typeof error.message === 'string') {
|
||||
path = extractFieldFromMessage(error.message)
|
||||
}
|
||||
|
||||
throw new ValidationError(
|
||||
{
|
||||
collection,
|
||||
errors: [
|
||||
{
|
||||
message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique',
|
||||
path: Object.keys(error.keyValue)[0] ?? '',
|
||||
path: path ?? '',
|
||||
},
|
||||
],
|
||||
global,
|
||||
|
||||
647
packages/db-mongodb/src/utilities/resolveJoins.ts
Normal file
647
packages/db-mongodb/src/utilities/resolveJoins.ts
Normal file
@@ -0,0 +1,647 @@
|
||||
import type { JoinQuery, SanitizedJoins, Where } from 'payload'
|
||||
|
||||
import {
|
||||
appendVersionToQueryKey,
|
||||
buildVersionCollectionFields,
|
||||
combineQueries,
|
||||
getQueryDraftsSort,
|
||||
} from 'payload'
|
||||
import { fieldShouldBeLocalized } from 'payload/shared'
|
||||
|
||||
import type { MongooseAdapter } from '../index.js'
|
||||
|
||||
import { buildQuery } from '../queries/buildQuery.js'
|
||||
import { buildSortParam } from '../queries/buildSortParam.js'
|
||||
import { transform } from './transform.js'
|
||||
|
||||
export type ResolveJoinsArgs = {
|
||||
/** The MongoDB adapter instance */
|
||||
adapter: MongooseAdapter
|
||||
/** The slug of the collection being queried */
|
||||
collectionSlug: string
|
||||
/** Array of documents to resolve joins for */
|
||||
docs: Record<string, unknown>[]
|
||||
/** Join query specifications (which joins to resolve and how) */
|
||||
joins?: JoinQuery
|
||||
/** Optional locale for localized queries */
|
||||
locale?: string
|
||||
/** Optional projection for the join query */
|
||||
projection?: Record<string, true>
|
||||
/** Whether to resolve versions instead of published documents */
|
||||
versions?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves join relationships for a collection of documents.
|
||||
* This function fetches related documents based on join configurations and
|
||||
* attaches them to the original documents with pagination support.
|
||||
*/
|
||||
export async function resolveJoins({
|
||||
adapter,
|
||||
collectionSlug,
|
||||
docs,
|
||||
joins,
|
||||
locale,
|
||||
projection,
|
||||
versions = false,
|
||||
}: ResolveJoinsArgs): Promise<void> {
|
||||
// Early return if no joins are specified or no documents to process
|
||||
if (!joins || docs.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
// Get the collection configuration from the adapter
|
||||
const collectionConfig = adapter.payload.collections[collectionSlug]?.config
|
||||
if (!collectionConfig) {
|
||||
return
|
||||
}
|
||||
|
||||
// Build a map of join paths to their configurations for quick lookup
|
||||
// This flattens the nested join structure into a single map keyed by join path
|
||||
const joinMap: Record<string, { targetCollection: string } & SanitizedJoin> = {}
|
||||
|
||||
// Add regular joins
|
||||
for (const [target, joinList] of Object.entries(collectionConfig.joins)) {
|
||||
for (const join of joinList) {
|
||||
joinMap[join.joinPath] = { ...join, targetCollection: target }
|
||||
}
|
||||
}
|
||||
|
||||
// Add polymorphic joins
|
||||
for (const join of collectionConfig.polymorphicJoins || []) {
|
||||
// For polymorphic joins, we use the collections array as the target
|
||||
joinMap[join.joinPath] = { ...join, targetCollection: join.field.collection as string }
|
||||
}
|
||||
|
||||
// Process each requested join concurrently
|
||||
const joinPromises = Object.entries(joins).map(async ([joinPath, joinQuery]) => {
|
||||
if (!joinQuery) {
|
||||
return null
|
||||
}
|
||||
|
||||
// If a projection is provided, and the join path is not in the projection, skip it
|
||||
if (projection && !projection[joinPath]) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Get the join definition from our map
|
||||
const joinDef = joinMap[joinPath]
|
||||
if (!joinDef) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Normalize collections to always be an array for unified processing
|
||||
const allCollections = Array.isArray(joinDef.field.collection)
|
||||
? joinDef.field.collection
|
||||
: [joinDef.field.collection]
|
||||
|
||||
// Use the provided locale or fall back to the default locale for localized fields
|
||||
const localizationConfig = adapter.payload.config.localization
|
||||
const effectiveLocale =
|
||||
locale ||
|
||||
(typeof localizationConfig === 'object' &&
|
||||
localizationConfig &&
|
||||
localizationConfig.defaultLocale)
|
||||
|
||||
// Extract relationTo filter from the where clause to determine which collections to query
|
||||
const relationToFilter = extractRelationToFilter(joinQuery.where || {})
|
||||
|
||||
// Determine which collections to query based on relationTo filter
|
||||
const collections = relationToFilter
|
||||
? allCollections.filter((col) => relationToFilter.includes(col))
|
||||
: allCollections
|
||||
|
||||
// Check if this is a polymorphic collection join (where field.collection is an array)
|
||||
const isPolymorphicJoin = Array.isArray(joinDef.field.collection)
|
||||
|
||||
// Apply pagination settings
|
||||
const limit = joinQuery.limit ?? joinDef.field.defaultLimit ?? 10
|
||||
const page = joinQuery.page ?? 1
|
||||
const skip = (page - 1) * limit
|
||||
|
||||
// Process collections concurrently
|
||||
const collectionPromises = collections.map(async (joinCollectionSlug) => {
|
||||
const targetConfig = adapter.payload.collections[joinCollectionSlug]?.config
|
||||
if (!targetConfig) {
|
||||
return null
|
||||
}
|
||||
|
||||
const useDrafts = versions && Boolean(targetConfig.versions?.drafts)
|
||||
let JoinModel
|
||||
if (useDrafts) {
|
||||
JoinModel = adapter.versions[targetConfig.slug]
|
||||
} else {
|
||||
JoinModel = adapter.collections[targetConfig.slug]
|
||||
}
|
||||
|
||||
if (!JoinModel) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Extract all parent document IDs to use in the join query
|
||||
const parentIDs = docs.map((d) => (versions ? (d.parent ?? d._id ?? d.id) : (d._id ?? d.id)))
|
||||
|
||||
// Build the base query
|
||||
let whereQuery: null | Record<string, unknown> = null
|
||||
whereQuery = isPolymorphicJoin
|
||||
? filterWhereForCollection(
|
||||
joinQuery.where || {},
|
||||
targetConfig.flattenedFields,
|
||||
true, // exclude relationTo for individual collections
|
||||
)
|
||||
: joinQuery.where || {}
|
||||
|
||||
// Skip this collection if the WHERE clause cannot be satisfied for polymorphic collection joins
|
||||
if (whereQuery === null) {
|
||||
return null
|
||||
}
|
||||
whereQuery = useDrafts
|
||||
? await JoinModel.buildQuery({
|
||||
locale,
|
||||
payload: adapter.payload,
|
||||
where: combineQueries(appendVersionToQueryKey(whereQuery as Where), {
|
||||
latest: {
|
||||
equals: true,
|
||||
},
|
||||
}),
|
||||
})
|
||||
: await buildQuery({
|
||||
adapter,
|
||||
collectionSlug: joinCollectionSlug,
|
||||
fields: targetConfig.flattenedFields,
|
||||
locale,
|
||||
where: whereQuery as Where,
|
||||
})
|
||||
|
||||
// Handle localized paths and version prefixes
|
||||
let dbFieldName = joinDef.field.on
|
||||
|
||||
if (effectiveLocale && typeof localizationConfig === 'object' && localizationConfig) {
|
||||
const pathSegments = joinDef.field.on.split('.')
|
||||
const transformedSegments: string[] = []
|
||||
const fields = useDrafts
|
||||
? buildVersionCollectionFields(adapter.payload.config, targetConfig, true)
|
||||
: targetConfig.flattenedFields
|
||||
|
||||
for (let i = 0; i < pathSegments.length; i++) {
|
||||
const segment = pathSegments[i]!
|
||||
transformedSegments.push(segment)
|
||||
|
||||
// Check if this segment corresponds to a localized field
|
||||
const fieldAtSegment = fields.find((f) => f.name === segment)
|
||||
if (fieldAtSegment && fieldAtSegment.localized) {
|
||||
transformedSegments.push(effectiveLocale)
|
||||
}
|
||||
}
|
||||
|
||||
dbFieldName = transformedSegments.join('.')
|
||||
}
|
||||
|
||||
// Add version prefix for draft queries
|
||||
if (useDrafts) {
|
||||
dbFieldName = `version.${dbFieldName}`
|
||||
}
|
||||
|
||||
// Check if the target field is a polymorphic relationship
|
||||
const isPolymorphic = joinDef.targetField
|
||||
? Array.isArray(joinDef.targetField.relationTo)
|
||||
: false
|
||||
|
||||
if (isPolymorphic) {
|
||||
// For polymorphic relationships, we need to match both relationTo and value
|
||||
whereQuery[`${dbFieldName}.relationTo`] = collectionSlug
|
||||
whereQuery[`${dbFieldName}.value`] = { $in: parentIDs }
|
||||
} else {
|
||||
// For regular relationships and polymorphic collection joins
|
||||
whereQuery[dbFieldName] = { $in: parentIDs }
|
||||
}
|
||||
|
||||
// Build the sort parameters for the query
|
||||
const fields = useDrafts
|
||||
? buildVersionCollectionFields(adapter.payload.config, targetConfig, true)
|
||||
: targetConfig.flattenedFields
|
||||
|
||||
const sort = buildSortParam({
|
||||
adapter,
|
||||
config: adapter.payload.config,
|
||||
fields,
|
||||
locale,
|
||||
sort: useDrafts
|
||||
? getQueryDraftsSort({
|
||||
collectionConfig: targetConfig,
|
||||
sort: joinQuery.sort || joinDef.field.defaultSort || targetConfig.defaultSort,
|
||||
})
|
||||
: joinQuery.sort || joinDef.field.defaultSort || targetConfig.defaultSort,
|
||||
timestamps: true,
|
||||
})
|
||||
|
||||
const projection = buildJoinProjection(dbFieldName, useDrafts, sort)
|
||||
|
||||
const [results, dbCount] = await Promise.all([
|
||||
JoinModel.find(whereQuery, projection, {
|
||||
sort,
|
||||
...(isPolymorphicJoin ? {} : { limit, skip }),
|
||||
}).lean(),
|
||||
isPolymorphicJoin ? Promise.resolve(0) : JoinModel.countDocuments(whereQuery),
|
||||
])
|
||||
|
||||
const count = isPolymorphicJoin ? results.length : dbCount
|
||||
|
||||
transform({
|
||||
adapter,
|
||||
data: results,
|
||||
fields: useDrafts
|
||||
? buildVersionCollectionFields(adapter.payload.config, targetConfig, false)
|
||||
: targetConfig.fields,
|
||||
operation: 'read',
|
||||
})
|
||||
|
||||
// Return results with collection info for grouping
|
||||
return {
|
||||
collectionSlug: joinCollectionSlug,
|
||||
count,
|
||||
dbFieldName,
|
||||
results,
|
||||
sort,
|
||||
useDrafts,
|
||||
}
|
||||
})
|
||||
|
||||
const collectionResults = await Promise.all(collectionPromises)
|
||||
|
||||
// Group the results by parent ID
|
||||
const grouped: Record<
|
||||
string,
|
||||
{
|
||||
docs: Record<string, unknown>[]
|
||||
sort: Record<string, string>
|
||||
}
|
||||
> = {}
|
||||
|
||||
let totalCount = 0
|
||||
for (const collectionResult of collectionResults) {
|
||||
if (!collectionResult) {
|
||||
continue
|
||||
}
|
||||
|
||||
const { collectionSlug, count, dbFieldName, results, sort, useDrafts } = collectionResult
|
||||
|
||||
totalCount += count
|
||||
|
||||
for (const result of results) {
|
||||
if (useDrafts) {
|
||||
result.id = result.parent
|
||||
}
|
||||
|
||||
const parentValues = getByPathWithArrays(result, dbFieldName) as (
|
||||
| { relationTo: string; value: number | string }
|
||||
| number
|
||||
| string
|
||||
)[]
|
||||
|
||||
if (parentValues.length === 0) {
|
||||
continue
|
||||
}
|
||||
|
||||
for (let parentValue of parentValues) {
|
||||
if (!parentValue) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (typeof parentValue === 'object') {
|
||||
parentValue = parentValue.value
|
||||
}
|
||||
|
||||
const joinData = {
|
||||
relationTo: collectionSlug,
|
||||
value: result.id,
|
||||
}
|
||||
|
||||
const parentKey = parentValue as string
|
||||
if (!grouped[parentKey]) {
|
||||
grouped[parentKey] = {
|
||||
docs: [],
|
||||
sort,
|
||||
}
|
||||
}
|
||||
|
||||
// Always store the ObjectID reference in polymorphic format
|
||||
grouped[parentKey].docs.push({
|
||||
...result,
|
||||
__joinData: joinData,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const results of Object.values(grouped)) {
|
||||
results.docs.sort((a, b) => {
|
||||
for (const [fieldName, sortOrder] of Object.entries(results.sort)) {
|
||||
const sort = sortOrder === 'asc' ? 1 : -1
|
||||
const aValue = a[fieldName] as Date | number | string
|
||||
const bValue = b[fieldName] as Date | number | string
|
||||
if (aValue < bValue) {
|
||||
return -1 * sort
|
||||
}
|
||||
if (aValue > bValue) {
|
||||
return 1 * sort
|
||||
}
|
||||
}
|
||||
return 0
|
||||
})
|
||||
results.docs = results.docs.map(
|
||||
(doc) => (isPolymorphicJoin ? doc.__joinData : doc.id) as Record<string, unknown>,
|
||||
)
|
||||
}
|
||||
|
||||
// Determine if the join field should be localized
|
||||
const localeSuffix =
|
||||
fieldShouldBeLocalized({
|
||||
field: joinDef.field,
|
||||
parentIsLocalized: joinDef.parentIsLocalized,
|
||||
}) &&
|
||||
adapter.payload.config.localization &&
|
||||
effectiveLocale
|
||||
? `.${effectiveLocale}`
|
||||
: ''
|
||||
|
||||
// Adjust the join path with locale suffix if needed
|
||||
const localizedJoinPath = `${joinPath}${localeSuffix}`
|
||||
|
||||
return {
|
||||
grouped,
|
||||
isPolymorphicJoin,
|
||||
joinQuery,
|
||||
limit,
|
||||
localizedJoinPath,
|
||||
page,
|
||||
skip,
|
||||
totalCount,
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for all join operations to complete
|
||||
const joinResults = await Promise.all(joinPromises)
|
||||
|
||||
// Process the results and attach them to documents
|
||||
for (const joinResult of joinResults) {
|
||||
if (!joinResult) {
|
||||
continue
|
||||
}
|
||||
|
||||
const { grouped, isPolymorphicJoin, joinQuery, limit, localizedJoinPath, skip, totalCount } =
|
||||
joinResult
|
||||
|
||||
// Attach the joined data to each parent document
|
||||
for (const doc of docs) {
|
||||
const id = (versions ? (doc.parent ?? doc._id ?? doc.id) : (doc._id ?? doc.id)) as string
|
||||
const all = grouped[id]?.docs || []
|
||||
|
||||
// Calculate the slice for pagination
|
||||
// When limit is 0, it means unlimited - return all results
|
||||
const slice = isPolymorphicJoin
|
||||
? limit === 0
|
||||
? all
|
||||
: all.slice(skip, skip + limit)
|
||||
: // For non-polymorphic joins, we assume that page and limit were applied at the database level
|
||||
all
|
||||
|
||||
// Create the join result object with pagination metadata
|
||||
const value: Record<string, unknown> = {
|
||||
docs: slice,
|
||||
hasNextPage: limit === 0 ? false : totalCount > skip + slice.length,
|
||||
}
|
||||
|
||||
// Include total count if requested
|
||||
if (joinQuery.count) {
|
||||
value.totalDocs = totalCount
|
||||
}
|
||||
|
||||
// Navigate to the correct nested location in the document and set the join data
|
||||
// This handles nested join paths like "user.posts" by creating intermediate objects
|
||||
const segments = localizedJoinPath.split('.')
|
||||
let ref: Record<string, unknown>
|
||||
if (versions) {
|
||||
if (!doc.version) {
|
||||
doc.version = {}
|
||||
}
|
||||
ref = doc.version as Record<string, unknown>
|
||||
} else {
|
||||
ref = doc
|
||||
}
|
||||
|
||||
for (let i = 0; i < segments.length - 1; i++) {
|
||||
const seg = segments[i]!
|
||||
if (!ref[seg]) {
|
||||
ref[seg] = {}
|
||||
}
|
||||
ref = ref[seg] as Record<string, unknown>
|
||||
}
|
||||
// Set the final join data at the target path
|
||||
ref[segments[segments.length - 1]!] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts relationTo filter values from a WHERE clause
|
||||
* @param where - The WHERE clause to search
|
||||
* @returns Array of collection slugs if relationTo filter found, null otherwise
|
||||
*/
|
||||
function extractRelationToFilter(where: Record<string, unknown>): null | string[] {
|
||||
if (!where || typeof where !== 'object') {
|
||||
return null
|
||||
}
|
||||
|
||||
// Check for direct relationTo conditions
|
||||
if (where.relationTo && typeof where.relationTo === 'object') {
|
||||
const relationTo = where.relationTo as Record<string, unknown>
|
||||
if (relationTo.in && Array.isArray(relationTo.in)) {
|
||||
return relationTo.in as string[]
|
||||
}
|
||||
if (relationTo.equals) {
|
||||
return [relationTo.equals as string]
|
||||
}
|
||||
}
|
||||
|
||||
// Check for relationTo in logical operators
|
||||
if (where.and && Array.isArray(where.and)) {
|
||||
for (const condition of where.and) {
|
||||
const result = extractRelationToFilter(condition)
|
||||
if (result) {
|
||||
return result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (where.or && Array.isArray(where.or)) {
|
||||
for (const condition of where.or) {
|
||||
const result = extractRelationToFilter(condition)
|
||||
if (result) {
|
||||
return result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters a WHERE clause to only include fields that exist in the target collection
|
||||
* This is needed for polymorphic joins where different collections have different fields
|
||||
* @param where - The original WHERE clause
|
||||
* @param availableFields - The fields available in the target collection
|
||||
* @param excludeRelationTo - Whether to exclude relationTo field (for individual collections)
|
||||
* @returns A filtered WHERE clause, or null if the query cannot match this collection
|
||||
*/
|
||||
function filterWhereForCollection(
|
||||
where: Record<string, unknown>,
|
||||
availableFields: Array<{ name: string }>,
|
||||
excludeRelationTo: boolean = false,
|
||||
): null | Record<string, unknown> {
|
||||
if (!where || typeof where !== 'object') {
|
||||
return where
|
||||
}
|
||||
|
||||
const fieldNames = new Set(availableFields.map((f) => f.name))
|
||||
// Add special fields that are available in polymorphic relationships
|
||||
if (!excludeRelationTo) {
|
||||
fieldNames.add('relationTo')
|
||||
}
|
||||
|
||||
const filtered: Record<string, unknown> = {}
|
||||
|
||||
for (const [key, value] of Object.entries(where)) {
|
||||
if (key === 'and') {
|
||||
// Handle AND operator - all conditions must be satisfiable
|
||||
if (Array.isArray(value)) {
|
||||
const filteredConditions: Record<string, unknown>[] = []
|
||||
|
||||
for (const condition of value) {
|
||||
const filteredCondition = filterWhereForCollection(
|
||||
condition,
|
||||
availableFields,
|
||||
excludeRelationTo,
|
||||
)
|
||||
|
||||
// If any condition in AND cannot be satisfied, the whole AND fails
|
||||
if (filteredCondition === null) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (Object.keys(filteredCondition).length > 0) {
|
||||
filteredConditions.push(filteredCondition)
|
||||
}
|
||||
}
|
||||
|
||||
if (filteredConditions.length > 0) {
|
||||
filtered[key] = filteredConditions
|
||||
}
|
||||
}
|
||||
} else if (key === 'or') {
|
||||
// Handle OR operator - at least one condition must be satisfiable
|
||||
if (Array.isArray(value)) {
|
||||
const filteredConditions = value
|
||||
.map((condition) =>
|
||||
filterWhereForCollection(condition, availableFields, excludeRelationTo),
|
||||
)
|
||||
.filter((condition) => condition !== null && Object.keys(condition).length > 0)
|
||||
|
||||
if (filteredConditions.length > 0) {
|
||||
filtered[key] = filteredConditions
|
||||
}
|
||||
// If no OR conditions can be satisfied, we still continue (OR is more permissive)
|
||||
}
|
||||
} else if (key === 'relationTo' && excludeRelationTo) {
|
||||
// Skip relationTo field for non-polymorphic collections
|
||||
continue
|
||||
} else if (fieldNames.has(key)) {
|
||||
// Include the condition if the field exists in this collection
|
||||
filtered[key] = value
|
||||
} else {
|
||||
// Field doesn't exist in this collection - this makes the query unsatisfiable
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
type SanitizedJoin = SanitizedJoins[string][number]
|
||||
|
||||
/**
|
||||
* Builds projection for join queries
|
||||
*/
|
||||
function buildJoinProjection(
|
||||
baseFieldName: string,
|
||||
useDrafts: boolean,
|
||||
sort: Record<string, string>,
|
||||
): Record<string, 1> {
|
||||
const projection: Record<string, 1> = {
|
||||
_id: 1,
|
||||
[baseFieldName]: 1,
|
||||
}
|
||||
|
||||
if (useDrafts) {
|
||||
projection.parent = 1
|
||||
}
|
||||
|
||||
for (const fieldName of Object.keys(sort)) {
|
||||
projection[fieldName] = 1
|
||||
}
|
||||
|
||||
return projection
|
||||
}
|
||||
|
||||
/**
|
||||
* Enhanced utility function to safely traverse nested object properties using dot notation
|
||||
* Handles arrays by searching through array elements for matching values
|
||||
* @param doc - The document to traverse
|
||||
* @param path - Dot-separated path (e.g., "array.category")
|
||||
* @returns Array of values found at the specified path (for arrays) or single value
|
||||
*/
|
||||
function getByPathWithArrays(doc: unknown, path: string): unknown[] {
|
||||
const segments = path.split('.')
|
||||
let current = doc
|
||||
|
||||
for (let i = 0; i < segments.length; i++) {
|
||||
const segment = segments[i]!
|
||||
|
||||
if (current === undefined || current === null) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Get the value at the current segment
|
||||
const value = (current as Record<string, unknown>)[segment]
|
||||
|
||||
if (value === undefined || value === null) {
|
||||
return []
|
||||
}
|
||||
|
||||
// If this is the last segment, return the value(s)
|
||||
if (i === segments.length - 1) {
|
||||
return Array.isArray(value) ? value : [value]
|
||||
}
|
||||
|
||||
// If the value is an array and we have more segments to traverse
|
||||
if (Array.isArray(value)) {
|
||||
const remainingPath = segments.slice(i + 1).join('.')
|
||||
const results: unknown[] = []
|
||||
|
||||
// Search through each array element
|
||||
for (const item of value) {
|
||||
if (item && typeof item === 'object') {
|
||||
const subResults = getByPathWithArrays(item, remainingPath)
|
||||
results.push(...subResults)
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// Continue traversing
|
||||
current = value
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
@@ -208,6 +208,7 @@ const sanitizeDate = ({
|
||||
}
|
||||
|
||||
type Args = {
|
||||
$inc?: Record<string, number>
|
||||
/** instance of the adapter */
|
||||
adapter: MongooseAdapter
|
||||
/** data to transform, can be an array of documents or a single document */
|
||||
@@ -396,6 +397,7 @@ const stripFields = ({
|
||||
}
|
||||
|
||||
export const transform = ({
|
||||
$inc,
|
||||
adapter,
|
||||
data,
|
||||
fields,
|
||||
@@ -404,9 +406,13 @@ export const transform = ({
|
||||
parentIsLocalized = false,
|
||||
validateRelationships = true,
|
||||
}: Args) => {
|
||||
if (!data) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (Array.isArray(data)) {
|
||||
for (const item of data) {
|
||||
transform({ adapter, data: item, fields, globalSlug, operation, validateRelationships })
|
||||
transform({ $inc, adapter, data: item, fields, globalSlug, operation, validateRelationships })
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -424,6 +430,11 @@ export const transform = ({
|
||||
data.id = data.id.toHexString()
|
||||
}
|
||||
|
||||
// Handle BigInt conversion for custom ID fields of type 'number'
|
||||
if (adapter.useBigIntForNumberIDs && typeof data.id === 'bigint') {
|
||||
data.id = Number(data.id)
|
||||
}
|
||||
|
||||
if (!adapter.allowAdditionalKeys) {
|
||||
stripFields({
|
||||
config,
|
||||
@@ -438,13 +449,27 @@ export const transform = ({
|
||||
data.globalType = globalSlug
|
||||
}
|
||||
|
||||
const sanitize: TraverseFieldsCallback = ({ field, ref: incomingRef }) => {
|
||||
const sanitize: TraverseFieldsCallback = ({ field, parentPath, ref: incomingRef }) => {
|
||||
if (!incomingRef || typeof incomingRef !== 'object') {
|
||||
return
|
||||
}
|
||||
|
||||
const ref = incomingRef as Record<string, unknown>
|
||||
|
||||
if (
|
||||
$inc &&
|
||||
field.type === 'number' &&
|
||||
operation === 'write' &&
|
||||
field.name in ref &&
|
||||
ref[field.name]
|
||||
) {
|
||||
const value = ref[field.name]
|
||||
if (value && typeof value === 'object' && '$inc' in value && typeof value.$inc === 'number') {
|
||||
$inc[`${parentPath}${field.name}`] = value.$inc
|
||||
delete ref[field.name]
|
||||
}
|
||||
}
|
||||
|
||||
if (field.type === 'date' && operation === 'read' && field.name in ref && ref[field.name]) {
|
||||
if (config.localization && fieldShouldBeLocalized({ field, parentIsLocalized })) {
|
||||
const fieldRef = ref[field.name] as Record<string, unknown>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "3.47.0",
|
||||
"version": "3.51.0",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -17,6 +17,7 @@ import {
|
||||
deleteVersions,
|
||||
destroy,
|
||||
find,
|
||||
findDistinct,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
findMigrationDir,
|
||||
@@ -120,6 +121,7 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
|
||||
json: true,
|
||||
},
|
||||
fieldConstraints: {},
|
||||
findDistinct,
|
||||
generateSchema: createSchemaGenerator({
|
||||
columnToCodeConverter,
|
||||
corePackageSuffix: 'pg-core',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-sqlite",
|
||||
"version": "3.47.0",
|
||||
"version": "3.51.0",
|
||||
"description": "The officially supported SQLite database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -6,13 +6,13 @@ import type { CountDistinct, SQLiteAdapter } from './types.js'
|
||||
|
||||
export const countDistinct: CountDistinct = async function countDistinct(
|
||||
this: SQLiteAdapter,
|
||||
{ db, joins, tableName, where },
|
||||
{ column, db, joins, tableName, where },
|
||||
) {
|
||||
// When we don't have any joins - use a simple COUNT(*) query.
|
||||
if (joins.length === 0) {
|
||||
const countResult = await db
|
||||
.select({
|
||||
count: count(),
|
||||
count: column ? count(sql`DISTINCT ${column}`) : count(),
|
||||
})
|
||||
.from(this.tables[tableName])
|
||||
.where(where)
|
||||
@@ -25,12 +25,12 @@ export const countDistinct: CountDistinct = async function countDistinct(
|
||||
})
|
||||
.from(this.tables[tableName])
|
||||
.where(where)
|
||||
.groupBy(this.tables[tableName].id)
|
||||
.groupBy(column ?? this.tables[tableName].id)
|
||||
.limit(1)
|
||||
.$dynamic()
|
||||
|
||||
joins.forEach(({ condition, table }) => {
|
||||
query = query.leftJoin(table, condition)
|
||||
joins.forEach(({ type, condition, table }) => {
|
||||
query = query[type ?? 'leftJoin'](table, condition)
|
||||
})
|
||||
|
||||
// When we have any joins, we need to count each individual ID only once.
|
||||
|
||||
@@ -60,6 +60,10 @@ const createConstraint = ({
|
||||
formattedOperator = '='
|
||||
}
|
||||
|
||||
if (pathSegments.length === 1) {
|
||||
return `EXISTS (SELECT 1 FROM json_each("${pathSegments[0]}") AS ${newAlias} WHERE ${newAlias}.value ${formattedOperator} '${formattedValue}')`
|
||||
}
|
||||
|
||||
return `EXISTS (
|
||||
SELECT 1
|
||||
FROM json_each(${alias}.value -> '${pathSegments[0]}') AS ${newAlias}
|
||||
@@ -68,21 +72,38 @@ const createConstraint = ({
|
||||
}
|
||||
|
||||
export const createJSONQuery = ({
|
||||
column,
|
||||
operator,
|
||||
pathSegments,
|
||||
rawColumn,
|
||||
table,
|
||||
treatAsArray,
|
||||
treatRootAsArray,
|
||||
value,
|
||||
}: CreateJSONQueryArgs): string => {
|
||||
if ((operator === 'in' || operator === 'not_in') && Array.isArray(value)) {
|
||||
let sql = ''
|
||||
for (const [i, v] of value.entries()) {
|
||||
sql = `${sql}${createJSONQuery({ column, operator: operator === 'in' ? 'equals' : 'not_equals', pathSegments, rawColumn, table, treatAsArray, treatRootAsArray, value: v })} ${i === value.length - 1 ? '' : ` ${operator === 'in' ? 'OR' : 'AND'} `}`
|
||||
}
|
||||
return sql
|
||||
}
|
||||
|
||||
if (treatAsArray?.includes(pathSegments[1]!) && table) {
|
||||
return fromArray({
|
||||
operator,
|
||||
pathSegments,
|
||||
table,
|
||||
treatAsArray,
|
||||
value,
|
||||
value: value as CreateConstraintArgs['value'],
|
||||
})
|
||||
}
|
||||
|
||||
return createConstraint({ alias: table, operator, pathSegments, treatAsArray, value })
|
||||
return createConstraint({
|
||||
alias: table,
|
||||
operator,
|
||||
pathSegments,
|
||||
treatAsArray,
|
||||
value: value as CreateConstraintArgs['value'],
|
||||
})
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
deleteVersions,
|
||||
destroy,
|
||||
find,
|
||||
findDistinct,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
findMigrationDir,
|
||||
@@ -101,6 +102,7 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
|
||||
json: true,
|
||||
},
|
||||
fieldConstraints: {},
|
||||
findDistinct,
|
||||
generateSchema: createSchemaGenerator({
|
||||
columnToCodeConverter,
|
||||
corePackageSuffix: 'sqlite-core',
|
||||
|
||||
@@ -5,6 +5,7 @@ import type { DrizzleConfig, Relation, Relations, SQL } from 'drizzle-orm'
|
||||
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
|
||||
import type {
|
||||
AnySQLiteColumn,
|
||||
SQLiteColumn,
|
||||
SQLiteInsertOnConflictDoUpdateConfig,
|
||||
SQLiteTableWithColumns,
|
||||
SQLiteTransactionConfig,
|
||||
@@ -87,6 +88,7 @@ export type GenericTable = SQLiteTableWithColumns<{
|
||||
export type GenericRelation = Relations<string, Record<string, Relation<string>>>
|
||||
|
||||
export type CountDistinct = (args: {
|
||||
column?: SQLiteColumn<any>
|
||||
db: LibSQLDatabase
|
||||
joins: BuildQueryJoinAliases
|
||||
tableName: string
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-vercel-postgres",
|
||||
"version": "3.47.0",
|
||||
"version": "3.51.0",
|
||||
"description": "Vercel Postgres adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
deleteVersions,
|
||||
destroy,
|
||||
find,
|
||||
findDistinct,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
findMigrationDir,
|
||||
@@ -174,6 +175,7 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<Verce
|
||||
dropDatabase,
|
||||
execute,
|
||||
find,
|
||||
findDistinct,
|
||||
findGlobal,
|
||||
findGlobalVersions,
|
||||
readReplicaOptions: args.readReplicas,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/drizzle",
|
||||
"version": "3.47.0",
|
||||
"version": "3.51.0",
|
||||
"description": "A library of shared functions used by different payload database adapters",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -6,41 +6,58 @@ import toSnakeCase from 'to-snake-case'
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { findMany } from './find/findMany.js'
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export const deleteMany: DeleteMany = async function deleteMany(
|
||||
this: DrizzleAdapter,
|
||||
{ collection, req, where },
|
||||
{ collection, req, where: whereArg },
|
||||
) {
|
||||
const db = await getTransaction(this, req)
|
||||
const collectionConfig = this.payload.collections[collection].config
|
||||
|
||||
const tableName = this.tableNameMap.get(toSnakeCase(collectionConfig.slug))
|
||||
|
||||
const result = await findMany({
|
||||
const table = this.tables[tableName]
|
||||
|
||||
const { joins, where } = buildQuery({
|
||||
adapter: this,
|
||||
fields: collectionConfig.flattenedFields,
|
||||
joins: false,
|
||||
limit: 0,
|
||||
locale: req?.locale,
|
||||
page: 1,
|
||||
pagination: false,
|
||||
req,
|
||||
tableName,
|
||||
where,
|
||||
where: whereArg,
|
||||
})
|
||||
|
||||
const ids = []
|
||||
let whereToUse = where
|
||||
|
||||
result.docs.forEach((data) => {
|
||||
ids.push(data.id)
|
||||
})
|
||||
|
||||
if (ids.length > 0) {
|
||||
await this.deleteWhere({
|
||||
db,
|
||||
if (joins?.length) {
|
||||
// Difficult to support joins (through where referencing other tables) in deleteMany. => 2 separate queries.
|
||||
// We can look into supporting this using one single query (through a subquery) in the future, though that's difficult to do in a generic way.
|
||||
const result = await findMany({
|
||||
adapter: this,
|
||||
fields: collectionConfig.flattenedFields,
|
||||
joins: false,
|
||||
limit: 0,
|
||||
locale: req?.locale,
|
||||
page: 1,
|
||||
pagination: false,
|
||||
req,
|
||||
select: {
|
||||
id: true,
|
||||
},
|
||||
tableName,
|
||||
where: inArray(this.tables[tableName].id, ids),
|
||||
where: whereArg,
|
||||
})
|
||||
|
||||
whereToUse = inArray(
|
||||
table.id,
|
||||
result.docs.map((doc) => doc.id),
|
||||
)
|
||||
}
|
||||
|
||||
await this.deleteWhere({
|
||||
db,
|
||||
tableName,
|
||||
where: whereToUse,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ export const buildFindManyArgs = ({
|
||||
select,
|
||||
tableName,
|
||||
versions,
|
||||
}: BuildFindQueryArgs): Record<string, unknown> => {
|
||||
}: BuildFindQueryArgs): Result => {
|
||||
const result: Result = {
|
||||
extras: {},
|
||||
with: {},
|
||||
@@ -134,5 +134,12 @@ export const buildFindManyArgs = ({
|
||||
result.with._locales = _locales
|
||||
}
|
||||
|
||||
// Delete properties that are empty
|
||||
for (const key of Object.keys(result)) {
|
||||
if (!Object.keys(result[key]).length) {
|
||||
delete result[key]
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
|
||||
import type { SQLiteSelect, SQLiteSelectBase } from 'drizzle-orm/sqlite-core'
|
||||
|
||||
import { and, asc, count, desc, eq, or, sql } from 'drizzle-orm'
|
||||
import { and, asc, count, desc, eq, getTableName, or, sql } from 'drizzle-orm'
|
||||
import {
|
||||
appendVersionToQueryKey,
|
||||
buildVersionCollectionFields,
|
||||
combineQueries,
|
||||
type FlattenedField,
|
||||
getFieldByPath,
|
||||
getQueryDraftsSort,
|
||||
type JoinQuery,
|
||||
type SelectMode,
|
||||
@@ -31,7 +33,7 @@ import {
|
||||
resolveBlockTableName,
|
||||
} from '../utilities/validateExistingBlockIsIdentical.js'
|
||||
|
||||
const flattenAllWherePaths = (where: Where, paths: string[]) => {
|
||||
const flattenAllWherePaths = (where: Where, paths: { path: string; ref: any }[]) => {
|
||||
for (const k in where) {
|
||||
if (['AND', 'OR'].includes(k.toUpperCase())) {
|
||||
if (Array.isArray(where[k])) {
|
||||
@@ -41,7 +43,7 @@ const flattenAllWherePaths = (where: Where, paths: string[]) => {
|
||||
}
|
||||
} else {
|
||||
// TODO: explore how to support arrays/relationship querying.
|
||||
paths.push(k.split('.').join('_'))
|
||||
paths.push({ path: k.split('.').join('_'), ref: where })
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -59,7 +61,11 @@ const buildSQLWhere = (where: Where, alias: string) => {
|
||||
}
|
||||
} else {
|
||||
const payloadOperator = Object.keys(where[k])[0]
|
||||
|
||||
const value = where[k][payloadOperator]
|
||||
if (payloadOperator === '$raw') {
|
||||
return sql.raw(value)
|
||||
}
|
||||
|
||||
return operatorMap[payloadOperator](sql.raw(`"${alias}"."${k.split('.').join('_')}"`), value)
|
||||
}
|
||||
@@ -472,7 +478,7 @@ export const traverseFields = ({
|
||||
|
||||
const sortPath = sanitizedSort.split('.').join('_')
|
||||
|
||||
const wherePaths: string[] = []
|
||||
const wherePaths: { path: string; ref: any }[] = []
|
||||
|
||||
if (where) {
|
||||
flattenAllWherePaths(where, wherePaths)
|
||||
@@ -492,9 +498,50 @@ export const traverseFields = ({
|
||||
sortPath: sql`${sortColumn ? sortColumn : null}`.as('sortPath'),
|
||||
}
|
||||
|
||||
const collectionQueryWhere: any[] = []
|
||||
// Select for WHERE and Fallback NULL
|
||||
for (const path of wherePaths) {
|
||||
if (adapter.tables[joinCollectionTableName][path]) {
|
||||
for (const { path, ref } of wherePaths) {
|
||||
const collectioConfig = adapter.payload.collections[collection].config
|
||||
const field = getFieldByPath({ fields: collectioConfig.flattenedFields, path })
|
||||
|
||||
if (field && field.field.type === 'select' && field.field.hasMany) {
|
||||
let tableName = adapter.tableNameMap.get(
|
||||
`${toSnakeCase(collection)}_${toSnakeCase(path)}`,
|
||||
)
|
||||
let parentTable = getTableName(table)
|
||||
|
||||
if (adapter.schemaName) {
|
||||
tableName = `"${adapter.schemaName}"."${tableName}"`
|
||||
parentTable = `"${adapter.schemaName}"."${parentTable}"`
|
||||
}
|
||||
|
||||
if (adapter.name === 'postgres') {
|
||||
selectFields[path] = sql
|
||||
.raw(
|
||||
`(select jsonb_agg(${tableName}.value) from ${tableName} where ${tableName}.parent_id = ${parentTable}.id)`,
|
||||
)
|
||||
.as(path)
|
||||
} else {
|
||||
selectFields[path] = sql
|
||||
.raw(
|
||||
`(select json_group_array(${tableName}.value) from ${tableName} where ${tableName}.parent_id = ${parentTable}.id)`,
|
||||
)
|
||||
.as(path)
|
||||
}
|
||||
|
||||
const constraint = ref[path]
|
||||
const operator = Object.keys(constraint)[0]
|
||||
const value: any = Object.values(constraint)[0]
|
||||
|
||||
const query = adapter.createJSONQuery({
|
||||
column: `"${path}"`,
|
||||
operator,
|
||||
pathSegments: [field.field.name],
|
||||
table: parentTable,
|
||||
value,
|
||||
})
|
||||
ref[path] = { $raw: query }
|
||||
} else if (adapter.tables[joinCollectionTableName][path]) {
|
||||
selectFields[path] = sql`${adapter.tables[joinCollectionTableName][path]}`.as(path)
|
||||
// Allow to filter by collectionSlug
|
||||
} else if (path !== 'relationTo') {
|
||||
@@ -502,7 +549,10 @@ export const traverseFields = ({
|
||||
}
|
||||
}
|
||||
|
||||
const query = db.select(selectFields).from(adapter.tables[joinCollectionTableName])
|
||||
let query: any = db.select(selectFields).from(adapter.tables[joinCollectionTableName])
|
||||
if (collectionQueryWhere.length) {
|
||||
query = query.where(and(...collectionQueryWhere))
|
||||
}
|
||||
if (currentQuery === null) {
|
||||
currentQuery = query as unknown as SQLSelect
|
||||
} else {
|
||||
|
||||
108
packages/drizzle/src/findDistinct.ts
Normal file
108
packages/drizzle/src/findDistinct.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import type { FindDistinct, SanitizedCollectionConfig } from 'payload'
|
||||
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter, GenericColumn } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import { selectDistinct } from './queries/selectDistinct.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
import { DistinctSymbol } from './utilities/rawConstraint.js'
|
||||
|
||||
export const findDistinct: FindDistinct = async function (this: DrizzleAdapter, args) {
|
||||
const db = await getTransaction(this, args.req)
|
||||
const collectionConfig: SanitizedCollectionConfig =
|
||||
this.payload.collections[args.collection].config
|
||||
const page = args.page || 1
|
||||
const offset = args.limit ? (page - 1) * args.limit : undefined
|
||||
const tableName = this.tableNameMap.get(toSnakeCase(collectionConfig.slug))
|
||||
|
||||
const { joins, orderBy, selectFields, where } = buildQuery({
|
||||
adapter: this,
|
||||
fields: collectionConfig.flattenedFields,
|
||||
locale: args.locale,
|
||||
sort: args.sort ?? args.field,
|
||||
tableName,
|
||||
where: {
|
||||
and: [
|
||||
args.where ?? {},
|
||||
{
|
||||
[args.field]: {
|
||||
equals: DistinctSymbol,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
orderBy.pop()
|
||||
|
||||
const selectDistinctResult = await selectDistinct({
|
||||
adapter: this,
|
||||
db,
|
||||
forceRun: true,
|
||||
joins,
|
||||
query: ({ query }) => {
|
||||
query = query.orderBy(() => orderBy.map(({ column, order }) => order(column)))
|
||||
|
||||
if (args.limit) {
|
||||
if (offset) {
|
||||
query = query.offset(offset)
|
||||
}
|
||||
|
||||
query = query.limit(args.limit)
|
||||
}
|
||||
|
||||
return query
|
||||
},
|
||||
selectFields: {
|
||||
_selected: selectFields['_selected'],
|
||||
...(orderBy[0].column === selectFields['_selected'] ? {} : { _order: orderBy[0].column }),
|
||||
} as Record<string, GenericColumn>,
|
||||
tableName,
|
||||
where,
|
||||
})
|
||||
|
||||
const values = selectDistinctResult.map((each) => ({
|
||||
[args.field]: (each as Record<string, any>)._selected,
|
||||
}))
|
||||
|
||||
if (args.limit) {
|
||||
const totalDocs = await this.countDistinct({
|
||||
column: selectFields['_selected'],
|
||||
db,
|
||||
joins,
|
||||
tableName,
|
||||
where,
|
||||
})
|
||||
|
||||
const totalPages = Math.ceil(totalDocs / args.limit)
|
||||
const hasPrevPage = page > 1
|
||||
const hasNextPage = totalPages > page
|
||||
const pagingCounter = (page - 1) * args.limit + 1
|
||||
|
||||
return {
|
||||
hasNextPage,
|
||||
hasPrevPage,
|
||||
limit: args.limit,
|
||||
nextPage: hasNextPage ? page + 1 : null,
|
||||
page,
|
||||
pagingCounter,
|
||||
prevPage: hasPrevPage ? page - 1 : null,
|
||||
totalDocs,
|
||||
totalPages,
|
||||
values,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
hasNextPage: false,
|
||||
hasPrevPage: false,
|
||||
limit: 0,
|
||||
page: 1,
|
||||
pagingCounter: 1,
|
||||
totalDocs: values.length,
|
||||
totalPages: 1,
|
||||
values,
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,7 @@ export { deleteVersions } from './deleteVersions.js'
|
||||
export { destroy } from './destroy.js'
|
||||
export { find } from './find.js'
|
||||
export { chainMethods } from './find/chainMethods.js'
|
||||
export { findDistinct } from './findDistinct.js'
|
||||
export { findGlobal } from './findGlobal.js'
|
||||
export { findGlobalVersions } from './findGlobalVersions.js'
|
||||
export { findMigrationDir } from './findMigrationDir.js'
|
||||
|
||||
@@ -6,13 +6,13 @@ import type { BasePostgresAdapter, CountDistinct } from './types.js'
|
||||
|
||||
export const countDistinct: CountDistinct = async function countDistinct(
|
||||
this: BasePostgresAdapter,
|
||||
{ db, joins, tableName, where },
|
||||
{ column, db, joins, tableName, where },
|
||||
) {
|
||||
// When we don't have any joins - use a simple COUNT(*) query.
|
||||
if (joins.length === 0) {
|
||||
const countResult = await db
|
||||
.select({
|
||||
count: count(),
|
||||
count: column ? count(sql`DISTINCT ${column}`) : count(),
|
||||
})
|
||||
.from(this.tables[tableName])
|
||||
.where(where)
|
||||
@@ -26,12 +26,12 @@ export const countDistinct: CountDistinct = async function countDistinct(
|
||||
})
|
||||
.from(this.tables[tableName])
|
||||
.where(where)
|
||||
.groupBy(this.tables[tableName].id)
|
||||
.groupBy(column || this.tables[tableName].id)
|
||||
.limit(1)
|
||||
.$dynamic()
|
||||
|
||||
joins.forEach(({ condition, table }) => {
|
||||
query = query.leftJoin(table as PgTableWithColumns<any>, condition)
|
||||
joins.forEach(({ type, condition, table }) => {
|
||||
query = query[type ?? 'leftJoin'](table as PgTableWithColumns<any>, condition)
|
||||
})
|
||||
|
||||
// When we have any joins, we need to count each individual ID only once.
|
||||
|
||||
@@ -28,6 +28,8 @@ export const createJSONQuery = ({ column, operator, pathSegments, value }: Creat
|
||||
})
|
||||
.join('.')
|
||||
|
||||
const fullPath = pathSegments.length === 1 ? '$[*]' : `$.${jsonPaths}`
|
||||
|
||||
let sql = ''
|
||||
|
||||
if (['in', 'not_in'].includes(operator) && Array.isArray(value)) {
|
||||
@@ -35,13 +37,13 @@ export const createJSONQuery = ({ column, operator, pathSegments, value }: Creat
|
||||
sql = `${sql}${createJSONQuery({ column, operator: operator === 'in' ? 'equals' : 'not_equals', pathSegments, value: item })}${i === value.length - 1 ? '' : ` ${operator === 'in' ? 'OR' : 'AND'} `}`
|
||||
})
|
||||
} else if (operator === 'exists') {
|
||||
sql = `${value === false ? 'NOT ' : ''}jsonb_path_exists(${columnName}, '$.${jsonPaths}')`
|
||||
sql = `${value === false ? 'NOT ' : ''}jsonb_path_exists(${columnName}, '${fullPath}')`
|
||||
} else if (['not_like'].includes(operator)) {
|
||||
const mappedOperator = operatorMap[operator]
|
||||
|
||||
sql = `NOT jsonb_path_exists(${columnName}, '$.${jsonPaths} ? (@ ${mappedOperator.substring(1)} ${sanitizeValue(value, operator)})')`
|
||||
sql = `NOT jsonb_path_exists(${columnName}, '${fullPath} ? (@ ${mappedOperator.substring(1)} ${sanitizeValue(value, operator)})')`
|
||||
} else {
|
||||
sql = `jsonb_path_exists(${columnName}, '$.${jsonPaths} ? (@ ${operatorMap[operator]} ${sanitizeValue(value, operator)})')`
|
||||
sql = `jsonb_path_exists(${columnName}, '${fullPath} ? (@ ${operatorMap[operator]} ${sanitizeValue(value, operator)})')`
|
||||
}
|
||||
|
||||
return sql
|
||||
|
||||
@@ -20,6 +20,7 @@ import type {
|
||||
UniqueConstraintBuilder,
|
||||
} from 'drizzle-orm/pg-core'
|
||||
import type { PgTableFn } from 'drizzle-orm/pg-core/table'
|
||||
import type { SQLiteColumn } from 'drizzle-orm/sqlite-core'
|
||||
import type { Payload, PayloadRequest } from 'payload'
|
||||
import type { ClientConfig, QueryResult } from 'pg'
|
||||
|
||||
@@ -64,6 +65,7 @@ export type GenericRelation = Relations<string, Record<string, Relation<string>>
|
||||
export type PostgresDB = NodePgDatabase<Record<string, unknown>>
|
||||
|
||||
export type CountDistinct = (args: {
|
||||
column?: PgColumn<any> | SQLiteColumn<any>
|
||||
db: PostgresDB | TransactionPg
|
||||
joins: BuildQueryJoinAliases
|
||||
tableName: string
|
||||
|
||||
@@ -10,6 +10,7 @@ import type { DrizzleAdapter, GenericColumn } from '../types.js'
|
||||
import type { BuildQueryJoinAliases } from './buildQuery.js'
|
||||
|
||||
import { getNameFromDrizzleTable } from '../utilities/getNameFromDrizzleTable.js'
|
||||
import { DistinctSymbol } from '../utilities/rawConstraint.js'
|
||||
import { buildAndOrConditions } from './buildAndOrConditions.js'
|
||||
import { getTableColumnFromPath } from './getTableColumnFromPath.js'
|
||||
import { sanitizeQueryValue } from './sanitizeQueryValue.js'
|
||||
@@ -108,6 +109,17 @@ export function parseParams({
|
||||
value: val,
|
||||
})
|
||||
|
||||
const resolvedColumn =
|
||||
rawColumn ||
|
||||
(aliasTable && tableName === getNameFromDrizzleTable(table)
|
||||
? aliasTable[columnName]
|
||||
: table[columnName])
|
||||
|
||||
if (val === DistinctSymbol) {
|
||||
selectFields['_selected'] = resolvedColumn
|
||||
break
|
||||
}
|
||||
|
||||
queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => {
|
||||
if (typeof value === 'string' && value.indexOf('%') > -1) {
|
||||
constraints.push(adapter.operators.like(constraintTable[col], value))
|
||||
@@ -207,7 +219,10 @@ export function parseParams({
|
||||
|
||||
if (
|
||||
operator === 'like' &&
|
||||
(field.type === 'number' || table[columnName].columnType === 'PgUUID')
|
||||
(field.type === 'number' ||
|
||||
field.type === 'relationship' ||
|
||||
field.type === 'upload' ||
|
||||
table[columnName].columnType === 'PgUUID')
|
||||
) {
|
||||
operator = 'equals'
|
||||
}
|
||||
@@ -281,12 +296,6 @@ export function parseParams({
|
||||
break
|
||||
}
|
||||
|
||||
const resolvedColumn =
|
||||
rawColumn ||
|
||||
(aliasTable && tableName === getNameFromDrizzleTable(table)
|
||||
? aliasTable[columnName]
|
||||
: table[columnName])
|
||||
|
||||
if (queryOperator === 'not_equals' && queryValue !== null) {
|
||||
constraints.push(
|
||||
or(
|
||||
|
||||
@@ -112,9 +112,14 @@ export const sanitizeQueryValue = ({
|
||||
|
||||
if (field.type === 'date' && operator !== 'exists') {
|
||||
if (typeof val === 'string') {
|
||||
formattedValue = new Date(val).toISOString()
|
||||
if (Number.isNaN(Date.parse(formattedValue))) {
|
||||
return { operator, value: undefined }
|
||||
if (val === 'null' || val === '') {
|
||||
formattedValue = null
|
||||
} else {
|
||||
const date = new Date(val)
|
||||
if (Number.isNaN(date.getTime())) {
|
||||
return { operator, value: undefined }
|
||||
}
|
||||
formattedValue = date.toISOString()
|
||||
}
|
||||
} else if (typeof val === 'number') {
|
||||
formattedValue = new Date(val).toISOString()
|
||||
|
||||
@@ -14,6 +14,7 @@ import type { BuildQueryJoinAliases } from './buildQuery.js'
|
||||
type Args = {
|
||||
adapter: DrizzleAdapter
|
||||
db: DrizzleAdapter['drizzle'] | DrizzleTransaction
|
||||
forceRun?: boolean
|
||||
joins: BuildQueryJoinAliases
|
||||
query?: (args: { query: SQLiteSelect }) => SQLiteSelect
|
||||
selectFields: Record<string, GenericColumn>
|
||||
@@ -27,13 +28,14 @@ type Args = {
|
||||
export const selectDistinct = ({
|
||||
adapter,
|
||||
db,
|
||||
forceRun,
|
||||
joins,
|
||||
query: queryModifier = ({ query }) => query,
|
||||
selectFields,
|
||||
tableName,
|
||||
where,
|
||||
}: Args): QueryPromise<{ id: number | string }[] & Record<string, GenericColumn>> => {
|
||||
if (Object.keys(joins).length > 0) {
|
||||
if (forceRun || Object.keys(joins).length > 0) {
|
||||
let query: SQLiteSelect
|
||||
const table = adapter.tables[tableName]
|
||||
|
||||
@@ -54,8 +56,8 @@ export const selectDistinct = ({
|
||||
query = query.where(where)
|
||||
}
|
||||
|
||||
joins.forEach(({ condition, table }) => {
|
||||
query = query.leftJoin(table, condition)
|
||||
joins.forEach(({ type, condition, table }) => {
|
||||
query = query[type ?? 'leftJoin'](table, condition)
|
||||
})
|
||||
|
||||
return queryModifier({
|
||||
|
||||
@@ -8,6 +8,7 @@ import { traverseFields } from './traverseFields.js'
|
||||
type Args = {
|
||||
adapter: DrizzleAdapter
|
||||
data: Record<string, unknown>
|
||||
enableAtomicWrites?: boolean
|
||||
fields: FlattenedField[]
|
||||
parentIsLocalized?: boolean
|
||||
path?: string
|
||||
@@ -17,6 +18,7 @@ type Args = {
|
||||
export const transformForWrite = ({
|
||||
adapter,
|
||||
data,
|
||||
enableAtomicWrites,
|
||||
fields,
|
||||
parentIsLocalized,
|
||||
path = '',
|
||||
@@ -48,6 +50,7 @@ export const transformForWrite = ({
|
||||
blocksToDelete: rowToInsert.blocksToDelete,
|
||||
columnPrefix: '',
|
||||
data,
|
||||
enableAtomicWrites,
|
||||
fieldPrefix: '',
|
||||
fields,
|
||||
locales: rowToInsert.locales,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { FlattenedField } from 'payload'
|
||||
|
||||
import { sql } from 'drizzle-orm'
|
||||
import { APIError, type FlattenedField } from 'payload'
|
||||
import { fieldIsVirtual, fieldShouldBeLocalized } from 'payload/shared'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
@@ -41,6 +40,7 @@ type Args = {
|
||||
*/
|
||||
columnPrefix: string
|
||||
data: Record<string, unknown>
|
||||
enableAtomicWrites?: boolean
|
||||
existingLocales?: Record<string, unknown>[]
|
||||
/**
|
||||
* A prefix that will retain camel-case formatting, representing prior fields
|
||||
@@ -87,6 +87,7 @@ export const traverseFields = ({
|
||||
blocksToDelete,
|
||||
columnPrefix,
|
||||
data,
|
||||
enableAtomicWrites,
|
||||
existingLocales,
|
||||
fieldPrefix,
|
||||
fields,
|
||||
@@ -268,6 +269,7 @@ export const traverseFields = ({
|
||||
blocksToDelete,
|
||||
columnPrefix: `${columnName}_`,
|
||||
data: localeData as Record<string, unknown>,
|
||||
enableAtomicWrites,
|
||||
existingLocales,
|
||||
fieldPrefix: `${fieldName}_`,
|
||||
fields: field.flattenedFields,
|
||||
@@ -553,6 +555,22 @@ export const traverseFields = ({
|
||||
formattedValue = JSON.stringify(value)
|
||||
}
|
||||
|
||||
if (
|
||||
field.type === 'number' &&
|
||||
value &&
|
||||
typeof value === 'object' &&
|
||||
'$inc' in value &&
|
||||
typeof value.$inc === 'number'
|
||||
) {
|
||||
if (!enableAtomicWrites) {
|
||||
throw new APIError(
|
||||
'The passed data must not contain any nested fields for atomic writes',
|
||||
)
|
||||
}
|
||||
|
||||
formattedValue = sql.raw(`${columnName} + ${value.$inc}`)
|
||||
}
|
||||
|
||||
if (field.type === 'date') {
|
||||
if (typeof value === 'number' && !Number.isNaN(value)) {
|
||||
formattedValue = new Date(value).toISOString()
|
||||
|
||||
@@ -89,6 +89,7 @@ export type TransactionPg = PgTransaction<
|
||||
export type DrizzleTransaction = TransactionPg | TransactionSQLite
|
||||
|
||||
export type CountDistinct = (args: {
|
||||
column?: PgColumn<any> | SQLiteColumn<any>
|
||||
db: DrizzleTransaction | LibSQLDatabase | PostgresDB
|
||||
joins: BuildQueryJoinAliases
|
||||
tableName: string
|
||||
@@ -160,10 +161,11 @@ export type CreateJSONQueryArgs = {
|
||||
column?: Column | string
|
||||
operator: string
|
||||
pathSegments: string[]
|
||||
rawColumn?: SQL<unknown>
|
||||
table?: string
|
||||
treatAsArray?: string[]
|
||||
treatRootAsArray?: boolean
|
||||
value: boolean | number | string
|
||||
value: boolean | number | number[] | string | string[]
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -6,6 +6,7 @@ import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { findMany } from './find/findMany.js'
|
||||
import { upsertRow } from './upsertRow/index.js'
|
||||
import { shouldUseOptimizedUpsertRow } from './upsertRow/shouldUseOptimizedUpsertRow.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export const updateJobs: UpdateJobs = async function updateMany(
|
||||
@@ -23,6 +24,27 @@ export const updateJobs: UpdateJobs = async function updateMany(
|
||||
const tableName = this.tableNameMap.get(toSnakeCase(collection.slug))
|
||||
const sort = sortArg !== undefined && sortArg !== null ? sortArg : collection.defaultSort
|
||||
|
||||
const useOptimizedUpsertRow = shouldUseOptimizedUpsertRow({
|
||||
data,
|
||||
fields: collection.flattenedFields,
|
||||
})
|
||||
|
||||
if (useOptimizedUpsertRow && id) {
|
||||
const result = await upsertRow({
|
||||
id,
|
||||
adapter: this,
|
||||
data,
|
||||
db,
|
||||
fields: collection.flattenedFields,
|
||||
ignoreResult: returning === false,
|
||||
operation: 'update',
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
|
||||
return returning === false ? null : [result]
|
||||
}
|
||||
|
||||
const jobs = await findMany({
|
||||
adapter: this,
|
||||
collectionSlug: 'payload-jobs',
|
||||
@@ -42,10 +64,12 @@ export const updateJobs: UpdateJobs = async function updateMany(
|
||||
|
||||
// TODO: We need to batch this to reduce the amount of db calls. This can get very slow if we are updating a lot of rows.
|
||||
for (const job of jobs.docs) {
|
||||
const updateData = {
|
||||
...job,
|
||||
...data,
|
||||
}
|
||||
const updateData = useOptimizedUpsertRow
|
||||
? data
|
||||
: {
|
||||
...job,
|
||||
...data,
|
||||
}
|
||||
|
||||
const result = await upsertRow({
|
||||
id: job.id,
|
||||
|
||||
@@ -1,67 +1,15 @@
|
||||
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
|
||||
import type { FlattenedField, UpdateOne } from 'payload'
|
||||
import type { UpdateOne } from 'payload'
|
||||
|
||||
import { eq } from 'drizzle-orm'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildFindManyArgs } from './find/buildFindManyArgs.js'
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import { selectDistinct } from './queries/selectDistinct.js'
|
||||
import { transform } from './transform/read/index.js'
|
||||
import { transformForWrite } from './transform/write/index.js'
|
||||
import { upsertRow } from './upsertRow/index.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
/**
|
||||
* Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call.
|
||||
* We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships.
|
||||
*/
|
||||
const shouldUseUpsertRow = ({
|
||||
data,
|
||||
fields,
|
||||
}: {
|
||||
data: Record<string, unknown>
|
||||
fields: FlattenedField[]
|
||||
}) => {
|
||||
for (const key in data) {
|
||||
const value = data[key]
|
||||
const field = fields.find((each) => each.name === key)
|
||||
|
||||
if (!field) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
field.type === 'array' ||
|
||||
field.type === 'blocks' ||
|
||||
((field.type === 'text' ||
|
||||
field.type === 'relationship' ||
|
||||
field.type === 'upload' ||
|
||||
field.type === 'select' ||
|
||||
field.type === 'number') &&
|
||||
field.hasMany) ||
|
||||
((field.type === 'relationship' || field.type === 'upload') &&
|
||||
Array.isArray(field.relationTo)) ||
|
||||
field.localized
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (
|
||||
(field.type === 'group' || field.type === 'tab') &&
|
||||
value &&
|
||||
typeof value === 'object' &&
|
||||
shouldUseUpsertRow({ data: value as Record<string, unknown>, fields: field.flattenedFields })
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
export const updateOne: UpdateOne = async function updateOne(
|
||||
this: DrizzleAdapter,
|
||||
{
|
||||
@@ -126,71 +74,23 @@ export const updateOne: UpdateOne = async function updateOne(
|
||||
return null
|
||||
}
|
||||
|
||||
if (!idToUpdate || shouldUseUpsertRow({ data, fields: collection.flattenedFields })) {
|
||||
const result = await upsertRow({
|
||||
id: idToUpdate,
|
||||
adapter: this,
|
||||
data,
|
||||
db,
|
||||
fields: collection.flattenedFields,
|
||||
ignoreResult: returning === false,
|
||||
joinQuery,
|
||||
operation: 'update',
|
||||
req,
|
||||
select,
|
||||
tableName,
|
||||
})
|
||||
|
||||
if (returning === false) {
|
||||
return null
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
const { row } = transformForWrite({
|
||||
const result = await upsertRow({
|
||||
id: idToUpdate,
|
||||
adapter: this,
|
||||
data,
|
||||
db,
|
||||
fields: collection.flattenedFields,
|
||||
ignoreResult: returning === false,
|
||||
joinQuery,
|
||||
operation: 'update',
|
||||
req,
|
||||
select,
|
||||
tableName,
|
||||
})
|
||||
|
||||
const drizzle = db as LibSQLDatabase
|
||||
await drizzle
|
||||
.update(this.tables[tableName])
|
||||
.set(row)
|
||||
// TODO: we can skip fetching idToUpdate here with using the incoming where
|
||||
.where(eq(this.tables[tableName].id, idToUpdate))
|
||||
|
||||
if (returning === false) {
|
||||
return null
|
||||
}
|
||||
|
||||
const findManyArgs = buildFindManyArgs({
|
||||
adapter: this,
|
||||
depth: 0,
|
||||
fields: collection.flattenedFields,
|
||||
joinQuery: false,
|
||||
select,
|
||||
tableName,
|
||||
})
|
||||
|
||||
findManyArgs.where = eq(this.tables[tableName].id, idToUpdate)
|
||||
|
||||
const doc = await db.query[tableName].findFirst(findManyArgs)
|
||||
|
||||
// //////////////////////////////////
|
||||
// TRANSFORM DATA
|
||||
// //////////////////////////////////
|
||||
|
||||
const result = transform({
|
||||
adapter: this,
|
||||
config: this.payload.config,
|
||||
data: doc,
|
||||
fields: collection.flattenedFields,
|
||||
joinQuery: false,
|
||||
tableName,
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
|
||||
import type { SelectedFields } from 'drizzle-orm/sqlite-core'
|
||||
import type { TypeWithID } from 'payload'
|
||||
|
||||
import { eq } from 'drizzle-orm'
|
||||
@@ -12,13 +14,14 @@ import { transformForWrite } from '../transform/write/index.js'
|
||||
import { deleteExistingArrayRows } from './deleteExistingArrayRows.js'
|
||||
import { deleteExistingRowsByPath } from './deleteExistingRowsByPath.js'
|
||||
import { insertArrays } from './insertArrays.js'
|
||||
import { shouldUseOptimizedUpsertRow } from './shouldUseOptimizedUpsertRow.js'
|
||||
|
||||
/**
|
||||
* If `id` is provided, it will update the row with that ID.
|
||||
* If `where` is provided, it will update the row that matches the `where`
|
||||
* If neither `id` nor `where` is provided, it will create a new row.
|
||||
*
|
||||
* This function replaces the entire row and does not support partial updates.
|
||||
* adapter function replaces the entire row and does not support partial updates.
|
||||
*/
|
||||
export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>({
|
||||
id,
|
||||
@@ -39,19 +42,99 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
upsertTarget,
|
||||
where,
|
||||
}: Args): Promise<T> => {
|
||||
let insertedRow: Record<string, unknown> = { id }
|
||||
if (id && shouldUseOptimizedUpsertRow({ data, fields })) {
|
||||
const { row } = transformForWrite({
|
||||
adapter,
|
||||
data,
|
||||
enableAtomicWrites: true,
|
||||
fields,
|
||||
tableName,
|
||||
})
|
||||
|
||||
const drizzle = db as LibSQLDatabase
|
||||
|
||||
if (ignoreResult) {
|
||||
await drizzle
|
||||
.update(adapter.tables[tableName])
|
||||
.set(row)
|
||||
.where(eq(adapter.tables[tableName].id, id))
|
||||
return ignoreResult === 'idOnly' ? ({ id } as T) : null
|
||||
}
|
||||
|
||||
const findManyArgs = buildFindManyArgs({
|
||||
adapter,
|
||||
depth: 0,
|
||||
fields,
|
||||
joinQuery: false,
|
||||
select,
|
||||
tableName,
|
||||
})
|
||||
|
||||
const findManyKeysLength = Object.keys(findManyArgs).length
|
||||
const hasOnlyColumns = Object.keys(findManyArgs.columns || {}).length > 0
|
||||
|
||||
if (findManyKeysLength === 0 || hasOnlyColumns) {
|
||||
// Optimization - No need for joins => can simply use returning(). This is optimal for very simple collections
|
||||
// without complex fields that live in separate tables like blocks, arrays, relationships, etc.
|
||||
|
||||
const selectedFields: SelectedFields = {}
|
||||
if (hasOnlyColumns) {
|
||||
for (const [column, enabled] of Object.entries(findManyArgs.columns)) {
|
||||
if (enabled) {
|
||||
selectedFields[column] = adapter.tables[tableName][column]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const docs = await drizzle
|
||||
.update(adapter.tables[tableName])
|
||||
.set(row)
|
||||
.where(eq(adapter.tables[tableName].id, id))
|
||||
.returning(Object.keys(selectedFields).length ? selectedFields : undefined)
|
||||
|
||||
return transform<T>({
|
||||
adapter,
|
||||
config: adapter.payload.config,
|
||||
data: docs[0],
|
||||
fields,
|
||||
joinQuery: false,
|
||||
tableName,
|
||||
})
|
||||
}
|
||||
|
||||
// DB Update that needs the result, potentially with joins => need to update first, then find. returning() does not work with joins.
|
||||
|
||||
await drizzle
|
||||
.update(adapter.tables[tableName])
|
||||
.set(row)
|
||||
.where(eq(adapter.tables[tableName].id, id))
|
||||
|
||||
findManyArgs.where = eq(adapter.tables[tableName].id, insertedRow.id)
|
||||
|
||||
const doc = await db.query[tableName].findFirst(findManyArgs)
|
||||
|
||||
return transform<T>({
|
||||
adapter,
|
||||
config: adapter.payload.config,
|
||||
data: doc,
|
||||
fields,
|
||||
joinQuery: false,
|
||||
tableName,
|
||||
})
|
||||
}
|
||||
// Split out the incoming data into the corresponding:
|
||||
// base row, locales, relationships, blocks, and arrays
|
||||
const rowToInsert = transformForWrite({
|
||||
adapter,
|
||||
data,
|
||||
enableAtomicWrites: false,
|
||||
fields,
|
||||
path,
|
||||
tableName,
|
||||
})
|
||||
|
||||
// First, we insert the main row
|
||||
let insertedRow: Record<string, unknown>
|
||||
|
||||
try {
|
||||
if (operation === 'update') {
|
||||
const target = upsertTarget || adapter.tables[tableName].id
|
||||
@@ -275,7 +358,7 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
}
|
||||
}
|
||||
|
||||
// When versions are enabled, this is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions.
|
||||
// When versions are enabled, adapter is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions.
|
||||
const arraysBlocksUUIDMap: Record<string, number | string> = {}
|
||||
|
||||
for (const [tableName, blockRows] of Object.entries(blocksToInsert)) {
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
import type { FlattenedField } from 'payload'
|
||||
|
||||
/**
|
||||
* Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call.
|
||||
* We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships.
|
||||
*/
|
||||
export const shouldUseOptimizedUpsertRow = ({
|
||||
data,
|
||||
fields,
|
||||
}: {
|
||||
data: Record<string, unknown>
|
||||
fields: FlattenedField[]
|
||||
}) => {
|
||||
for (const key in data) {
|
||||
const value = data[key]
|
||||
const field = fields.find((each) => each.name === key)
|
||||
|
||||
if (!field) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
field.type === 'array' ||
|
||||
field.type === 'blocks' ||
|
||||
((field.type === 'text' ||
|
||||
field.type === 'relationship' ||
|
||||
field.type === 'upload' ||
|
||||
field.type === 'select' ||
|
||||
field.type === 'number') &&
|
||||
field.hasMany) ||
|
||||
((field.type === 'relationship' || field.type === 'upload') &&
|
||||
Array.isArray(field.relationTo)) ||
|
||||
field.localized
|
||||
) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (
|
||||
(field.type === 'group' || field.type === 'tab') &&
|
||||
value &&
|
||||
typeof value === 'object' &&
|
||||
!shouldUseOptimizedUpsertRow({
|
||||
data: value as Record<string, unknown>,
|
||||
fields: field.flattenedFields,
|
||||
})
|
||||
) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
@@ -296,12 +296,13 @@ declare module '${this.packageName}' {
|
||||
|
||||
if (prettify) {
|
||||
try {
|
||||
const prettier = await import('prettier')
|
||||
const prettier = await eval('import("prettier")')
|
||||
const configPath = await prettier.resolveConfigFile()
|
||||
const config = configPath ? await prettier.resolveConfig(configPath) : {}
|
||||
code = await prettier.format(code, { ...config, parser: 'typescript' })
|
||||
// eslint-disable-next-line no-empty
|
||||
} catch {}
|
||||
} catch {
|
||||
/* empty */
|
||||
}
|
||||
}
|
||||
|
||||
await writeFile(outputFile, code, 'utf-8')
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import type { Table } from 'drizzle-orm'
|
||||
|
||||
export const getNameFromDrizzleTable = (table: Table): string => {
|
||||
const symbol = Object.getOwnPropertySymbols(table).find((symb) =>
|
||||
symb.description.includes('Name'),
|
||||
)
|
||||
import { getTableName } from 'drizzle-orm'
|
||||
|
||||
return table[symbol]
|
||||
export const getNameFromDrizzleTable = (table: Table): string => {
|
||||
return getTableName(table)
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
const RawConstraintSymbol = Symbol('RawConstraint')
|
||||
|
||||
export const DistinctSymbol = Symbol('DistinctSymbol')
|
||||
|
||||
/**
|
||||
* You can use this to inject a raw query to where
|
||||
*/
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/email-nodemailer",
|
||||
"version": "3.47.0",
|
||||
"version": "3.51.0",
|
||||
"description": "Payload Nodemailer Email Adapter",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/email-resend",
|
||||
"version": "3.47.0",
|
||||
"version": "3.51.0",
|
||||
"description": "Payload Resend Email Adapter",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/graphql",
|
||||
"version": "3.47.0",
|
||||
"version": "3.51.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -9,6 +9,7 @@ export type Resolver = (
|
||||
args: {
|
||||
data: Record<string, unknown>
|
||||
locale?: string
|
||||
trash?: boolean
|
||||
where?: Where
|
||||
},
|
||||
context: {
|
||||
@@ -30,6 +31,7 @@ export function countResolver(collection: Collection): Resolver {
|
||||
const options = {
|
||||
collection,
|
||||
req: isolateObjectProperty(req, 'transactionID'),
|
||||
trash: args.trash,
|
||||
where: args.where,
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ export type Resolver<TSlug extends CollectionSlug> = (
|
||||
fallbackLocale?: string
|
||||
id: number | string
|
||||
locale?: string
|
||||
trash?: boolean
|
||||
},
|
||||
context: {
|
||||
req: PayloadRequest
|
||||
@@ -49,6 +50,7 @@ export function getDeleteResolver<TSlug extends CollectionSlug>(
|
||||
collection,
|
||||
depth: 0,
|
||||
req: isolateObjectProperty(req, 'transactionID'),
|
||||
trash: args.trash,
|
||||
}
|
||||
|
||||
const result = await deleteByIDOperation(options)
|
||||
|
||||
@@ -15,6 +15,7 @@ export type Resolver = (
|
||||
page?: number
|
||||
pagination?: boolean
|
||||
sort?: string
|
||||
trash?: boolean
|
||||
where?: Where
|
||||
},
|
||||
context: {
|
||||
@@ -57,6 +58,7 @@ export function findResolver(collection: Collection): Resolver {
|
||||
pagination: args.pagination,
|
||||
req,
|
||||
sort: args.sort,
|
||||
trash: args.trash,
|
||||
where: args.where,
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ export type Resolver<TData> = (
|
||||
fallbackLocale?: string
|
||||
id: string
|
||||
locale?: string
|
||||
trash?: boolean
|
||||
},
|
||||
context: {
|
||||
req: PayloadRequest
|
||||
@@ -50,6 +51,7 @@ export function findByIDResolver<TSlug extends CollectionSlug>(
|
||||
depth: 0,
|
||||
draft: args.draft,
|
||||
req: isolateObjectProperty(req, 'transactionID'),
|
||||
trash: args.trash,
|
||||
}
|
||||
|
||||
const result = await findByIDOperation(options)
|
||||
|
||||
@@ -10,6 +10,7 @@ export type Resolver<T extends TypeWithID = any> = (
|
||||
fallbackLocale?: string
|
||||
id: number | string
|
||||
locale?: string
|
||||
trash?: boolean
|
||||
},
|
||||
context: {
|
||||
req: PayloadRequest
|
||||
@@ -33,6 +34,7 @@ export function findVersionByIDResolver(collection: Collection): Resolver {
|
||||
collection,
|
||||
depth: 0,
|
||||
req: isolateObjectProperty(req, 'transactionID'),
|
||||
trash: args.trash,
|
||||
}
|
||||
|
||||
const result = await findVersionByIDOperation(options)
|
||||
|
||||
@@ -14,6 +14,7 @@ export type Resolver = (
|
||||
page?: number
|
||||
pagination?: boolean
|
||||
sort?: string
|
||||
trash?: boolean
|
||||
where: Where
|
||||
},
|
||||
context: {
|
||||
@@ -54,6 +55,7 @@ export function findVersionsResolver(collection: Collection): Resolver {
|
||||
pagination: args.pagination,
|
||||
req: isolateObjectProperty(req, 'transactionID'),
|
||||
sort: args.sort,
|
||||
trash: args.trash,
|
||||
where: args.where,
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user