Compare commits
64 Commits
feat/pushj
...
feat/ecomm
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1c516d4410 | ||
|
|
c5a40a597e | ||
|
|
a84aed63eb | ||
|
|
73335c9055 | ||
|
|
eb07a8fc95 | ||
|
|
dd7f60dbda | ||
|
|
8a4c0a1910 | ||
|
|
8a0674f4a4 | ||
|
|
002cdff8f0 | ||
|
|
bf8983a991 | ||
|
|
0f9106d332 | ||
|
|
f99bc602fe | ||
|
|
eafdfe56f6 | ||
|
|
4a32b294d3 | ||
|
|
1f4fc0316d | ||
|
|
7488d4c232 | ||
|
|
387df98c61 | ||
|
|
fa5412b240 | ||
|
|
78578adbfe | ||
|
|
f2b0056f0e | ||
|
|
72c3806f03 | ||
|
|
9d8527a39a | ||
|
|
85923c3373 | ||
|
|
e2c5d82fe4 | ||
|
|
d192118a71 | ||
|
|
bd4b17b266 | ||
|
|
de0ff04803 | ||
|
|
45ceb12283 | ||
|
|
d07e9599b4 | ||
|
|
77136c82d1 | ||
|
|
f5b676e2fd | ||
|
|
b23a60fbf0 | ||
|
|
27d2d10e76 | ||
|
|
f669c5a380 | ||
|
|
7eb0f77eb4 | ||
|
|
31368ce1c0 | ||
|
|
18216f5e85 | ||
|
|
cbe3f25eee | ||
|
|
77239be30c | ||
|
|
eb62c7addd | ||
|
|
c38395bc40 | ||
|
|
a2aa16608a | ||
|
|
58a11fac87 | ||
|
|
995b2bccce | ||
|
|
c1528c51ef | ||
|
|
e4ba0f4b41 | ||
|
|
390870ea67 | ||
|
|
7c44197291 | ||
|
|
fad16e5550 | ||
|
|
8bc16229a8 | ||
|
|
7ce97362b9 | ||
|
|
c1c56ce7d0 | ||
|
|
db15750648 | ||
|
|
489a9cbd96 | ||
|
|
5fe1d3feb0 | ||
|
|
bd55f4c36e | ||
|
|
90d466d560 | ||
|
|
02d86f96a4 | ||
|
|
4d0d415400 | ||
|
|
5fffb5fb4c | ||
|
|
2836a7c10a | ||
|
|
1080782f9e | ||
|
|
8c81c07a35 | ||
|
|
58d89e6d31 |
7
.github/workflows/audit-dependencies.sh
vendored
7
.github/workflows/audit-dependencies.sh
vendored
@@ -13,8 +13,7 @@ echo "${audit_json}" | jq --arg severity "${severity}" '
|
||||
{
|
||||
package: .value.module_name,
|
||||
vulnerable: .value.vulnerable_versions,
|
||||
fixed_in: .value.patched_versions,
|
||||
findings: .value.findings
|
||||
fixed_in: .value.patched_versions
|
||||
}
|
||||
)
|
||||
' >$output_file
|
||||
@@ -24,11 +23,7 @@ audit_length=$(jq 'length' $output_file)
|
||||
if [[ "${audit_length}" -gt "0" ]]; then
|
||||
echo "Actionable vulnerabilities found in the following packages:"
|
||||
jq -r '.[] | "\u001b[1m\(.package)\u001b[0m vulnerable in \u001b[31m\(.vulnerable)\u001b[0m fixed in \u001b[32m\(.fixed_in)\u001b[0m"' $output_file | while read -r line; do echo -e "$line"; done
|
||||
echo ""
|
||||
echo "Output written to ${output_file}"
|
||||
cat $output_file
|
||||
echo ""
|
||||
echo "This script can be rerun with: './.github/workflows/audit-dependencies.sh $severity'"
|
||||
exit 1
|
||||
else
|
||||
echo "No actionable vulnerabilities"
|
||||
|
||||
2
.github/workflows/audit-dependencies.yml
vendored
2
.github/workflows/audit-dependencies.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "🚨 Actionable vulnerabilities found: <https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Script Run Details>"
|
||||
"text": "🚨 Actionable vulnerabilities found: <https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Details>"
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
3
.github/workflows/post-release.yml
vendored
3
.github/workflows/post-release.yml
vendored
@@ -17,9 +17,6 @@ env:
|
||||
|
||||
jobs:
|
||||
post_release:
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ github.event_name != 'workflow_dispatch' }}
|
||||
steps:
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -322,6 +322,8 @@ test/admin-root/app/(payload)/admin/importMap.js
|
||||
/test/admin-root/app/(payload)/admin/importMap.js
|
||||
test/app/(payload)/admin/importMap.js
|
||||
/test/app/(payload)/admin/importMap.js
|
||||
test/plugin-ecommerce/app/(payload)/admin/importMap.js
|
||||
/test/plugin-ecommerce/app/(payload)/admin/importMap.js
|
||||
test/pnpm-lock.yaml
|
||||
test/databaseAdapter.js
|
||||
/filename-compound-index
|
||||
|
||||
@@ -739,7 +739,7 @@ The `useDocumentInfo` hook provides information about the current document being
|
||||
| **`lastUpdateTime`** | Timestamp of the last update to the document. |
|
||||
| **`mostRecentVersionIsAutosaved`** | Whether the most recent version is an autosaved version. |
|
||||
| **`preferencesKey`** | The `preferences` key to use when interacting with document-level user preferences. [More details](./preferences). |
|
||||
| **`data`** | The saved data of the document. |
|
||||
| **`savedDocumentData`** | The saved data of the document. |
|
||||
| **`setDocFieldPreferences`** | Method to set preferences for a specific field. [More details](./preferences). |
|
||||
| **`setDocumentTitle`** | Method to set the document title. |
|
||||
| **`setHasPublishedDoc`** | Method to update whether the document has been published. |
|
||||
|
||||
@@ -142,7 +142,7 @@ The following options are available:
|
||||
| `components` | Swap in your own React components to be used within this Collection. [More details](#custom-components). |
|
||||
| `listSearchableFields` | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
|
||||
| `pagination` | Set pagination-specific options for this Collection. [More details](#pagination). |
|
||||
| `baseFilter` | Defines a default base filter which will be applied to the List View (along with any other filters applied by the user) and internal links in Lexical Editor, |
|
||||
| `baseListFilter` | You can define a default base filter for this collection's List view, which will be merged into any filters that the user performs. |
|
||||
|
||||
<Banner type="warning">
|
||||
**Note:** If you set `useAsTitle` to a relationship or join field, it will use
|
||||
|
||||
@@ -296,16 +296,11 @@ query {
|
||||
sort: "createdAt"
|
||||
limit: 5
|
||||
where: { author: { equals: "66e3431a3f23e684075aaeb9" } }
|
||||
"""
|
||||
Optionally pass count: true if you want to retrieve totalDocs
|
||||
"""
|
||||
count: true -- s
|
||||
) {
|
||||
docs {
|
||||
title
|
||||
}
|
||||
hasNextPage
|
||||
totalDocs
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,20 +34,20 @@ npm i @payloadcms/plugin-csm
|
||||
Then in the `plugins` array of your Payload Config, call the plugin and enable any collections that require Content Source Maps.
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload/config'
|
||||
import contentSourceMaps from '@payloadcms/plugin-csm'
|
||||
import { buildConfig } from "payload/config"
|
||||
import contentSourceMaps from "@payloadcms/plugin-csm"
|
||||
|
||||
const config = buildConfig({
|
||||
collections: [
|
||||
{
|
||||
slug: 'pages',
|
||||
slug: "pages",
|
||||
fields: [
|
||||
{
|
||||
name: 'slug',
|
||||
type: 'text',
|
||||
},
|
||||
{
|
||||
name: 'title',
|
||||
name: 'title,'
|
||||
type: 'text',
|
||||
},
|
||||
],
|
||||
@@ -55,7 +55,7 @@ const config = buildConfig({
|
||||
],
|
||||
plugins: [
|
||||
contentSourceMaps({
|
||||
collections: ['pages'],
|
||||
collections: ["pages"],
|
||||
}),
|
||||
],
|
||||
})
|
||||
|
||||
@@ -45,11 +45,13 @@ The following options are available:
|
||||
|
||||
| Path | Description |
|
||||
| ----------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`url`** | String, or function that returns a string, pointing to your front-end application. This value is used as the iframe `src`. [More details](#url). |
|
||||
| **`url`** \* | String, or function that returns a string, pointing to your front-end application. This value is used as the iframe `src`. [More details](#url). |
|
||||
| **`breakpoints`** | Array of breakpoints to be used as “device sizes” in the preview window. Each item appears as an option in the toolbar. [More details](#breakpoints). |
|
||||
| **`collections`** | Array of collection slugs to enable Live Preview on. |
|
||||
| **`globals`** | Array of global slugs to enable Live Preview on. |
|
||||
|
||||
_\* An asterisk denotes that a property is required._
|
||||
|
||||
### URL
|
||||
|
||||
The `url` property resolves to a string that points to your front-end application. This value is used as the `src` attribute of the iframe rendering your front-end. Once loaded, the Admin Panel will communicate directly with your app through `window.postMessage` events.
|
||||
@@ -86,16 +88,17 @@ const config = buildConfig({
|
||||
// ...
|
||||
livePreview: {
|
||||
// highlight-start
|
||||
url: ({ data, collectionConfig, locale }) =>
|
||||
`${data.tenant.url}${
|
||||
collectionConfig.slug === 'posts'
|
||||
? `/posts/${data.slug}`
|
||||
: `${data.slug !== 'home' ? `/${data.slug}` : ''}`
|
||||
}${locale ? `?locale=${locale?.code}` : ''}`, // Localization query param
|
||||
url: ({
|
||||
data,
|
||||
collectionConfig,
|
||||
locale
|
||||
}) => `${data.tenant.url}${ // Multi-tenant top-level domain
|
||||
collectionConfig.slug === 'posts' ? `/posts/${data.slug}` : `${data.slug !== 'home' : `/${data.slug}` : ''}`
|
||||
}${locale ? `?locale=${locale?.code}` : ''}`, // Localization query param
|
||||
collections: ['pages'],
|
||||
},
|
||||
// highlight-end
|
||||
},
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
914
docs/plugins/ecommerce.mdx
Normal file
914
docs/plugins/ecommerce.mdx
Normal file
@@ -0,0 +1,914 @@
|
||||
---
|
||||
title: Ecommerce Plugin
|
||||
label: Ecommerce
|
||||
order: 120
|
||||
desc: Add ecommerce functionality to your Payload CMS application with this plugin.
|
||||
keywords: plugins, ecommerce, stripe, plugin, payload, cms, shop, payments
|
||||
---
|
||||
|
||||

|
||||
|
||||
<Banner type="warning">
|
||||
This plugin is currently in Alpha and may have breaking changes in future
|
||||
releases.
|
||||
</Banner>
|
||||
|
||||
This plugin allows you to add ecommerce functionality to your Payload app. It provides a set of utilities and collections to manage products, orders, and payments. It also integrates with popular payment gateways like Stripe to handle transactions.
|
||||
|
||||
<Banner type="info">
|
||||
This plugin is completely open-source and the [source code can be found
|
||||
here](https://github.com/payloadcms/payload/tree/main/packages/plugin-ecommerce).
|
||||
If you need help, check out our [Community
|
||||
Help](https://payloadcms.com/community-help). If you think you've found a bug,
|
||||
please [open a new
|
||||
issue](https://github.com/payloadcms/payload/issues/new?assignees=&labels=plugin%3A%redirects&template=bug_report.md&title=plugin-ecommerce%3A)
|
||||
with as much detail as possible.
|
||||
</Banner>
|
||||
|
||||
## Core features
|
||||
|
||||
The plugin ships with a wide range of features to help you get started with ecommerce:
|
||||
|
||||
- Products with Variants are supported by default
|
||||
- Carts are tracked in Payload
|
||||
- Orders and Transactions
|
||||
- Addresses linked to your Customers
|
||||
- Stripe Payments integration
|
||||
- Any number of currencies supported
|
||||
- React UI utilities to help you manage your frontend logic
|
||||
|
||||
## Installation
|
||||
|
||||
Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com):
|
||||
|
||||
```bash
|
||||
pnpm add @payloadcms/plugin-ecommerce
|
||||
```
|
||||
|
||||
## Basic Usage
|
||||
|
||||
In the `plugins` array of your [Payload Config](https://payloadcms.com/docs/configuration/overview), call the plugin with [options](#options):
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
import { ecommercePlugin } from '@payloadcms/plugin-ecommerce'
|
||||
|
||||
const config = buildConfig({
|
||||
collections: [
|
||||
{
|
||||
slug: 'pages',
|
||||
fields: [],
|
||||
},
|
||||
],
|
||||
plugins: [
|
||||
ecommercePlugin({
|
||||
customers: { slug: 'users' },
|
||||
}),
|
||||
],
|
||||
})
|
||||
|
||||
export default config
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Type | Description |
|
||||
| -------------- | ------------------ | ------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `access` | `object` | Configuration to override the default access control, use this when checking for roles or multi tenancy. [More](#access) |
|
||||
| `addresses` | `object` | Configuration for addresses collection and supported fields. [More](#addresses) |
|
||||
| `carts` | `object` | Configuration for carts collection. [More](#carts) |
|
||||
| `currencies` | `object` | Supported currencies by the store. [More](#currencies) |
|
||||
| `customers` | `object` | Used to provide the customers slug. [More](#customers) |
|
||||
| `inventory` | `boolean` `object` | Enable inventory tracking within Payload. Defaults to `true`. [More](#inventory) |
|
||||
| `payments` | `object` | Configuring payments and supported payment methods. [More](#payments) |
|
||||
| `products` | `object` | Configuration for products, variants collections and more. [More](#products) |
|
||||
| `orders` | `object` | Configuration for orders collection. [More](#orders) |
|
||||
| `transactions` | `boolean` `object` | Configuration for transactions collection. [More](#transactions) |
|
||||
|
||||
Note that the fields in overrides take a function that receives the default fields and returns an array of fields. This allows you to add fields to the collection.
|
||||
|
||||
```ts
|
||||
ecommercePlugin({
|
||||
access: {
|
||||
isAdmin: isAdmin,
|
||||
isAdminField: isAdminField,
|
||||
isAdminOrOwner: isAdminOrOwner,
|
||||
isAdminOrPublished: isAdminOrPublished,
|
||||
isCustomerField: isCustomerField,
|
||||
},
|
||||
customers: {
|
||||
slug: 'users',
|
||||
},
|
||||
payments: {
|
||||
paymentMethods: [
|
||||
stripeAdapter({
|
||||
secretKey: process.env.STRIPE_SECRET_KEY!,
|
||||
publishableKey: process.env.NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY!,
|
||||
webhookSecret: process.env.STRIPE_WEBHOOKS_SIGNING_SECRET!,
|
||||
}),
|
||||
],
|
||||
},
|
||||
products: {
|
||||
variants: {
|
||||
variantsCollection: VariantsCollection,
|
||||
},
|
||||
productsCollection: ProductsCollection,
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
## Access
|
||||
|
||||
The plugin requires access control functions in order to restrict permissions to certain collections or fields. You can override these functions by providing your own in the `access` option.
|
||||
|
||||
| Option | Type | Description |
|
||||
| -------------------- | ------------- | ------------------------------------------------------------------------- |
|
||||
| `isAuthenticated` | `Access` | Authenticated access only, provided by default. |
|
||||
| `isPublic` | `Access` | Public access, provided by default. |
|
||||
| `isAdmin` | `Access` | Limited to only admin users. |
|
||||
| `isAdminField` | `FieldAccess` | Limited to only admin users, specifically for Field level access control. |
|
||||
| `isAdminOrOwner` | `Access` | Is the owner of the document via the `customer` field or is an admin. |
|
||||
| `isAdminOrPublished` | `Access` | The document is published or user is admin. |
|
||||
| `isCustomerField` | `FieldAccess` | Limited to customers only, specifically for Field level access control. |
|
||||
|
||||
The default access control functions are:
|
||||
|
||||
```ts
|
||||
access: {
|
||||
isAuthenticated: ({ req: { user } }) => Boolean(user),
|
||||
isPublic: () => true,
|
||||
}
|
||||
```
|
||||
|
||||
### `isAuthenticated`
|
||||
|
||||
Access control to check if the user is authenticated. By default the following is provided:
|
||||
|
||||
```ts
|
||||
isAuthenticated: ({ req: { user } }) => Boolean(user)
|
||||
```
|
||||
|
||||
### `isPublic`
|
||||
|
||||
Access control to allow public access. By default the following is provided:
|
||||
|
||||
```ts
|
||||
isPublic: () => true
|
||||
```
|
||||
|
||||
### `isAdmin`
|
||||
|
||||
Access control to check if the user has `admin` permissions.
|
||||
|
||||
Example:
|
||||
|
||||
```ts
|
||||
isAdmin: ({ req: { user } }) => Boolean(user?.roles?.includes('admin'))
|
||||
```
|
||||
|
||||
### `isAdminField`
|
||||
|
||||
Field level access control to check if the user has `admin` permissions.
|
||||
|
||||
Example:
|
||||
|
||||
```ts
|
||||
isAdminField: ({ req: { user } }) => Boolean(user?.roles?.includes('admin'))
|
||||
```
|
||||
|
||||
### `isAdminOrOwner`
|
||||
|
||||
Access control to check if the user has `admin` permissions or is the owner of the document via the `customer` field.
|
||||
|
||||
Example:
|
||||
|
||||
```ts
|
||||
isAdminOrOwner: ({ req: { user } }) => {
|
||||
if (user && Boolean(user?.roles?.includes('admin'))) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (user?.id) {
|
||||
return {
|
||||
customer: {
|
||||
equals: user.id,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
```
|
||||
|
||||
### `isAdminOrPublished`
|
||||
|
||||
Access control to check if the user has `admin` permissions or if the document is published.
|
||||
|
||||
Example:
|
||||
|
||||
```ts
|
||||
isAdminOrPublished: ({ req: { user } }) => {
|
||||
if (user && Boolean(user?.roles?.includes('admin'))) {
|
||||
return true
|
||||
}
|
||||
return {
|
||||
_status: {
|
||||
equals: 'published',
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `isCustomerField`
|
||||
|
||||
Field level access control to check if the user has `customer` permissions.
|
||||
|
||||
Example:
|
||||
|
||||
```ts
|
||||
isCustomerField: ({ req: { user } }) =>
|
||||
Boolean(user?.roles?.includes('customer'))
|
||||
```
|
||||
|
||||
## Carts
|
||||
|
||||
The `carts` option is used to configure the carts collection. Defaults to `true` which will create a `carts` collection with default fields. It also takes an object:
|
||||
|
||||
| Option | Type | Description |
|
||||
| ----------------- | -------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `cartsCollection` | `CollectionOverride` | Allows you to override the collection for `carts` where the `fields` is a function of type `FieldsOverride` and expects a return of a `Field` array. |
|
||||
|
||||
You can add your own fields or modify the structure of the existing on in the collection. Example for overriding the default fields:
|
||||
|
||||
```ts
|
||||
carts: {
|
||||
cartsCollection: {
|
||||
fields: ({ defaultFields }) => {
|
||||
const fields = [
|
||||
...defaultFields,
|
||||
{
|
||||
name: 'notes',
|
||||
label: 'Notes',
|
||||
type: 'textarea',
|
||||
},
|
||||
]
|
||||
return fields
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Carts are created when a customer adds their first item to the cart. The cart is then updated as they add or remove items. The cart is linked to a _Customer_ via the `customer` field. If the user is authenticated, this will be set to their user ID. If the user is not authenticated, this will be `null`.
|
||||
If the user is not authenticated, the cart ID is stored in local storage and used to fetch the cart on subsequent requests. Access control by default works so that if the user is not authenticated then they can only access carts that have no customer linked to them.
|
||||
|
||||
## Customers
|
||||
|
||||
The `customers` option is required and is used to provide the customers collection slug. This collection is used to link orders, carts, and addresses to a customer.
|
||||
|
||||
| Option | Type | Description |
|
||||
| ------ | ------ | ------------------------------------- |
|
||||
| `slug` | string | The slug of the customers collection. |
|
||||
|
||||
While it's recommended to use just one collection for customers and your editors, you can use any collection you want for your customers. Just make sure that your access control is checking for the correct collections as well.
|
||||
|
||||
## Inventory
|
||||
|
||||
The `inventory` option is used to enable or disable inventory tracking within Payload. It defaults to `true`. It also takes an object:
|
||||
|
||||
| Option | Type | Description |
|
||||
| ----------- | -------- | ------------------------------------------------------------------------- |
|
||||
| `fieldName` | `string` | Override the field name used to track inventory. Defaults to `inventory`. |
|
||||
|
||||
For now it's quite rudimentary tracking with no integrations to 3rd party services. It will simply add an `inventory` field to the `variants` collection and decrement the inventory when an order is placed.
|
||||
|
||||
## Payments
|
||||
|
||||
The `payments` option is used to configure payments and supported payment methods.
|
||||
|
||||
| Option | Type | Description |
|
||||
| ---------------- | ------- | ------------------------------------------------------------------------------------------------- |
|
||||
| `paymentMethods` | `array` | An array of payment method adapters. Currently, only Stripe is supported. [More](#stripe-adapter) |
|
||||
|
||||
### Payment adapters
|
||||
|
||||
The plugin supports payment adapters to integrate with different payment gateways. Currently, only the [Stripe adapter](#stripe-adapter) is available. Adapters will provide a client side version as well with slightly different arguments.
|
||||
|
||||
Every adapter supports the following arguments in addition to their own:
|
||||
|
||||
| Argument | Type | Description |
|
||||
| ---------------- | ---------------------------------- | ----------------------------------------------------------------------- |
|
||||
| `label` | `string` | Human readabale label for this payment adapter. |
|
||||
| `groupOverrides` | `GroupField` with `FieldsOverride` | Use this to override the available fields for the payment adapter type. |
|
||||
|
||||
Client side base arguments are the following:
|
||||
|
||||
| Argument | Type | Description |
|
||||
| -------- | -------- | ----------------------------------------------- |
|
||||
| `label` | `string` | Human readabale label for this payment adapter. |
|
||||
|
||||
See the [Stripe adapter](#stripe-adapter) for an example of client side arguments and the [React section](#react) for usage.
|
||||
|
||||
#### `groupOverrides`
|
||||
|
||||
The `groupOverrides` option allows you to customize the fields that are available for a specific payment adapter. It takes a `GroupField` object with a `fields` function that receives the default fields and returns an array of fields.
|
||||
These fields are stored in transactions and can be used to collect additional information for the payment method. Stripe, for example, will track the `paymentIntentID`.
|
||||
|
||||
Example for overriding the default fields:
|
||||
|
||||
```ts
|
||||
payments: {
|
||||
paymentMethods: [
|
||||
stripeAdapter({
|
||||
secretKey: process.env.STRIPE_SECRET_KEY,
|
||||
publishableKey: process.env.NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY,
|
||||
webhookSecret: process.env.STRIPE_WEBHOOKS_SIGNING_SECRET,
|
||||
groupOverrides: {
|
||||
fields: ({ defaultFields }) => {
|
||||
return [
|
||||
...defaultFields,
|
||||
{
|
||||
name: 'customField',
|
||||
label: 'Custom Field',
|
||||
type: 'text',
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}),
|
||||
],
|
||||
},
|
||||
```
|
||||
|
||||
### Stripe Adapter
|
||||
|
||||
The Stripe adapter is used to integrate with the Stripe payment gateway. It requires a secret key, publishable key, and optionally webhook secret.
|
||||
|
||||
<Banner type="info">
|
||||
Note that Payload will not install the Stripe SDK package for you
|
||||
automatically, so you will need to install it yourself:
|
||||
|
||||
```
|
||||
pnpm add stripe
|
||||
```
|
||||
|
||||
</Banner>
|
||||
|
||||
| Argument | Type | Description |
|
||||
| ---------------- | ------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `secretKey` | `string` | Required for communicating with the Stripe API in the backend. |
|
||||
| `publishableKey` | `string` | Required for communicating with the Stripe API in the client side. |
|
||||
| `webhookSecret` | `string` | The webhook secret used to verify incoming webhook requests from Stripe. |
|
||||
| `webhooks` | `WebhookHandler[]` | An array of webhook handlers to register within Payload's REST API for Stripe to callback. |
|
||||
| `apiVersion` | `string` | The Stripe API version to use. See [docs](https://stripe.com/docs/api/versioning). This will be deprecated soon by Stripe's SDK, configure the API version in your Stripe Dashboard. |
|
||||
| `appInfo` | `object` | The application info to pass to Stripe. See [docs](https://stripe.com/docs/api/app_info). |
|
||||
|
||||
```ts
|
||||
import { stripeAdapter } from '@payloadcms/plugin-ecommerce/payments/stripe'
|
||||
|
||||
stripeAdapter({
|
||||
secretKey: process.env.STRIPE_SECRET_KEY!,
|
||||
publishableKey: process.env.NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY!,
|
||||
webhookSecret: process.env.STRIPE_WEBHOOKS_SIGNING_SECRET!,
|
||||
})
|
||||
```
|
||||
|
||||
#### Stripe `webhooks`
|
||||
|
||||
The `webhooks` option allows you to register custom webhook handlers for [Stripe events](https://docs.stripe.com/api/events). This is useful if you want to handle specific events that are not covered by the default handlers provided by the plugin.
|
||||
|
||||
```ts
|
||||
stripeAdapter({
|
||||
webhooks: {
|
||||
'payment_intent.succeeded': ({ event, req }) => {
|
||||
// Access to Payload's req object and event data
|
||||
},
|
||||
},
|
||||
}),
|
||||
```
|
||||
|
||||
#### Stripe client side
|
||||
|
||||
On the client side, you can use the `publishableKey` to initialize Stripe and handle payments. The client side version of the adapter only requires the `label` and `publishableKey` arguments. Never expose the `secretKey` or `webhookSecret` keys on the client side.
|
||||
|
||||
```ts
|
||||
import { stripeAdapterClient } from '@payloadcms/plugin-ecommerce/payments/stripe'
|
||||
|
||||
<EcommerceProvider
|
||||
paymentMethods={[
|
||||
stripeAdapterClient({
|
||||
publishableKey: process.env.NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY,
|
||||
}),
|
||||
]}
|
||||
>
|
||||
{children}
|
||||
</EcommerceProvider>
|
||||
```
|
||||
|
||||
## Addresses
|
||||
|
||||
The `addresses` option is used to configure the addresses collection and supported fields. Defaults to `true` which will create an `addresses` collection with default fields. It also takes an object:
|
||||
|
||||
| Option | Type | Description |
|
||||
| -------------------- | -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `addressFields` | `FieldsOverride` | A function that is given the `defaultFields` as an argument and returns an array of fields. Use this to customise the supported fields for stored addresses. |
|
||||
| `collectionOverride` | `CollectionOverride` | Allows you to override the collection for `addresses` where the `fields` is a function of type `FieldsOverride` and expects a return of a `Field` array. |
|
||||
| `supportedCountries` | `CountryType[]` | An array of supported countries in [ISO 3166-1 alpha-2 format](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2). Defaults to all countries. |
|
||||
|
||||
You can add your own fields or modify the structure of the existing on in the collection. Example for overriding the default fields:
|
||||
|
||||
```ts
|
||||
addresses: {
|
||||
collectionOverride: {
|
||||
fields: ({ defaultFields }) => {
|
||||
const fields = [
|
||||
...defaultFields,
|
||||
{
|
||||
name: 'googleMapLocation',
|
||||
label: 'Google Map Location',
|
||||
type: 'text',
|
||||
},
|
||||
]
|
||||
|
||||
return fields
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `supportedCountries`
|
||||
|
||||
The `supportedCountries` option is an array of country codes in [ISO 3166-1 alpha-2 format](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2). This is used to limit the countries that can be selected when creating or updating an address. If not provided, all countries will be supported. Currently used for storing addresses only.
|
||||
|
||||
You can import the default list of countries from the plugin:
|
||||
|
||||
```ts
|
||||
import { defaultCountries } from '@payloadcms/plugin-ecommerce/addresses'
|
||||
```
|
||||
|
||||
## Currencies
|
||||
|
||||
The `currencies` option is used to configure the supported currencies by the store. Defaults to `true` which will support `USD`. It also takes an object:
|
||||
|
||||
| Option | Type | Description |
|
||||
| --------------------- | ------------ | ------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `supportedCurrencies` | `Currency[]` | An array of supported currencies by the store. Defaults to `USD`. See [Currencies](#currencies-list) for available currencies. |
|
||||
| `defaultCurrency` | `string` | The default currency code to use for the store. Defaults to the first currency. Must be one of the `supportedCurrencies` codes. |
|
||||
|
||||
The `Currency` type is as follows:
|
||||
|
||||
```ts
|
||||
type Currency = {
|
||||
code: string // The currency code in ISO 4217 format, e.g. 'USD'
|
||||
decimals: number // The number of decimal places for the currency, e.g. 2 for USD
|
||||
label: string // A human-readable label for the currency, e.g. 'US Dollar'
|
||||
symbol: string // The currency symbol, e.g. '$'
|
||||
}
|
||||
```
|
||||
|
||||
For example, to support JYP in addition to USD:
|
||||
|
||||
```ts
|
||||
import { ecommercePlugin } from '@payloadcms/plugin-ecommerce'
|
||||
import { USD } from '@payloadcms/plugin-ecommerce/currencies'
|
||||
|
||||
ecommercePlugin({
|
||||
currencies: {
|
||||
supportedCurrencies: [
|
||||
USD,
|
||||
{
|
||||
code: 'JPY',
|
||||
decimals: 0,
|
||||
label: 'Japanese Yen',
|
||||
symbol: '¥',
|
||||
},
|
||||
],
|
||||
defaultCurrency: 'USD',
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
Note that adding a new currency could generate a new schema migration as it adds new prices fields in your products.
|
||||
|
||||
We currently support the following currencies out of the box:
|
||||
|
||||
- `USD`
|
||||
- `EUR`
|
||||
- `GBP`
|
||||
|
||||
You can import these from the plugin:
|
||||
|
||||
```ts
|
||||
import { EUR } from '@payloadcms/plugin-ecommerce/currencies'
|
||||
```
|
||||
|
||||
<Banner type="info">
|
||||
Note that adding new currencies here does not automatically enable them in
|
||||
your payment gateway. Make sure to enable the currencies in your payment
|
||||
gateway dashboard as well.
|
||||
</Banner>
|
||||
|
||||
## Products
|
||||
|
||||
The `products` option is used to configure the products and variants collections. Defaults to `true` which will create `products` and `variants` collections with default fields. It also takes an object:
|
||||
|
||||
| Option | Type | Description |
|
||||
| -------------------- | -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `productsCollection` | `CollectionOverride` | Allows you to override the collection for `products` where the `fields` is a function of type `FieldsOverride` and expects a return of a `Field` array. |
|
||||
| `variants` | `object` | Configuration for the variants collection. [More](#variants) |
|
||||
|
||||
You can add your own fields or modify the structure of the existing on in the collections. Example for overriding the default fields:
|
||||
|
||||
```ts
|
||||
products: {
|
||||
productsCollection: {
|
||||
fields: ({ defaultFields }) => {
|
||||
const fields = [
|
||||
...defaultFields,
|
||||
{
|
||||
name: 'customField',
|
||||
label: 'Custom Field',
|
||||
type: 'text',
|
||||
},
|
||||
]
|
||||
return fields
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Variants
|
||||
|
||||
The `variants` option is used to configure the variants collection. It takes an object:
|
||||
|
||||
| Option | Type | Description |
|
||||
| -------------------------- | -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `variantsCollection` | `CollectionOverride` | Allows you to override the collection for `variants` where the `fields` is a function of type `FieldsOverride` and expects a return of a `Field` array. |
|
||||
| `variantTypesCollection` | `CollectionOverride` | Allows you to override the collection for `variantTypes` where the `fields` is a function of type `FieldsOverride` and expects a return of a `Field` array. |
|
||||
| `variantOptionsCollection` | `CollectionOverride` | Allows you to override the collection for `variantOptions` where the `fields` is a function of type `FieldsOverride` and expects a return of a `Field` array. |
|
||||
|
||||
You can add your own fields or modify the structure of the existing on in the collection. Example for overriding the default fields:
|
||||
|
||||
```ts
|
||||
variants: {
|
||||
variantsCollection: {
|
||||
fields: ({ defaultFields }) => {
|
||||
const fields = [
|
||||
...defaultFields,
|
||||
{
|
||||
name: 'customField',
|
||||
label: 'Custom Field',
|
||||
type: 'text',
|
||||
},
|
||||
]
|
||||
return fields
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The key differences between these collections:
|
||||
|
||||
- `variantTypes` are the types of variants that a product can have, e.g. Size, Color.
|
||||
- `variantOptions` are the options for each variant type, e.g. Small, Medium, Large for Size.
|
||||
- `variants` are the actual variants of a product, e.g. a T-Shirt in Size Small and Color Red.
|
||||
|
||||
## Orders
|
||||
|
||||
The `orders` option is used to configure the orders collection. Defaults to `true` which will create an `orders` collection with default fields. It also takes an object:
|
||||
|
||||
| Option | Type | Description |
|
||||
| ------------------ | -------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `ordersCollection` | `CollectionOverride` | Allows you to override the collection for `orders` where the `fields` is a function of type `FieldsOverride` and expects a return of a `Field` array. |
|
||||
|
||||
You can add your own fields or modify the structure of the existing on in the collection. Example for overriding the default fields:
|
||||
|
||||
```ts
|
||||
orders: {
|
||||
ordersCollection: {
|
||||
fields: ({ defaultFields }) => {
|
||||
const fields = [
|
||||
...defaultFields,
|
||||
{
|
||||
name: 'notes',
|
||||
label: 'Notes',
|
||||
type: 'textarea',
|
||||
},
|
||||
]
|
||||
return fields
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Transactions
|
||||
|
||||
The `transactions` option is used to configure the transactions collection. Defaults to `true` which will create a `transactions` collection with default fields. It also takes an object:
|
||||
|
||||
| Option | Type | Description |
|
||||
| ------------------------ | -------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `transactionsCollection` | `CollectionOverride` | Allows you to override the collection for `transactions` where the `fields` is a function of type `FieldsOverride` and expects a return of a `Field` array. |
|
||||
|
||||
You can add your own fields or modify the structure of the existing on in the collection. Example for overriding the default fields:
|
||||
|
||||
```ts
|
||||
transactions: {
|
||||
transactionsCollection: {
|
||||
fields: ({ defaultFields }) => {
|
||||
const fields = [
|
||||
...defaultFields,
|
||||
{
|
||||
name: 'notes',
|
||||
label: 'Notes',
|
||||
type: 'textarea',
|
||||
},
|
||||
]
|
||||
return fields
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## React
|
||||
|
||||
The plugin provides a set of React utilities to help you manage your ecommerce frontend. These include context providers, hooks, and components to handle carts, products, and payments.
|
||||
|
||||
The following hooks and components are available:
|
||||
|
||||
| Hook / Component | Description |
|
||||
| ------------------- | ------------------------------------------------------------------------------ |
|
||||
| `EcommerceProvider` | A context provider to wrap your application and provide the ecommerce context. |
|
||||
| `useCart` | A hook to manage the cart state and actions. |
|
||||
| `useAddresses` | A hook to fetch and manage products. |
|
||||
| `usePayments` | A hook to manage the checkout process. |
|
||||
| `useCurrency` | A hook to format prices based on the selected currency. |
|
||||
| `useEcommerce` | A hook that encompasses all of the above in one. |
|
||||
|
||||
### `EcommerceProvider`
|
||||
|
||||
The `EcommerceProvider` component is used to wrap your application and provide the ecommerce context. It takes the following props:
|
||||
|
||||
| Prop | Type | Description |
|
||||
| ------------------ | ------------------ | --------------------------------------------------------------------------------------------------- |
|
||||
| `addressesSlug` | `string` | The slug of the addresses collection. Defaults to `addresses`. |
|
||||
| `api` | `object` | API configuration for the internal fetches of the provider. [More](#api) |
|
||||
| `cartsSlug` | `string` | The slug of the carts collection. Defaults to `carts`. |
|
||||
| `children` | `ReactNode` | The child components that will have access to the ecommerce context. |
|
||||
| `currenciesConfig` | `CurrenciesConfig` | Configuration for supported currencies. See [Currencies](#currencies). |
|
||||
| `customersSlug` | `string` | The slug of the customers collection. Defaults to `users`. |
|
||||
| `debug` | `boolean` | Enable or disable debug mode. This will send more information to the console. |
|
||||
| `enableVariants` | `boolean` | Enable or disable product variants support. Defaults to `true`. |
|
||||
| `paymentMethods` | `PaymentMethod[]` | An array of payment method adapters for the client side. See [Payment adapters](#payment-adapters). |
|
||||
| `syncLocalStorage` | `boolean` `object` | Whether to sync the cart ID to local storage. Defaults to `true`. Takes an object for configuration |
|
||||
|
||||
Example usage:
|
||||
|
||||
```tsx
|
||||
import { EcommerceProvider } from '@payloadcms/plugin-ecommerce/react'
|
||||
// Import any payment adapters you want to use on the client side
|
||||
import { stripeAdapterClient } from '@payloadcms/plugin-ecommerce/payments/stripe'
|
||||
import { USD, EUR } from '@payloadcms/plugin-ecommerce/currencies'
|
||||
|
||||
export const Providers = () => (
|
||||
<EcommerceProvider
|
||||
enableVariants={true}
|
||||
currenciesConfig={{
|
||||
supportedCurrencies: [USD, EUR],
|
||||
defaultCurrency: 'USD',
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
</EcommerceProvider>
|
||||
)
|
||||
```
|
||||
|
||||
#### `api`
|
||||
|
||||
The `api` prop is used to configure the API settings for the internal fetches of the provider. It takes an object with the following properties:
|
||||
|
||||
| Property | Type | Description |
|
||||
| ----------------- | -------- | ----------------------------------------------------------------- |
|
||||
| `apiRoute` | `string` | The base route for accessing the Payload API. Defaults to `/api`. |
|
||||
| `serverURL` | `string` | The full URL of your Payload server. |
|
||||
| `cartsFetchQuery` | `object` | Additional query parameters to include when fetching the cart. |
|
||||
|
||||
#### `cartsFetchQuery`
|
||||
|
||||
The `cartsFetchQuery` property allows you to specify additional query parameters to include when fetching the cart. This can be useful for including related data or customizing the response. This accepts:
|
||||
|
||||
| Property | Type | Description |
|
||||
| ---------- | -------------- | --------------------------------------------------------------- |
|
||||
| `depth` | `string` | Defaults to 0. [See Depth](../queries/depth) |
|
||||
| `select` | `SelectType` | Select parameters. [See Select](../queries/select) |
|
||||
| `populate` | `PopulateType` | Populate parameters. [See Populate](../queries/select#populate) |
|
||||
|
||||
Example usage:
|
||||
|
||||
```tsx
|
||||
<EcommerceProvider
|
||||
api={{
|
||||
cartsFetchQuery: {
|
||||
depth: 2, // Include related data up to 2 levels deep
|
||||
},
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
</EcommerceProvider>
|
||||
```
|
||||
|
||||
#### `syncLocalStorage`
|
||||
|
||||
The `syncLocalStorage` prop is used to enable or disable syncing the cart ID to local storage. This allows the cart to persist across page reloads and sessions. It defaults to `true`.
|
||||
|
||||
You can also provide an object with the following properties for more configuration:
|
||||
|
||||
| Property | Type | Description |
|
||||
| -------- | -------- | ---------------------------------------------------------------------------- |
|
||||
| `key` | `string` | The key to use for storing the cart ID in local storage. Defaults to `cart`. |
|
||||
|
||||
### `useCart`
|
||||
|
||||
The `useCart` hook is used to manage the cart state and actions. It provides methods to add, remove, and update items in the cart, as well as to fetch the current cart state. It has the following properties:
|
||||
|
||||
| Property | Type | Description |
|
||||
| --------------- | -------------------------------------------------- | ----------------------------------------------------------------------------------------- |
|
||||
| `addItem` | `(item: CartItemInput, quantity?: number) => void` | Method to add an item to the cart, optionally accepts a quantity to add multiple at once. |
|
||||
| `cart` | `Cart` `null` | The current cart state. Null or undefined if it doesn't exist. |
|
||||
| `clearCart` | `() => void` | Method to clear the cart. |
|
||||
| `decrementItem` | `(item: IDType) => void` | Method to decrement the quantity of an item. Will remove it entirely if it reaches 0. |
|
||||
| `incrementItem` | `(item: IDType) => void` | Method to increment the quantity of an item. |
|
||||
| `removeItem` | `(item: IDType) => void` | Method to remove an item from the cart. |
|
||||
|
||||
Example usage:
|
||||
|
||||
```tsx
|
||||
import { useCart } from '@payloadcms/plugin-ecommerce/react'
|
||||
|
||||
const CartComponent = () => {
|
||||
const { addItem, cart, clearCart, decrementItem, incrementItem, removeItem } =
|
||||
useCart()
|
||||
|
||||
// Your component logic here
|
||||
}
|
||||
```
|
||||
|
||||
### `useAddresses`
|
||||
|
||||
The `useAddresses` hook is used to fetch and manage addresses. It provides methods to create, update, and delete addresses, as well as to fetch the list of addresses. It has the following properties:
|
||||
|
||||
| Property | Type | Description |
|
||||
| --------------- | ----------------------------------------------------------------- | ----------------------------------------------------------------- |
|
||||
| `addresses` | `Address[]` | The list of addresses, if any are available for the current user. |
|
||||
| `createAddress` | `(data: Address) => Promise<Address>` | Method to create a new address. |
|
||||
| `updateAddress` | `(addressID: IDType, data: Partial<Address>) => Promise<Address>` | Method to update an existing address by ID. |
|
||||
|
||||
Example usage:
|
||||
|
||||
```tsx
|
||||
import { useAddresses } from '@payloadcms/plugin-ecommerce/react'
|
||||
|
||||
const AddressesComponent = () => {
|
||||
const { addresses, createAddress, updateAddress } = useAddresses()
|
||||
|
||||
// Your component logic here
|
||||
}
|
||||
```
|
||||
|
||||
### `usePayments`
|
||||
|
||||
The `usePayments` hook is used to manage the checkout process. It provides methods to initiate payments, confirm orders, and handle payment status. It has the following properties:
|
||||
|
||||
| Property | Type | Description |
|
||||
| ----------------------- | -------------------------- | ------------------------------------------------------------------- |
|
||||
| `confirmOrder` | `(args) => Promise<Order>` | Method to confirm an order by ID. [More](#confirmOrder) |
|
||||
| `initiatePayment` | `(args) => Promise<void>` | Method to initiate a payment for an order. [More](#initiatePayment) |
|
||||
| `paymentMethods` | `PaymentMethod[]` | The list of available payment methods. |
|
||||
| `selectedPaymentMethod` | `PaymentMethod` | The currently selected payment method, if any. |
|
||||
|
||||
Example usage:
|
||||
|
||||
```tsx
|
||||
import { usePayments } from '@payloadcms/plugin-ecommerce/react'
|
||||
|
||||
const CheckoutComponent = () => {
|
||||
const {
|
||||
confirmOrder,
|
||||
initiatePayment,
|
||||
paymentMethods,
|
||||
selectedPaymentMethod,
|
||||
} = usePayments()
|
||||
|
||||
// Your component logic here
|
||||
}
|
||||
```
|
||||
|
||||
#### `confirmOrder`
|
||||
|
||||
Use this method to confirm an order by its ID. It requires the payment method ID and will return the order ID.
|
||||
|
||||
```ts
|
||||
try {
|
||||
const data = await confirmOrder('stripe', {
|
||||
additionalData: {
|
||||
paymentIntentID: paymentIntent.id,
|
||||
customerEmail,
|
||||
},
|
||||
})
|
||||
// Return type will contain `orderID`
|
||||
// use data to redirect to your order page
|
||||
} catch (error) {
|
||||
// handle error
|
||||
}
|
||||
```
|
||||
|
||||
If the payment gateway requires additional confirmations offsite then you will need another landing page to handle that. For example with Stripe you may need to use a callback URL, just make sure the relevant information is routed back.
|
||||
|
||||
<Banner type="info">
|
||||
This will mark the transaction as complete in the backend and create the order
|
||||
for the user.
|
||||
</Banner>
|
||||
|
||||
#### `initiatePayment`
|
||||
|
||||
Use this method to initiate a payment for an order. It requires the cart ID and the payment method ID. Depending on the payment method, additional data may be required. Depending on the payment method used you may need to provide additional data, for example with Stripe:
|
||||
|
||||
```ts
|
||||
try {
|
||||
const data = await initiatePayment('stripe', {
|
||||
additionalData: {
|
||||
customerEmail,
|
||||
billingAddress,
|
||||
shippingAddress,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
// handle error
|
||||
}
|
||||
```
|
||||
|
||||
This function will hit the Payload API endpoint for `/stripe/initiate` and return the payment data required to complete the payment on the client side, which by default will include a `client_secret` to complete the payment with Stripe.js. The next step is to call the `confirmOrder` once payment is confirmed on the client side by Stripe.
|
||||
|
||||
<Banner type="info">
|
||||
At this step the cart is verified and a transaction is created in the backend
|
||||
with the address details provided. No order is created yet until you call
|
||||
`confirmOrder`, which should be done after payment is confirmed on the client
|
||||
side or via webhooks if you opt for that approach instead.
|
||||
</Banner>
|
||||
|
||||
### `useCurrency`
|
||||
|
||||
The `useCurrency` hook is used to format prices based on the selected currency. It provides methods to format prices and to get the current currency. It has the following properties:
|
||||
|
||||
| Property | Type | Description |
|
||||
| ------------------ | -------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `currenciesConfig` | `CurrenciesConfig` | The configuration for supported currencies. Directly matching the config provided to the Context Provider. [More](#ecommerceprovider) |
|
||||
| `currency` | `Currency` | The currently selected currency. |
|
||||
| `formatPrice` | `(amount: number) => string` | Method to format a price based on the selected currency. |
|
||||
| `setCurrency` | `(currencyCode: string) => void` | Method to set the current currency by code. It will update all price formats when used in conjunction with the `formatPrice` utility. |
|
||||
|
||||
`formatPrice` in particular is very helpful as all prices are stored as integers to avoid any potential issues with decimal calculations, therefore on the frontend you can use this utility to format your price accounting for the currency and decimals. Example usage:
|
||||
|
||||
```tsx
|
||||
import { useCurrency } from '@payloadcms/plugin-ecommerce/react'
|
||||
|
||||
const PriceComponent = ({ amount }) => {
|
||||
const { currenciesConfig, currency, setCurrency } = useCurrency()
|
||||
|
||||
return <div>{formatPrice(amount)}</div>
|
||||
}
|
||||
```
|
||||
|
||||
### `useEcommerce`
|
||||
|
||||
The `useEcommerce` hook encompasses all of the above hooks in one. It provides access to the cart, addresses, and payments hooks.
|
||||
|
||||
Example usage:
|
||||
|
||||
```tsx
|
||||
import { useEcommerce } from '@payloadcms/plugin-ecommerce/react'
|
||||
|
||||
const EcommerceComponent = () => {
|
||||
const { cart, addresses, selectedPaymentMethod } = useEcommerce()
|
||||
|
||||
// Your component logic here
|
||||
}
|
||||
```
|
||||
|
||||
## TypeScript
|
||||
|
||||
The plugin will inherit the types from your generated Payload types where possible. We also export the following types:
|
||||
|
||||
- `Cart` - The cart type as stored in the React state and local storage and on the client side.
|
||||
- `CollectionOverride` - Type for overriding collections.
|
||||
- `CurrenciesConfig` - Type for the currencies configuration.
|
||||
- `EcommercePluginConfig` - The configuration object for the ecommerce plugin.
|
||||
- `FieldsOverride` - Type for overriding fields in collections.
|
||||
|
||||
All types can be directly imported:
|
||||
|
||||
```ts
|
||||
import { EcommercePluginConfig } from '@payloadcms/plugin-ecommerce/types'
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
The [Templates Directory](https://github.com/payloadcms/payload/tree/main/templates) also contains an official [E-commerce Template](https://github.com/payloadcms/payload/tree/main/templates/ecommerce), which uses this plugin.
|
||||
@@ -54,15 +54,8 @@ The plugin accepts an object with the following properties:
|
||||
```ts
|
||||
type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
/**
|
||||
* Base path for your application
|
||||
*
|
||||
* https://nextjs.org/docs/app/api-reference/config/next-config-js/basePath
|
||||
*
|
||||
* @default undefined
|
||||
*/
|
||||
basePath?: string
|
||||
/**
|
||||
* After a tenant is deleted, the plugin will attempt to clean up related documents
|
||||
* After a tenant is deleted, the plugin will attempt
|
||||
* to clean up related documents
|
||||
* - removing documents with the tenant ID
|
||||
* - removing the tenant from users
|
||||
*
|
||||
@@ -75,36 +68,22 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
collections: {
|
||||
[key in CollectionSlug]?: {
|
||||
/**
|
||||
* Set to `true` if you want the collection to behave as a global
|
||||
* Set to `true` if you want the collection to
|
||||
* behave as a global
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
isGlobal?: boolean
|
||||
/**
|
||||
* Overrides for the tenant field, will override the entire tenantField configuration
|
||||
*/
|
||||
tenantFieldOverrides?: CollectionTenantFieldConfigOverrides
|
||||
/**
|
||||
* Set to `false` if you want to manually apply the baseListFilter
|
||||
* Set to `false` if you want to manually apply the baseFilter
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
useBaseFilter?: boolean
|
||||
/**
|
||||
* @deprecated Use `useBaseFilter` instead. If both are defined,
|
||||
* `useBaseFilter` will take precedence. This property remains only
|
||||
* for backward compatibility and may be removed in a future version.
|
||||
*
|
||||
* Originally, `baseListFilter` was intended to filter only the List View
|
||||
* in the admin panel. However, base filtering is often required in other areas
|
||||
* such as internal link relationships in the Lexical editor.
|
||||
* Set to `false` if you want to manually apply
|
||||
* the baseListFilter
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
useBaseListFilter?: boolean
|
||||
/**
|
||||
* Set to `false` if you want to handle collection access manually without the multi-tenant constraints applied
|
||||
* Set to `false` if you want to handle collection access
|
||||
* manually without the multi-tenant constraints applied
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
@@ -113,7 +92,8 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
}
|
||||
/**
|
||||
* Enables debug mode
|
||||
* - Makes the tenant field visible in the admin UI within applicable collections
|
||||
* - Makes the tenant field visible in the
|
||||
* admin UI within applicable collections
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
@@ -125,41 +105,27 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
*/
|
||||
enabled?: boolean
|
||||
/**
|
||||
* Localization for the plugin
|
||||
* Field configuration for the field added
|
||||
* to all tenant enabled collections
|
||||
*/
|
||||
i18n?: {
|
||||
translations: {
|
||||
[key in AcceptedLanguages]?: {
|
||||
/**
|
||||
* @default 'You are about to change ownership from <0>{{fromTenant}}</0> to <0>{{toTenant}}</0>'
|
||||
*/
|
||||
'confirm-modal-tenant-switch--body'?: string
|
||||
/**
|
||||
* `tenantLabel` defaults to the value of the `nav-tenantSelector-label` translation
|
||||
*
|
||||
* @default 'Confirm {{tenantLabel}} change'
|
||||
*/
|
||||
'confirm-modal-tenant-switch--heading'?: string
|
||||
/**
|
||||
* @default 'Assigned Tenant'
|
||||
*/
|
||||
'field-assignedTenant-label'?: string
|
||||
/**
|
||||
* @default 'Tenant'
|
||||
*/
|
||||
'nav-tenantSelector-label'?: string
|
||||
}
|
||||
}
|
||||
tenantField?: {
|
||||
access?: RelationshipField['access']
|
||||
/**
|
||||
* The name of the field added to all tenant
|
||||
* enabled collections
|
||||
*
|
||||
* @default 'tenant'
|
||||
*/
|
||||
name?: string
|
||||
}
|
||||
/**
|
||||
* Field configuration for the field added to all tenant enabled collections
|
||||
*/
|
||||
tenantField?: RootTenantFieldConfigOverrides
|
||||
/**
|
||||
* Field configuration for the field added to the users collection
|
||||
* Field configuration for the field added
|
||||
* to the users collection
|
||||
*
|
||||
* If `includeDefaultField` is `false`, you must include the field on your users collection manually
|
||||
* This is useful if you want to customize the field or place the field in a specific location
|
||||
* If `includeDefaultField` is `false`, you must
|
||||
* include the field on your users collection manually
|
||||
* This is useful if you want to customize the field
|
||||
* or place the field in a specific location
|
||||
*/
|
||||
tenantsArrayField?:
|
||||
| {
|
||||
@@ -180,7 +146,8 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
*/
|
||||
arrayTenantFieldName?: string
|
||||
/**
|
||||
* When `includeDefaultField` is `true`, the field will be added to the users collection automatically
|
||||
* When `includeDefaultField` is `true`, the field will
|
||||
* be added to the users collection automatically
|
||||
*/
|
||||
includeDefaultField?: true
|
||||
/**
|
||||
@@ -197,7 +164,8 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
arrayFieldName?: string
|
||||
arrayTenantFieldName?: string
|
||||
/**
|
||||
* When `includeDefaultField` is `false`, you must include the field on your users collection manually
|
||||
* When `includeDefaultField` is `false`, you must
|
||||
* include the field on your users collection manually
|
||||
*/
|
||||
includeDefaultField?: false
|
||||
rowFields?: never
|
||||
@@ -206,9 +174,8 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
/**
|
||||
* Customize tenant selector label
|
||||
*
|
||||
* Either a string or an object where the keys are i18n codes and the values are the string labels
|
||||
*
|
||||
* @deprecated Use `i18n.translations` instead.
|
||||
* Either a string or an object where the keys are i18n
|
||||
* codes and the values are the string labels
|
||||
*/
|
||||
tenantSelectorLabel?:
|
||||
| Partial<{
|
||||
@@ -222,25 +189,27 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
*/
|
||||
tenantsSlug?: string
|
||||
/**
|
||||
* Function that determines if a user has access to _all_ tenants
|
||||
* Function that determines if a user has access
|
||||
* to _all_ tenants
|
||||
*
|
||||
* Useful for super-admin type users
|
||||
*/
|
||||
userHasAccessToAllTenants?: (
|
||||
user: ConfigTypes extends { user: unknown }
|
||||
? ConfigTypes['user']
|
||||
: TypedUser,
|
||||
user: ConfigTypes extends { user: unknown } ? ConfigTypes['user'] : User,
|
||||
) => boolean
|
||||
/**
|
||||
* Opt out of adding access constraints to the tenants collection
|
||||
* Opt out of adding access constraints to
|
||||
* the tenants collection
|
||||
*/
|
||||
useTenantsCollectionAccess?: boolean
|
||||
/**
|
||||
* Opt out including the baseListFilter to filter tenants by selected tenant
|
||||
* Opt out including the baseListFilter to filter
|
||||
* tenants by selected tenant
|
||||
*/
|
||||
useTenantsListFilter?: boolean
|
||||
/**
|
||||
* Opt out including the baseListFilter to filter users by selected tenant
|
||||
* Opt out including the baseListFilter to filter
|
||||
* users by selected tenant
|
||||
*/
|
||||
useUsersTenantFilter?: boolean
|
||||
}
|
||||
|
||||
@@ -6,112 +6,9 @@ desc: Troubleshooting Common Issues in Payload
|
||||
keywords: admin, components, custom, customize, documentation, Content Management System, cms, headless, javascript, node, react, nextjs, troubleshooting
|
||||
---
|
||||
|
||||
## Dependency mismatches
|
||||
## Common Issues
|
||||
|
||||
All `payload` and `@payloadcms/*` packages must be on exactly the same version and installed only once.
|
||||
|
||||
When two copies—or two different versions—of any of these packages (or of `react` / `react-dom`) appear in your dependency graph, you can see puzzling runtime errors. The most frequent is a broken React context:
|
||||
|
||||
```bash
|
||||
TypeError: Cannot destructure property 'config' of...
|
||||
```
|
||||
|
||||
This happens because one package imports a hook (most commonly `useConfig`) from _version A_ while the context provider comes from _version B_. The fix is always the same: make sure every Payload-related and React package resolves to the same module.
|
||||
|
||||
### Confirm whether duplicates exist
|
||||
|
||||
The first thing to do is to confirm whether duplicative dependencies do in fact exist.
|
||||
|
||||
There are two ways to do this:
|
||||
|
||||
1. Using pnpm's built-in inspection tool
|
||||
|
||||
```bash
|
||||
pnpm why @payloadcms/ui
|
||||
```
|
||||
|
||||
This prints the dependency tree and shows which versions are being installed. If you see more than one distinct version—or the same version listed under different paths—you have duplication.
|
||||
|
||||
2. Manual check (works with any package manager)
|
||||
|
||||
```bash
|
||||
find node_modules -name package.json \
|
||||
-exec grep -H '"name": "@payloadcms/ui"' {} \;
|
||||
```
|
||||
|
||||
Most of these hits are likely symlinks created by pnpm. Edit the matching package.json files (temporarily add a comment or change a description) to confirm whether they point to the same physical folder or to multiple copies.
|
||||
|
||||
Perform the same two checks for react and react-dom; a second copy of React can cause identical symptoms.
|
||||
|
||||
#### If no duplicates are found
|
||||
|
||||
`@payloadcms/ui` intentionally contains two bundles of itself, so you may see dual paths even when everything is correct. Inside the Payload Admin UI you must import only:
|
||||
|
||||
- `@payloadcms/ui`
|
||||
- `@payloadcms/ui/rsc`
|
||||
- `@payloadcms/ui/shared`
|
||||
|
||||
Any other deep import such as `@payloadcms/ui/elements/Button` should **only** be used in your own frontend, outside of the Payload Admin Panel. Those deep entries are published un-bundled to help you tree-shake and ship a smaller client bundle if you only need a few components from `@payloadcms/ui`.
|
||||
|
||||
### Fixing depedendency issues
|
||||
|
||||
These steps assume `pnpm`, which the Payload team recommends and uses internally. The principles apply to other package managers like npm and yarn as well. Do note that yarn 1.x is not supported by Payload.
|
||||
|
||||
1. Pin every critical package to an exact version
|
||||
|
||||
In package.json remove `^` or `~` from all versions of:
|
||||
|
||||
- `payload`
|
||||
- `@payloadcms/*`
|
||||
- `react`
|
||||
- `react-dom`
|
||||
|
||||
Prefixes allow your package manager to float to a newer minor/patch release, causing mismatches.
|
||||
|
||||
2. Delete node_modules
|
||||
|
||||
Old packages often linger even after you change versions or removed them from your package.json. Deleting node_modules ensures a clean slate.
|
||||
|
||||
3. Re-install dependencies
|
||||
|
||||
```bash
|
||||
pnpm install
|
||||
```
|
||||
|
||||
#### If the error persists
|
||||
|
||||
1. Clean the global store (pnpm only)
|
||||
|
||||
```bash
|
||||
pnpm store prune
|
||||
```
|
||||
|
||||
2. Delete the lockfile
|
||||
|
||||
Depending on your package manager, this could be `pnpm-lock.yaml`, `package-lock.json`, or `yarn.lock`.
|
||||
|
||||
Make sure you delete the lockfile **and** the node_modules folder at the same time, then run `pnpm install`. This forces a fresh, consistent resolution for all packages. It will also update all packages with dynamic versions to the latest version.
|
||||
|
||||
While it's best practice to manage dependencies in such a way where the lockfile can easily be re-generated (often this is the easiest way to resolve dependency issues), this may break your project if you have not tested the latest versions of your dependencies.
|
||||
|
||||
If you are using a version control system, make sure to commit your lockfile after this step.
|
||||
|
||||
3. Deduplicate anything that slipped through
|
||||
|
||||
```bash
|
||||
pnpm dedupe
|
||||
```
|
||||
|
||||
**Still stuck?**
|
||||
|
||||
- Switch to `pnpm` if you are on npm. Its symlinked store helps reducing accidental duplication.
|
||||
- Inspect the lockfile directly for peer-dependency violations.
|
||||
- Check project-level .npmrc / .pnpmfile.cjs overrides.
|
||||
- Run [Syncpack](https://www.npmjs.com/package/syncpack) to enforce identical versions of every `@payloadcms/*`, `react`, and `react-dom` reference.
|
||||
|
||||
Absolute last resort: add Webpack aliases so that all imports of a given package resolve to the same path (e.g. `resolve.alias['react'] = path.resolve('./node_modules/react')`). Keep this only until you can fix the underlying version skew.
|
||||
|
||||
## "Unauthorized, you must be logged in to make this request" when attempting to log in
|
||||
### "Unauthorized, you must be logged in to make this request" when attempting to log in
|
||||
|
||||
This means that your auth cookie is not being set or accepted correctly upon logging in. To resolve check the following settings in your Payload Config:
|
||||
|
||||
|
||||
@@ -90,33 +90,33 @@ export const Media: CollectionConfig = {
|
||||
|
||||
_An asterisk denotes that an option is required._
|
||||
|
||||
| Option | Description |
|
||||
| ------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`adminThumbnail`** | Set the way that the [Admin Panel](../admin/overview) will display thumbnails for this Collection. [More](#admin-thumbnails) |
|
||||
| **`bulkUpload`** | Allow users to upload in bulk from the list view, default is true |
|
||||
| **`cacheTags`** | Set to `false` to disable the cache tag set in the UI for the admin thumbnail component. Useful for when CDNs don't allow certain cache queries. |
|
||||
| **`constructorOptions`** | An object passed to the the Sharp image library that accepts any Constructor options and applies them to the upload file. [More](https://sharp.pixelplumbing.com/api-constructor/) |
|
||||
| **`crop`** | Set to `false` to disable the cropping tool in the [Admin Panel](../admin/overview). Crop is enabled by default. [More](#crop-and-focal-point-selector) |
|
||||
| **`disableLocalStorage`** | Completely disable uploading files to disk locally. [More](#disabling-local-upload-storage) |
|
||||
| **`displayPreview`** | Enable displaying preview of the uploaded file in Upload fields related to this Collection. Can be locally overridden by `displayPreview` option in Upload field. [More](/docs/fields/upload#config-options). |
|
||||
| **`externalFileHeaderFilter`** | Accepts existing headers and returns the headers after filtering or modifying. If using this option, you should handle the removal of any sensitive cookies (like payload-prefixed cookies) to prevent leaking session information to external services. By default, Payload automatically filters out payload-prefixed cookies when this option is not defined. |
|
||||
| **`filesRequiredOnCreate`** | Mandate file data on creation, default is true. |
|
||||
| **`filenameCompoundIndex`** | Field slugs to use for a compound index instead of the default filename index. |
|
||||
| **`focalPoint`** | Set to `false` to disable the focal point selection tool in the [Admin Panel](../admin/overview). The focal point selector is only available when `imageSizes` or `resizeOptions` are defined. [More](#crop-and-focal-point-selector) |
|
||||
| **`formatOptions`** | An object with `format` and `options` that are used with the Sharp image library to format the upload file. [More](https://sharp.pixelplumbing.com/api-output#toformat) |
|
||||
| **`handlers`** | Array of Request handlers to execute when fetching a file, if a handler returns a Response it will be sent to the client. Otherwise Payload will retrieve and send back the file. |
|
||||
| **`imageSizes`** | If specified, image uploads will be automatically resized in accordance to these image sizes. [More](#image-sizes) |
|
||||
| **`mimeTypes`** | Restrict mimeTypes in the file picker. Array of valid mimetypes or mimetype wildcards [More](#mimetypes) |
|
||||
| **`pasteURL`** | Controls whether files can be uploaded from remote URLs by pasting them into the Upload field. **Enabled by default.** Accepts `false` to disable or an object with an `allowList` of valid remote URLs. [More](#uploading-files-from-remote-urls) |
|
||||
| **`resizeOptions`** | An object passed to the the Sharp image library to resize the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize) |
|
||||
| **`skipSafeFetch`** | Set to an `allowList` to skip the safe fetch check when fetching external files. Set to `true` to skip the safe fetch for all documents in this collection. Defaults to `false`. |
|
||||
| **`allowRestrictedFileTypes`** | Set to `true` to allow restricted file types. If your Collection has defined [mimeTypes](#mimetypes), restricted file verification will be skipped. Defaults to `false`. [More](#restricted-file-types) |
|
||||
| **`staticDir`** | The folder directory to use to store media in. Can be either an absolute path or relative to the directory that contains your config. Defaults to your collection slug |
|
||||
| **`trimOptions`** | An object passed to the the Sharp image library to trim the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize#trim) |
|
||||
| **`withMetadata`** | If specified, appends metadata to the output image file. Accepts a boolean or a function that receives `metadata` and `req`, returning a boolean. |
|
||||
| **`hideFileInputOnCreate`** | Set to `true` to prevent the admin UI from showing file inputs during document creation, useful for programmatic file generation. |
|
||||
| **`hideRemoveFile`** | Set to `true` to prevent the admin UI having a way to remove an existing file while editing. |
|
||||
| **`modifyResponseHeaders`** | Accepts an object with existing `headers` and allows you to manipulate the response headers for media files. [More](#modifying-response-headers) |
|
||||
| Option | Description |
|
||||
| ------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`adminThumbnail`** | Set the way that the [Admin Panel](../admin/overview) will display thumbnails for this Collection. [More](#admin-thumbnails) |
|
||||
| **`bulkUpload`** | Allow users to upload in bulk from the list view, default is true |
|
||||
| **`cacheTags`** | Set to `false` to disable the cache tag set in the UI for the admin thumbnail component. Useful for when CDNs don't allow certain cache queries. |
|
||||
| **`constructorOptions`** | An object passed to the the Sharp image library that accepts any Constructor options and applies them to the upload file. [More](https://sharp.pixelplumbing.com/api-constructor/) |
|
||||
| **`crop`** | Set to `false` to disable the cropping tool in the [Admin Panel](../admin/overview). Crop is enabled by default. [More](#crop-and-focal-point-selector) |
|
||||
| **`disableLocalStorage`** | Completely disable uploading files to disk locally. [More](#disabling-local-upload-storage) |
|
||||
| **`displayPreview`** | Enable displaying preview of the uploaded file in Upload fields related to this Collection. Can be locally overridden by `displayPreview` option in Upload field. [More](/docs/fields/upload#config-options). |
|
||||
| **`externalFileHeaderFilter`** | Accepts existing headers and returns the headers after filtering or modifying. |
|
||||
| **`filesRequiredOnCreate`** | Mandate file data on creation, default is true. |
|
||||
| **`filenameCompoundIndex`** | Field slugs to use for a compound index instead of the default filename index. |
|
||||
| **`focalPoint`** | Set to `false` to disable the focal point selection tool in the [Admin Panel](../admin/overview). The focal point selector is only available when `imageSizes` or `resizeOptions` are defined. [More](#crop-and-focal-point-selector) |
|
||||
| **`formatOptions`** | An object with `format` and `options` that are used with the Sharp image library to format the upload file. [More](https://sharp.pixelplumbing.com/api-output#toformat) |
|
||||
| **`handlers`** | Array of Request handlers to execute when fetching a file, if a handler returns a Response it will be sent to the client. Otherwise Payload will retrieve and send back the file. |
|
||||
| **`imageSizes`** | If specified, image uploads will be automatically resized in accordance to these image sizes. [More](#image-sizes) |
|
||||
| **`mimeTypes`** | Restrict mimeTypes in the file picker. Array of valid mimetypes or mimetype wildcards [More](#mimetypes) |
|
||||
| **`pasteURL`** | Controls whether files can be uploaded from remote URLs by pasting them into the Upload field. **Enabled by default.** Accepts `false` to disable or an object with an `allowList` of valid remote URLs. [More](#uploading-files-from-remote-urls) |
|
||||
| **`resizeOptions`** | An object passed to the the Sharp image library to resize the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize) |
|
||||
| **`skipSafeFetch`** | Set to an `allowList` to skip the safe fetch check when fetching external files. Set to `true` to skip the safe fetch for all documents in this collection. Defaults to `false`. |
|
||||
| **`allowRestrictedFileTypes`** | Set to `true` to allow restricted file types. If your Collection has defined [mimeTypes](#mimetypes), restricted file verification will be skipped. Defaults to `false`. [More](#restricted-file-types) |
|
||||
| **`staticDir`** | The folder directory to use to store media in. Can be either an absolute path or relative to the directory that contains your config. Defaults to your collection slug |
|
||||
| **`trimOptions`** | An object passed to the the Sharp image library to trim the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize#trim) |
|
||||
| **`withMetadata`** | If specified, appends metadata to the output image file. Accepts a boolean or a function that receives `metadata` and `req`, returning a boolean. |
|
||||
| **`hideFileInputOnCreate`** | Set to `true` to prevent the admin UI from showing file inputs during document creation, useful for programmatic file generation. |
|
||||
| **`hideRemoveFile`** | Set to `true` to prevent the admin UI having a way to remove an existing file while editing. |
|
||||
| **`modifyResponseHeaders`** | Accepts an object with existing `headers` and allows you to manipulate the response headers for media files. [More](#modifying-response-headers) |
|
||||
|
||||
### Payload-wide Upload Options
|
||||
|
||||
|
||||
16
package.json
16
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload-monorepo",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"workspaces": [
|
||||
@@ -132,12 +132,12 @@
|
||||
"devDependencies": {
|
||||
"@jest/globals": "29.7.0",
|
||||
"@libsql/client": "0.14.0",
|
||||
"@next/bundle-analyzer": "15.4.4",
|
||||
"@next/bundle-analyzer": "15.3.2",
|
||||
"@payloadcms/db-postgres": "workspace:*",
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@payloadcms/eslint-plugin": "workspace:*",
|
||||
"@payloadcms/live-preview-react": "workspace:*",
|
||||
"@playwright/test": "1.54.1",
|
||||
"@playwright/test": "1.50.0",
|
||||
"@sentry/nextjs": "^8.33.1",
|
||||
"@sentry/node": "^8.33.1",
|
||||
"@swc-node/register": "1.10.10",
|
||||
@@ -147,8 +147,8 @@
|
||||
"@types/jest": "29.5.12",
|
||||
"@types/minimist": "1.2.5",
|
||||
"@types/node": "22.15.30",
|
||||
"@types/react": "19.1.8",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.2",
|
||||
"@types/shelljs": "0.8.15",
|
||||
"chalk": "^4.1.2",
|
||||
"comment-json": "^4.2.3",
|
||||
@@ -168,12 +168,12 @@
|
||||
"lint-staged": "15.2.7",
|
||||
"minimist": "1.2.8",
|
||||
"mongodb-memory-server": "10.1.4",
|
||||
"next": "15.4.4",
|
||||
"next": "15.3.2",
|
||||
"open": "^10.1.0",
|
||||
"p-limit": "^5.0.0",
|
||||
"pg": "8.16.3",
|
||||
"playwright": "1.54.1",
|
||||
"playwright-core": "1.54.1",
|
||||
"playwright": "1.50.0",
|
||||
"playwright-core": "1.50.0",
|
||||
"prettier": "3.5.3",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/admin-bar",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "An admin bar for React apps using Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
@@ -42,8 +42,8 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@types/react": "19.1.8",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.2",
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "create-payload-app",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -50,18 +50,12 @@ export const updateOne: UpdateOne = async function updateOne(
|
||||
|
||||
let result
|
||||
|
||||
let updateData: UpdateQuery<any> = data
|
||||
|
||||
const $inc: Record<string, number> = {}
|
||||
const $push: Record<string, { $each: any[] } | any> = {}
|
||||
|
||||
transform({ $inc, $push, adapter: this, data, fields, operation: 'write' })
|
||||
let updateData: UpdateQuery<any> = data
|
||||
transform({ $inc, adapter: this, data, fields, operation: 'write' })
|
||||
if (Object.keys($inc).length) {
|
||||
updateData = { $inc, $set: updateData }
|
||||
}
|
||||
if (Object.keys($push).length) {
|
||||
updateData = { $push, $set: updateData }
|
||||
}
|
||||
|
||||
try {
|
||||
if (returning === false) {
|
||||
|
||||
@@ -209,7 +209,6 @@ const sanitizeDate = ({
|
||||
|
||||
type Args = {
|
||||
$inc?: Record<string, number>
|
||||
$push?: Record<string, { $each: any[] } | any>
|
||||
/** instance of the adapter */
|
||||
adapter: MongooseAdapter
|
||||
/** data to transform, can be an array of documents or a single document */
|
||||
@@ -399,7 +398,6 @@ const stripFields = ({
|
||||
|
||||
export const transform = ({
|
||||
$inc,
|
||||
$push,
|
||||
adapter,
|
||||
data,
|
||||
fields,
|
||||
@@ -414,16 +412,7 @@ export const transform = ({
|
||||
|
||||
if (Array.isArray(data)) {
|
||||
for (const item of data) {
|
||||
transform({
|
||||
$inc,
|
||||
$push,
|
||||
adapter,
|
||||
data: item,
|
||||
fields,
|
||||
globalSlug,
|
||||
operation,
|
||||
validateRelationships,
|
||||
})
|
||||
transform({ $inc, adapter, data: item, fields, globalSlug, operation, validateRelationships })
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -481,26 +470,6 @@ export const transform = ({
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
$push &&
|
||||
field.type === 'array' &&
|
||||
operation === 'write' &&
|
||||
field.name in ref &&
|
||||
ref[field.name]
|
||||
) {
|
||||
const value = ref[field.name]
|
||||
if (value && typeof value === 'object' && '$push' in value) {
|
||||
const push = value.$push
|
||||
|
||||
if (Array.isArray(push)) {
|
||||
$push[`${parentPath}${field.name}`] = { $each: push }
|
||||
} else if (typeof push === 'object') {
|
||||
$push[`${parentPath}${field.name}`] = push
|
||||
}
|
||||
delete ref[field.name]
|
||||
}
|
||||
}
|
||||
|
||||
if (field.type === 'date' && operation === 'read' && field.name in ref && ref[field.name]) {
|
||||
if (config.localization && fieldShouldBeLocalized({ field, parentIsLocalized })) {
|
||||
const fieldRef = ref[field.name] as Record<string, unknown>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-sqlite",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "The officially supported SQLite database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -60,10 +60,6 @@ const createConstraint = ({
|
||||
formattedOperator = '='
|
||||
}
|
||||
|
||||
if (pathSegments.length === 1) {
|
||||
return `EXISTS (SELECT 1 FROM json_each("${pathSegments[0]}") AS ${newAlias} WHERE ${newAlias}.value ${formattedOperator} '${formattedValue}')`
|
||||
}
|
||||
|
||||
return `EXISTS (
|
||||
SELECT 1
|
||||
FROM json_each(${alias}.value -> '${pathSegments[0]}') AS ${newAlias}
|
||||
@@ -72,38 +68,21 @@ const createConstraint = ({
|
||||
}
|
||||
|
||||
export const createJSONQuery = ({
|
||||
column,
|
||||
operator,
|
||||
pathSegments,
|
||||
rawColumn,
|
||||
table,
|
||||
treatAsArray,
|
||||
treatRootAsArray,
|
||||
value,
|
||||
}: CreateJSONQueryArgs): string => {
|
||||
if ((operator === 'in' || operator === 'not_in') && Array.isArray(value)) {
|
||||
let sql = ''
|
||||
for (const [i, v] of value.entries()) {
|
||||
sql = `${sql}${createJSONQuery({ column, operator: operator === 'in' ? 'equals' : 'not_equals', pathSegments, rawColumn, table, treatAsArray, treatRootAsArray, value: v })} ${i === value.length - 1 ? '' : ` ${operator === 'in' ? 'OR' : 'AND'} `}`
|
||||
}
|
||||
return sql
|
||||
}
|
||||
|
||||
if (treatAsArray?.includes(pathSegments[1]!) && table) {
|
||||
return fromArray({
|
||||
operator,
|
||||
pathSegments,
|
||||
table,
|
||||
treatAsArray,
|
||||
value: value as CreateConstraintArgs['value'],
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
return createConstraint({
|
||||
alias: table,
|
||||
operator,
|
||||
pathSegments,
|
||||
treatAsArray,
|
||||
value: value as CreateConstraintArgs['value'],
|
||||
})
|
||||
return createConstraint({ alias: table, operator, pathSegments, treatAsArray, value })
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-vercel-postgres",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "Vercel Postgres adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/drizzle",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "A library of shared functions used by different payload database adapters",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
|
||||
import type { SQLiteSelect, SQLiteSelectBase } from 'drizzle-orm/sqlite-core'
|
||||
|
||||
import { and, asc, count, desc, eq, getTableName, or, sql } from 'drizzle-orm'
|
||||
import { and, asc, count, desc, eq, or, sql } from 'drizzle-orm'
|
||||
import {
|
||||
appendVersionToQueryKey,
|
||||
buildVersionCollectionFields,
|
||||
combineQueries,
|
||||
type FlattenedField,
|
||||
getFieldByPath,
|
||||
getQueryDraftsSort,
|
||||
type JoinQuery,
|
||||
type SelectMode,
|
||||
@@ -33,7 +31,7 @@ import {
|
||||
resolveBlockTableName,
|
||||
} from '../utilities/validateExistingBlockIsIdentical.js'
|
||||
|
||||
const flattenAllWherePaths = (where: Where, paths: { path: string; ref: any }[]) => {
|
||||
const flattenAllWherePaths = (where: Where, paths: string[]) => {
|
||||
for (const k in where) {
|
||||
if (['AND', 'OR'].includes(k.toUpperCase())) {
|
||||
if (Array.isArray(where[k])) {
|
||||
@@ -43,7 +41,7 @@ const flattenAllWherePaths = (where: Where, paths: { path: string; ref: any }[])
|
||||
}
|
||||
} else {
|
||||
// TODO: explore how to support arrays/relationship querying.
|
||||
paths.push({ path: k.split('.').join('_'), ref: where })
|
||||
paths.push(k.split('.').join('_'))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -61,11 +59,7 @@ const buildSQLWhere = (where: Where, alias: string) => {
|
||||
}
|
||||
} else {
|
||||
const payloadOperator = Object.keys(where[k])[0]
|
||||
|
||||
const value = where[k][payloadOperator]
|
||||
if (payloadOperator === '$raw') {
|
||||
return sql.raw(value)
|
||||
}
|
||||
|
||||
return operatorMap[payloadOperator](sql.raw(`"${alias}"."${k.split('.').join('_')}"`), value)
|
||||
}
|
||||
@@ -478,7 +472,7 @@ export const traverseFields = ({
|
||||
|
||||
const sortPath = sanitizedSort.split('.').join('_')
|
||||
|
||||
const wherePaths: { path: string; ref: any }[] = []
|
||||
const wherePaths: string[] = []
|
||||
|
||||
if (where) {
|
||||
flattenAllWherePaths(where, wherePaths)
|
||||
@@ -498,50 +492,9 @@ export const traverseFields = ({
|
||||
sortPath: sql`${sortColumn ? sortColumn : null}`.as('sortPath'),
|
||||
}
|
||||
|
||||
const collectionQueryWhere: any[] = []
|
||||
// Select for WHERE and Fallback NULL
|
||||
for (const { path, ref } of wherePaths) {
|
||||
const collectioConfig = adapter.payload.collections[collection].config
|
||||
const field = getFieldByPath({ fields: collectioConfig.flattenedFields, path })
|
||||
|
||||
if (field && field.field.type === 'select' && field.field.hasMany) {
|
||||
let tableName = adapter.tableNameMap.get(
|
||||
`${toSnakeCase(collection)}_${toSnakeCase(path)}`,
|
||||
)
|
||||
let parentTable = getTableName(table)
|
||||
|
||||
if (adapter.schemaName) {
|
||||
tableName = `"${adapter.schemaName}"."${tableName}"`
|
||||
parentTable = `"${adapter.schemaName}"."${parentTable}"`
|
||||
}
|
||||
|
||||
if (adapter.name === 'postgres') {
|
||||
selectFields[path] = sql
|
||||
.raw(
|
||||
`(select jsonb_agg(${tableName}.value) from ${tableName} where ${tableName}.parent_id = ${parentTable}.id)`,
|
||||
)
|
||||
.as(path)
|
||||
} else {
|
||||
selectFields[path] = sql
|
||||
.raw(
|
||||
`(select json_group_array(${tableName}.value) from ${tableName} where ${tableName}.parent_id = ${parentTable}.id)`,
|
||||
)
|
||||
.as(path)
|
||||
}
|
||||
|
||||
const constraint = ref[path]
|
||||
const operator = Object.keys(constraint)[0]
|
||||
const value: any = Object.values(constraint)[0]
|
||||
|
||||
const query = adapter.createJSONQuery({
|
||||
column: `"${path}"`,
|
||||
operator,
|
||||
pathSegments: [field.field.name],
|
||||
table: parentTable,
|
||||
value,
|
||||
})
|
||||
ref[path] = { $raw: query }
|
||||
} else if (adapter.tables[joinCollectionTableName][path]) {
|
||||
for (const path of wherePaths) {
|
||||
if (adapter.tables[joinCollectionTableName][path]) {
|
||||
selectFields[path] = sql`${adapter.tables[joinCollectionTableName][path]}`.as(path)
|
||||
// Allow to filter by collectionSlug
|
||||
} else if (path !== 'relationTo') {
|
||||
@@ -549,10 +502,7 @@ export const traverseFields = ({
|
||||
}
|
||||
}
|
||||
|
||||
let query: any = db.select(selectFields).from(adapter.tables[joinCollectionTableName])
|
||||
if (collectionQueryWhere.length) {
|
||||
query = query.where(and(...collectionQueryWhere))
|
||||
}
|
||||
const query = db.select(selectFields).from(adapter.tables[joinCollectionTableName])
|
||||
if (currentQuery === null) {
|
||||
currentQuery = query as unknown as SQLSelect
|
||||
} else {
|
||||
|
||||
@@ -28,8 +28,6 @@ export const createJSONQuery = ({ column, operator, pathSegments, value }: Creat
|
||||
})
|
||||
.join('.')
|
||||
|
||||
const fullPath = pathSegments.length === 1 ? '$[*]' : `$.${jsonPaths}`
|
||||
|
||||
let sql = ''
|
||||
|
||||
if (['in', 'not_in'].includes(operator) && Array.isArray(value)) {
|
||||
@@ -37,13 +35,13 @@ export const createJSONQuery = ({ column, operator, pathSegments, value }: Creat
|
||||
sql = `${sql}${createJSONQuery({ column, operator: operator === 'in' ? 'equals' : 'not_equals', pathSegments, value: item })}${i === value.length - 1 ? '' : ` ${operator === 'in' ? 'OR' : 'AND'} `}`
|
||||
})
|
||||
} else if (operator === 'exists') {
|
||||
sql = `${value === false ? 'NOT ' : ''}jsonb_path_exists(${columnName}, '${fullPath}')`
|
||||
sql = `${value === false ? 'NOT ' : ''}jsonb_path_exists(${columnName}, '$.${jsonPaths}')`
|
||||
} else if (['not_like'].includes(operator)) {
|
||||
const mappedOperator = operatorMap[operator]
|
||||
|
||||
sql = `NOT jsonb_path_exists(${columnName}, '${fullPath} ? (@ ${mappedOperator.substring(1)} ${sanitizeValue(value, operator)})')`
|
||||
sql = `NOT jsonb_path_exists(${columnName}, '$.${jsonPaths} ? (@ ${mappedOperator.substring(1)} ${sanitizeValue(value, operator)})')`
|
||||
} else {
|
||||
sql = `jsonb_path_exists(${columnName}, '${fullPath} ? (@ ${operatorMap[operator]} ${sanitizeValue(value, operator)})')`
|
||||
sql = `jsonb_path_exists(${columnName}, '$.${jsonPaths} ? (@ ${operatorMap[operator]} ${sanitizeValue(value, operator)})')`
|
||||
}
|
||||
|
||||
return sql
|
||||
|
||||
@@ -71,7 +71,6 @@ export const transformArray = ({
|
||||
data.forEach((arrayRow, i) => {
|
||||
const newRow: ArrayRowToInsert = {
|
||||
arrays: {},
|
||||
arraysToPush: {},
|
||||
locales: {},
|
||||
row: {
|
||||
_order: i + 1,
|
||||
@@ -105,7 +104,6 @@ export const transformArray = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays: newRow.arrays,
|
||||
arraysToPush: newRow.arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
|
||||
@@ -78,7 +78,6 @@ export const transformBlocks = ({
|
||||
|
||||
const newRow: BlockRowToInsert = {
|
||||
arrays: {},
|
||||
arraysToPush: {},
|
||||
locales: {},
|
||||
row: {
|
||||
_order: i + 1,
|
||||
@@ -117,7 +116,6 @@ export const transformBlocks = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays: newRow.arrays,
|
||||
arraysToPush: newRow.arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
|
||||
@@ -27,7 +27,6 @@ export const transformForWrite = ({
|
||||
// Split out the incoming data into rows to insert / delete
|
||||
const rowToInsert: RowToInsert = {
|
||||
arrays: {},
|
||||
arraysToPush: {},
|
||||
blocks: {},
|
||||
blocksToDelete: new Set(),
|
||||
locales: {},
|
||||
@@ -46,7 +45,6 @@ export const transformForWrite = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays: rowToInsert.arrays,
|
||||
arraysToPush: rowToInsert.arraysToPush,
|
||||
baseTableName: tableName,
|
||||
blocks: rowToInsert.blocks,
|
||||
blocksToDelete: rowToInsert.blocksToDelete,
|
||||
|
||||
@@ -4,7 +4,13 @@ import { fieldIsVirtual, fieldShouldBeLocalized } from 'payload/shared'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from '../../types.js'
|
||||
import type { NumberToDelete, RelationshipToDelete, RowToInsert, TextToDelete } from './types.js'
|
||||
import type {
|
||||
ArrayRowToInsert,
|
||||
BlockRowToInsert,
|
||||
NumberToDelete,
|
||||
RelationshipToDelete,
|
||||
TextToDelete,
|
||||
} from './types.js'
|
||||
|
||||
import { isArrayOfRows } from '../../utilities/isArrayOfRows.js'
|
||||
import { resolveBlockTableName } from '../../utilities/validateExistingBlockIsIdentical.js'
|
||||
@@ -17,20 +23,16 @@ import { transformTexts } from './texts.js'
|
||||
|
||||
type Args = {
|
||||
adapter: DrizzleAdapter
|
||||
/**
|
||||
* This will delete the array table and then re-insert all the new array rows.
|
||||
*/
|
||||
arrays: RowToInsert['arrays']
|
||||
/**
|
||||
* Array rows to push to the existing array. This will simply create
|
||||
* a new row in the array table.
|
||||
*/
|
||||
arraysToPush: RowToInsert['arraysToPush']
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
/**
|
||||
* This is the name of the base table
|
||||
*/
|
||||
baseTableName: string
|
||||
blocks: RowToInsert['blocks']
|
||||
blocks: {
|
||||
[blockType: string]: BlockRowToInsert[]
|
||||
}
|
||||
blocksToDelete: Set<string>
|
||||
/**
|
||||
* A snake-case field prefix, representing prior fields
|
||||
@@ -80,7 +82,6 @@ type Args = {
|
||||
export const traverseFields = ({
|
||||
adapter,
|
||||
arrays,
|
||||
arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
@@ -128,6 +129,10 @@ export const traverseFields = ({
|
||||
if (field.type === 'array') {
|
||||
const arrayTableName = adapter.tableNameMap.get(`${parentTableName}_${columnName}`)
|
||||
|
||||
if (!arrays[arrayTableName]) {
|
||||
arrays[arrayTableName] = []
|
||||
}
|
||||
|
||||
if (isLocalized) {
|
||||
if (typeof data[field.name] === 'object' && data[field.name] !== null) {
|
||||
Object.entries(data[field.name]).forEach(([localeKey, localeData]) => {
|
||||
@@ -152,33 +157,19 @@ export const traverseFields = ({
|
||||
textsToDelete,
|
||||
withinArrayOrBlockLocale: localeKey,
|
||||
})
|
||||
if (!arrays[arrayTableName]) {
|
||||
arrays[arrayTableName] = []
|
||||
}
|
||||
|
||||
arrays[arrayTableName] = arrays[arrayTableName].concat(newRows)
|
||||
}
|
||||
})
|
||||
}
|
||||
} else {
|
||||
let value = data[field.name]
|
||||
let push = false
|
||||
if (
|
||||
// TODO do this for localized as well in DRY way
|
||||
|
||||
typeof value === 'object' &&
|
||||
'$push' in value
|
||||
) {
|
||||
value = Array.isArray(value.$push) ? value.$push : [value.$push]
|
||||
push = true
|
||||
}
|
||||
|
||||
const newRows = transformArray({
|
||||
adapter,
|
||||
arrayTableName,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
data: value,
|
||||
data: data[field.name],
|
||||
field,
|
||||
numbers,
|
||||
numbersToDelete,
|
||||
@@ -192,17 +183,7 @@ export const traverseFields = ({
|
||||
withinArrayOrBlockLocale,
|
||||
})
|
||||
|
||||
if (push) {
|
||||
if (!arraysToPush[arrayTableName]) {
|
||||
arraysToPush[arrayTableName] = []
|
||||
}
|
||||
arraysToPush[arrayTableName] = arraysToPush[arrayTableName].concat(newRows)
|
||||
} else {
|
||||
if (!arrays[arrayTableName]) {
|
||||
arrays[arrayTableName] = []
|
||||
}
|
||||
arrays[arrayTableName] = arrays[arrayTableName].concat(newRows)
|
||||
}
|
||||
arrays[arrayTableName] = arrays[arrayTableName].concat(newRows)
|
||||
}
|
||||
|
||||
return
|
||||
@@ -283,7 +264,6 @@ export const traverseFields = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays,
|
||||
arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
@@ -318,7 +298,6 @@ export const traverseFields = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays,
|
||||
arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
|
||||
@@ -2,9 +2,6 @@ export type ArrayRowToInsert = {
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
arraysToPush: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
locales: {
|
||||
[locale: string]: Record<string, unknown>
|
||||
}
|
||||
@@ -15,9 +12,6 @@ export type BlockRowToInsert = {
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
arraysToPush: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
locales: {
|
||||
[locale: string]: Record<string, unknown>
|
||||
}
|
||||
@@ -43,9 +37,6 @@ export type RowToInsert = {
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
arraysToPush: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
blocks: {
|
||||
[tableName: string]: BlockRowToInsert[]
|
||||
}
|
||||
|
||||
@@ -161,11 +161,10 @@ export type CreateJSONQueryArgs = {
|
||||
column?: Column | string
|
||||
operator: string
|
||||
pathSegments: string[]
|
||||
rawColumn?: SQL<unknown>
|
||||
table?: string
|
||||
treatAsArray?: string[]
|
||||
treatRootAsArray?: boolean
|
||||
value: boolean | number | number[] | string | string[]
|
||||
value: boolean | number | string
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -6,20 +6,15 @@ import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { findMany } from './find/findMany.js'
|
||||
import { upsertRow } from './upsertRow/index.js'
|
||||
import { shouldUseOptimizedUpsertRow } from './upsertRow/shouldUseOptimizedUpsertRow.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export const updateJobs: UpdateJobs = async function updateMany(
|
||||
this: DrizzleAdapter,
|
||||
{ id, data, limit: limitArg, req, returning, sort: sortArg, where: whereArg },
|
||||
) {
|
||||
if (
|
||||
!(data?.log as object[])?.length &&
|
||||
!(data.log && typeof data.log === 'object' && '$push' in data.log)
|
||||
) {
|
||||
if (!(data?.log as object[])?.length) {
|
||||
delete data.log
|
||||
}
|
||||
|
||||
const whereToUse: Where = id ? { id: { equals: id } } : whereArg
|
||||
const limit = id ? 1 : limitArg
|
||||
|
||||
@@ -28,27 +23,6 @@ export const updateJobs: UpdateJobs = async function updateMany(
|
||||
const tableName = this.tableNameMap.get(toSnakeCase(collection.slug))
|
||||
const sort = sortArg !== undefined && sortArg !== null ? sortArg : collection.defaultSort
|
||||
|
||||
const useOptimizedUpsertRow = shouldUseOptimizedUpsertRow({
|
||||
data,
|
||||
fields: collection.flattenedFields,
|
||||
})
|
||||
|
||||
if (useOptimizedUpsertRow && id) {
|
||||
const result = await upsertRow({
|
||||
id,
|
||||
adapter: this,
|
||||
data,
|
||||
db,
|
||||
fields: collection.flattenedFields,
|
||||
ignoreResult: returning === false,
|
||||
operation: 'update',
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
|
||||
return returning === false ? null : [result]
|
||||
}
|
||||
|
||||
const jobs = await findMany({
|
||||
adapter: this,
|
||||
collectionSlug: 'payload-jobs',
|
||||
@@ -68,12 +42,10 @@ export const updateJobs: UpdateJobs = async function updateMany(
|
||||
|
||||
// TODO: We need to batch this to reduce the amount of db calls. This can get very slow if we are updating a lot of rows.
|
||||
for (const job of jobs.docs) {
|
||||
const updateData = useOptimizedUpsertRow
|
||||
? data
|
||||
: {
|
||||
...job,
|
||||
...data,
|
||||
}
|
||||
const updateData = {
|
||||
...job,
|
||||
...data,
|
||||
}
|
||||
|
||||
const result = await upsertRow({
|
||||
id: job.id,
|
||||
|
||||
@@ -44,7 +44,7 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
}: Args): Promise<T> => {
|
||||
let insertedRow: Record<string, unknown> = { id }
|
||||
if (id && shouldUseOptimizedUpsertRow({ data, fields })) {
|
||||
const { arraysToPush, row } = transformForWrite({
|
||||
const { row } = transformForWrite({
|
||||
adapter,
|
||||
data,
|
||||
enableAtomicWrites: true,
|
||||
@@ -54,27 +54,11 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
|
||||
const drizzle = db as LibSQLDatabase
|
||||
|
||||
// First, handle $push arrays
|
||||
|
||||
if (arraysToPush && Object.keys(arraysToPush)?.length) {
|
||||
await insertArrays({
|
||||
adapter,
|
||||
arrays: [arraysToPush],
|
||||
db,
|
||||
parentRows: [insertedRow],
|
||||
uuidMap: {},
|
||||
})
|
||||
}
|
||||
|
||||
// Then, handle regular row update
|
||||
|
||||
if (ignoreResult) {
|
||||
if (row && Object.keys(row).length) {
|
||||
await drizzle
|
||||
.update(adapter.tables[tableName])
|
||||
.set(row)
|
||||
.where(eq(adapter.tables[tableName].id, id))
|
||||
}
|
||||
await drizzle
|
||||
.update(adapter.tables[tableName])
|
||||
.set(row)
|
||||
.where(eq(adapter.tables[tableName].id, id))
|
||||
return ignoreResult === 'idOnly' ? ({ id } as T) : null
|
||||
}
|
||||
|
||||
@@ -90,22 +74,6 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
const findManyKeysLength = Object.keys(findManyArgs).length
|
||||
const hasOnlyColumns = Object.keys(findManyArgs.columns || {}).length > 0
|
||||
|
||||
if (!row || !Object.keys(row).length) {
|
||||
// Nothing to update => just fetch current row and return
|
||||
findManyArgs.where = eq(adapter.tables[tableName].id, insertedRow.id)
|
||||
|
||||
const doc = await db.query[tableName].findFirst(findManyArgs)
|
||||
|
||||
return transform<T>({
|
||||
adapter,
|
||||
config: adapter.payload.config,
|
||||
data: doc,
|
||||
fields,
|
||||
joinQuery: false,
|
||||
tableName,
|
||||
})
|
||||
}
|
||||
|
||||
if (findManyKeysLength === 0 || hasOnlyColumns) {
|
||||
// Optimization - No need for joins => can simply use returning(). This is optimal for very simple collections
|
||||
// without complex fields that live in separate tables like blocks, arrays, relationships, etc.
|
||||
@@ -461,9 +429,9 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
|
||||
await insertArrays({
|
||||
adapter,
|
||||
arrays: [rowToInsert.arrays, rowToInsert.arraysToPush],
|
||||
arrays: [rowToInsert.arrays],
|
||||
db,
|
||||
parentRows: [insertedRow, insertedRow],
|
||||
parentRows: [insertedRow],
|
||||
uuidMap: arraysBlocksUUIDMap,
|
||||
})
|
||||
|
||||
|
||||
@@ -32,9 +32,6 @@ export const insertArrays = async ({
|
||||
const rowsByTable: RowsByTable = {}
|
||||
|
||||
arrays.forEach((arraysByTable, parentRowIndex) => {
|
||||
if (!arraysByTable || Object.keys(arraysByTable).length === 0) {
|
||||
return
|
||||
}
|
||||
Object.entries(arraysByTable).forEach(([tableName, arrayRows]) => {
|
||||
// If the table doesn't exist in map, initialize it
|
||||
if (!rowsByTable[tableName]) {
|
||||
|
||||
@@ -20,6 +20,7 @@ export const shouldUseOptimizedUpsertRow = ({
|
||||
}
|
||||
|
||||
if (
|
||||
field.type === 'array' ||
|
||||
field.type === 'blocks' ||
|
||||
((field.type === 'text' ||
|
||||
field.type === 'relationship' ||
|
||||
@@ -34,17 +35,6 @@ export const shouldUseOptimizedUpsertRow = ({
|
||||
return false
|
||||
}
|
||||
|
||||
if (field.type === 'array') {
|
||||
if (typeof value === 'object' && '$push' in value && value.$push) {
|
||||
return shouldUseOptimizedUpsertRow({
|
||||
// Only check first row - this function cares about field definitions. Each array row will have the same field definitions.
|
||||
data: Array.isArray(value.$push) ? value.$push?.[0] : value.$push,
|
||||
fields: field.flattenedFields,
|
||||
})
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
if (
|
||||
(field.type === 'group' || field.type === 'tab') &&
|
||||
value &&
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/email-nodemailer",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "Payload Nodemailer Email Adapter",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/email-resend",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "Payload Resend Email Adapter",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/graphql",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -9,7 +9,6 @@ export type Resolver = (
|
||||
args: {
|
||||
data: Record<string, unknown>
|
||||
locale?: string
|
||||
trash?: boolean
|
||||
where?: Where
|
||||
},
|
||||
context: {
|
||||
@@ -31,7 +30,6 @@ export function countResolver(collection: Collection): Resolver {
|
||||
const options = {
|
||||
collection,
|
||||
req: isolateObjectProperty(req, 'transactionID'),
|
||||
trash: args.trash,
|
||||
where: args.where,
|
||||
}
|
||||
|
||||
|
||||
@@ -379,11 +379,9 @@ export const fieldToSchemaMap: FieldToSchemaMap = {
|
||||
),
|
||||
},
|
||||
hasNextPage: { type: new GraphQLNonNull(GraphQLBoolean) },
|
||||
totalDocs: { type: GraphQLInt },
|
||||
},
|
||||
}),
|
||||
args: {
|
||||
count: { type: GraphQLBoolean },
|
||||
limit: {
|
||||
type: GraphQLInt,
|
||||
},
|
||||
@@ -404,7 +402,7 @@ export const fieldToSchemaMap: FieldToSchemaMap = {
|
||||
},
|
||||
async resolve(parent, args, context: Context) {
|
||||
const { collection } = field
|
||||
const { count = false, limit, page, sort, where } = args
|
||||
const { limit, page, sort, where } = args
|
||||
const { req } = context
|
||||
|
||||
const draft = Boolean(args.draft ?? context.req.query?.draft)
|
||||
@@ -431,7 +429,7 @@ export const fieldToSchemaMap: FieldToSchemaMap = {
|
||||
throw new Error('GraphQL with array of join.field.collection is not implemented')
|
||||
}
|
||||
|
||||
const { docs, totalDocs } = await req.payload.find({
|
||||
const { docs } = await req.payload.find({
|
||||
collection,
|
||||
depth: 0,
|
||||
draft,
|
||||
@@ -441,7 +439,7 @@ export const fieldToSchemaMap: FieldToSchemaMap = {
|
||||
locale: req.locale,
|
||||
overrideAccess: false,
|
||||
page,
|
||||
pagination: count ? true : false,
|
||||
pagination: false,
|
||||
req,
|
||||
sort,
|
||||
where: fullWhere,
|
||||
@@ -456,7 +454,6 @@ export const fieldToSchemaMap: FieldToSchemaMap = {
|
||||
return {
|
||||
docs: shouldSlice ? docs.slice(0, -1) : docs,
|
||||
hasNextPage: limit === 0 ? false : limit < docs.length,
|
||||
...(count ? { totalDocs } : {}),
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -239,7 +239,6 @@ export function initCollections({ config, graphqlResult }: InitCollectionsGraphQ
|
||||
}),
|
||||
args: {
|
||||
draft: { type: GraphQLBoolean },
|
||||
trash: { type: GraphQLBoolean },
|
||||
where: { type: collection.graphQL.whereInputType },
|
||||
...(config.localization
|
||||
? {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview-react",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "The official React SDK for Payload Live Preview",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
@@ -46,8 +46,8 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@types/react": "19.1.8",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.2",
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview-vue",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "The official Vue SDK for Payload Live Preview",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "The official live preview JavaScript SDK for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/next",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -117,11 +117,11 @@
|
||||
"@babel/preset-env": "7.27.2",
|
||||
"@babel/preset-react": "7.27.1",
|
||||
"@babel/preset-typescript": "7.27.1",
|
||||
"@next/eslint-plugin-next": "15.4.4",
|
||||
"@next/eslint-plugin-next": "15.3.2",
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@types/busboy": "1.5.4",
|
||||
"@types/react": "19.1.8",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.2",
|
||||
"@types/uuid": "10.0.0",
|
||||
"babel-plugin-react-compiler": "19.1.0-rc.2",
|
||||
"esbuild": "0.25.5",
|
||||
|
||||
@@ -1,11 +1,4 @@
|
||||
import type {
|
||||
DocumentTabConfig,
|
||||
DocumentTabServerPropsOnly,
|
||||
PayloadRequest,
|
||||
SanitizedCollectionConfig,
|
||||
SanitizedGlobalConfig,
|
||||
SanitizedPermissions,
|
||||
} from 'payload'
|
||||
import type { DocumentTabConfig, DocumentTabServerProps, ServerProps } from 'payload'
|
||||
import type React from 'react'
|
||||
|
||||
import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerComponent'
|
||||
@@ -16,24 +9,27 @@ import './index.scss'
|
||||
|
||||
export const baseClass = 'doc-tab'
|
||||
|
||||
export const DefaultDocumentTab: React.FC<{
|
||||
apiURL?: string
|
||||
collectionConfig?: SanitizedCollectionConfig
|
||||
globalConfig?: SanitizedGlobalConfig
|
||||
path?: string
|
||||
permissions?: SanitizedPermissions
|
||||
req: PayloadRequest
|
||||
tabConfig: { readonly Pill_Component?: React.FC } & DocumentTabConfig
|
||||
}> = (props) => {
|
||||
export const DocumentTab: React.FC<
|
||||
{ readonly Pill_Component?: React.FC } & DocumentTabConfig & DocumentTabServerProps
|
||||
> = (props) => {
|
||||
const {
|
||||
apiURL,
|
||||
collectionConfig,
|
||||
globalConfig,
|
||||
href: tabHref,
|
||||
i18n,
|
||||
isActive: tabIsActive,
|
||||
label,
|
||||
newTab,
|
||||
payload,
|
||||
permissions,
|
||||
req,
|
||||
tabConfig: { href: tabHref, isActive: tabIsActive, label, newTab, Pill, Pill_Component },
|
||||
Pill,
|
||||
Pill_Component,
|
||||
} = props
|
||||
|
||||
const { config } = payload
|
||||
const { routes } = config
|
||||
|
||||
let href = typeof tabHref === 'string' ? tabHref : ''
|
||||
let isActive = typeof tabIsActive === 'boolean' ? tabIsActive : false
|
||||
|
||||
@@ -42,7 +38,7 @@ export const DefaultDocumentTab: React.FC<{
|
||||
apiURL,
|
||||
collection: collectionConfig,
|
||||
global: globalConfig,
|
||||
routes: req.payload.config.routes,
|
||||
routes,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -55,13 +51,13 @@ export const DefaultDocumentTab: React.FC<{
|
||||
const labelToRender =
|
||||
typeof label === 'function'
|
||||
? label({
|
||||
t: req.i18n.t,
|
||||
t: i18n.t,
|
||||
})
|
||||
: label
|
||||
|
||||
return (
|
||||
<DocumentTabLink
|
||||
adminRoute={req.payload.config.routes.admin}
|
||||
adminRoute={routes.admin}
|
||||
ariaLabel={labelToRender}
|
||||
baseClass={baseClass}
|
||||
href={href}
|
||||
@@ -76,14 +72,12 @@ export const DefaultDocumentTab: React.FC<{
|
||||
{RenderServerComponent({
|
||||
Component: Pill,
|
||||
Fallback: Pill_Component,
|
||||
importMap: req.payload.importMap,
|
||||
importMap: payload.importMap,
|
||||
serverProps: {
|
||||
i18n: req.i18n,
|
||||
payload: req.payload,
|
||||
i18n,
|
||||
payload,
|
||||
permissions,
|
||||
req,
|
||||
user: req.user,
|
||||
} satisfies DocumentTabServerPropsOnly,
|
||||
} satisfies ServerProps,
|
||||
})}
|
||||
</Fragment>
|
||||
) : null}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import type { I18n } from '@payloadcms/translations'
|
||||
import type {
|
||||
DocumentTabClientProps,
|
||||
DocumentTabServerPropsOnly,
|
||||
PayloadRequest,
|
||||
Payload,
|
||||
SanitizedCollectionConfig,
|
||||
SanitizedGlobalConfig,
|
||||
SanitizedPermissions,
|
||||
@@ -11,7 +12,7 @@ import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerCompo
|
||||
import React from 'react'
|
||||
|
||||
import { ShouldRenderTabs } from './ShouldRenderTabs.js'
|
||||
import { DefaultDocumentTab } from './Tab/index.js'
|
||||
import { DocumentTab } from './Tab/index.js'
|
||||
import { getTabs } from './tabs/index.js'
|
||||
import './index.scss'
|
||||
|
||||
@@ -20,10 +21,12 @@ const baseClass = 'doc-tabs'
|
||||
export const DocumentTabs: React.FC<{
|
||||
collectionConfig: SanitizedCollectionConfig
|
||||
globalConfig: SanitizedGlobalConfig
|
||||
i18n: I18n
|
||||
payload: Payload
|
||||
permissions: SanitizedPermissions
|
||||
req: PayloadRequest
|
||||
}> = ({ collectionConfig, globalConfig, permissions, req }) => {
|
||||
const { config } = req.payload
|
||||
}> = (props) => {
|
||||
const { collectionConfig, globalConfig, i18n, payload, permissions } = props
|
||||
const { config } = payload
|
||||
|
||||
const tabs = getTabs({
|
||||
collectionConfig,
|
||||
@@ -35,46 +38,42 @@ export const DocumentTabs: React.FC<{
|
||||
<div className={baseClass}>
|
||||
<div className={`${baseClass}__tabs-container`}>
|
||||
<ul className={`${baseClass}__tabs`}>
|
||||
{tabs?.map(({ tab: tabConfig, viewPath }, index) => {
|
||||
const { condition } = tabConfig || {}
|
||||
{tabs?.map(({ tab, viewPath }, index) => {
|
||||
const { condition } = tab || {}
|
||||
|
||||
const meetsCondition =
|
||||
!condition ||
|
||||
condition({ collectionConfig, config, globalConfig, permissions, req })
|
||||
!condition || condition({ collectionConfig, config, globalConfig, permissions })
|
||||
|
||||
if (!meetsCondition) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (tabConfig?.Component) {
|
||||
if (tab?.Component) {
|
||||
return RenderServerComponent({
|
||||
clientProps: {
|
||||
path: viewPath,
|
||||
} satisfies DocumentTabClientProps,
|
||||
Component: tabConfig.Component,
|
||||
importMap: req.payload.importMap,
|
||||
Component: tab.Component,
|
||||
importMap: payload.importMap,
|
||||
key: `tab-${index}`,
|
||||
serverProps: {
|
||||
collectionConfig,
|
||||
globalConfig,
|
||||
i18n: req.i18n,
|
||||
payload: req.payload,
|
||||
i18n,
|
||||
payload,
|
||||
permissions,
|
||||
req,
|
||||
user: req.user,
|
||||
} satisfies DocumentTabServerPropsOnly,
|
||||
})
|
||||
}
|
||||
|
||||
return (
|
||||
<DefaultDocumentTab
|
||||
collectionConfig={collectionConfig}
|
||||
globalConfig={globalConfig}
|
||||
<DocumentTab
|
||||
key={`tab-${index}`}
|
||||
path={viewPath}
|
||||
permissions={permissions}
|
||||
req={req}
|
||||
tabConfig={tabConfig}
|
||||
{...{
|
||||
...props,
|
||||
...tab,
|
||||
}}
|
||||
/>
|
||||
)
|
||||
})}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { I18n } from '@payloadcms/translations'
|
||||
import type {
|
||||
PayloadRequest,
|
||||
Payload,
|
||||
SanitizedCollectionConfig,
|
||||
SanitizedGlobalConfig,
|
||||
SanitizedPermissions,
|
||||
@@ -18,10 +18,11 @@ export const DocumentHeader: React.FC<{
|
||||
collectionConfig?: SanitizedCollectionConfig
|
||||
globalConfig?: SanitizedGlobalConfig
|
||||
hideTabs?: boolean
|
||||
i18n: I18n
|
||||
payload: Payload
|
||||
permissions: SanitizedPermissions
|
||||
req: PayloadRequest
|
||||
}> = (props) => {
|
||||
const { collectionConfig, globalConfig, hideTabs, permissions, req } = props
|
||||
const { collectionConfig, globalConfig, hideTabs, i18n, payload, permissions } = props
|
||||
|
||||
return (
|
||||
<Gutter className={baseClass}>
|
||||
@@ -30,8 +31,9 @@ export const DocumentHeader: React.FC<{
|
||||
<DocumentTabs
|
||||
collectionConfig={collectionConfig}
|
||||
globalConfig={globalConfig}
|
||||
i18n={i18n}
|
||||
payload={payload}
|
||||
permissions={permissions}
|
||||
req={req}
|
||||
/>
|
||||
)}
|
||||
</Gutter>
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
@import '~@payloadcms/ui/scss';
|
||||
|
||||
$tab-width: 24px;
|
||||
$tab-width: 16px;
|
||||
|
||||
@layer payload-default {
|
||||
.query-inspector {
|
||||
--tab-width: 24px;
|
||||
|
||||
&__json-children {
|
||||
position: relative;
|
||||
|
||||
&--nested {
|
||||
& li {
|
||||
padding-left: 8px;
|
||||
padding-left: $tab-width;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,14 +23,6 @@ $tab-width: 24px;
|
||||
}
|
||||
}
|
||||
|
||||
&__row-line {
|
||||
&--nested {
|
||||
.query-inspector__json-children {
|
||||
padding-left: var(--tab-width);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&__list-wrap {
|
||||
position: relative;
|
||||
}
|
||||
@@ -47,16 +37,10 @@ $tab-width: 24px;
|
||||
border-bottom-right-radius: 0;
|
||||
position: relative;
|
||||
display: flex;
|
||||
column-gap: 14px;
|
||||
row-gap: 10px;
|
||||
gap: 10px;
|
||||
align-items: center;
|
||||
left: 0;
|
||||
left: -3px;
|
||||
width: calc(100% + 3px);
|
||||
background-color: var(--theme-elevation-50);
|
||||
|
||||
&:not(.query-inspector__list-toggle--empty) {
|
||||
margin-left: calc(var(--tab-width) * -1 - 10px);
|
||||
}
|
||||
|
||||
svg .stroke {
|
||||
stroke: var(--theme-elevation-400);
|
||||
@@ -98,31 +82,13 @@ $tab-width: 24px;
|
||||
&__bracket {
|
||||
position: relative;
|
||||
|
||||
&--position-end {
|
||||
left: 2px;
|
||||
width: calc(100% - 5px);
|
||||
&--nested {
|
||||
margin-left: $tab-width;
|
||||
}
|
||||
}
|
||||
|
||||
// Some specific rules targetting the very top of the nested JSON structure or very first items since they need slightly different styling
|
||||
&__results {
|
||||
& > .query-inspector__row-line--nested {
|
||||
& > .query-inspector__list-toggle {
|
||||
margin-left: 0;
|
||||
column-gap: 6px;
|
||||
|
||||
.query-inspector__toggle-row-icon {
|
||||
margin-left: -4px;
|
||||
}
|
||||
}
|
||||
|
||||
& > .query-inspector__json-children {
|
||||
padding-left: calc(var(--base) * 1);
|
||||
}
|
||||
|
||||
& > .query-inspector__bracket--nested > .query-inspector__bracket--position-end {
|
||||
padding-left: 16px;
|
||||
}
|
||||
&--position-end {
|
||||
left: 1px;
|
||||
width: calc(100% - 5px);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,8 +137,9 @@ export async function Account({ initPageResult, params, searchParams }: AdminVie
|
||||
<DocumentHeader
|
||||
collectionConfig={collectionConfig}
|
||||
hideTabs
|
||||
i18n={i18n}
|
||||
payload={payload}
|
||||
permissions={permissions}
|
||||
req={req}
|
||||
/>
|
||||
<HydrateAuthProvider permissions={permissions} />
|
||||
{RenderServerComponent({
|
||||
|
||||
@@ -19,14 +19,17 @@ type RenderTrashViewArgs = {
|
||||
redirectAfterRestore?: boolean
|
||||
} & AdminViewServerProps
|
||||
|
||||
export const TrashView: React.FC<Omit<RenderTrashViewArgs, 'enableRowSelections'>> = async (
|
||||
args,
|
||||
) => {
|
||||
export const TrashView: React.FC<
|
||||
{ query?: any } & Omit<RenderTrashViewArgs, 'enableRowSelections'>
|
||||
> = async (args) => {
|
||||
try {
|
||||
const { List: TrashList } = await renderListView({
|
||||
...args,
|
||||
enableRowSelections: true,
|
||||
trash: true,
|
||||
query: {
|
||||
...(args.query || {}),
|
||||
trash: true, // force trash view
|
||||
},
|
||||
viewType: 'trash',
|
||||
})
|
||||
|
||||
|
||||
@@ -110,18 +110,17 @@ export const renderDocument = async ({
|
||||
|
||||
// Fetch the doc required for the view
|
||||
let doc =
|
||||
!idFromArgs && !globalSlug
|
||||
? initialData || null
|
||||
: await getDocumentData({
|
||||
id: idFromArgs,
|
||||
collectionSlug,
|
||||
globalSlug,
|
||||
locale,
|
||||
payload,
|
||||
req,
|
||||
segments,
|
||||
user,
|
||||
})
|
||||
initialData ||
|
||||
(await getDocumentData({
|
||||
id: idFromArgs,
|
||||
collectionSlug,
|
||||
globalSlug,
|
||||
locale,
|
||||
payload,
|
||||
req,
|
||||
segments,
|
||||
user,
|
||||
}))
|
||||
|
||||
if (isEditing && !doc) {
|
||||
// If it's a collection document that doesn't exist, redirect to collection list
|
||||
@@ -417,8 +416,9 @@ export const renderDocument = async ({
|
||||
<DocumentHeader
|
||||
collectionConfig={collectionConfig}
|
||||
globalConfig={globalConfig}
|
||||
i18n={i18n}
|
||||
payload={payload}
|
||||
permissions={permissions}
|
||||
req={req}
|
||||
/>
|
||||
)}
|
||||
<HydrateAuthProvider permissions={permissions} />
|
||||
|
||||
@@ -5,7 +5,6 @@ import type {
|
||||
PaginatedDocs,
|
||||
PayloadRequest,
|
||||
SanitizedCollectionConfig,
|
||||
ViewTypes,
|
||||
Where,
|
||||
} from 'payload'
|
||||
|
||||
@@ -23,9 +22,7 @@ export const handleGroupBy = async ({
|
||||
enableRowSelections,
|
||||
query,
|
||||
req,
|
||||
trash = false,
|
||||
user,
|
||||
viewType,
|
||||
where: whereWithMergedSearch,
|
||||
}: {
|
||||
clientConfig: ClientConfig
|
||||
@@ -37,9 +34,7 @@ export const handleGroupBy = async ({
|
||||
enableRowSelections?: boolean
|
||||
query?: ListQuery
|
||||
req: PayloadRequest
|
||||
trash?: boolean
|
||||
user: any
|
||||
viewType?: ViewTypes
|
||||
where: Where
|
||||
}): Promise<{
|
||||
columnState: Column[]
|
||||
@@ -93,7 +88,6 @@ export const handleGroupBy = async ({
|
||||
populate,
|
||||
req,
|
||||
sort: query?.groupBy,
|
||||
trash,
|
||||
where: whereWithMergedSearch,
|
||||
})
|
||||
|
||||
@@ -133,7 +127,6 @@ export const handleGroupBy = async ({
|
||||
// Note: if we wanted to enable table-by-table sorting, we could use this:
|
||||
// sort: query?.queryByGroup?.[valueOrRelationshipID]?.sort,
|
||||
sort: query?.sort,
|
||||
trash,
|
||||
user,
|
||||
where: {
|
||||
...(whereWithMergedSearch || {}),
|
||||
@@ -143,11 +136,10 @@ export const handleGroupBy = async ({
|
||||
},
|
||||
})
|
||||
|
||||
let heading = valueOrRelationshipID
|
||||
let heading = valueOrRelationshipID || req.i18n.t('general:noValue')
|
||||
|
||||
if (
|
||||
groupByField?.type === 'relationship' &&
|
||||
potentiallyPopulatedRelationship &&
|
||||
typeof potentiallyPopulatedRelationship === 'object'
|
||||
) {
|
||||
heading =
|
||||
@@ -155,24 +147,14 @@ export const handleGroupBy = async ({
|
||||
valueOrRelationshipID
|
||||
}
|
||||
|
||||
if (groupByField.type === 'date' && valueOrRelationshipID) {
|
||||
if (groupByField.type === 'date') {
|
||||
heading = formatDate({
|
||||
date: String(valueOrRelationshipID),
|
||||
date: String(heading),
|
||||
i18n: req.i18n,
|
||||
pattern: clientConfig.admin.dateFormat,
|
||||
})
|
||||
}
|
||||
|
||||
if (groupByField.type === 'checkbox') {
|
||||
if (valueOrRelationshipID === true) {
|
||||
heading = req.i18n.t('general:true')
|
||||
}
|
||||
|
||||
if (valueOrRelationshipID === false) {
|
||||
heading = req.i18n.t('general:false')
|
||||
}
|
||||
}
|
||||
|
||||
if (groupData.docs && groupData.docs.length > 0) {
|
||||
const { columnState: newColumnState, Table: NewTable } = renderTable({
|
||||
clientCollectionConfig,
|
||||
@@ -184,14 +166,13 @@ export const handleGroupBy = async ({
|
||||
enableRowSelections,
|
||||
groupByFieldPath,
|
||||
groupByValue: valueOrRelationshipID,
|
||||
heading: heading || req.i18n.t('general:noValue'),
|
||||
heading,
|
||||
i18n: req.i18n,
|
||||
key: `table-${valueOrRelationshipID}`,
|
||||
orderableFieldName: collectionConfig.orderable === true ? '_order' : undefined,
|
||||
payload: req.payload,
|
||||
query,
|
||||
useAsTitle: collectionConfig.admin.useAsTitle,
|
||||
viewType,
|
||||
})
|
||||
|
||||
// Only need to set `columnState` once, using the first table's column state
|
||||
|
||||
@@ -1,19 +1,20 @@
|
||||
import type {
|
||||
AdminViewServerProps,
|
||||
CollectionPreferences,
|
||||
Column,
|
||||
ColumnPreference,
|
||||
ListQuery,
|
||||
ListViewClientProps,
|
||||
ListViewServerPropsOnly,
|
||||
PaginatedDocs,
|
||||
QueryPreset,
|
||||
SanitizedCollectionPermission,
|
||||
} from 'payload'
|
||||
|
||||
import { DefaultListView, HydrateAuthProvider, ListQueryProvider } from '@payloadcms/ui'
|
||||
import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerComponent'
|
||||
import { renderFilters, renderTable, upsertPreferences } from '@payloadcms/ui/rsc'
|
||||
import { notFound } from 'next/navigation.js'
|
||||
import {
|
||||
type AdminViewServerProps,
|
||||
type CollectionPreferences,
|
||||
type Column,
|
||||
type ColumnPreference,
|
||||
type ListQuery,
|
||||
type ListViewClientProps,
|
||||
type ListViewServerPropsOnly,
|
||||
type PaginatedDocs,
|
||||
type QueryPreset,
|
||||
type SanitizedCollectionPermission,
|
||||
} from 'payload'
|
||||
import {
|
||||
combineWhereConstraints,
|
||||
formatAdminURL,
|
||||
@@ -40,10 +41,6 @@ type RenderListViewArgs = {
|
||||
query: ListQuery
|
||||
redirectAfterDelete?: boolean
|
||||
redirectAfterDuplicate?: boolean
|
||||
/**
|
||||
* @experimental This prop is subject to change in future releases.
|
||||
*/
|
||||
trash?: boolean
|
||||
} & AdminViewServerProps
|
||||
|
||||
/**
|
||||
@@ -70,7 +67,6 @@ export const renderListView = async (
|
||||
params,
|
||||
query: queryFromArgs,
|
||||
searchParams,
|
||||
trash,
|
||||
viewType,
|
||||
} = args
|
||||
|
||||
@@ -138,14 +134,16 @@ export const renderListView = async (
|
||||
throw new Error('not-found')
|
||||
}
|
||||
|
||||
const baseFilterConstraint = await (
|
||||
collectionConfig.admin?.baseFilter ?? collectionConfig.admin?.baseListFilter
|
||||
)?.({
|
||||
limit: query.limit,
|
||||
page: query.page,
|
||||
req,
|
||||
sort: query.sort,
|
||||
})
|
||||
let baseListFilter = undefined
|
||||
|
||||
if (typeof collectionConfig.admin?.baseListFilter === 'function') {
|
||||
baseListFilter = await collectionConfig.admin.baseListFilter({
|
||||
limit: query.limit,
|
||||
page: query.page,
|
||||
req,
|
||||
sort: query.sort,
|
||||
})
|
||||
}
|
||||
|
||||
let queryPreset: QueryPreset | undefined
|
||||
let queryPresetPermissions: SanitizedCollectionPermission | undefined
|
||||
@@ -153,10 +151,10 @@ export const renderListView = async (
|
||||
let whereWithMergedSearch = mergeListSearchAndWhere({
|
||||
collectionConfig,
|
||||
search: typeof query?.search === 'string' ? query.search : undefined,
|
||||
where: combineWhereConstraints([query?.where, baseFilterConstraint]),
|
||||
where: combineWhereConstraints([query?.where, baseListFilter]),
|
||||
})
|
||||
|
||||
if (trash === true) {
|
||||
if (query?.trash === true) {
|
||||
whereWithMergedSearch = {
|
||||
and: [
|
||||
whereWithMergedSearch,
|
||||
@@ -192,81 +190,56 @@ export const renderListView = async (
|
||||
}
|
||||
}
|
||||
|
||||
let data: PaginatedDocs | undefined
|
||||
let Table: React.ReactNode | React.ReactNode[] = null
|
||||
let columnState: Column[] = []
|
||||
let data: PaginatedDocs = {
|
||||
// no results default
|
||||
docs: [],
|
||||
hasNextPage: false,
|
||||
hasPrevPage: false,
|
||||
limit: query.limit,
|
||||
nextPage: null,
|
||||
page: 1,
|
||||
pagingCounter: 0,
|
||||
prevPage: null,
|
||||
totalDocs: 0,
|
||||
totalPages: 0,
|
||||
}
|
||||
|
||||
try {
|
||||
if (collectionConfig.admin.groupBy && query.groupBy) {
|
||||
;({ columnState, data, Table } = await handleGroupBy({
|
||||
clientConfig,
|
||||
collectionConfig,
|
||||
collectionSlug,
|
||||
columns: collectionPreferences?.columns,
|
||||
customCellProps,
|
||||
drawerSlug,
|
||||
enableRowSelections,
|
||||
query,
|
||||
req,
|
||||
trash,
|
||||
user,
|
||||
viewType,
|
||||
where: whereWithMergedSearch,
|
||||
}))
|
||||
} else {
|
||||
data = await req.payload.find({
|
||||
collection: collectionSlug,
|
||||
depth: 0,
|
||||
draft: true,
|
||||
fallbackLocale: false,
|
||||
includeLockStatus: true,
|
||||
limit: query?.limit ? Number(query.limit) : undefined,
|
||||
locale: req.locale,
|
||||
overrideAccess: false,
|
||||
page: query?.page ? Number(query.page) : undefined,
|
||||
req,
|
||||
sort: query?.sort,
|
||||
trash,
|
||||
user,
|
||||
where: whereWithMergedSearch,
|
||||
})
|
||||
;({ columnState, Table } = renderTable({
|
||||
clientCollectionConfig: clientConfig.collections.find((c) => c.slug === collectionSlug),
|
||||
collectionConfig,
|
||||
columns: collectionPreferences?.columns,
|
||||
customCellProps,
|
||||
data,
|
||||
drawerSlug,
|
||||
enableRowSelections,
|
||||
i18n: req.i18n,
|
||||
orderableFieldName: collectionConfig.orderable === true ? '_order' : undefined,
|
||||
payload: req.payload,
|
||||
query,
|
||||
useAsTitle: collectionConfig.admin.useAsTitle,
|
||||
viewType,
|
||||
}))
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.name !== 'QueryError') {
|
||||
// QueryErrors are expected when a user filters by a field they do not have access to
|
||||
req.payload.logger.error({
|
||||
err,
|
||||
msg: `There was an error fetching the list view data for collection ${collectionSlug}`,
|
||||
})
|
||||
throw err
|
||||
}
|
||||
if (collectionConfig.admin.groupBy && query.groupBy) {
|
||||
;({ columnState, data, Table } = await handleGroupBy({
|
||||
clientConfig,
|
||||
collectionConfig,
|
||||
collectionSlug,
|
||||
columns: collectionPreferences?.columns,
|
||||
customCellProps,
|
||||
drawerSlug,
|
||||
enableRowSelections,
|
||||
query,
|
||||
req,
|
||||
user,
|
||||
where: whereWithMergedSearch,
|
||||
}))
|
||||
} else {
|
||||
data = await req.payload.find({
|
||||
collection: collectionSlug,
|
||||
depth: 0,
|
||||
draft: true,
|
||||
fallbackLocale: false,
|
||||
includeLockStatus: true,
|
||||
limit: query?.limit ? Number(query.limit) : undefined,
|
||||
locale: req.locale,
|
||||
overrideAccess: false,
|
||||
page: query?.page ? Number(query.page) : undefined,
|
||||
req,
|
||||
sort: query?.sort,
|
||||
trash: query?.trash === true,
|
||||
user,
|
||||
where: whereWithMergedSearch,
|
||||
})
|
||||
;({ columnState, Table } = renderTable({
|
||||
clientCollectionConfig: clientConfig.collections.find((c) => c.slug === collectionSlug),
|
||||
collectionConfig,
|
||||
columns: collectionPreferences?.columns,
|
||||
customCellProps,
|
||||
data,
|
||||
drawerSlug,
|
||||
enableRowSelections,
|
||||
i18n: req.i18n,
|
||||
orderableFieldName: collectionConfig.orderable === true ? '_order' : undefined,
|
||||
payload: req.payload,
|
||||
query,
|
||||
useAsTitle: collectionConfig.admin.useAsTitle,
|
||||
viewType,
|
||||
}))
|
||||
}
|
||||
|
||||
const renderedFilters = renderFilters(collectionConfig.fields, req.payload.importMap)
|
||||
|
||||
@@ -15,7 +15,7 @@ export const SetStepNav: React.FC<{
|
||||
readonly isTrashed?: boolean
|
||||
versionToCreatedAtFormatted?: string
|
||||
versionToID?: string
|
||||
versionToUseAsTitle?: Record<string, string> | string
|
||||
versionToUseAsTitle?: string
|
||||
}> = ({
|
||||
id,
|
||||
collectionConfig,
|
||||
@@ -54,7 +54,7 @@ export const SetStepNav: React.FC<{
|
||||
? versionToUseAsTitle?.[locale.code] || docLabel
|
||||
: versionToUseAsTitle
|
||||
} else if (useAsTitle === 'id') {
|
||||
docLabel = String(id)
|
||||
docLabel = versionToID
|
||||
}
|
||||
|
||||
const docBasePath: `/${string}` = isTrashed
|
||||
|
||||
@@ -17,13 +17,7 @@ import {
|
||||
type SanitizedFieldPermissions,
|
||||
type VersionField,
|
||||
} from 'payload'
|
||||
import {
|
||||
fieldIsID,
|
||||
fieldShouldBeLocalized,
|
||||
getFieldPermissions,
|
||||
getUniqueListBy,
|
||||
tabHasName,
|
||||
} from 'payload/shared'
|
||||
import { fieldIsID, fieldShouldBeLocalized, getUniqueListBy, tabHasName } from 'payload/shared'
|
||||
|
||||
import { diffComponents } from './fields/index.js'
|
||||
import { getFieldPathsModified } from './utilities/getFieldPathsModified.js'
|
||||
@@ -229,16 +223,21 @@ const buildVersionField = ({
|
||||
BuildVersionFieldsArgs,
|
||||
'fields' | 'parentIndexPath' | 'versionFromSiblingData' | 'versionToSiblingData'
|
||||
>): BaseVersionField | null => {
|
||||
const { permissions, read: hasReadPermission } = getFieldPermissions({
|
||||
field,
|
||||
operation: 'read',
|
||||
parentName: parentPath?.includes('.')
|
||||
? parentPath.split('.')[parentPath.split('.').length - 1]
|
||||
: parentPath,
|
||||
permissions: fieldPermissions,
|
||||
})
|
||||
const fieldName: null | string = 'name' in field ? field.name : null
|
||||
|
||||
if (!hasReadPermission) {
|
||||
const hasPermission =
|
||||
fieldPermissions === true ||
|
||||
!fieldName ||
|
||||
fieldPermissions?.[fieldName] === true ||
|
||||
fieldPermissions?.[fieldName]?.read
|
||||
|
||||
const subFieldPermissions =
|
||||
fieldPermissions === true ||
|
||||
!fieldName ||
|
||||
fieldPermissions?.[fieldName] === true ||
|
||||
fieldPermissions?.[fieldName]?.fields
|
||||
|
||||
if (!hasPermission) {
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -293,29 +292,13 @@ const buildVersionField = ({
|
||||
parentPath,
|
||||
parentSchemaPath,
|
||||
})
|
||||
|
||||
let tabPermissions: typeof fieldPermissions = undefined
|
||||
|
||||
if (typeof permissions === 'boolean') {
|
||||
tabPermissions = permissions
|
||||
} else if (permissions && typeof permissions === 'object') {
|
||||
if ('name' in tab) {
|
||||
tabPermissions =
|
||||
typeof permissions.fields?.[tab.name] === 'object'
|
||||
? permissions.fields?.[tab.name].fields
|
||||
: permissions.fields?.[tab.name]
|
||||
} else {
|
||||
tabPermissions = permissions.fields
|
||||
}
|
||||
}
|
||||
|
||||
const tabVersion = {
|
||||
name: 'name' in tab ? tab.name : null,
|
||||
fields: buildVersionFields({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions: tabPermissions,
|
||||
fieldPermissions,
|
||||
fields: tab.fields,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
@@ -341,13 +324,6 @@ const buildVersionField = ({
|
||||
}
|
||||
} // At this point, we are dealing with a `row`, `collapsible`, etc
|
||||
else if ('fields' in field) {
|
||||
let subfieldPermissions: typeof fieldPermissions = undefined
|
||||
|
||||
if (typeof permissions === 'boolean') {
|
||||
subfieldPermissions = permissions
|
||||
} else if (permissions && typeof permissions === 'object') {
|
||||
subfieldPermissions = permissions.fields
|
||||
}
|
||||
if (field.type === 'array' && (valueTo || valueFrom)) {
|
||||
const maxLength = Math.max(
|
||||
Array.isArray(valueTo) ? valueTo.length : 0,
|
||||
@@ -363,7 +339,7 @@ const buildVersionField = ({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions: subfieldPermissions,
|
||||
fieldPermissions,
|
||||
fields: field.fields,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
@@ -387,7 +363,7 @@ const buildVersionField = ({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions: subfieldPermissions,
|
||||
fieldPermissions,
|
||||
fields: field.fields,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
@@ -445,24 +421,11 @@ const buildVersionField = ({
|
||||
}
|
||||
}
|
||||
|
||||
let blockPermissions: typeof fieldPermissions = undefined
|
||||
|
||||
if (permissions === true) {
|
||||
blockPermissions = true
|
||||
} else {
|
||||
const permissionsBlockSpecific = permissions?.blocks?.[blockSlugToMatch]
|
||||
if (permissionsBlockSpecific === true) {
|
||||
blockPermissions = true
|
||||
} else {
|
||||
blockPermissions = permissionsBlockSpecific?.fields
|
||||
}
|
||||
}
|
||||
|
||||
baseVersionField.rows[i] = buildVersionFields({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions: blockPermissions,
|
||||
fieldPermissions,
|
||||
fields,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
@@ -496,7 +459,7 @@ const buildVersionField = ({
|
||||
*/
|
||||
diffMethod: 'diffWordsWithSpace',
|
||||
field: clientField,
|
||||
fieldPermissions: typeof permissions === 'object' ? permissions.fields : permissions,
|
||||
fieldPermissions: subFieldPermissions,
|
||||
parentIsLocalized,
|
||||
|
||||
nestingLevel: nestingLevel ? nestingLevel : undefined,
|
||||
|
||||
@@ -18,12 +18,12 @@ export const generateLabelFromValue = ({
|
||||
value: PopulatedRelationshipValue
|
||||
}): string => {
|
||||
let relatedDoc: TypeWithID
|
||||
let relationTo: string = field.relationTo as string
|
||||
let valueToReturn: string = ''
|
||||
|
||||
const relationTo: string = 'relationTo' in value ? value.relationTo : (field.relationTo as string)
|
||||
|
||||
if (typeof value === 'object' && 'relationTo' in value) {
|
||||
relatedDoc = value.value
|
||||
relationTo = value.relationTo
|
||||
} else {
|
||||
// Non-polymorphic relationship
|
||||
relatedDoc = value
|
||||
|
||||
@@ -411,11 +411,6 @@ export async function VersionView(props: DocumentViewServerProps) {
|
||||
})
|
||||
}
|
||||
|
||||
const useAsTitleFieldName = collectionConfig?.admin?.useAsTitle || 'id'
|
||||
const versionToUseAsTitle =
|
||||
useAsTitleFieldName === 'id'
|
||||
? String(versionTo.parent)
|
||||
: versionTo.version?.[useAsTitleFieldName]
|
||||
return (
|
||||
<DefaultVersionView
|
||||
canUpdate={docPermissions?.update}
|
||||
@@ -430,7 +425,7 @@ export async function VersionView(props: DocumentViewServerProps) {
|
||||
VersionToCreatedAtLabel={formatPill({ doc: versionTo, labelStyle: 'pill' })}
|
||||
versionToID={versionTo.id}
|
||||
versionToStatus={versionTo.version?._status}
|
||||
versionToUseAsTitle={versionToUseAsTitle}
|
||||
versionToUseAsTitle={versionTo[collectionConfig?.admin?.useAsTitle || 'id']}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/payload-cloud",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "The official Payload Cloud plugin",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload",
|
||||
"version": "3.50.0",
|
||||
"version": "3.49.0",
|
||||
"description": "Node, React, Headless CMS and Application Framework built on Next.js",
|
||||
"keywords": [
|
||||
"admin panel",
|
||||
|
||||
@@ -68,9 +68,6 @@ export type FieldPaths = {
|
||||
path: string
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO: This should be renamed to `FieldComponentServerProps` or similar
|
||||
*/
|
||||
export type ServerComponentProps = {
|
||||
clientField: ClientFieldWithOptionalType
|
||||
clientFieldSchemaMap: ClientFieldSchemaMap
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { SanitizedPermissions } from '../../auth/types.js'
|
||||
import type { SanitizedCollectionConfig } from '../../collections/config/types.js'
|
||||
import type { PayloadComponent, SanitizedConfig, ServerProps } from '../../config/types.js'
|
||||
import type { SanitizedGlobalConfig } from '../../globals/config/types.js'
|
||||
import type { PayloadRequest } from '../../types/index.js'
|
||||
import type { Data, DocumentSlots, FormState } from '../types.js'
|
||||
import type { InitPageResult, ViewTypes } from './index.js'
|
||||
|
||||
@@ -51,7 +50,6 @@ export type DocumentTabServerPropsOnly = {
|
||||
readonly collectionConfig?: SanitizedCollectionConfig
|
||||
readonly globalConfig?: SanitizedGlobalConfig
|
||||
readonly permissions: SanitizedPermissions
|
||||
readonly req: PayloadRequest
|
||||
} & ServerProps
|
||||
|
||||
export type DocumentTabClientProps = {
|
||||
@@ -62,13 +60,9 @@ export type DocumentTabServerProps = DocumentTabClientProps & DocumentTabServerP
|
||||
|
||||
export type DocumentTabCondition = (args: {
|
||||
collectionConfig: SanitizedCollectionConfig
|
||||
/**
|
||||
* @deprecated: Use `req.payload.config` instead. This will be removed in v4.
|
||||
*/
|
||||
config: SanitizedConfig
|
||||
globalConfig: SanitizedGlobalConfig
|
||||
permissions: SanitizedPermissions
|
||||
req: PayloadRequest
|
||||
}) => boolean
|
||||
|
||||
// Everything is optional because we merge in the defaults
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export const isUserLocked = (date: Date): boolean => {
|
||||
export const isUserLocked = (date: number): boolean => {
|
||||
if (!date) {
|
||||
return false
|
||||
}
|
||||
return date.getTime() > Date.now()
|
||||
return date > Date.now()
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { v4 as uuid } from 'uuid'
|
||||
|
||||
import type {
|
||||
AuthOperationsFromCollectionSlug,
|
||||
Collection,
|
||||
@@ -22,7 +24,7 @@ import { getFieldsToSign } from '../getFieldsToSign.js'
|
||||
import { getLoginOptions } from '../getLoginOptions.js'
|
||||
import { isUserLocked } from '../isUserLocked.js'
|
||||
import { jwtSign } from '../jwt.js'
|
||||
import { addSessionToUser } from '../sessions.js'
|
||||
import { removeExpiredSessions } from '../removeExpiredSessions.js'
|
||||
import { authenticateLocalStrategy } from '../strategies/local/authenticate.js'
|
||||
import { incrementLoginAttempts } from '../strategies/local/incrementLoginAttempts.js'
|
||||
import { resetLoginAttempts } from '../strategies/local/resetLoginAttempts.js'
|
||||
@@ -48,11 +50,6 @@ type CheckLoginPermissionArgs = {
|
||||
user: any
|
||||
}
|
||||
|
||||
/**
|
||||
* Throws an error if the user is locked or does not exist.
|
||||
* This does not check the login attempts, only the lock status. Whoever increments login attempts
|
||||
* is responsible for locking the user properly, not whoever checks the login permission.
|
||||
*/
|
||||
export const checkLoginPermission = ({
|
||||
loggingInWithUsername,
|
||||
req,
|
||||
@@ -62,7 +59,7 @@ export const checkLoginPermission = ({
|
||||
throw new AuthenticationError(req.t, Boolean(loggingInWithUsername))
|
||||
}
|
||||
|
||||
if (isUserLocked(new Date(user.lockUntil))) {
|
||||
if (isUserLocked(new Date(user.lockUntil).getTime())) {
|
||||
throw new LockedAuth(req.t)
|
||||
}
|
||||
}
|
||||
@@ -209,11 +206,11 @@ export const loginOperation = async <TSlug extends CollectionSlug>(
|
||||
where: whereConstraint,
|
||||
})
|
||||
|
||||
let user = (await payload.db.findOne<TypedUser>({
|
||||
let user = await payload.db.findOne<any>({
|
||||
collection: collectionConfig.slug,
|
||||
req,
|
||||
where: whereConstraint,
|
||||
})) as TypedUser
|
||||
})
|
||||
|
||||
checkLoginPermission({
|
||||
loggingInWithUsername: Boolean(canLoginWithUsername && sanitizedUsername),
|
||||
@@ -233,16 +230,9 @@ export const loginOperation = async <TSlug extends CollectionSlug>(
|
||||
if (maxLoginAttemptsEnabled) {
|
||||
await incrementLoginAttempts({
|
||||
collection: collectionConfig,
|
||||
doc: user,
|
||||
payload: req.payload,
|
||||
req,
|
||||
user,
|
||||
})
|
||||
|
||||
// Re-check login permissions and max attempts after incrementing attempts, in case parallel updates occurred
|
||||
checkLoginPermission({
|
||||
loggingInWithUsername: Boolean(canLoginWithUsername && sanitizedUsername),
|
||||
req,
|
||||
user,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -253,45 +243,40 @@ export const loginOperation = async <TSlug extends CollectionSlug>(
|
||||
throw new UnverifiedEmail({ t: req.t })
|
||||
}
|
||||
|
||||
/*
|
||||
* Correct password accepted - re‑check that the account didn't
|
||||
* get locked by parallel bad attempts in the meantime.
|
||||
*/
|
||||
if (maxLoginAttemptsEnabled) {
|
||||
const { lockUntil, loginAttempts } = (await payload.db.findOne<TypedUser>({
|
||||
collection: collectionConfig.slug,
|
||||
req,
|
||||
select: {
|
||||
lockUntil: true,
|
||||
loginAttempts: true,
|
||||
},
|
||||
where: { id: { equals: user.id } },
|
||||
}))!
|
||||
|
||||
user.lockUntil = lockUntil
|
||||
user.loginAttempts = loginAttempts
|
||||
|
||||
checkLoginPermission({
|
||||
req,
|
||||
user,
|
||||
})
|
||||
}
|
||||
|
||||
const fieldsToSignArgs: Parameters<typeof getFieldsToSign>[0] = {
|
||||
collectionConfig,
|
||||
email: sanitizedEmail!,
|
||||
user,
|
||||
}
|
||||
|
||||
const { sid } = await addSessionToUser({
|
||||
collectionConfig,
|
||||
payload,
|
||||
req,
|
||||
user,
|
||||
})
|
||||
if (collectionConfig.auth.useSessions) {
|
||||
// Add session to user
|
||||
const newSessionID = uuid()
|
||||
const now = new Date()
|
||||
const tokenExpInMs = collectionConfig.auth.tokenExpiration * 1000
|
||||
const expiresAt = new Date(now.getTime() + tokenExpInMs)
|
||||
|
||||
if (sid) {
|
||||
fieldsToSignArgs.sid = sid
|
||||
const session = { id: newSessionID, createdAt: now, expiresAt }
|
||||
|
||||
if (!user.sessions?.length) {
|
||||
user.sessions = [session]
|
||||
} else {
|
||||
user.sessions = removeExpiredSessions(user.sessions)
|
||||
user.sessions.push(session)
|
||||
}
|
||||
|
||||
await payload.db.updateOne({
|
||||
id: user.id,
|
||||
collection: collectionConfig.slug,
|
||||
data: user,
|
||||
req,
|
||||
returning: false,
|
||||
})
|
||||
|
||||
user.collection = collectionConfig.slug
|
||||
user._strategy = 'local-jwt'
|
||||
|
||||
fieldsToSignArgs.sid = newSessionID
|
||||
}
|
||||
|
||||
const fieldsToSign = getFieldsToSign(fieldsToSignArgs)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import url from 'url'
|
||||
import { v4 as uuid } from 'uuid'
|
||||
|
||||
import type { Collection } from '../../collections/config/types.js'
|
||||
import type { Document, PayloadRequest } from '../../types/index.js'
|
||||
@@ -10,7 +11,7 @@ import { initTransaction } from '../../utilities/initTransaction.js'
|
||||
import { killTransaction } from '../../utilities/killTransaction.js'
|
||||
import { getFieldsToSign } from '../getFieldsToSign.js'
|
||||
import { jwtSign } from '../jwt.js'
|
||||
import { removeExpiredSessions } from '../sessions.js'
|
||||
import { removeExpiredSessions } from '../removeExpiredSessions.js'
|
||||
|
||||
export type Result = {
|
||||
exp: number
|
||||
@@ -73,10 +74,11 @@ export const refreshOperation = async (incomingArgs: Arguments): Promise<Result>
|
||||
const parsedURL = url.parse(args.req.url!)
|
||||
const isGraphQL = parsedURL.pathname === config.routes.graphQL
|
||||
|
||||
let user = await req.payload.db.findOne<any>({
|
||||
collection: collectionConfig.slug,
|
||||
req,
|
||||
where: { id: { equals: args.req.user.id } },
|
||||
const user = await args.req.payload.findByID({
|
||||
id: args.req.user.id,
|
||||
collection: args.req.user.collection,
|
||||
depth: isGraphQL ? 0 : args.collection.config.auth.depth,
|
||||
req: args.req,
|
||||
})
|
||||
|
||||
const sid = args.req.user._sid
|
||||
@@ -86,7 +88,7 @@ export const refreshOperation = async (incomingArgs: Arguments): Promise<Result>
|
||||
throw new Forbidden(args.req.t)
|
||||
}
|
||||
|
||||
const existingSession = user.sessions.find(({ id }: { id: number }) => id === sid)
|
||||
const existingSession = user.sessions.find(({ id }) => id === sid)
|
||||
|
||||
const now = new Date()
|
||||
const tokenExpInMs = collectionConfig.auth.tokenExpiration * 1000
|
||||
@@ -104,13 +106,6 @@ export const refreshOperation = async (incomingArgs: Arguments): Promise<Result>
|
||||
})
|
||||
}
|
||||
|
||||
user = await req.payload.findByID({
|
||||
id: user.id,
|
||||
collection: collectionConfig.slug,
|
||||
depth: isGraphQL ? 0 : args.collection.config.auth.depth,
|
||||
req: args.req,
|
||||
})
|
||||
|
||||
if (user) {
|
||||
user.collection = args.req.user.collection
|
||||
user._strategy = args.req.user._strategy
|
||||
|
||||
10
packages/payload/src/auth/removeExpiredSessions.ts
Normal file
10
packages/payload/src/auth/removeExpiredSessions.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import type { UserSession } from './types.js'
|
||||
|
||||
export const removeExpiredSessions = (sessions: UserSession[]) => {
|
||||
const now = new Date()
|
||||
|
||||
return sessions.filter(({ expiresAt }) => {
|
||||
const expiry = expiresAt instanceof Date ? expiresAt : new Date(expiresAt)
|
||||
return expiry > now
|
||||
})
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
import { v4 as uuid } from 'uuid'
|
||||
|
||||
import type { SanitizedCollectionConfig } from '../collections/config/types.js'
|
||||
import type { TypedUser } from '../index.js'
|
||||
import type { Payload, PayloadRequest } from '../types/index.js'
|
||||
import type { UserSession } from './types.js'
|
||||
|
||||
/**
|
||||
* Removes expired sessions from an array of sessions
|
||||
*/
|
||||
export const removeExpiredSessions = (sessions: UserSession[]) => {
|
||||
const now = new Date()
|
||||
|
||||
return sessions.filter(({ expiresAt }) => {
|
||||
const expiry = expiresAt instanceof Date ? expiresAt : new Date(expiresAt)
|
||||
return expiry > now
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a session to the user and removes expired sessions
|
||||
* @returns The session ID (sid) if sessions are used
|
||||
*/
|
||||
export const addSessionToUser = async ({
|
||||
collectionConfig,
|
||||
payload,
|
||||
req,
|
||||
user,
|
||||
}: {
|
||||
collectionConfig: SanitizedCollectionConfig
|
||||
payload: Payload
|
||||
req: PayloadRequest
|
||||
user: TypedUser
|
||||
}): Promise<{ sid?: string }> => {
|
||||
let sid: string | undefined
|
||||
if (collectionConfig.auth.useSessions) {
|
||||
// Add session to user
|
||||
sid = uuid()
|
||||
const now = new Date()
|
||||
const tokenExpInMs = collectionConfig.auth.tokenExpiration * 1000
|
||||
const expiresAt = new Date(now.getTime() + tokenExpInMs)
|
||||
|
||||
const session = { id: sid, createdAt: now, expiresAt }
|
||||
|
||||
if (!user.sessions?.length) {
|
||||
user.sessions = [session]
|
||||
} else {
|
||||
user.sessions = removeExpiredSessions(user.sessions)
|
||||
user.sessions.push(session)
|
||||
}
|
||||
|
||||
await payload.db.updateOne({
|
||||
id: user.id,
|
||||
collection: collectionConfig.slug,
|
||||
data: user,
|
||||
req,
|
||||
returning: false,
|
||||
})
|
||||
|
||||
user.collection = collectionConfig.slug
|
||||
user._strategy = 'local-jwt'
|
||||
}
|
||||
|
||||
return {
|
||||
sid,
|
||||
}
|
||||
}
|
||||
@@ -1,154 +1,59 @@
|
||||
import type { SanitizedCollectionConfig } from '../../../collections/config/types.js'
|
||||
import type { SanitizedCollectionConfig, TypeWithID } from '../../../collections/config/types.js'
|
||||
import type { JsonObject, Payload } from '../../../index.js'
|
||||
import type { PayloadRequest } from '../../../types/index.js'
|
||||
|
||||
import { type JsonObject, type Payload, type TypedUser } from '../../../index.js'
|
||||
import { isUserLocked } from '../../isUserLocked.js'
|
||||
|
||||
type Args = {
|
||||
collection: SanitizedCollectionConfig
|
||||
doc: Record<string, unknown> & TypeWithID
|
||||
payload: Payload
|
||||
req: PayloadRequest
|
||||
user: TypedUser
|
||||
}
|
||||
|
||||
// Note: this function does not use req in most updates, as we want those to be visible in parallel requests that are on a different
|
||||
// transaction. At the same time, we want updates from parallel requests to be visible here.
|
||||
export const incrementLoginAttempts = async ({
|
||||
collection,
|
||||
doc,
|
||||
payload,
|
||||
req,
|
||||
user,
|
||||
}: Args): Promise<void> => {
|
||||
const {
|
||||
auth: { lockTime, maxLoginAttempts },
|
||||
} = collection
|
||||
|
||||
const currentTime = Date.now()
|
||||
if ('lockUntil' in doc && typeof doc.lockUntil === 'string') {
|
||||
const lockUntil = new Date(doc.lockUntil).getTime()
|
||||
|
||||
let updatedLockUntil: null | string = null
|
||||
let updatedLoginAttempts: null | number = null
|
||||
|
||||
if (user.lockUntil && !isUserLocked(new Date(user.lockUntil))) {
|
||||
// Expired lock, restart count at 1
|
||||
const updatedUser = await payload.db.updateOne({
|
||||
id: user.id,
|
||||
collection: collection.slug,
|
||||
data: {
|
||||
lockUntil: null,
|
||||
loginAttempts: 1,
|
||||
},
|
||||
req,
|
||||
select: {
|
||||
lockUntil: true,
|
||||
loginAttempts: true,
|
||||
},
|
||||
})
|
||||
updatedLockUntil = updatedUser.lockUntil
|
||||
updatedLoginAttempts = updatedUser.loginAttempts
|
||||
user.lockUntil = updatedLockUntil
|
||||
} else {
|
||||
const data: JsonObject = {
|
||||
loginAttempts: {
|
||||
$inc: 1,
|
||||
},
|
||||
}
|
||||
|
||||
const willReachMaxAttempts =
|
||||
typeof user.loginAttempts === 'number' && user.loginAttempts + 1 >= maxLoginAttempts
|
||||
// Lock the account if at max attempts and not already locked
|
||||
if (willReachMaxAttempts) {
|
||||
const lockUntil = new Date(currentTime + lockTime).toISOString()
|
||||
data.lockUntil = lockUntil
|
||||
}
|
||||
|
||||
const updatedUser = await payload.db.updateOne({
|
||||
id: user.id,
|
||||
collection: collection.slug,
|
||||
data,
|
||||
select: {
|
||||
lockUntil: true,
|
||||
loginAttempts: true,
|
||||
},
|
||||
})
|
||||
|
||||
updatedLockUntil = updatedUser.lockUntil
|
||||
updatedLoginAttempts = updatedUser.loginAttempts
|
||||
}
|
||||
|
||||
if (updatedLoginAttempts === null) {
|
||||
throw new Error('Failed to update login attempts or lockUntil for user')
|
||||
}
|
||||
|
||||
// Check updated latest lockUntil and loginAttempts in case there were parallel updates
|
||||
const reachedMaxAttemptsForCurrentUser =
|
||||
typeof updatedLoginAttempts === 'number' && updatedLoginAttempts - 1 >= maxLoginAttempts
|
||||
|
||||
const reachedMaxAttemptsForNextUser =
|
||||
typeof updatedLoginAttempts === 'number' && updatedLoginAttempts >= maxLoginAttempts
|
||||
|
||||
if (reachedMaxAttemptsForCurrentUser) {
|
||||
user.lockUntil = updatedLockUntil
|
||||
}
|
||||
user.loginAttempts = updatedLoginAttempts - 1 // -1, as the updated increment is applied for the *next* login attempt, not the current one
|
||||
|
||||
if (
|
||||
reachedMaxAttemptsForNextUser &&
|
||||
(!updatedLockUntil || !isUserLocked(new Date(updatedLockUntil)))
|
||||
) {
|
||||
// If lockUntil reached max login attempts due to multiple parallel attempts but user was not locked yet,
|
||||
const newLockUntil = new Date(currentTime + lockTime).toISOString()
|
||||
|
||||
await payload.db.updateOne({
|
||||
id: user.id,
|
||||
collection: collection.slug,
|
||||
data: {
|
||||
lockUntil: newLockUntil,
|
||||
},
|
||||
returning: false,
|
||||
})
|
||||
|
||||
if (reachedMaxAttemptsForCurrentUser) {
|
||||
user.lockUntil = newLockUntil
|
||||
}
|
||||
|
||||
if (collection.auth.useSessions) {
|
||||
// Remove all active sessions that have been created in a 20 second window. This protects
|
||||
// against brute force attacks - example: 99 incorrect, 1 correct parallel login attempts.
|
||||
// The correct login attempt will be finished first, as it's faster due to not having to perform
|
||||
// an additional db update here.
|
||||
// However, this request (the incorrect login attempt request) can kill the successful login attempt here.
|
||||
|
||||
// Fetch user sessions separately (do not do this in the updateOne select in order to preserve the returning: true db call optimization)
|
||||
const currentUser = await payload.db.findOne<TypedUser>({
|
||||
if (lockUntil < Date.now()) {
|
||||
await payload.update({
|
||||
id: doc.id,
|
||||
collection: collection.slug,
|
||||
select: {
|
||||
sessions: true,
|
||||
},
|
||||
where: {
|
||||
id: {
|
||||
equals: user.id,
|
||||
},
|
||||
data: {
|
||||
lockUntil: null,
|
||||
loginAttempts: 1,
|
||||
},
|
||||
depth: 0,
|
||||
req,
|
||||
})
|
||||
if (currentUser?.sessions?.length) {
|
||||
// Does not hurt also removing expired sessions
|
||||
currentUser.sessions = currentUser.sessions.filter((session) => {
|
||||
const sessionCreatedAt = new Date(session.createdAt)
|
||||
const twentySecondsAgo = new Date(currentTime - 20000)
|
||||
|
||||
// Remove sessions created within the last 20 seconds
|
||||
return sessionCreatedAt <= twentySecondsAgo
|
||||
})
|
||||
|
||||
user.sessions = currentUser.sessions
|
||||
|
||||
await payload.db.updateOne({
|
||||
id: user.id,
|
||||
collection: collection.slug,
|
||||
data: user,
|
||||
returning: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const data: JsonObject = {
|
||||
loginAttempts: Number(doc.loginAttempts) + 1,
|
||||
}
|
||||
|
||||
// Lock the account if at max attempts and not already locked
|
||||
if (typeof doc.loginAttempts === 'number' && doc.loginAttempts + 1 >= maxLoginAttempts) {
|
||||
const lockUntil = new Date(Date.now() + lockTime).toISOString()
|
||||
data.lockUntil = lockUntil
|
||||
}
|
||||
|
||||
await payload.update({
|
||||
id: doc.id,
|
||||
collection: collection.slug,
|
||||
data,
|
||||
depth: 0,
|
||||
req,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -21,14 +21,15 @@ export const resetLoginAttempts = async ({
|
||||
) {
|
||||
return
|
||||
}
|
||||
await payload.db.updateOne({
|
||||
await payload.update({
|
||||
id: doc.id,
|
||||
collection: collection.slug,
|
||||
data: {
|
||||
lockUntil: null,
|
||||
loginAttempts: 0,
|
||||
},
|
||||
depth: 0,
|
||||
overrideAccess: true,
|
||||
req,
|
||||
returning: false,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ export type ServerOnlyCollectionProperties = keyof Pick<
|
||||
|
||||
export type ServerOnlyCollectionAdminProperties = keyof Pick<
|
||||
SanitizedCollectionConfig['admin'],
|
||||
'baseFilter' | 'baseListFilter' | 'components' | 'hidden'
|
||||
'baseListFilter' | 'components' | 'hidden'
|
||||
>
|
||||
|
||||
export type ServerOnlyUploadProperties = keyof Pick<
|
||||
@@ -94,7 +94,6 @@ const serverOnlyUploadProperties: Partial<ServerOnlyUploadProperties>[] = [
|
||||
|
||||
const serverOnlyCollectionAdminProperties: Partial<ServerOnlyCollectionAdminProperties>[] = [
|
||||
'hidden',
|
||||
'baseFilter',
|
||||
'baseListFilter',
|
||||
'components',
|
||||
// 'preview' is handled separately
|
||||
|
||||
@@ -85,7 +85,6 @@ export type HookOperationType =
|
||||
| 'readDistinct'
|
||||
| 'refresh'
|
||||
| 'resetPassword'
|
||||
| 'restoreVersion'
|
||||
| 'update'
|
||||
|
||||
type CreateOrUpdateOperation = Extract<HookOperationType, 'create' | 'update'>
|
||||
@@ -270,7 +269,7 @@ export type EnableFoldersOptions = {
|
||||
debug?: boolean
|
||||
}
|
||||
|
||||
export type BaseFilter = (args: {
|
||||
export type BaseListFilter = (args: {
|
||||
limit: number
|
||||
locale?: TypedLocale
|
||||
page: number
|
||||
@@ -278,31 +277,7 @@ export type BaseFilter = (args: {
|
||||
sort: string
|
||||
}) => null | Promise<null | Where> | Where
|
||||
|
||||
/**
|
||||
* @deprecated Use `BaseFilter` instead.
|
||||
*/
|
||||
export type BaseListFilter = BaseFilter
|
||||
|
||||
export type CollectionAdminOptions = {
|
||||
/**
|
||||
* Defines a default base filter which will be applied in the following parts of the admin panel:
|
||||
* - List View
|
||||
* - Relationship fields for internal links within the Lexical editor
|
||||
*
|
||||
* This is especially useful for plugins like multi-tenant. For example,
|
||||
* a user may have access to multiple tenants, but should only see content
|
||||
* related to the currently active or selected tenant in those places.
|
||||
*/
|
||||
baseFilter?: BaseFilter
|
||||
/**
|
||||
* @deprecated Use `baseFilter` instead. If both are defined,
|
||||
* `baseFilter` will take precedence. This property remains only
|
||||
* for backward compatibility and may be removed in a future version.
|
||||
*
|
||||
* Originally, `baseListFilter` was intended to filter only the List View
|
||||
* in the admin panel. However, base filtering is often required in other areas
|
||||
* such as internal link relationships in the Lexical editor.
|
||||
*/
|
||||
baseListFilter?: BaseListFilter
|
||||
/**
|
||||
* Custom admin components
|
||||
@@ -712,7 +687,7 @@ export type AuthCollection = {
|
||||
}
|
||||
|
||||
export type TypeWithID = {
|
||||
deletedAt?: null | string
|
||||
deletedAt?: string
|
||||
docId?: any
|
||||
id: number | string
|
||||
}
|
||||
@@ -720,7 +695,7 @@ export type TypeWithID = {
|
||||
export type TypeWithTimestamps = {
|
||||
[key: string]: unknown
|
||||
createdAt: string
|
||||
deletedAt?: null | string
|
||||
deletedAt?: string
|
||||
id: number | string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
@@ -8,15 +8,13 @@ import { countOperation } from '../operations/count.js'
|
||||
|
||||
export const countHandler: PayloadHandler = async (req) => {
|
||||
const collection = getRequestCollection(req)
|
||||
const { trash, where } = req.query as {
|
||||
trash?: string
|
||||
const { where } = req.query as {
|
||||
where?: Where
|
||||
}
|
||||
|
||||
const result = await countOperation({
|
||||
collection,
|
||||
req,
|
||||
trash: trash === 'true',
|
||||
where,
|
||||
})
|
||||
|
||||
|
||||
@@ -11,14 +11,13 @@ import { findDistinctOperation } from '../operations/findDistinct.js'
|
||||
|
||||
export const findDistinctHandler: PayloadHandler = async (req) => {
|
||||
const collection = getRequestCollection(req)
|
||||
const { depth, field, limit, page, sort, trash, where } = req.query as {
|
||||
const { depth, field, limit, page, sort, where } = req.query as {
|
||||
depth?: string
|
||||
field?: string
|
||||
limit?: string
|
||||
page?: string
|
||||
sort?: string
|
||||
sortOrder?: string
|
||||
trash?: string
|
||||
where?: Where
|
||||
}
|
||||
|
||||
@@ -34,7 +33,6 @@ export const findDistinctHandler: PayloadHandler = async (req) => {
|
||||
page: isNumber(page) ? Number(page) : undefined,
|
||||
req,
|
||||
sort: typeof sort === 'string' ? sort.split(',') : undefined,
|
||||
trash: trash === 'true',
|
||||
where,
|
||||
})
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import { docAccessHandler } from './docAccess.js'
|
||||
import { duplicateHandler } from './duplicate.js'
|
||||
import { findHandler } from './find.js'
|
||||
import { findByIDHandler } from './findByID.js'
|
||||
// import { findDistinctHandler } from './findDistinct.js'
|
||||
import { findDistinctHandler } from './findDistinct.js'
|
||||
import { findVersionByIDHandler } from './findVersionByID.js'
|
||||
import { findVersionsHandler } from './findVersions.js'
|
||||
import { previewHandler } from './preview.js'
|
||||
|
||||
@@ -7,7 +7,6 @@ import { executeAccess } from '../../auth/executeAccess.js'
|
||||
import { combineQueries } from '../../database/combineQueries.js'
|
||||
import { validateQueryPaths } from '../../database/queryValidation/validateQueryPaths.js'
|
||||
import { sanitizeWhereQuery } from '../../database/sanitizeWhereQuery.js'
|
||||
import { appendNonTrashedFilter } from '../../utilities/appendNonTrashedFilter.js'
|
||||
import { killTransaction } from '../../utilities/killTransaction.js'
|
||||
import { buildAfterOperation } from './utils.js'
|
||||
|
||||
@@ -16,7 +15,6 @@ export type Arguments = {
|
||||
disableErrors?: boolean
|
||||
overrideAccess?: boolean
|
||||
req?: PayloadRequest
|
||||
trash?: boolean
|
||||
where?: Where
|
||||
}
|
||||
|
||||
@@ -49,7 +47,6 @@ export const countOperation = async <TSlug extends CollectionSlug>(
|
||||
disableErrors,
|
||||
overrideAccess,
|
||||
req,
|
||||
trash = false,
|
||||
where,
|
||||
} = args
|
||||
|
||||
@@ -74,16 +71,9 @@ export const countOperation = async <TSlug extends CollectionSlug>(
|
||||
|
||||
let result: { totalDocs: number }
|
||||
|
||||
let fullWhere = combineQueries(where!, accessResult!)
|
||||
const fullWhere = combineQueries(where!, accessResult!)
|
||||
sanitizeWhereQuery({ fields: collectionConfig.flattenedFields, payload, where: fullWhere })
|
||||
|
||||
// Exclude trashed documents when trash: false
|
||||
fullWhere = appendNonTrashedFilter({
|
||||
enableTrash: collectionConfig.trash,
|
||||
trash,
|
||||
where: fullWhere,
|
||||
})
|
||||
|
||||
await validateQueryPaths({
|
||||
collectionConfig,
|
||||
overrideAccess: overrideAccess!,
|
||||
|
||||
@@ -291,7 +291,6 @@ export const createOperation = async <
|
||||
autosave,
|
||||
collection: collectionConfig,
|
||||
docWithLocales: result,
|
||||
operation: 'create',
|
||||
payload,
|
||||
publishSpecificLocale,
|
||||
req,
|
||||
|
||||
@@ -12,7 +12,6 @@ import { sanitizeWhereQuery } from '../../database/sanitizeWhereQuery.js'
|
||||
import { APIError } from '../../errors/APIError.js'
|
||||
import { Forbidden } from '../../errors/Forbidden.js'
|
||||
import { relationshipPopulationPromise } from '../../fields/hooks/afterRead/relationshipPopulationPromise.js'
|
||||
import { appendNonTrashedFilter } from '../../utilities/appendNonTrashedFilter.js'
|
||||
import { getFieldByPath } from '../../utilities/getFieldByPath.js'
|
||||
import { killTransaction } from '../../utilities/killTransaction.js'
|
||||
import { buildAfterOperation } from './utils.js'
|
||||
@@ -30,7 +29,6 @@ export type Arguments = {
|
||||
req?: PayloadRequest
|
||||
showHiddenFields?: boolean
|
||||
sort?: Sort
|
||||
trash?: boolean
|
||||
where?: Where
|
||||
}
|
||||
export const findDistinctOperation = async (
|
||||
@@ -62,7 +60,6 @@ export const findDistinctOperation = async (
|
||||
overrideAccess,
|
||||
populate,
|
||||
showHiddenFields = false,
|
||||
trash = false,
|
||||
where,
|
||||
} = args
|
||||
|
||||
@@ -99,16 +96,9 @@ export const findDistinctOperation = async (
|
||||
// Find Distinct
|
||||
// /////////////////////////////////////
|
||||
|
||||
let fullWhere = combineQueries(where!, accessResult!)
|
||||
const fullWhere = combineQueries(where!, accessResult!)
|
||||
sanitizeWhereQuery({ fields: collectionConfig.flattenedFields, payload, where: fullWhere })
|
||||
|
||||
// Exclude trashed documents when trash: false
|
||||
fullWhere = appendNonTrashedFilter({
|
||||
enableTrash: collectionConfig.trash,
|
||||
trash,
|
||||
where: fullWhere,
|
||||
})
|
||||
|
||||
await validateQueryPaths({
|
||||
collectionConfig,
|
||||
overrideAccess: overrideAccess!,
|
||||
|
||||
@@ -41,15 +41,6 @@ export type Options<TSlug extends CollectionSlug> = {
|
||||
* Recommended to pass when using the Local API from hooks, as usually you want to execute the operation within the current transaction.
|
||||
*/
|
||||
req?: Partial<PayloadRequest>
|
||||
/**
|
||||
* When set to `true`, the query will include both normal and trashed documents.
|
||||
* To query only trashed documents, pass `trash: true` and combine with a `where` clause filtering by `deletedAt`.
|
||||
* By default (`false`), the query will only include normal documents and exclude those with a `deletedAt` field.
|
||||
*
|
||||
* This argument has no effect unless `trash` is enabled on the collection.
|
||||
* @default false
|
||||
*/
|
||||
trash?: boolean
|
||||
/**
|
||||
* If you set `overrideAccess` to `false`, you can pass a user to use against the access control checks.
|
||||
*/
|
||||
@@ -64,13 +55,7 @@ export async function countLocal<TSlug extends CollectionSlug>(
|
||||
payload: Payload,
|
||||
options: Options<TSlug>,
|
||||
): Promise<{ totalDocs: number }> {
|
||||
const {
|
||||
collection: collectionSlug,
|
||||
disableErrors,
|
||||
overrideAccess = true,
|
||||
trash = false,
|
||||
where,
|
||||
} = options
|
||||
const { collection: collectionSlug, disableErrors, overrideAccess = true, where } = options
|
||||
|
||||
const collection = payload.collections[collectionSlug]
|
||||
|
||||
@@ -85,7 +70,6 @@ export async function countLocal<TSlug extends CollectionSlug>(
|
||||
disableErrors,
|
||||
overrideAccess,
|
||||
req: await createLocalReq(options as CreateLocalReqOptions, payload),
|
||||
trash,
|
||||
where,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -83,15 +83,6 @@ export type Options<
|
||||
* @example ['group', '-createdAt'] // sort by 2 fields, ASC group and DESC createdAt
|
||||
*/
|
||||
sort?: Sort
|
||||
/**
|
||||
* When set to `true`, the query will include both normal and trashed documents.
|
||||
* To query only trashed documents, pass `trash: true` and combine with a `where` clause filtering by `deletedAt`.
|
||||
* By default (`false`), the query will only include normal documents and exclude those with a `deletedAt` field.
|
||||
*
|
||||
* This argument has no effect unless `trash` is enabled on the collection.
|
||||
* @default false
|
||||
*/
|
||||
trash?: boolean
|
||||
/**
|
||||
* If you set `overrideAccess` to `false`, you can pass a user to use against the access control checks.
|
||||
*/
|
||||
@@ -120,7 +111,6 @@ export async function findDistinct<
|
||||
populate,
|
||||
showHiddenFields,
|
||||
sort,
|
||||
trash = false,
|
||||
where,
|
||||
} = options
|
||||
const collection = payload.collections[collectionSlug]
|
||||
@@ -143,7 +133,6 @@ export async function findDistinct<
|
||||
req: await createLocalReq(options as CreateLocalReqOptions, payload),
|
||||
showHiddenFields,
|
||||
sort,
|
||||
trash,
|
||||
where,
|
||||
}) as Promise<PaginatedDistinctDocs<Record<TField, DataFromCollectionSlug<TSlug>[TField]>>>
|
||||
}
|
||||
|
||||
@@ -10,23 +10,15 @@ import { combineQueries } from '../../database/combineQueries.js'
|
||||
import { APIError, Forbidden, NotFound } from '../../errors/index.js'
|
||||
import { afterChange } from '../../fields/hooks/afterChange/index.js'
|
||||
import { afterRead } from '../../fields/hooks/afterRead/index.js'
|
||||
import { beforeChange } from '../../fields/hooks/beforeChange/index.js'
|
||||
import { beforeValidate } from '../../fields/hooks/beforeValidate/index.js'
|
||||
import { commitTransaction } from '../../utilities/commitTransaction.js'
|
||||
import { deepCopyObjectSimple } from '../../utilities/deepCopyObject.js'
|
||||
import { initTransaction } from '../../utilities/initTransaction.js'
|
||||
import { killTransaction } from '../../utilities/killTransaction.js'
|
||||
import { sanitizeSelect } from '../../utilities/sanitizeSelect.js'
|
||||
import { getLatestCollectionVersion } from '../../versions/getLatestCollectionVersion.js'
|
||||
import { saveVersion } from '../../versions/saveVersion.js'
|
||||
import { buildAfterOperation } from './utils.js'
|
||||
|
||||
export type Arguments = {
|
||||
collection: Collection
|
||||
currentDepth?: number
|
||||
depth?: number
|
||||
disableErrors?: boolean
|
||||
disableTransaction?: boolean
|
||||
draft?: boolean
|
||||
id: number | string
|
||||
overrideAccess?: boolean
|
||||
@@ -43,7 +35,7 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
id,
|
||||
collection: { config: collectionConfig },
|
||||
depth,
|
||||
draft: draftArg = false,
|
||||
draft,
|
||||
overrideAccess = false,
|
||||
populate,
|
||||
req,
|
||||
@@ -53,25 +45,6 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
} = args
|
||||
|
||||
try {
|
||||
const shouldCommit = !args.disableTransaction && (await initTransaction(args.req))
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (args.collection.config.hooks?.beforeOperation?.length) {
|
||||
for (const hook of args.collection.config.hooks.beforeOperation) {
|
||||
args =
|
||||
(await hook({
|
||||
args,
|
||||
collection: args.collection.config,
|
||||
context: args.req.context,
|
||||
operation: 'restoreVersion',
|
||||
req: args.req,
|
||||
})) || args
|
||||
}
|
||||
}
|
||||
|
||||
if (!id) {
|
||||
throw new APIError('Missing ID of version to restore.', httpStatus.BAD_REQUEST)
|
||||
}
|
||||
@@ -95,7 +68,7 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
throw new NotFound(req.t)
|
||||
}
|
||||
|
||||
const { parent: parentDocID, version: versionToRestoreWithLocales } = rawVersion
|
||||
const parentDocID = rawVersion.parent
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Access
|
||||
@@ -117,7 +90,6 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
where: combineQueries({ id: { equals: parentDocID } }, accessResults),
|
||||
}
|
||||
|
||||
// Get the document from the non versioned collection
|
||||
const doc = await req.payload.db.findOne(findOneArgs)
|
||||
|
||||
if (!doc && !hasWherePolicy) {
|
||||
@@ -137,6 +109,7 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
// /////////////////////////////////////
|
||||
// fetch previousDoc
|
||||
// /////////////////////////////////////
|
||||
|
||||
const prevDocWithLocales = await getLatestCollectionVersion({
|
||||
id: parentDocID,
|
||||
config: collectionConfig,
|
||||
@@ -145,109 +118,6 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
req,
|
||||
})
|
||||
|
||||
// originalDoc with hoisted localized data
|
||||
const originalDoc = await afterRead({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
depth: 0,
|
||||
doc: deepCopyObjectSimple(prevDocWithLocales),
|
||||
draft: draftArg,
|
||||
fallbackLocale: null,
|
||||
global: null,
|
||||
locale: locale!,
|
||||
overrideAccess: true,
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
// version data with hoisted localized data
|
||||
const prevVersionDoc = await afterRead({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
depth: 0,
|
||||
doc: deepCopyObjectSimple(versionToRestoreWithLocales),
|
||||
draft: draftArg,
|
||||
fallbackLocale: null,
|
||||
global: null,
|
||||
locale: locale!,
|
||||
overrideAccess: true,
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
let data = deepCopyObjectSimple(prevVersionDoc)
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeValidate - Fields
|
||||
// /////////////////////////////////////
|
||||
|
||||
data = await beforeValidate({
|
||||
id: parentDocID,
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data,
|
||||
doc: originalDoc,
|
||||
global: null,
|
||||
operation: 'update',
|
||||
overrideAccess,
|
||||
req,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeValidate - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (collectionConfig.hooks?.beforeValidate?.length) {
|
||||
for (const hook of collectionConfig.hooks.beforeValidate) {
|
||||
data =
|
||||
(await hook({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data,
|
||||
operation: 'update',
|
||||
originalDoc,
|
||||
req,
|
||||
})) || data
|
||||
}
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeChange - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (collectionConfig.hooks?.beforeChange?.length) {
|
||||
for (const hook of collectionConfig.hooks.beforeChange) {
|
||||
data =
|
||||
(await hook({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data,
|
||||
operation: 'update',
|
||||
originalDoc,
|
||||
req,
|
||||
})) || data
|
||||
}
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeChange - Fields
|
||||
// /////////////////////////////////////
|
||||
|
||||
let result = await beforeChange({
|
||||
id: parentDocID,
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data: { ...data, id: parentDocID },
|
||||
doc: originalDoc,
|
||||
docWithLocales: versionToRestoreWithLocales,
|
||||
global: null,
|
||||
operation: 'update',
|
||||
overrideAccess,
|
||||
req,
|
||||
skipValidation:
|
||||
draftArg && collectionConfig.versions.drafts && !collectionConfig.versions.drafts.validate,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Update
|
||||
// /////////////////////////////////////
|
||||
@@ -258,10 +128,10 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
select: incomingSelect,
|
||||
})
|
||||
|
||||
result = await req.payload.db.updateOne({
|
||||
let result = await req.payload.db.updateOne({
|
||||
id: parentDocID,
|
||||
collection: collectionConfig.slug,
|
||||
data: result,
|
||||
data: rawVersion.version,
|
||||
req,
|
||||
select,
|
||||
})
|
||||
@@ -270,16 +140,18 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
// Save `previousDoc` as a version after restoring
|
||||
// /////////////////////////////////////
|
||||
|
||||
result = await saveVersion({
|
||||
id: parentDocID,
|
||||
const prevVersion = { ...prevDocWithLocales }
|
||||
|
||||
delete prevVersion.id
|
||||
|
||||
await payload.db.createVersion({
|
||||
autosave: false,
|
||||
collection: collectionConfig,
|
||||
docWithLocales: result,
|
||||
draft: draftArg,
|
||||
operation: 'restoreVersion',
|
||||
payload,
|
||||
collectionSlug: collectionConfig.slug,
|
||||
createdAt: prevVersion.createdAt,
|
||||
parent: parentDocID,
|
||||
req,
|
||||
select,
|
||||
updatedAt: new Date().toISOString(),
|
||||
versionData: draft ? { ...rawVersion.version, _status: 'draft' } : rawVersion.version,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
@@ -353,21 +225,6 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
}
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// afterOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
result = await buildAfterOperation({
|
||||
args,
|
||||
collection: collectionConfig,
|
||||
operation: 'restoreVersion',
|
||||
result,
|
||||
})
|
||||
|
||||
if (shouldCommit) {
|
||||
await commitTransaction(req)
|
||||
}
|
||||
|
||||
return result
|
||||
} catch (error: unknown) {
|
||||
await killTransaction(req)
|
||||
|
||||
@@ -314,7 +314,6 @@ export const updateDocument = async <
|
||||
collection: collectionConfig,
|
||||
docWithLocales: result,
|
||||
draft: shouldSaveDraft,
|
||||
operation: 'update',
|
||||
payload,
|
||||
publishSpecificLocale,
|
||||
req,
|
||||
|
||||
@@ -2,7 +2,7 @@ import type { forgotPasswordOperation } from '../../auth/operations/forgotPasswo
|
||||
import type { loginOperation } from '../../auth/operations/login.js'
|
||||
import type { refreshOperation } from '../../auth/operations/refresh.js'
|
||||
import type { resetPasswordOperation } from '../../auth/operations/resetPassword.js'
|
||||
import type { CollectionSlug, restoreVersionOperation } from '../../index.js'
|
||||
import type { CollectionSlug } from '../../index.js'
|
||||
import type { PayloadRequest } from '../../types/index.js'
|
||||
import type { SanitizedCollectionConfig, SelectFromCollectionSlug } from '../config/types.js'
|
||||
import type { countOperation } from './count.js'
|
||||
@@ -36,7 +36,6 @@ export type AfterOperationMap<TOperationGeneric extends CollectionSlug> = {
|
||||
login: typeof loginOperation<TOperationGeneric>
|
||||
refresh: typeof refreshOperation
|
||||
resetPassword: typeof resetPasswordOperation<TOperationGeneric>
|
||||
restoreVersion: typeof restoreVersionOperation
|
||||
update: typeof updateOperation<TOperationGeneric, SelectFromCollectionSlug<TOperationGeneric>>
|
||||
updateByID: typeof updateByIDOperation<
|
||||
TOperationGeneric,
|
||||
@@ -109,11 +108,6 @@ export type AfterOperationArg<TOperationGeneric extends CollectionSlug> = {
|
||||
operation: 'resetPassword'
|
||||
result: Awaited<ReturnType<AfterOperationMap<TOperationGeneric>['resetPassword']>>
|
||||
}
|
||||
| {
|
||||
args: Parameters<AfterOperationMap<TOperationGeneric>['restoreVersion']>[0]
|
||||
operation: 'restoreVersion'
|
||||
result: Awaited<ReturnType<AfterOperationMap<TOperationGeneric>['restoreVersion']>>
|
||||
}
|
||||
| {
|
||||
args: Parameters<AfterOperationMap<TOperationGeneric>['update']>[0]
|
||||
operation: 'update'
|
||||
|
||||
@@ -160,29 +160,32 @@ export async function validateSearchParam({
|
||||
let fieldAccess: any
|
||||
|
||||
if (versionFields) {
|
||||
fieldAccess = policies[entityType]![entitySlug]!.fields
|
||||
fieldAccess = policies[entityType]![entitySlug]!
|
||||
|
||||
if (segments[0] === 'parent' || segments[0] === 'version' || segments[0] === 'snapshot') {
|
||||
if (segments[0] === 'parent' || segments[0] === 'version') {
|
||||
segments.shift()
|
||||
}
|
||||
} else {
|
||||
fieldAccess = policies[entityType]![entitySlug]!.fields
|
||||
}
|
||||
|
||||
if (segments.length) {
|
||||
segments.forEach((segment) => {
|
||||
if (fieldAccess[segment]) {
|
||||
if ('fields' in fieldAccess[segment]) {
|
||||
fieldAccess = fieldAccess[segment].fields
|
||||
} else {
|
||||
fieldAccess = fieldAccess[segment]
|
||||
}
|
||||
segments.forEach((segment) => {
|
||||
if (fieldAccess[segment]) {
|
||||
if ('fields' in fieldAccess[segment]) {
|
||||
fieldAccess = fieldAccess[segment].fields
|
||||
} else if (
|
||||
'blocks' in fieldAccess[segment] ||
|
||||
'blockReferences' in fieldAccess[segment]
|
||||
) {
|
||||
fieldAccess = fieldAccess[segment]
|
||||
} else {
|
||||
fieldAccess = fieldAccess[segment]
|
||||
}
|
||||
})
|
||||
|
||||
if (!fieldAccess?.read?.permission) {
|
||||
errors.push({ path: fieldPath })
|
||||
}
|
||||
})
|
||||
|
||||
if (!fieldAccess?.read?.permission) {
|
||||
errors.push({ path: fieldPath })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
export { id } from '@payloadcms/translations/languages/id'
|
||||
@@ -6,7 +6,6 @@ export {
|
||||
parseCookies,
|
||||
} from '../auth/cookies.js'
|
||||
export { getLoginOptions } from '../auth/getLoginOptions.js'
|
||||
export { addSessionToUser, removeExpiredSessions } from '../auth/sessions.js'
|
||||
export { getFromImportMap } from '../bin/generateImportMap/utilities/getFromImportMap.js'
|
||||
export { parsePayloadComponent } from '../bin/generateImportMap/utilities/parsePayloadComponent.js'
|
||||
export { defaults as collectionDefaults } from '../collections/config/defaults.js'
|
||||
|
||||
@@ -14,7 +14,6 @@ export const baseIDField: TextField = {
|
||||
defaultValue: () => new ObjectId().toHexString(),
|
||||
hooks: {
|
||||
beforeChange: [({ value }) => value || new ObjectId().toHexString()],
|
||||
// ID field values for arrays and blocks need to be unique when duplicating, as on postgres they are stored on the same table as primary keys.
|
||||
beforeDuplicate: [() => new ObjectId().toHexString()],
|
||||
},
|
||||
label: 'ID',
|
||||
|
||||
@@ -111,14 +111,13 @@ export const promise = async ({
|
||||
parentSchemaPath,
|
||||
})
|
||||
|
||||
const fieldAffectsDataResult = fieldAffectsData(field)
|
||||
const pathSegments = path ? path.split('.') : []
|
||||
const schemaPathSegments = schemaPath ? schemaPath.split('.') : []
|
||||
const indexPathSegments = indexPath ? indexPath.split('-').filter(Boolean)?.map(Number) : []
|
||||
let removedFieldValue = false
|
||||
|
||||
if (
|
||||
fieldAffectsDataResult &&
|
||||
fieldAffectsData(field) &&
|
||||
field.hidden &&
|
||||
typeof siblingDoc[field.name!] !== 'undefined' &&
|
||||
!showHiddenFields
|
||||
@@ -140,17 +139,16 @@ export const promise = async ({
|
||||
}
|
||||
}
|
||||
|
||||
const shouldHoistLocalizedValue: boolean = Boolean(
|
||||
const shouldHoistLocalizedValue =
|
||||
flattenLocales &&
|
||||
fieldAffectsDataResult &&
|
||||
typeof siblingDoc[field.name!] === 'object' &&
|
||||
siblingDoc[field.name!] !== null &&
|
||||
fieldShouldBeLocalized({ field, parentIsLocalized: parentIsLocalized! }) &&
|
||||
locale !== 'all' &&
|
||||
req.payload.config.localization,
|
||||
)
|
||||
fieldAffectsData(field) &&
|
||||
typeof siblingDoc[field.name!] === 'object' &&
|
||||
siblingDoc[field.name!] !== null &&
|
||||
fieldShouldBeLocalized({ field, parentIsLocalized: parentIsLocalized! }) &&
|
||||
locale !== 'all' &&
|
||||
req.payload.config.localization
|
||||
|
||||
if (fieldAffectsDataResult && shouldHoistLocalizedValue) {
|
||||
if (shouldHoistLocalizedValue) {
|
||||
// replace actual value with localized value before sanitizing
|
||||
// { [locale]: fields } -> fields
|
||||
const value = siblingDoc[field.name!][locale!]
|
||||
@@ -189,7 +187,7 @@ export const promise = async ({
|
||||
case 'group': {
|
||||
// Fill groups with empty objects so fields with hooks within groups can populate
|
||||
// themselves virtually as necessary
|
||||
if (fieldAffectsDataResult && typeof siblingDoc[field.name] === 'undefined') {
|
||||
if (fieldAffectsData(field) && typeof siblingDoc[field.name] === 'undefined') {
|
||||
siblingDoc[field.name] = {}
|
||||
}
|
||||
|
||||
@@ -236,7 +234,7 @@ export const promise = async ({
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldAffectsDataResult) {
|
||||
if (fieldAffectsData(field)) {
|
||||
// Execute hooks
|
||||
if (triggerHooks && field.hooks?.afterRead) {
|
||||
for (const hook of field.hooks.afterRead) {
|
||||
@@ -402,7 +400,7 @@ export const promise = async ({
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(rows) && rows.length > 0) {
|
||||
if (Array.isArray(rows)) {
|
||||
rows.forEach((row, rowIndex) => {
|
||||
traverseFields({
|
||||
blockData,
|
||||
@@ -470,8 +468,6 @@ export const promise = async ({
|
||||
})
|
||||
}
|
||||
})
|
||||
} else if (shouldHoistLocalizedValue && (!rows || rows.length === 0)) {
|
||||
siblingDoc[field.name] = null
|
||||
} else if (field.hidden !== true || showHiddenFields === true) {
|
||||
siblingDoc[field.name] = []
|
||||
}
|
||||
@@ -481,7 +477,7 @@ export const promise = async ({
|
||||
case 'blocks': {
|
||||
const rows = siblingDoc[field.name]
|
||||
|
||||
if (Array.isArray(rows) && rows.length > 0) {
|
||||
if (Array.isArray(rows)) {
|
||||
rows.forEach((row, rowIndex) => {
|
||||
const blockTypeToMatch = (row as JsonObject).blockType
|
||||
|
||||
@@ -577,8 +573,6 @@ export const promise = async ({
|
||||
})
|
||||
}
|
||||
})
|
||||
} else if (shouldHoistLocalizedValue && (!rows || rows.length === 0)) {
|
||||
siblingDoc[field.name] = null
|
||||
} else if (field.hidden !== true || showHiddenFields === true) {
|
||||
siblingDoc[field.name] = []
|
||||
}
|
||||
@@ -623,7 +617,7 @@ export const promise = async ({
|
||||
}
|
||||
|
||||
case 'group': {
|
||||
if (fieldAffectsDataResult) {
|
||||
if (fieldAffectsData(field)) {
|
||||
let groupDoc = siblingDoc[field.name] as JsonObject
|
||||
|
||||
if (typeof siblingDoc[field.name] !== 'object') {
|
||||
|
||||
@@ -63,8 +63,7 @@ export const promise = async <T>({
|
||||
let fieldData = siblingDoc?.[field.name!]
|
||||
const fieldIsLocalized = localization && fieldShouldBeLocalized({ field, parentIsLocalized })
|
||||
|
||||
// Run field beforeDuplicate hooks.
|
||||
// These hooks are responsible for resetting the `id` field values of array and block rows. See `baseIDField`.
|
||||
// Run field beforeDuplicate hooks
|
||||
if (Array.isArray(field.hooks?.beforeDuplicate)) {
|
||||
if (fieldIsLocalized) {
|
||||
const localeData: JsonObject = {}
|
||||
|
||||
@@ -28,20 +28,20 @@ export async function buildFolderWhereConstraints({
|
||||
}),
|
||||
]
|
||||
|
||||
const baseFilterConstraint = await (
|
||||
collectionConfig.admin?.baseFilter ?? collectionConfig.admin?.baseListFilter
|
||||
)?.({
|
||||
limit: 0,
|
||||
locale: localeCode,
|
||||
page: 1,
|
||||
req,
|
||||
sort:
|
||||
sort ||
|
||||
(typeof collectionConfig.defaultSort === 'string' ? collectionConfig.defaultSort : 'id'),
|
||||
})
|
||||
if (typeof collectionConfig.admin?.baseListFilter === 'function') {
|
||||
const baseListFilterConstraint = await collectionConfig.admin.baseListFilter({
|
||||
limit: 0,
|
||||
locale: localeCode,
|
||||
page: 1,
|
||||
req,
|
||||
sort:
|
||||
sort ||
|
||||
(typeof collectionConfig.defaultSort === 'string' ? collectionConfig.defaultSort : 'id'),
|
||||
})
|
||||
|
||||
if (baseFilterConstraint) {
|
||||
constraints.push(baseFilterConstraint)
|
||||
if (baseListFilterConstraint) {
|
||||
constraints.push(baseListFilterConstraint)
|
||||
}
|
||||
}
|
||||
|
||||
if (folderID) {
|
||||
|
||||
@@ -30,17 +30,15 @@ export function formatFolderOrDocumentItem({
|
||||
if (isUpload) {
|
||||
itemValue.filename = value.filename
|
||||
itemValue.mimeType = value.mimeType
|
||||
itemValue.url =
|
||||
value.thumbnailURL ||
|
||||
(isImage(value.mimeType)
|
||||
? getBestFitFromSizes({
|
||||
sizes: value.sizes,
|
||||
targetSizeMax: 520,
|
||||
targetSizeMin: 300,
|
||||
url: value.url,
|
||||
width: value.width,
|
||||
})
|
||||
: undefined)
|
||||
itemValue.url = isImage(value.mimeType)
|
||||
? getBestFitFromSizes({
|
||||
sizes: value.sizes,
|
||||
targetSizeMax: 520,
|
||||
targetSizeMin: 300,
|
||||
url: value.url,
|
||||
width: value.width,
|
||||
})
|
||||
: undefined
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -282,7 +282,6 @@ export const updateOperation = async <
|
||||
docWithLocales: result,
|
||||
draft: shouldSaveDraft,
|
||||
global: globalConfig,
|
||||
operation: 'update',
|
||||
payload,
|
||||
publishSpecificLocale,
|
||||
req,
|
||||
|
||||
@@ -873,7 +873,6 @@ export class BasePayload {
|
||||
this.config.jobs.scheduling
|
||||
) {
|
||||
await this.jobs.handleSchedules({
|
||||
allQueues: cronConfig.allQueues,
|
||||
queue: cronConfig.queue,
|
||||
})
|
||||
}
|
||||
@@ -892,7 +891,6 @@ export class BasePayload {
|
||||
}
|
||||
|
||||
await this.jobs.run({
|
||||
allQueues: cronConfig.allQueues,
|
||||
limit: cronConfig.limit ?? DEFAULT_LIMIT,
|
||||
queue: cronConfig.queue,
|
||||
silent: cronConfig.silent,
|
||||
@@ -1170,7 +1168,6 @@ export type {
|
||||
AfterRefreshHook as CollectionAfterRefreshHook,
|
||||
AuthCollection,
|
||||
AuthOperationsFromCollectionSlug,
|
||||
BaseFilter,
|
||||
BaseListFilter,
|
||||
BeforeChangeHook as CollectionBeforeChangeHook,
|
||||
BeforeDeleteHook as CollectionBeforeDeleteHook,
|
||||
|
||||
@@ -7,13 +7,6 @@ import type { TaskConfig } from './taskTypes.js'
|
||||
import type { WorkflowConfig } from './workflowTypes.js'
|
||||
|
||||
export type AutorunCronConfig = {
|
||||
/**
|
||||
* If you want to autoRUn jobs from all queues, set this to true.
|
||||
* If you set this to true, the `queue` property will be ignored.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
allQueues?: boolean
|
||||
/**
|
||||
* The cron schedule for the job.
|
||||
* @default '* * * * *' (every minute).
|
||||
@@ -50,8 +43,6 @@ export type AutorunCronConfig = {
|
||||
limit?: number
|
||||
/**
|
||||
* The queue name for the job.
|
||||
*
|
||||
* @default 'default'
|
||||
*/
|
||||
queue?: string
|
||||
/**
|
||||
|
||||
@@ -45,18 +45,11 @@ export const handleSchedulesJobsEndpoint: Endpoint = {
|
||||
)
|
||||
}
|
||||
|
||||
const { allQueues, queue } = req.query as {
|
||||
allQueues?: 'false' | 'true'
|
||||
const { queue } = req.query as {
|
||||
queue?: string
|
||||
}
|
||||
|
||||
const runAllQueues = allQueues && !(typeof allQueues === 'string' && allQueues === 'false')
|
||||
|
||||
const { errored, queued, skipped } = await handleSchedules({
|
||||
allQueues: runAllQueues,
|
||||
queue,
|
||||
req,
|
||||
})
|
||||
const { errored, queued, skipped } = await handleSchedules({ queue, req })
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
|
||||
@@ -56,7 +56,7 @@ export const runJobsEndpoint: Endpoint = {
|
||||
|
||||
if (shouldHandleSchedules && jobsConfig.scheduling) {
|
||||
// If should handle schedules and schedules are defined
|
||||
await req.payload.jobs.handleSchedules({ allQueues: runAllQueues, queue, req })
|
||||
await req.payload.jobs.handleSchedules({ queue: runAllQueues ? undefined : queue, req })
|
||||
}
|
||||
|
||||
const runJobsArgs: RunJobsArgs = {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import ObjectIdImport from 'bson-objectid'
|
||||
|
||||
import type { JobLog, PayloadRequest } from '../../index.js'
|
||||
import type { PayloadRequest } from '../../index.js'
|
||||
import type { RunJobsSilent } from '../localAPI.js'
|
||||
import type { UpdateJobFunction } from '../operations/runJobs/runJob/getUpdateJobFunction.js'
|
||||
import type { TaskError } from './index.js'
|
||||
@@ -60,6 +60,19 @@ export async function handleTaskError({
|
||||
|
||||
const currentDate = getCurrentDate()
|
||||
|
||||
;(job.log ??= []).push({
|
||||
id: new ObjectId().toHexString(),
|
||||
completedAt: currentDate.toISOString(),
|
||||
error: errorJSON,
|
||||
executedAt: executedAt.toISOString(),
|
||||
input,
|
||||
output: output ?? {},
|
||||
parent: req.payload.config.jobs.addParentToTaskLog ? parent : undefined,
|
||||
state: 'failed',
|
||||
taskID,
|
||||
taskSlug,
|
||||
})
|
||||
|
||||
if (job.waitUntil) {
|
||||
// Check if waitUntil is in the past
|
||||
const waitUntil = new Date(job.waitUntil)
|
||||
@@ -87,19 +100,6 @@ export async function handleTaskError({
|
||||
maxRetries = retriesConfig.attempts
|
||||
}
|
||||
|
||||
const taskLogToPush: JobLog = {
|
||||
id: new ObjectId().toHexString(),
|
||||
completedAt: currentDate.toISOString(),
|
||||
error: errorJSON,
|
||||
executedAt: executedAt.toISOString(),
|
||||
input,
|
||||
output: output ?? {},
|
||||
parent: req.payload.config.jobs.addParentToTaskLog ? parent : undefined,
|
||||
state: 'failed',
|
||||
taskID,
|
||||
taskSlug,
|
||||
}
|
||||
|
||||
if (!taskStatus?.complete && (taskStatus?.totalTried ?? 0) >= maxRetries) {
|
||||
/**
|
||||
* Task reached max retries => workflow will not retry
|
||||
@@ -108,9 +108,7 @@ export async function handleTaskError({
|
||||
await updateJob({
|
||||
error: errorJSON,
|
||||
hasError: true,
|
||||
log: {
|
||||
$push: taskLogToPush,
|
||||
} as any,
|
||||
log: job.log,
|
||||
processing: false,
|
||||
totalTried: (job.totalTried ?? 0) + 1,
|
||||
waitUntil: job.waitUntil,
|
||||
@@ -170,9 +168,7 @@ export async function handleTaskError({
|
||||
await updateJob({
|
||||
error: hasFinalError ? errorJSON : undefined,
|
||||
hasError: hasFinalError, // If reached max retries => final error. If hasError is true this job will not be retried
|
||||
log: {
|
||||
$push: taskLogToPush,
|
||||
} as any,
|
||||
log: job.log,
|
||||
processing: false,
|
||||
totalTried: (job.totalTried ?? 0) + 1,
|
||||
waitUntil: job.waitUntil,
|
||||
|
||||
@@ -79,6 +79,7 @@ export async function handleWorkflowError({
|
||||
await updateJob({
|
||||
error: errorJSON,
|
||||
hasError: hasFinalError, // If reached max retries => final error. If hasError is true this job will not be retried
|
||||
log: job.log,
|
||||
processing: false,
|
||||
totalTried: (job.totalTried ?? 0) + 1,
|
||||
waitUntil: job.waitUntil,
|
||||
|
||||
@@ -22,20 +22,13 @@ export type RunJobsSilent =
|
||||
| boolean
|
||||
export const getJobsLocalAPI = (payload: Payload) => ({
|
||||
handleSchedules: async (args?: {
|
||||
/**
|
||||
* If you want to schedule jobs from all queues, set this to true.
|
||||
* If you set this to true, the `queue` property will be ignored.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
allQueues?: boolean
|
||||
// By default, schedule all queues - only scheduling jobs scheduled to be added to the `default` queue would not make sense
|
||||
// here, as you'd usually specify a different queue than `default` here, especially if this is used in combination with autorun.
|
||||
// The `queue` property for setting up schedules is required, and not optional.
|
||||
/**
|
||||
* If you want to only schedule jobs that are set to schedule in a specific queue, set this to the queue name.
|
||||
*
|
||||
* @default jobs from the `default` queue will be executed.
|
||||
* @default all jobs for all queues will be scheduled.
|
||||
*/
|
||||
queue?: string
|
||||
req?: PayloadRequest
|
||||
@@ -43,7 +36,6 @@ export const getJobsLocalAPI = (payload: Payload) => ({
|
||||
const newReq: PayloadRequest = args?.req ?? (await createLocalReq({}, payload))
|
||||
|
||||
return await handleSchedules({
|
||||
allQueues: args?.allQueues,
|
||||
queue: args?.queue,
|
||||
req: newReq,
|
||||
})
|
||||
|
||||
@@ -23,26 +23,17 @@ export type HandleSchedulesResult = {
|
||||
* after they are scheduled
|
||||
*/
|
||||
export async function handleSchedules({
|
||||
allQueues = false,
|
||||
queue: _queue,
|
||||
queue,
|
||||
req,
|
||||
}: {
|
||||
/**
|
||||
* If you want to schedule jobs from all queues, set this to true.
|
||||
* If you set this to true, the `queue` property will be ignored.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
allQueues?: boolean
|
||||
/**
|
||||
* If you want to only schedule jobs that are set to schedule in a specific queue, set this to the queue name.
|
||||
*
|
||||
* @default jobs from the `default` queue will be executed.
|
||||
* @default all jobs for all queues will be scheduled.
|
||||
*/
|
||||
queue?: string
|
||||
req: PayloadRequest
|
||||
}): Promise<HandleSchedulesResult> {
|
||||
const queue = _queue ?? 'default'
|
||||
const jobsConfig = req.payload.config.jobs
|
||||
const queuesWithSchedules = getQueuesWithSchedules({
|
||||
jobsConfig,
|
||||
@@ -62,7 +53,7 @@ export async function handleSchedules({
|
||||
// Need to know when that particular job was last scheduled in that particular queue
|
||||
|
||||
for (const [queueName, { schedules }] of Object.entries(queuesWithSchedules)) {
|
||||
if (!allQueues && queueName !== queue) {
|
||||
if (queue && queueName !== queue) {
|
||||
// If a queue is specified, only schedule jobs for that queue
|
||||
continue
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user