Compare commits
77 Commits
v3.49.0
...
feat/pushj
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a8e2a835cc | ||
|
|
b194ffc504 | ||
|
|
d2ca782a3d | ||
|
|
f61e7d06b0 | ||
|
|
255bba9606 | ||
|
|
8173180d1d | ||
|
|
3258e78596 | ||
|
|
ad2564e5fa | ||
|
|
995f96bc70 | ||
|
|
306b7f6943 | ||
|
|
72f5763c25 | ||
|
|
a374aabd8d | ||
|
|
e84f43fca1 | ||
|
|
2bc9a2def4 | ||
|
|
1d81b0c6dd | ||
|
|
9c8f3202e4 | ||
|
|
161769e50c | ||
|
|
c9a1590fc4 | ||
|
|
e870be094e | ||
|
|
d4f198651c | ||
|
|
5d8f8dc0a5 | ||
|
|
7344d64be3 | ||
|
|
2211f3dd1c | ||
|
|
ac40185158 | ||
|
|
d622d3c5e7 | ||
|
|
b74f4fb9b2 | ||
|
|
8401b2166d | ||
|
|
20b4de94ee | ||
|
|
43b4b22af9 | ||
|
|
3b9dba8641 | ||
|
|
1d70d4d36c | ||
|
|
1b31c74d32 | ||
|
|
f432cc1956 | ||
|
|
2903486974 | ||
|
|
b965db881e | ||
|
|
1b93c4becc | ||
|
|
9031f3bf23 | ||
|
|
df91321f4a | ||
|
|
11755089f8 | ||
|
|
a8b6983ab5 | ||
|
|
f2d4004237 | ||
|
|
8a489410ad | ||
|
|
095e7d904f | ||
|
|
c48b57fdbf | ||
|
|
b26a73be4a | ||
|
|
3114b89d4c | ||
|
|
227a20e94b | ||
|
|
a22f27de1c | ||
|
|
e7124f6176 | ||
|
|
183f313387 | ||
|
|
b1fa76e397 | ||
|
|
08942494e3 | ||
|
|
da8bf69054 | ||
|
|
26d9daeccf | ||
|
|
fc5944840e | ||
|
|
9e04dbb1ca | ||
|
|
72954ce9f2 | ||
|
|
e50220374e | ||
|
|
61ee8fadca | ||
|
|
8d84352ee9 | ||
|
|
4beb27b9ad | ||
|
|
c5c8c13057 | ||
|
|
a888d5cc53 | ||
|
|
72349245ca | ||
|
|
4fde0f23ce | ||
|
|
aff2ce1b9b | ||
|
|
5c94d2dc71 | ||
|
|
b1aac19668 | ||
|
|
d093bb1f00 | ||
|
|
2e9ba10fb5 | ||
|
|
8518141a5e | ||
|
|
6d6c9ebc56 | ||
|
|
7cd4a8a602 | ||
|
|
bc802846c5 | ||
|
|
e8f6cb5ed1 | ||
|
|
23bd67515c | ||
|
|
e29d1d98d4 |
7
.github/workflows/audit-dependencies.sh
vendored
7
.github/workflows/audit-dependencies.sh
vendored
@@ -13,7 +13,8 @@ echo "${audit_json}" | jq --arg severity "${severity}" '
|
||||
{
|
||||
package: .value.module_name,
|
||||
vulnerable: .value.vulnerable_versions,
|
||||
fixed_in: .value.patched_versions
|
||||
fixed_in: .value.patched_versions,
|
||||
findings: .value.findings
|
||||
}
|
||||
)
|
||||
' >$output_file
|
||||
@@ -23,7 +24,11 @@ audit_length=$(jq 'length' $output_file)
|
||||
if [[ "${audit_length}" -gt "0" ]]; then
|
||||
echo "Actionable vulnerabilities found in the following packages:"
|
||||
jq -r '.[] | "\u001b[1m\(.package)\u001b[0m vulnerable in \u001b[31m\(.vulnerable)\u001b[0m fixed in \u001b[32m\(.fixed_in)\u001b[0m"' $output_file | while read -r line; do echo -e "$line"; done
|
||||
echo ""
|
||||
echo "Output written to ${output_file}"
|
||||
cat $output_file
|
||||
echo ""
|
||||
echo "This script can be rerun with: './.github/workflows/audit-dependencies.sh $severity'"
|
||||
exit 1
|
||||
else
|
||||
echo "No actionable vulnerabilities"
|
||||
|
||||
2
.github/workflows/audit-dependencies.yml
vendored
2
.github/workflows/audit-dependencies.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "🚨 Actionable vulnerabilities found: <https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Details>"
|
||||
"text": "🚨 Actionable vulnerabilities found: <https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Script Run Details>"
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
3
.github/workflows/post-release.yml
vendored
3
.github/workflows/post-release.yml
vendored
@@ -17,6 +17,9 @@ env:
|
||||
|
||||
jobs:
|
||||
post_release:
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ github.event_name != 'workflow_dispatch' }}
|
||||
steps:
|
||||
|
||||
@@ -739,7 +739,7 @@ The `useDocumentInfo` hook provides information about the current document being
|
||||
| **`lastUpdateTime`** | Timestamp of the last update to the document. |
|
||||
| **`mostRecentVersionIsAutosaved`** | Whether the most recent version is an autosaved version. |
|
||||
| **`preferencesKey`** | The `preferences` key to use when interacting with document-level user preferences. [More details](./preferences). |
|
||||
| **`savedDocumentData`** | The saved data of the document. |
|
||||
| **`data`** | The saved data of the document. |
|
||||
| **`setDocFieldPreferences`** | Method to set preferences for a specific field. [More details](./preferences). |
|
||||
| **`setDocumentTitle`** | Method to set the document title. |
|
||||
| **`setHasPublishedDoc`** | Method to update whether the document has been published. |
|
||||
|
||||
@@ -142,7 +142,7 @@ The following options are available:
|
||||
| `components` | Swap in your own React components to be used within this Collection. [More details](#custom-components). |
|
||||
| `listSearchableFields` | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
|
||||
| `pagination` | Set pagination-specific options for this Collection. [More details](#pagination). |
|
||||
| `baseListFilter` | You can define a default base filter for this collection's List view, which will be merged into any filters that the user performs. |
|
||||
| `baseFilter` | Defines a default base filter which will be applied to the List View (along with any other filters applied by the user) and internal links in Lexical Editor, |
|
||||
|
||||
<Banner type="warning">
|
||||
**Note:** If you set `useAsTitle` to a relationship or join field, it will use
|
||||
|
||||
@@ -296,11 +296,16 @@ query {
|
||||
sort: "createdAt"
|
||||
limit: 5
|
||||
where: { author: { equals: "66e3431a3f23e684075aaeb9" } }
|
||||
"""
|
||||
Optionally pass count: true if you want to retrieve totalDocs
|
||||
"""
|
||||
count: true -- s
|
||||
) {
|
||||
docs {
|
||||
title
|
||||
}
|
||||
hasNextPage
|
||||
totalDocs
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,20 +34,20 @@ npm i @payloadcms/plugin-csm
|
||||
Then in the `plugins` array of your Payload Config, call the plugin and enable any collections that require Content Source Maps.
|
||||
|
||||
```ts
|
||||
import { buildConfig } from "payload/config"
|
||||
import contentSourceMaps from "@payloadcms/plugin-csm"
|
||||
import { buildConfig } from 'payload/config'
|
||||
import contentSourceMaps from '@payloadcms/plugin-csm'
|
||||
|
||||
const config = buildConfig({
|
||||
collections: [
|
||||
{
|
||||
slug: "pages",
|
||||
slug: 'pages',
|
||||
fields: [
|
||||
{
|
||||
name: 'slug',
|
||||
type: 'text',
|
||||
},
|
||||
{
|
||||
name: 'title,'
|
||||
name: 'title',
|
||||
type: 'text',
|
||||
},
|
||||
],
|
||||
@@ -55,7 +55,7 @@ const config = buildConfig({
|
||||
],
|
||||
plugins: [
|
||||
contentSourceMaps({
|
||||
collections: ["pages"],
|
||||
collections: ['pages'],
|
||||
}),
|
||||
],
|
||||
})
|
||||
|
||||
@@ -77,7 +77,6 @@ This configuration only queues the Job - it does not execute it immediately. To
|
||||
```ts
|
||||
export default buildConfig({
|
||||
jobs: {
|
||||
scheduler: 'cron',
|
||||
autoRun: [
|
||||
{
|
||||
cron: '* * * * *', // Runs every minute
|
||||
|
||||
@@ -45,13 +45,11 @@ The following options are available:
|
||||
|
||||
| Path | Description |
|
||||
| ----------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`url`** \* | String, or function that returns a string, pointing to your front-end application. This value is used as the iframe `src`. [More details](#url). |
|
||||
| **`url`** | String, or function that returns a string, pointing to your front-end application. This value is used as the iframe `src`. [More details](#url). |
|
||||
| **`breakpoints`** | Array of breakpoints to be used as “device sizes” in the preview window. Each item appears as an option in the toolbar. [More details](#breakpoints). |
|
||||
| **`collections`** | Array of collection slugs to enable Live Preview on. |
|
||||
| **`globals`** | Array of global slugs to enable Live Preview on. |
|
||||
|
||||
_\* An asterisk denotes that a property is required._
|
||||
|
||||
### URL
|
||||
|
||||
The `url` property resolves to a string that points to your front-end application. This value is used as the `src` attribute of the iframe rendering your front-end. Once loaded, the Admin Panel will communicate directly with your app through `window.postMessage` events.
|
||||
@@ -88,17 +86,16 @@ const config = buildConfig({
|
||||
// ...
|
||||
livePreview: {
|
||||
// highlight-start
|
||||
url: ({
|
||||
data,
|
||||
collectionConfig,
|
||||
locale
|
||||
}) => `${data.tenant.url}${ // Multi-tenant top-level domain
|
||||
collectionConfig.slug === 'posts' ? `/posts/${data.slug}` : `${data.slug !== 'home' : `/${data.slug}` : ''}`
|
||||
}${locale ? `?locale=${locale?.code}` : ''}`, // Localization query param
|
||||
url: ({ data, collectionConfig, locale }) =>
|
||||
`${data.tenant.url}${
|
||||
collectionConfig.slug === 'posts'
|
||||
? `/posts/${data.slug}`
|
||||
: `${data.slug !== 'home' ? `/${data.slug}` : ''}`
|
||||
}${locale ? `?locale=${locale?.code}` : ''}`, // Localization query param
|
||||
collections: ['pages'],
|
||||
},
|
||||
// highlight-end
|
||||
}
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
@@ -54,8 +54,15 @@ The plugin accepts an object with the following properties:
|
||||
```ts
|
||||
type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
/**
|
||||
* After a tenant is deleted, the plugin will attempt
|
||||
* to clean up related documents
|
||||
* Base path for your application
|
||||
*
|
||||
* https://nextjs.org/docs/app/api-reference/config/next-config-js/basePath
|
||||
*
|
||||
* @default undefined
|
||||
*/
|
||||
basePath?: string
|
||||
/**
|
||||
* After a tenant is deleted, the plugin will attempt to clean up related documents
|
||||
* - removing documents with the tenant ID
|
||||
* - removing the tenant from users
|
||||
*
|
||||
@@ -68,22 +75,36 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
collections: {
|
||||
[key in CollectionSlug]?: {
|
||||
/**
|
||||
* Set to `true` if you want the collection to
|
||||
* behave as a global
|
||||
* Set to `true` if you want the collection to behave as a global
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
isGlobal?: boolean
|
||||
/**
|
||||
* Set to `false` if you want to manually apply
|
||||
* the baseListFilter
|
||||
* Overrides for the tenant field, will override the entire tenantField configuration
|
||||
*/
|
||||
tenantFieldOverrides?: CollectionTenantFieldConfigOverrides
|
||||
/**
|
||||
* Set to `false` if you want to manually apply the baseListFilter
|
||||
* Set to `false` if you want to manually apply the baseFilter
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
useBaseFilter?: boolean
|
||||
/**
|
||||
* @deprecated Use `useBaseFilter` instead. If both are defined,
|
||||
* `useBaseFilter` will take precedence. This property remains only
|
||||
* for backward compatibility and may be removed in a future version.
|
||||
*
|
||||
* Originally, `baseListFilter` was intended to filter only the List View
|
||||
* in the admin panel. However, base filtering is often required in other areas
|
||||
* such as internal link relationships in the Lexical editor.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
useBaseListFilter?: boolean
|
||||
/**
|
||||
* Set to `false` if you want to handle collection access
|
||||
* manually without the multi-tenant constraints applied
|
||||
* Set to `false` if you want to handle collection access manually without the multi-tenant constraints applied
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
@@ -92,8 +113,7 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
}
|
||||
/**
|
||||
* Enables debug mode
|
||||
* - Makes the tenant field visible in the
|
||||
* admin UI within applicable collections
|
||||
* - Makes the tenant field visible in the admin UI within applicable collections
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
@@ -105,27 +125,41 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
*/
|
||||
enabled?: boolean
|
||||
/**
|
||||
* Field configuration for the field added
|
||||
* to all tenant enabled collections
|
||||
* Localization for the plugin
|
||||
*/
|
||||
tenantField?: {
|
||||
access?: RelationshipField['access']
|
||||
/**
|
||||
* The name of the field added to all tenant
|
||||
* enabled collections
|
||||
*
|
||||
* @default 'tenant'
|
||||
*/
|
||||
name?: string
|
||||
i18n?: {
|
||||
translations: {
|
||||
[key in AcceptedLanguages]?: {
|
||||
/**
|
||||
* @default 'You are about to change ownership from <0>{{fromTenant}}</0> to <0>{{toTenant}}</0>'
|
||||
*/
|
||||
'confirm-modal-tenant-switch--body'?: string
|
||||
/**
|
||||
* `tenantLabel` defaults to the value of the `nav-tenantSelector-label` translation
|
||||
*
|
||||
* @default 'Confirm {{tenantLabel}} change'
|
||||
*/
|
||||
'confirm-modal-tenant-switch--heading'?: string
|
||||
/**
|
||||
* @default 'Assigned Tenant'
|
||||
*/
|
||||
'field-assignedTenant-label'?: string
|
||||
/**
|
||||
* @default 'Tenant'
|
||||
*/
|
||||
'nav-tenantSelector-label'?: string
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Field configuration for the field added
|
||||
* to the users collection
|
||||
* Field configuration for the field added to all tenant enabled collections
|
||||
*/
|
||||
tenantField?: RootTenantFieldConfigOverrides
|
||||
/**
|
||||
* Field configuration for the field added to the users collection
|
||||
*
|
||||
* If `includeDefaultField` is `false`, you must
|
||||
* include the field on your users collection manually
|
||||
* This is useful if you want to customize the field
|
||||
* or place the field in a specific location
|
||||
* If `includeDefaultField` is `false`, you must include the field on your users collection manually
|
||||
* This is useful if you want to customize the field or place the field in a specific location
|
||||
*/
|
||||
tenantsArrayField?:
|
||||
| {
|
||||
@@ -146,8 +180,7 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
*/
|
||||
arrayTenantFieldName?: string
|
||||
/**
|
||||
* When `includeDefaultField` is `true`, the field will
|
||||
* be added to the users collection automatically
|
||||
* When `includeDefaultField` is `true`, the field will be added to the users collection automatically
|
||||
*/
|
||||
includeDefaultField?: true
|
||||
/**
|
||||
@@ -164,8 +197,7 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
arrayFieldName?: string
|
||||
arrayTenantFieldName?: string
|
||||
/**
|
||||
* When `includeDefaultField` is `false`, you must
|
||||
* include the field on your users collection manually
|
||||
* When `includeDefaultField` is `false`, you must include the field on your users collection manually
|
||||
*/
|
||||
includeDefaultField?: false
|
||||
rowFields?: never
|
||||
@@ -174,8 +206,9 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
/**
|
||||
* Customize tenant selector label
|
||||
*
|
||||
* Either a string or an object where the keys are i18n
|
||||
* codes and the values are the string labels
|
||||
* Either a string or an object where the keys are i18n codes and the values are the string labels
|
||||
*
|
||||
* @deprecated Use `i18n.translations` instead.
|
||||
*/
|
||||
tenantSelectorLabel?:
|
||||
| Partial<{
|
||||
@@ -189,27 +222,25 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
*/
|
||||
tenantsSlug?: string
|
||||
/**
|
||||
* Function that determines if a user has access
|
||||
* to _all_ tenants
|
||||
* Function that determines if a user has access to _all_ tenants
|
||||
*
|
||||
* Useful for super-admin type users
|
||||
*/
|
||||
userHasAccessToAllTenants?: (
|
||||
user: ConfigTypes extends { user: unknown } ? ConfigTypes['user'] : User,
|
||||
user: ConfigTypes extends { user: unknown }
|
||||
? ConfigTypes['user']
|
||||
: TypedUser,
|
||||
) => boolean
|
||||
/**
|
||||
* Opt out of adding access constraints to
|
||||
* the tenants collection
|
||||
* Opt out of adding access constraints to the tenants collection
|
||||
*/
|
||||
useTenantsCollectionAccess?: boolean
|
||||
/**
|
||||
* Opt out including the baseListFilter to filter
|
||||
* tenants by selected tenant
|
||||
* Opt out including the baseListFilter to filter tenants by selected tenant
|
||||
*/
|
||||
useTenantsListFilter?: boolean
|
||||
/**
|
||||
* Opt out including the baseListFilter to filter
|
||||
* users by selected tenant
|
||||
* Opt out including the baseListFilter to filter users by selected tenant
|
||||
*/
|
||||
useUsersTenantFilter?: boolean
|
||||
}
|
||||
|
||||
@@ -6,9 +6,112 @@ desc: Troubleshooting Common Issues in Payload
|
||||
keywords: admin, components, custom, customize, documentation, Content Management System, cms, headless, javascript, node, react, nextjs, troubleshooting
|
||||
---
|
||||
|
||||
## Common Issues
|
||||
## Dependency mismatches
|
||||
|
||||
### "Unauthorized, you must be logged in to make this request" when attempting to log in
|
||||
All `payload` and `@payloadcms/*` packages must be on exactly the same version and installed only once.
|
||||
|
||||
When two copies—or two different versions—of any of these packages (or of `react` / `react-dom`) appear in your dependency graph, you can see puzzling runtime errors. The most frequent is a broken React context:
|
||||
|
||||
```bash
|
||||
TypeError: Cannot destructure property 'config' of...
|
||||
```
|
||||
|
||||
This happens because one package imports a hook (most commonly `useConfig`) from _version A_ while the context provider comes from _version B_. The fix is always the same: make sure every Payload-related and React package resolves to the same module.
|
||||
|
||||
### Confirm whether duplicates exist
|
||||
|
||||
The first thing to do is to confirm whether duplicative dependencies do in fact exist.
|
||||
|
||||
There are two ways to do this:
|
||||
|
||||
1. Using pnpm's built-in inspection tool
|
||||
|
||||
```bash
|
||||
pnpm why @payloadcms/ui
|
||||
```
|
||||
|
||||
This prints the dependency tree and shows which versions are being installed. If you see more than one distinct version—or the same version listed under different paths—you have duplication.
|
||||
|
||||
2. Manual check (works with any package manager)
|
||||
|
||||
```bash
|
||||
find node_modules -name package.json \
|
||||
-exec grep -H '"name": "@payloadcms/ui"' {} \;
|
||||
```
|
||||
|
||||
Most of these hits are likely symlinks created by pnpm. Edit the matching package.json files (temporarily add a comment or change a description) to confirm whether they point to the same physical folder or to multiple copies.
|
||||
|
||||
Perform the same two checks for react and react-dom; a second copy of React can cause identical symptoms.
|
||||
|
||||
#### If no duplicates are found
|
||||
|
||||
`@payloadcms/ui` intentionally contains two bundles of itself, so you may see dual paths even when everything is correct. Inside the Payload Admin UI you must import only:
|
||||
|
||||
- `@payloadcms/ui`
|
||||
- `@payloadcms/ui/rsc`
|
||||
- `@payloadcms/ui/shared`
|
||||
|
||||
Any other deep import such as `@payloadcms/ui/elements/Button` should **only** be used in your own frontend, outside of the Payload Admin Panel. Those deep entries are published un-bundled to help you tree-shake and ship a smaller client bundle if you only need a few components from `@payloadcms/ui`.
|
||||
|
||||
### Fixing depedendency issues
|
||||
|
||||
These steps assume `pnpm`, which the Payload team recommends and uses internally. The principles apply to other package managers like npm and yarn as well. Do note that yarn 1.x is not supported by Payload.
|
||||
|
||||
1. Pin every critical package to an exact version
|
||||
|
||||
In package.json remove `^` or `~` from all versions of:
|
||||
|
||||
- `payload`
|
||||
- `@payloadcms/*`
|
||||
- `react`
|
||||
- `react-dom`
|
||||
|
||||
Prefixes allow your package manager to float to a newer minor/patch release, causing mismatches.
|
||||
|
||||
2. Delete node_modules
|
||||
|
||||
Old packages often linger even after you change versions or removed them from your package.json. Deleting node_modules ensures a clean slate.
|
||||
|
||||
3. Re-install dependencies
|
||||
|
||||
```bash
|
||||
pnpm install
|
||||
```
|
||||
|
||||
#### If the error persists
|
||||
|
||||
1. Clean the global store (pnpm only)
|
||||
|
||||
```bash
|
||||
pnpm store prune
|
||||
```
|
||||
|
||||
2. Delete the lockfile
|
||||
|
||||
Depending on your package manager, this could be `pnpm-lock.yaml`, `package-lock.json`, or `yarn.lock`.
|
||||
|
||||
Make sure you delete the lockfile **and** the node_modules folder at the same time, then run `pnpm install`. This forces a fresh, consistent resolution for all packages. It will also update all packages with dynamic versions to the latest version.
|
||||
|
||||
While it's best practice to manage dependencies in such a way where the lockfile can easily be re-generated (often this is the easiest way to resolve dependency issues), this may break your project if you have not tested the latest versions of your dependencies.
|
||||
|
||||
If you are using a version control system, make sure to commit your lockfile after this step.
|
||||
|
||||
3. Deduplicate anything that slipped through
|
||||
|
||||
```bash
|
||||
pnpm dedupe
|
||||
```
|
||||
|
||||
**Still stuck?**
|
||||
|
||||
- Switch to `pnpm` if you are on npm. Its symlinked store helps reducing accidental duplication.
|
||||
- Inspect the lockfile directly for peer-dependency violations.
|
||||
- Check project-level .npmrc / .pnpmfile.cjs overrides.
|
||||
- Run [Syncpack](https://www.npmjs.com/package/syncpack) to enforce identical versions of every `@payloadcms/*`, `react`, and `react-dom` reference.
|
||||
|
||||
Absolute last resort: add Webpack aliases so that all imports of a given package resolve to the same path (e.g. `resolve.alias['react'] = path.resolve('./node_modules/react')`). Keep this only until you can fix the underlying version skew.
|
||||
|
||||
## "Unauthorized, you must be logged in to make this request" when attempting to log in
|
||||
|
||||
This means that your auth cookie is not being set or accepted correctly upon logging in. To resolve check the following settings in your Payload Config:
|
||||
|
||||
|
||||
@@ -90,33 +90,33 @@ export const Media: CollectionConfig = {
|
||||
|
||||
_An asterisk denotes that an option is required._
|
||||
|
||||
| Option | Description |
|
||||
| ------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`adminThumbnail`** | Set the way that the [Admin Panel](../admin/overview) will display thumbnails for this Collection. [More](#admin-thumbnails) |
|
||||
| **`bulkUpload`** | Allow users to upload in bulk from the list view, default is true |
|
||||
| **`cacheTags`** | Set to `false` to disable the cache tag set in the UI for the admin thumbnail component. Useful for when CDNs don't allow certain cache queries. |
|
||||
| **`constructorOptions`** | An object passed to the the Sharp image library that accepts any Constructor options and applies them to the upload file. [More](https://sharp.pixelplumbing.com/api-constructor/) |
|
||||
| **`crop`** | Set to `false` to disable the cropping tool in the [Admin Panel](../admin/overview). Crop is enabled by default. [More](#crop-and-focal-point-selector) |
|
||||
| **`disableLocalStorage`** | Completely disable uploading files to disk locally. [More](#disabling-local-upload-storage) |
|
||||
| **`displayPreview`** | Enable displaying preview of the uploaded file in Upload fields related to this Collection. Can be locally overridden by `displayPreview` option in Upload field. [More](/docs/fields/upload#config-options). |
|
||||
| **`externalFileHeaderFilter`** | Accepts existing headers and returns the headers after filtering or modifying. |
|
||||
| **`filesRequiredOnCreate`** | Mandate file data on creation, default is true. |
|
||||
| **`filenameCompoundIndex`** | Field slugs to use for a compound index instead of the default filename index. |
|
||||
| **`focalPoint`** | Set to `false` to disable the focal point selection tool in the [Admin Panel](../admin/overview). The focal point selector is only available when `imageSizes` or `resizeOptions` are defined. [More](#crop-and-focal-point-selector) |
|
||||
| **`formatOptions`** | An object with `format` and `options` that are used with the Sharp image library to format the upload file. [More](https://sharp.pixelplumbing.com/api-output#toformat) |
|
||||
| **`handlers`** | Array of Request handlers to execute when fetching a file, if a handler returns a Response it will be sent to the client. Otherwise Payload will retrieve and send back the file. |
|
||||
| **`imageSizes`** | If specified, image uploads will be automatically resized in accordance to these image sizes. [More](#image-sizes) |
|
||||
| **`mimeTypes`** | Restrict mimeTypes in the file picker. Array of valid mimetypes or mimetype wildcards [More](#mimetypes) |
|
||||
| **`pasteURL`** | Controls whether files can be uploaded from remote URLs by pasting them into the Upload field. **Enabled by default.** Accepts `false` to disable or an object with an `allowList` of valid remote URLs. [More](#uploading-files-from-remote-urls) |
|
||||
| **`resizeOptions`** | An object passed to the the Sharp image library to resize the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize) |
|
||||
| **`skipSafeFetch`** | Set to an `allowList` to skip the safe fetch check when fetching external files. Set to `true` to skip the safe fetch for all documents in this collection. Defaults to `false`. |
|
||||
| **`allowRestrictedFileTypes`** | Set to `true` to allow restricted file types. If your Collection has defined [mimeTypes](#mimetypes), restricted file verification will be skipped. Defaults to `false`. [More](#restricted-file-types) |
|
||||
| **`staticDir`** | The folder directory to use to store media in. Can be either an absolute path or relative to the directory that contains your config. Defaults to your collection slug |
|
||||
| **`trimOptions`** | An object passed to the the Sharp image library to trim the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize#trim) |
|
||||
| **`withMetadata`** | If specified, appends metadata to the output image file. Accepts a boolean or a function that receives `metadata` and `req`, returning a boolean. |
|
||||
| **`hideFileInputOnCreate`** | Set to `true` to prevent the admin UI from showing file inputs during document creation, useful for programmatic file generation. |
|
||||
| **`hideRemoveFile`** | Set to `true` to prevent the admin UI having a way to remove an existing file while editing. |
|
||||
| **`modifyResponseHeaders`** | Accepts an object with existing `headers` and allows you to manipulate the response headers for media files. [More](#modifying-response-headers) |
|
||||
| Option | Description |
|
||||
| ------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`adminThumbnail`** | Set the way that the [Admin Panel](../admin/overview) will display thumbnails for this Collection. [More](#admin-thumbnails) |
|
||||
| **`bulkUpload`** | Allow users to upload in bulk from the list view, default is true |
|
||||
| **`cacheTags`** | Set to `false` to disable the cache tag set in the UI for the admin thumbnail component. Useful for when CDNs don't allow certain cache queries. |
|
||||
| **`constructorOptions`** | An object passed to the the Sharp image library that accepts any Constructor options and applies them to the upload file. [More](https://sharp.pixelplumbing.com/api-constructor/) |
|
||||
| **`crop`** | Set to `false` to disable the cropping tool in the [Admin Panel](../admin/overview). Crop is enabled by default. [More](#crop-and-focal-point-selector) |
|
||||
| **`disableLocalStorage`** | Completely disable uploading files to disk locally. [More](#disabling-local-upload-storage) |
|
||||
| **`displayPreview`** | Enable displaying preview of the uploaded file in Upload fields related to this Collection. Can be locally overridden by `displayPreview` option in Upload field. [More](/docs/fields/upload#config-options). |
|
||||
| **`externalFileHeaderFilter`** | Accepts existing headers and returns the headers after filtering or modifying. If using this option, you should handle the removal of any sensitive cookies (like payload-prefixed cookies) to prevent leaking session information to external services. By default, Payload automatically filters out payload-prefixed cookies when this option is not defined. |
|
||||
| **`filesRequiredOnCreate`** | Mandate file data on creation, default is true. |
|
||||
| **`filenameCompoundIndex`** | Field slugs to use for a compound index instead of the default filename index. |
|
||||
| **`focalPoint`** | Set to `false` to disable the focal point selection tool in the [Admin Panel](../admin/overview). The focal point selector is only available when `imageSizes` or `resizeOptions` are defined. [More](#crop-and-focal-point-selector) |
|
||||
| **`formatOptions`** | An object with `format` and `options` that are used with the Sharp image library to format the upload file. [More](https://sharp.pixelplumbing.com/api-output#toformat) |
|
||||
| **`handlers`** | Array of Request handlers to execute when fetching a file, if a handler returns a Response it will be sent to the client. Otherwise Payload will retrieve and send back the file. |
|
||||
| **`imageSizes`** | If specified, image uploads will be automatically resized in accordance to these image sizes. [More](#image-sizes) |
|
||||
| **`mimeTypes`** | Restrict mimeTypes in the file picker. Array of valid mimetypes or mimetype wildcards [More](#mimetypes) |
|
||||
| **`pasteURL`** | Controls whether files can be uploaded from remote URLs by pasting them into the Upload field. **Enabled by default.** Accepts `false` to disable or an object with an `allowList` of valid remote URLs. [More](#uploading-files-from-remote-urls) |
|
||||
| **`resizeOptions`** | An object passed to the the Sharp image library to resize the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize) |
|
||||
| **`skipSafeFetch`** | Set to an `allowList` to skip the safe fetch check when fetching external files. Set to `true` to skip the safe fetch for all documents in this collection. Defaults to `false`. |
|
||||
| **`allowRestrictedFileTypes`** | Set to `true` to allow restricted file types. If your Collection has defined [mimeTypes](#mimetypes), restricted file verification will be skipped. Defaults to `false`. [More](#restricted-file-types) |
|
||||
| **`staticDir`** | The folder directory to use to store media in. Can be either an absolute path or relative to the directory that contains your config. Defaults to your collection slug |
|
||||
| **`trimOptions`** | An object passed to the the Sharp image library to trim the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize#trim) |
|
||||
| **`withMetadata`** | If specified, appends metadata to the output image file. Accepts a boolean or a function that receives `metadata` and `req`, returning a boolean. |
|
||||
| **`hideFileInputOnCreate`** | Set to `true` to prevent the admin UI from showing file inputs during document creation, useful for programmatic file generation. |
|
||||
| **`hideRemoveFile`** | Set to `true` to prevent the admin UI having a way to remove an existing file while editing. |
|
||||
| **`modifyResponseHeaders`** | Accepts an object with existing `headers` and allows you to manipulate the response headers for media files. [More](#modifying-response-headers) |
|
||||
|
||||
### Payload-wide Upload Options
|
||||
|
||||
|
||||
16
package.json
16
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload-monorepo",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"workspaces": [
|
||||
@@ -132,12 +132,12 @@
|
||||
"devDependencies": {
|
||||
"@jest/globals": "29.7.0",
|
||||
"@libsql/client": "0.14.0",
|
||||
"@next/bundle-analyzer": "15.3.2",
|
||||
"@next/bundle-analyzer": "15.4.4",
|
||||
"@payloadcms/db-postgres": "workspace:*",
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@payloadcms/eslint-plugin": "workspace:*",
|
||||
"@payloadcms/live-preview-react": "workspace:*",
|
||||
"@playwright/test": "1.50.0",
|
||||
"@playwright/test": "1.54.1",
|
||||
"@sentry/nextjs": "^8.33.1",
|
||||
"@sentry/node": "^8.33.1",
|
||||
"@swc-node/register": "1.10.10",
|
||||
@@ -147,8 +147,8 @@
|
||||
"@types/jest": "29.5.12",
|
||||
"@types/minimist": "1.2.5",
|
||||
"@types/node": "22.15.30",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.2",
|
||||
"@types/react": "19.1.8",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"@types/shelljs": "0.8.15",
|
||||
"chalk": "^4.1.2",
|
||||
"comment-json": "^4.2.3",
|
||||
@@ -168,12 +168,12 @@
|
||||
"lint-staged": "15.2.7",
|
||||
"minimist": "1.2.8",
|
||||
"mongodb-memory-server": "10.1.4",
|
||||
"next": "15.3.2",
|
||||
"next": "15.4.4",
|
||||
"open": "^10.1.0",
|
||||
"p-limit": "^5.0.0",
|
||||
"pg": "8.16.3",
|
||||
"playwright": "1.50.0",
|
||||
"playwright-core": "1.50.0",
|
||||
"playwright": "1.54.1",
|
||||
"playwright-core": "1.54.1",
|
||||
"prettier": "3.5.3",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/admin-bar",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "An admin bar for React apps using Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
@@ -42,8 +42,8 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.2",
|
||||
"@types/react": "19.1.8",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "create-payload-app",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -50,12 +50,18 @@ export const updateOne: UpdateOne = async function updateOne(
|
||||
|
||||
let result
|
||||
|
||||
const $inc: Record<string, number> = {}
|
||||
let updateData: UpdateQuery<any> = data
|
||||
transform({ $inc, adapter: this, data, fields, operation: 'write' })
|
||||
|
||||
const $inc: Record<string, number> = {}
|
||||
const $push: Record<string, { $each: any[] } | any> = {}
|
||||
|
||||
transform({ $inc, $push, adapter: this, data, fields, operation: 'write' })
|
||||
if (Object.keys($inc).length) {
|
||||
updateData = { $inc, $set: updateData }
|
||||
}
|
||||
if (Object.keys($push).length) {
|
||||
updateData = { $push, $set: updateData }
|
||||
}
|
||||
|
||||
try {
|
||||
if (returning === false) {
|
||||
|
||||
@@ -209,6 +209,7 @@ const sanitizeDate = ({
|
||||
|
||||
type Args = {
|
||||
$inc?: Record<string, number>
|
||||
$push?: Record<string, { $each: any[] } | any>
|
||||
/** instance of the adapter */
|
||||
adapter: MongooseAdapter
|
||||
/** data to transform, can be an array of documents or a single document */
|
||||
@@ -398,6 +399,7 @@ const stripFields = ({
|
||||
|
||||
export const transform = ({
|
||||
$inc,
|
||||
$push,
|
||||
adapter,
|
||||
data,
|
||||
fields,
|
||||
@@ -412,7 +414,16 @@ export const transform = ({
|
||||
|
||||
if (Array.isArray(data)) {
|
||||
for (const item of data) {
|
||||
transform({ $inc, adapter, data: item, fields, globalSlug, operation, validateRelationships })
|
||||
transform({
|
||||
$inc,
|
||||
$push,
|
||||
adapter,
|
||||
data: item,
|
||||
fields,
|
||||
globalSlug,
|
||||
operation,
|
||||
validateRelationships,
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -470,6 +481,26 @@ export const transform = ({
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
$push &&
|
||||
field.type === 'array' &&
|
||||
operation === 'write' &&
|
||||
field.name in ref &&
|
||||
ref[field.name]
|
||||
) {
|
||||
const value = ref[field.name]
|
||||
if (value && typeof value === 'object' && '$push' in value) {
|
||||
const push = value.$push
|
||||
|
||||
if (Array.isArray(push)) {
|
||||
$push[`${parentPath}${field.name}`] = { $each: push }
|
||||
} else if (typeof push === 'object') {
|
||||
$push[`${parentPath}${field.name}`] = push
|
||||
}
|
||||
delete ref[field.name]
|
||||
}
|
||||
}
|
||||
|
||||
if (field.type === 'date' && operation === 'read' && field.name in ref && ref[field.name]) {
|
||||
if (config.localization && fieldShouldBeLocalized({ field, parentIsLocalized })) {
|
||||
const fieldRef = ref[field.name] as Record<string, unknown>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-sqlite",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The officially supported SQLite database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -29,8 +29,8 @@ export const countDistinct: CountDistinct = async function countDistinct(
|
||||
.limit(1)
|
||||
.$dynamic()
|
||||
|
||||
joins.forEach(({ condition, table }) => {
|
||||
query = query.leftJoin(table, condition)
|
||||
joins.forEach(({ type, condition, table }) => {
|
||||
query = query[type ?? 'leftJoin'](table, condition)
|
||||
})
|
||||
|
||||
// When we have any joins, we need to count each individual ID only once.
|
||||
|
||||
@@ -60,6 +60,10 @@ const createConstraint = ({
|
||||
formattedOperator = '='
|
||||
}
|
||||
|
||||
if (pathSegments.length === 1) {
|
||||
return `EXISTS (SELECT 1 FROM json_each("${pathSegments[0]}") AS ${newAlias} WHERE ${newAlias}.value ${formattedOperator} '${formattedValue}')`
|
||||
}
|
||||
|
||||
return `EXISTS (
|
||||
SELECT 1
|
||||
FROM json_each(${alias}.value -> '${pathSegments[0]}') AS ${newAlias}
|
||||
@@ -68,21 +72,38 @@ const createConstraint = ({
|
||||
}
|
||||
|
||||
export const createJSONQuery = ({
|
||||
column,
|
||||
operator,
|
||||
pathSegments,
|
||||
rawColumn,
|
||||
table,
|
||||
treatAsArray,
|
||||
treatRootAsArray,
|
||||
value,
|
||||
}: CreateJSONQueryArgs): string => {
|
||||
if ((operator === 'in' || operator === 'not_in') && Array.isArray(value)) {
|
||||
let sql = ''
|
||||
for (const [i, v] of value.entries()) {
|
||||
sql = `${sql}${createJSONQuery({ column, operator: operator === 'in' ? 'equals' : 'not_equals', pathSegments, rawColumn, table, treatAsArray, treatRootAsArray, value: v })} ${i === value.length - 1 ? '' : ` ${operator === 'in' ? 'OR' : 'AND'} `}`
|
||||
}
|
||||
return sql
|
||||
}
|
||||
|
||||
if (treatAsArray?.includes(pathSegments[1]!) && table) {
|
||||
return fromArray({
|
||||
operator,
|
||||
pathSegments,
|
||||
table,
|
||||
treatAsArray,
|
||||
value,
|
||||
value: value as CreateConstraintArgs['value'],
|
||||
})
|
||||
}
|
||||
|
||||
return createConstraint({ alias: table, operator, pathSegments, treatAsArray, value })
|
||||
return createConstraint({
|
||||
alias: table,
|
||||
operator,
|
||||
pathSegments,
|
||||
treatAsArray,
|
||||
value: value as CreateConstraintArgs['value'],
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-vercel-postgres",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "Vercel Postgres adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/drizzle",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "A library of shared functions used by different payload database adapters",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -6,41 +6,58 @@ import toSnakeCase from 'to-snake-case'
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { findMany } from './find/findMany.js'
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export const deleteMany: DeleteMany = async function deleteMany(
|
||||
this: DrizzleAdapter,
|
||||
{ collection, req, where },
|
||||
{ collection, req, where: whereArg },
|
||||
) {
|
||||
const db = await getTransaction(this, req)
|
||||
const collectionConfig = this.payload.collections[collection].config
|
||||
|
||||
const tableName = this.tableNameMap.get(toSnakeCase(collectionConfig.slug))
|
||||
|
||||
const result = await findMany({
|
||||
const table = this.tables[tableName]
|
||||
|
||||
const { joins, where } = buildQuery({
|
||||
adapter: this,
|
||||
fields: collectionConfig.flattenedFields,
|
||||
joins: false,
|
||||
limit: 0,
|
||||
locale: req?.locale,
|
||||
page: 1,
|
||||
pagination: false,
|
||||
req,
|
||||
tableName,
|
||||
where,
|
||||
where: whereArg,
|
||||
})
|
||||
|
||||
const ids = []
|
||||
let whereToUse = where
|
||||
|
||||
result.docs.forEach((data) => {
|
||||
ids.push(data.id)
|
||||
})
|
||||
|
||||
if (ids.length > 0) {
|
||||
await this.deleteWhere({
|
||||
db,
|
||||
if (joins?.length) {
|
||||
// Difficult to support joins (through where referencing other tables) in deleteMany. => 2 separate queries.
|
||||
// We can look into supporting this using one single query (through a subquery) in the future, though that's difficult to do in a generic way.
|
||||
const result = await findMany({
|
||||
adapter: this,
|
||||
fields: collectionConfig.flattenedFields,
|
||||
joins: false,
|
||||
limit: 0,
|
||||
locale: req?.locale,
|
||||
page: 1,
|
||||
pagination: false,
|
||||
req,
|
||||
select: {
|
||||
id: true,
|
||||
},
|
||||
tableName,
|
||||
where: inArray(this.tables[tableName].id, ids),
|
||||
where: whereArg,
|
||||
})
|
||||
|
||||
whereToUse = inArray(
|
||||
table.id,
|
||||
result.docs.map((doc) => doc.id),
|
||||
)
|
||||
}
|
||||
|
||||
await this.deleteWhere({
|
||||
db,
|
||||
tableName,
|
||||
where: whereToUse,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import type { LibSQLDatabase } from 'drizzle-orm/libsql'
|
||||
import type { SQLiteSelect, SQLiteSelectBase } from 'drizzle-orm/sqlite-core'
|
||||
|
||||
import { and, asc, count, desc, eq, or, sql } from 'drizzle-orm'
|
||||
import { and, asc, count, desc, eq, getTableName, or, sql } from 'drizzle-orm'
|
||||
import {
|
||||
appendVersionToQueryKey,
|
||||
buildVersionCollectionFields,
|
||||
combineQueries,
|
||||
type FlattenedField,
|
||||
getFieldByPath,
|
||||
getQueryDraftsSort,
|
||||
type JoinQuery,
|
||||
type SelectMode,
|
||||
@@ -31,7 +33,7 @@ import {
|
||||
resolveBlockTableName,
|
||||
} from '../utilities/validateExistingBlockIsIdentical.js'
|
||||
|
||||
const flattenAllWherePaths = (where: Where, paths: string[]) => {
|
||||
const flattenAllWherePaths = (where: Where, paths: { path: string; ref: any }[]) => {
|
||||
for (const k in where) {
|
||||
if (['AND', 'OR'].includes(k.toUpperCase())) {
|
||||
if (Array.isArray(where[k])) {
|
||||
@@ -41,7 +43,7 @@ const flattenAllWherePaths = (where: Where, paths: string[]) => {
|
||||
}
|
||||
} else {
|
||||
// TODO: explore how to support arrays/relationship querying.
|
||||
paths.push(k.split('.').join('_'))
|
||||
paths.push({ path: k.split('.').join('_'), ref: where })
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -59,7 +61,11 @@ const buildSQLWhere = (where: Where, alias: string) => {
|
||||
}
|
||||
} else {
|
||||
const payloadOperator = Object.keys(where[k])[0]
|
||||
|
||||
const value = where[k][payloadOperator]
|
||||
if (payloadOperator === '$raw') {
|
||||
return sql.raw(value)
|
||||
}
|
||||
|
||||
return operatorMap[payloadOperator](sql.raw(`"${alias}"."${k.split('.').join('_')}"`), value)
|
||||
}
|
||||
@@ -472,7 +478,7 @@ export const traverseFields = ({
|
||||
|
||||
const sortPath = sanitizedSort.split('.').join('_')
|
||||
|
||||
const wherePaths: string[] = []
|
||||
const wherePaths: { path: string; ref: any }[] = []
|
||||
|
||||
if (where) {
|
||||
flattenAllWherePaths(where, wherePaths)
|
||||
@@ -492,9 +498,50 @@ export const traverseFields = ({
|
||||
sortPath: sql`${sortColumn ? sortColumn : null}`.as('sortPath'),
|
||||
}
|
||||
|
||||
const collectionQueryWhere: any[] = []
|
||||
// Select for WHERE and Fallback NULL
|
||||
for (const path of wherePaths) {
|
||||
if (adapter.tables[joinCollectionTableName][path]) {
|
||||
for (const { path, ref } of wherePaths) {
|
||||
const collectioConfig = adapter.payload.collections[collection].config
|
||||
const field = getFieldByPath({ fields: collectioConfig.flattenedFields, path })
|
||||
|
||||
if (field && field.field.type === 'select' && field.field.hasMany) {
|
||||
let tableName = adapter.tableNameMap.get(
|
||||
`${toSnakeCase(collection)}_${toSnakeCase(path)}`,
|
||||
)
|
||||
let parentTable = getTableName(table)
|
||||
|
||||
if (adapter.schemaName) {
|
||||
tableName = `"${adapter.schemaName}"."${tableName}"`
|
||||
parentTable = `"${adapter.schemaName}"."${parentTable}"`
|
||||
}
|
||||
|
||||
if (adapter.name === 'postgres') {
|
||||
selectFields[path] = sql
|
||||
.raw(
|
||||
`(select jsonb_agg(${tableName}.value) from ${tableName} where ${tableName}.parent_id = ${parentTable}.id)`,
|
||||
)
|
||||
.as(path)
|
||||
} else {
|
||||
selectFields[path] = sql
|
||||
.raw(
|
||||
`(select json_group_array(${tableName}.value) from ${tableName} where ${tableName}.parent_id = ${parentTable}.id)`,
|
||||
)
|
||||
.as(path)
|
||||
}
|
||||
|
||||
const constraint = ref[path]
|
||||
const operator = Object.keys(constraint)[0]
|
||||
const value: any = Object.values(constraint)[0]
|
||||
|
||||
const query = adapter.createJSONQuery({
|
||||
column: `"${path}"`,
|
||||
operator,
|
||||
pathSegments: [field.field.name],
|
||||
table: parentTable,
|
||||
value,
|
||||
})
|
||||
ref[path] = { $raw: query }
|
||||
} else if (adapter.tables[joinCollectionTableName][path]) {
|
||||
selectFields[path] = sql`${adapter.tables[joinCollectionTableName][path]}`.as(path)
|
||||
// Allow to filter by collectionSlug
|
||||
} else if (path !== 'relationTo') {
|
||||
@@ -502,7 +549,10 @@ export const traverseFields = ({
|
||||
}
|
||||
}
|
||||
|
||||
const query = db.select(selectFields).from(adapter.tables[joinCollectionTableName])
|
||||
let query: any = db.select(selectFields).from(adapter.tables[joinCollectionTableName])
|
||||
if (collectionQueryWhere.length) {
|
||||
query = query.where(and(...collectionQueryWhere))
|
||||
}
|
||||
if (currentQuery === null) {
|
||||
currentQuery = query as unknown as SQLSelect
|
||||
} else {
|
||||
|
||||
@@ -30,8 +30,8 @@ export const countDistinct: CountDistinct = async function countDistinct(
|
||||
.limit(1)
|
||||
.$dynamic()
|
||||
|
||||
joins.forEach(({ condition, table }) => {
|
||||
query = query.leftJoin(table as PgTableWithColumns<any>, condition)
|
||||
joins.forEach(({ type, condition, table }) => {
|
||||
query = query[type ?? 'leftJoin'](table as PgTableWithColumns<any>, condition)
|
||||
})
|
||||
|
||||
// When we have any joins, we need to count each individual ID only once.
|
||||
|
||||
@@ -28,6 +28,8 @@ export const createJSONQuery = ({ column, operator, pathSegments, value }: Creat
|
||||
})
|
||||
.join('.')
|
||||
|
||||
const fullPath = pathSegments.length === 1 ? '$[*]' : `$.${jsonPaths}`
|
||||
|
||||
let sql = ''
|
||||
|
||||
if (['in', 'not_in'].includes(operator) && Array.isArray(value)) {
|
||||
@@ -35,13 +37,13 @@ export const createJSONQuery = ({ column, operator, pathSegments, value }: Creat
|
||||
sql = `${sql}${createJSONQuery({ column, operator: operator === 'in' ? 'equals' : 'not_equals', pathSegments, value: item })}${i === value.length - 1 ? '' : ` ${operator === 'in' ? 'OR' : 'AND'} `}`
|
||||
})
|
||||
} else if (operator === 'exists') {
|
||||
sql = `${value === false ? 'NOT ' : ''}jsonb_path_exists(${columnName}, '$.${jsonPaths}')`
|
||||
sql = `${value === false ? 'NOT ' : ''}jsonb_path_exists(${columnName}, '${fullPath}')`
|
||||
} else if (['not_like'].includes(operator)) {
|
||||
const mappedOperator = operatorMap[operator]
|
||||
|
||||
sql = `NOT jsonb_path_exists(${columnName}, '$.${jsonPaths} ? (@ ${mappedOperator.substring(1)} ${sanitizeValue(value, operator)})')`
|
||||
sql = `NOT jsonb_path_exists(${columnName}, '${fullPath} ? (@ ${mappedOperator.substring(1)} ${sanitizeValue(value, operator)})')`
|
||||
} else {
|
||||
sql = `jsonb_path_exists(${columnName}, '$.${jsonPaths} ? (@ ${operatorMap[operator]} ${sanitizeValue(value, operator)})')`
|
||||
sql = `jsonb_path_exists(${columnName}, '${fullPath} ? (@ ${operatorMap[operator]} ${sanitizeValue(value, operator)})')`
|
||||
}
|
||||
|
||||
return sql
|
||||
|
||||
@@ -56,8 +56,8 @@ export const selectDistinct = ({
|
||||
query = query.where(where)
|
||||
}
|
||||
|
||||
joins.forEach(({ condition, table }) => {
|
||||
query = query.leftJoin(table, condition)
|
||||
joins.forEach(({ type, condition, table }) => {
|
||||
query = query[type ?? 'leftJoin'](table, condition)
|
||||
})
|
||||
|
||||
return queryModifier({
|
||||
|
||||
@@ -71,6 +71,7 @@ export const transformArray = ({
|
||||
data.forEach((arrayRow, i) => {
|
||||
const newRow: ArrayRowToInsert = {
|
||||
arrays: {},
|
||||
arraysToPush: {},
|
||||
locales: {},
|
||||
row: {
|
||||
_order: i + 1,
|
||||
@@ -104,6 +105,7 @@ export const transformArray = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays: newRow.arrays,
|
||||
arraysToPush: newRow.arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
|
||||
@@ -78,6 +78,7 @@ export const transformBlocks = ({
|
||||
|
||||
const newRow: BlockRowToInsert = {
|
||||
arrays: {},
|
||||
arraysToPush: {},
|
||||
locales: {},
|
||||
row: {
|
||||
_order: i + 1,
|
||||
@@ -116,6 +117,7 @@ export const transformBlocks = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays: newRow.arrays,
|
||||
arraysToPush: newRow.arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
|
||||
@@ -27,6 +27,7 @@ export const transformForWrite = ({
|
||||
// Split out the incoming data into rows to insert / delete
|
||||
const rowToInsert: RowToInsert = {
|
||||
arrays: {},
|
||||
arraysToPush: {},
|
||||
blocks: {},
|
||||
blocksToDelete: new Set(),
|
||||
locales: {},
|
||||
@@ -45,6 +46,7 @@ export const transformForWrite = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays: rowToInsert.arrays,
|
||||
arraysToPush: rowToInsert.arraysToPush,
|
||||
baseTableName: tableName,
|
||||
blocks: rowToInsert.blocks,
|
||||
blocksToDelete: rowToInsert.blocksToDelete,
|
||||
|
||||
@@ -4,13 +4,7 @@ import { fieldIsVirtual, fieldShouldBeLocalized } from 'payload/shared'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from '../../types.js'
|
||||
import type {
|
||||
ArrayRowToInsert,
|
||||
BlockRowToInsert,
|
||||
NumberToDelete,
|
||||
RelationshipToDelete,
|
||||
TextToDelete,
|
||||
} from './types.js'
|
||||
import type { NumberToDelete, RelationshipToDelete, RowToInsert, TextToDelete } from './types.js'
|
||||
|
||||
import { isArrayOfRows } from '../../utilities/isArrayOfRows.js'
|
||||
import { resolveBlockTableName } from '../../utilities/validateExistingBlockIsIdentical.js'
|
||||
@@ -23,16 +17,20 @@ import { transformTexts } from './texts.js'
|
||||
|
||||
type Args = {
|
||||
adapter: DrizzleAdapter
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
/**
|
||||
* This will delete the array table and then re-insert all the new array rows.
|
||||
*/
|
||||
arrays: RowToInsert['arrays']
|
||||
/**
|
||||
* Array rows to push to the existing array. This will simply create
|
||||
* a new row in the array table.
|
||||
*/
|
||||
arraysToPush: RowToInsert['arraysToPush']
|
||||
/**
|
||||
* This is the name of the base table
|
||||
*/
|
||||
baseTableName: string
|
||||
blocks: {
|
||||
[blockType: string]: BlockRowToInsert[]
|
||||
}
|
||||
blocks: RowToInsert['blocks']
|
||||
blocksToDelete: Set<string>
|
||||
/**
|
||||
* A snake-case field prefix, representing prior fields
|
||||
@@ -82,6 +80,7 @@ type Args = {
|
||||
export const traverseFields = ({
|
||||
adapter,
|
||||
arrays,
|
||||
arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
@@ -129,10 +128,6 @@ export const traverseFields = ({
|
||||
if (field.type === 'array') {
|
||||
const arrayTableName = adapter.tableNameMap.get(`${parentTableName}_${columnName}`)
|
||||
|
||||
if (!arrays[arrayTableName]) {
|
||||
arrays[arrayTableName] = []
|
||||
}
|
||||
|
||||
if (isLocalized) {
|
||||
if (typeof data[field.name] === 'object' && data[field.name] !== null) {
|
||||
Object.entries(data[field.name]).forEach(([localeKey, localeData]) => {
|
||||
@@ -157,19 +152,33 @@ export const traverseFields = ({
|
||||
textsToDelete,
|
||||
withinArrayOrBlockLocale: localeKey,
|
||||
})
|
||||
|
||||
if (!arrays[arrayTableName]) {
|
||||
arrays[arrayTableName] = []
|
||||
}
|
||||
arrays[arrayTableName] = arrays[arrayTableName].concat(newRows)
|
||||
}
|
||||
})
|
||||
}
|
||||
} else {
|
||||
let value = data[field.name]
|
||||
let push = false
|
||||
if (
|
||||
// TODO do this for localized as well in DRY way
|
||||
|
||||
typeof value === 'object' &&
|
||||
'$push' in value
|
||||
) {
|
||||
value = Array.isArray(value.$push) ? value.$push : [value.$push]
|
||||
push = true
|
||||
}
|
||||
|
||||
const newRows = transformArray({
|
||||
adapter,
|
||||
arrayTableName,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
data: data[field.name],
|
||||
data: value,
|
||||
field,
|
||||
numbers,
|
||||
numbersToDelete,
|
||||
@@ -183,7 +192,17 @@ export const traverseFields = ({
|
||||
withinArrayOrBlockLocale,
|
||||
})
|
||||
|
||||
arrays[arrayTableName] = arrays[arrayTableName].concat(newRows)
|
||||
if (push) {
|
||||
if (!arraysToPush[arrayTableName]) {
|
||||
arraysToPush[arrayTableName] = []
|
||||
}
|
||||
arraysToPush[arrayTableName] = arraysToPush[arrayTableName].concat(newRows)
|
||||
} else {
|
||||
if (!arrays[arrayTableName]) {
|
||||
arrays[arrayTableName] = []
|
||||
}
|
||||
arrays[arrayTableName] = arrays[arrayTableName].concat(newRows)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
@@ -264,6 +283,7 @@ export const traverseFields = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays,
|
||||
arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
@@ -298,6 +318,7 @@ export const traverseFields = ({
|
||||
traverseFields({
|
||||
adapter,
|
||||
arrays,
|
||||
arraysToPush,
|
||||
baseTableName,
|
||||
blocks,
|
||||
blocksToDelete,
|
||||
|
||||
@@ -2,6 +2,9 @@ export type ArrayRowToInsert = {
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
arraysToPush: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
locales: {
|
||||
[locale: string]: Record<string, unknown>
|
||||
}
|
||||
@@ -12,6 +15,9 @@ export type BlockRowToInsert = {
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
arraysToPush: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
locales: {
|
||||
[locale: string]: Record<string, unknown>
|
||||
}
|
||||
@@ -37,6 +43,9 @@ export type RowToInsert = {
|
||||
arrays: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
arraysToPush: {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
blocks: {
|
||||
[tableName: string]: BlockRowToInsert[]
|
||||
}
|
||||
|
||||
@@ -161,10 +161,11 @@ export type CreateJSONQueryArgs = {
|
||||
column?: Column | string
|
||||
operator: string
|
||||
pathSegments: string[]
|
||||
rawColumn?: SQL<unknown>
|
||||
table?: string
|
||||
treatAsArray?: string[]
|
||||
treatRootAsArray?: boolean
|
||||
value: boolean | number | string
|
||||
value: boolean | number | number[] | string | string[]
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -6,15 +6,20 @@ import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { findMany } from './find/findMany.js'
|
||||
import { upsertRow } from './upsertRow/index.js'
|
||||
import { shouldUseOptimizedUpsertRow } from './upsertRow/shouldUseOptimizedUpsertRow.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export const updateJobs: UpdateJobs = async function updateMany(
|
||||
this: DrizzleAdapter,
|
||||
{ id, data, limit: limitArg, req, returning, sort: sortArg, where: whereArg },
|
||||
) {
|
||||
if (!(data?.log as object[])?.length) {
|
||||
if (
|
||||
!(data?.log as object[])?.length &&
|
||||
!(data.log && typeof data.log === 'object' && '$push' in data.log)
|
||||
) {
|
||||
delete data.log
|
||||
}
|
||||
|
||||
const whereToUse: Where = id ? { id: { equals: id } } : whereArg
|
||||
const limit = id ? 1 : limitArg
|
||||
|
||||
@@ -23,6 +28,27 @@ export const updateJobs: UpdateJobs = async function updateMany(
|
||||
const tableName = this.tableNameMap.get(toSnakeCase(collection.slug))
|
||||
const sort = sortArg !== undefined && sortArg !== null ? sortArg : collection.defaultSort
|
||||
|
||||
const useOptimizedUpsertRow = shouldUseOptimizedUpsertRow({
|
||||
data,
|
||||
fields: collection.flattenedFields,
|
||||
})
|
||||
|
||||
if (useOptimizedUpsertRow && id) {
|
||||
const result = await upsertRow({
|
||||
id,
|
||||
adapter: this,
|
||||
data,
|
||||
db,
|
||||
fields: collection.flattenedFields,
|
||||
ignoreResult: returning === false,
|
||||
operation: 'update',
|
||||
req,
|
||||
tableName,
|
||||
})
|
||||
|
||||
return returning === false ? null : [result]
|
||||
}
|
||||
|
||||
const jobs = await findMany({
|
||||
adapter: this,
|
||||
collectionSlug: 'payload-jobs',
|
||||
@@ -42,10 +68,12 @@ export const updateJobs: UpdateJobs = async function updateMany(
|
||||
|
||||
// TODO: We need to batch this to reduce the amount of db calls. This can get very slow if we are updating a lot of rows.
|
||||
for (const job of jobs.docs) {
|
||||
const updateData = {
|
||||
...job,
|
||||
...data,
|
||||
}
|
||||
const updateData = useOptimizedUpsertRow
|
||||
? data
|
||||
: {
|
||||
...job,
|
||||
...data,
|
||||
}
|
||||
|
||||
const result = await upsertRow({
|
||||
id: job.id,
|
||||
|
||||
@@ -44,7 +44,7 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
}: Args): Promise<T> => {
|
||||
let insertedRow: Record<string, unknown> = { id }
|
||||
if (id && shouldUseOptimizedUpsertRow({ data, fields })) {
|
||||
const { row } = transformForWrite({
|
||||
const { arraysToPush, row } = transformForWrite({
|
||||
adapter,
|
||||
data,
|
||||
enableAtomicWrites: true,
|
||||
@@ -54,11 +54,27 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
|
||||
const drizzle = db as LibSQLDatabase
|
||||
|
||||
// First, handle $push arrays
|
||||
|
||||
if (arraysToPush && Object.keys(arraysToPush)?.length) {
|
||||
await insertArrays({
|
||||
adapter,
|
||||
arrays: [arraysToPush],
|
||||
db,
|
||||
parentRows: [insertedRow],
|
||||
uuidMap: {},
|
||||
})
|
||||
}
|
||||
|
||||
// Then, handle regular row update
|
||||
|
||||
if (ignoreResult) {
|
||||
await drizzle
|
||||
.update(adapter.tables[tableName])
|
||||
.set(row)
|
||||
.where(eq(adapter.tables[tableName].id, id))
|
||||
if (row && Object.keys(row).length) {
|
||||
await drizzle
|
||||
.update(adapter.tables[tableName])
|
||||
.set(row)
|
||||
.where(eq(adapter.tables[tableName].id, id))
|
||||
}
|
||||
return ignoreResult === 'idOnly' ? ({ id } as T) : null
|
||||
}
|
||||
|
||||
@@ -74,6 +90,22 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
const findManyKeysLength = Object.keys(findManyArgs).length
|
||||
const hasOnlyColumns = Object.keys(findManyArgs.columns || {}).length > 0
|
||||
|
||||
if (!row || !Object.keys(row).length) {
|
||||
// Nothing to update => just fetch current row and return
|
||||
findManyArgs.where = eq(adapter.tables[tableName].id, insertedRow.id)
|
||||
|
||||
const doc = await db.query[tableName].findFirst(findManyArgs)
|
||||
|
||||
return transform<T>({
|
||||
adapter,
|
||||
config: adapter.payload.config,
|
||||
data: doc,
|
||||
fields,
|
||||
joinQuery: false,
|
||||
tableName,
|
||||
})
|
||||
}
|
||||
|
||||
if (findManyKeysLength === 0 || hasOnlyColumns) {
|
||||
// Optimization - No need for joins => can simply use returning(). This is optimal for very simple collections
|
||||
// without complex fields that live in separate tables like blocks, arrays, relationships, etc.
|
||||
@@ -429,9 +461,9 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
|
||||
await insertArrays({
|
||||
adapter,
|
||||
arrays: [rowToInsert.arrays],
|
||||
arrays: [rowToInsert.arrays, rowToInsert.arraysToPush],
|
||||
db,
|
||||
parentRows: [insertedRow],
|
||||
parentRows: [insertedRow, insertedRow],
|
||||
uuidMap: arraysBlocksUUIDMap,
|
||||
})
|
||||
|
||||
|
||||
@@ -32,6 +32,9 @@ export const insertArrays = async ({
|
||||
const rowsByTable: RowsByTable = {}
|
||||
|
||||
arrays.forEach((arraysByTable, parentRowIndex) => {
|
||||
if (!arraysByTable || Object.keys(arraysByTable).length === 0) {
|
||||
return
|
||||
}
|
||||
Object.entries(arraysByTable).forEach(([tableName, arrayRows]) => {
|
||||
// If the table doesn't exist in map, initialize it
|
||||
if (!rowsByTable[tableName]) {
|
||||
|
||||
@@ -20,7 +20,6 @@ export const shouldUseOptimizedUpsertRow = ({
|
||||
}
|
||||
|
||||
if (
|
||||
field.type === 'array' ||
|
||||
field.type === 'blocks' ||
|
||||
((field.type === 'text' ||
|
||||
field.type === 'relationship' ||
|
||||
@@ -35,6 +34,17 @@ export const shouldUseOptimizedUpsertRow = ({
|
||||
return false
|
||||
}
|
||||
|
||||
if (field.type === 'array') {
|
||||
if (typeof value === 'object' && '$push' in value && value.$push) {
|
||||
return shouldUseOptimizedUpsertRow({
|
||||
// Only check first row - this function cares about field definitions. Each array row will have the same field definitions.
|
||||
data: Array.isArray(value.$push) ? value.$push?.[0] : value.$push,
|
||||
fields: field.flattenedFields,
|
||||
})
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
if (
|
||||
(field.type === 'group' || field.type === 'tab') &&
|
||||
value &&
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/email-nodemailer",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "Payload Nodemailer Email Adapter",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/email-resend",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "Payload Resend Email Adapter",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/graphql",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -9,6 +9,7 @@ export type Resolver = (
|
||||
args: {
|
||||
data: Record<string, unknown>
|
||||
locale?: string
|
||||
trash?: boolean
|
||||
where?: Where
|
||||
},
|
||||
context: {
|
||||
@@ -30,6 +31,7 @@ export function countResolver(collection: Collection): Resolver {
|
||||
const options = {
|
||||
collection,
|
||||
req: isolateObjectProperty(req, 'transactionID'),
|
||||
trash: args.trash,
|
||||
where: args.where,
|
||||
}
|
||||
|
||||
|
||||
@@ -379,9 +379,11 @@ export const fieldToSchemaMap: FieldToSchemaMap = {
|
||||
),
|
||||
},
|
||||
hasNextPage: { type: new GraphQLNonNull(GraphQLBoolean) },
|
||||
totalDocs: { type: GraphQLInt },
|
||||
},
|
||||
}),
|
||||
args: {
|
||||
count: { type: GraphQLBoolean },
|
||||
limit: {
|
||||
type: GraphQLInt,
|
||||
},
|
||||
@@ -402,7 +404,7 @@ export const fieldToSchemaMap: FieldToSchemaMap = {
|
||||
},
|
||||
async resolve(parent, args, context: Context) {
|
||||
const { collection } = field
|
||||
const { limit, page, sort, where } = args
|
||||
const { count = false, limit, page, sort, where } = args
|
||||
const { req } = context
|
||||
|
||||
const draft = Boolean(args.draft ?? context.req.query?.draft)
|
||||
@@ -429,7 +431,7 @@ export const fieldToSchemaMap: FieldToSchemaMap = {
|
||||
throw new Error('GraphQL with array of join.field.collection is not implemented')
|
||||
}
|
||||
|
||||
const { docs } = await req.payload.find({
|
||||
const { docs, totalDocs } = await req.payload.find({
|
||||
collection,
|
||||
depth: 0,
|
||||
draft,
|
||||
@@ -439,7 +441,7 @@ export const fieldToSchemaMap: FieldToSchemaMap = {
|
||||
locale: req.locale,
|
||||
overrideAccess: false,
|
||||
page,
|
||||
pagination: false,
|
||||
pagination: count ? true : false,
|
||||
req,
|
||||
sort,
|
||||
where: fullWhere,
|
||||
@@ -454,6 +456,7 @@ export const fieldToSchemaMap: FieldToSchemaMap = {
|
||||
return {
|
||||
docs: shouldSlice ? docs.slice(0, -1) : docs,
|
||||
hasNextPage: limit === 0 ? false : limit < docs.length,
|
||||
...(count ? { totalDocs } : {}),
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -239,6 +239,7 @@ export function initCollections({ config, graphqlResult }: InitCollectionsGraphQ
|
||||
}),
|
||||
args: {
|
||||
draft: { type: GraphQLBoolean },
|
||||
trash: { type: GraphQLBoolean },
|
||||
where: { type: collection.graphQL.whereInputType },
|
||||
...(config.localization
|
||||
? {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview-react",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The official React SDK for Payload Live Preview",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
@@ -46,8 +46,8 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.2",
|
||||
"@types/react": "19.1.8",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview-vue",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The official Vue SDK for Payload Live Preview",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The official live preview JavaScript SDK for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/next",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -117,11 +117,11 @@
|
||||
"@babel/preset-env": "7.27.2",
|
||||
"@babel/preset-react": "7.27.1",
|
||||
"@babel/preset-typescript": "7.27.1",
|
||||
"@next/eslint-plugin-next": "15.3.2",
|
||||
"@next/eslint-plugin-next": "15.4.4",
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@types/busboy": "1.5.4",
|
||||
"@types/react": "19.1.0",
|
||||
"@types/react-dom": "19.1.2",
|
||||
"@types/react": "19.1.8",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"@types/uuid": "10.0.0",
|
||||
"babel-plugin-react-compiler": "19.1.0-rc.2",
|
||||
"esbuild": "0.25.5",
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
import type { DocumentTabConfig, DocumentTabServerProps, ServerProps } from 'payload'
|
||||
import type {
|
||||
DocumentTabConfig,
|
||||
DocumentTabServerPropsOnly,
|
||||
PayloadRequest,
|
||||
SanitizedCollectionConfig,
|
||||
SanitizedGlobalConfig,
|
||||
SanitizedPermissions,
|
||||
} from 'payload'
|
||||
import type React from 'react'
|
||||
|
||||
import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerComponent'
|
||||
@@ -9,27 +16,24 @@ import './index.scss'
|
||||
|
||||
export const baseClass = 'doc-tab'
|
||||
|
||||
export const DocumentTab: React.FC<
|
||||
{ readonly Pill_Component?: React.FC } & DocumentTabConfig & DocumentTabServerProps
|
||||
> = (props) => {
|
||||
export const DefaultDocumentTab: React.FC<{
|
||||
apiURL?: string
|
||||
collectionConfig?: SanitizedCollectionConfig
|
||||
globalConfig?: SanitizedGlobalConfig
|
||||
path?: string
|
||||
permissions?: SanitizedPermissions
|
||||
req: PayloadRequest
|
||||
tabConfig: { readonly Pill_Component?: React.FC } & DocumentTabConfig
|
||||
}> = (props) => {
|
||||
const {
|
||||
apiURL,
|
||||
collectionConfig,
|
||||
globalConfig,
|
||||
href: tabHref,
|
||||
i18n,
|
||||
isActive: tabIsActive,
|
||||
label,
|
||||
newTab,
|
||||
payload,
|
||||
permissions,
|
||||
Pill,
|
||||
Pill_Component,
|
||||
req,
|
||||
tabConfig: { href: tabHref, isActive: tabIsActive, label, newTab, Pill, Pill_Component },
|
||||
} = props
|
||||
|
||||
const { config } = payload
|
||||
const { routes } = config
|
||||
|
||||
let href = typeof tabHref === 'string' ? tabHref : ''
|
||||
let isActive = typeof tabIsActive === 'boolean' ? tabIsActive : false
|
||||
|
||||
@@ -38,7 +42,7 @@ export const DocumentTab: React.FC<
|
||||
apiURL,
|
||||
collection: collectionConfig,
|
||||
global: globalConfig,
|
||||
routes,
|
||||
routes: req.payload.config.routes,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -51,13 +55,13 @@ export const DocumentTab: React.FC<
|
||||
const labelToRender =
|
||||
typeof label === 'function'
|
||||
? label({
|
||||
t: i18n.t,
|
||||
t: req.i18n.t,
|
||||
})
|
||||
: label
|
||||
|
||||
return (
|
||||
<DocumentTabLink
|
||||
adminRoute={routes.admin}
|
||||
adminRoute={req.payload.config.routes.admin}
|
||||
ariaLabel={labelToRender}
|
||||
baseClass={baseClass}
|
||||
href={href}
|
||||
@@ -72,12 +76,14 @@ export const DocumentTab: React.FC<
|
||||
{RenderServerComponent({
|
||||
Component: Pill,
|
||||
Fallback: Pill_Component,
|
||||
importMap: payload.importMap,
|
||||
importMap: req.payload.importMap,
|
||||
serverProps: {
|
||||
i18n,
|
||||
payload,
|
||||
i18n: req.i18n,
|
||||
payload: req.payload,
|
||||
permissions,
|
||||
} satisfies ServerProps,
|
||||
req,
|
||||
user: req.user,
|
||||
} satisfies DocumentTabServerPropsOnly,
|
||||
})}
|
||||
</Fragment>
|
||||
) : null}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import type { I18n } from '@payloadcms/translations'
|
||||
import type {
|
||||
DocumentTabClientProps,
|
||||
DocumentTabServerPropsOnly,
|
||||
Payload,
|
||||
PayloadRequest,
|
||||
SanitizedCollectionConfig,
|
||||
SanitizedGlobalConfig,
|
||||
SanitizedPermissions,
|
||||
@@ -12,7 +11,7 @@ import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerCompo
|
||||
import React from 'react'
|
||||
|
||||
import { ShouldRenderTabs } from './ShouldRenderTabs.js'
|
||||
import { DocumentTab } from './Tab/index.js'
|
||||
import { DefaultDocumentTab } from './Tab/index.js'
|
||||
import { getTabs } from './tabs/index.js'
|
||||
import './index.scss'
|
||||
|
||||
@@ -21,12 +20,10 @@ const baseClass = 'doc-tabs'
|
||||
export const DocumentTabs: React.FC<{
|
||||
collectionConfig: SanitizedCollectionConfig
|
||||
globalConfig: SanitizedGlobalConfig
|
||||
i18n: I18n
|
||||
payload: Payload
|
||||
permissions: SanitizedPermissions
|
||||
}> = (props) => {
|
||||
const { collectionConfig, globalConfig, i18n, payload, permissions } = props
|
||||
const { config } = payload
|
||||
req: PayloadRequest
|
||||
}> = ({ collectionConfig, globalConfig, permissions, req }) => {
|
||||
const { config } = req.payload
|
||||
|
||||
const tabs = getTabs({
|
||||
collectionConfig,
|
||||
@@ -38,42 +35,46 @@ export const DocumentTabs: React.FC<{
|
||||
<div className={baseClass}>
|
||||
<div className={`${baseClass}__tabs-container`}>
|
||||
<ul className={`${baseClass}__tabs`}>
|
||||
{tabs?.map(({ tab, viewPath }, index) => {
|
||||
const { condition } = tab || {}
|
||||
{tabs?.map(({ tab: tabConfig, viewPath }, index) => {
|
||||
const { condition } = tabConfig || {}
|
||||
|
||||
const meetsCondition =
|
||||
!condition || condition({ collectionConfig, config, globalConfig, permissions })
|
||||
!condition ||
|
||||
condition({ collectionConfig, config, globalConfig, permissions, req })
|
||||
|
||||
if (!meetsCondition) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (tab?.Component) {
|
||||
if (tabConfig?.Component) {
|
||||
return RenderServerComponent({
|
||||
clientProps: {
|
||||
path: viewPath,
|
||||
} satisfies DocumentTabClientProps,
|
||||
Component: tab.Component,
|
||||
importMap: payload.importMap,
|
||||
Component: tabConfig.Component,
|
||||
importMap: req.payload.importMap,
|
||||
key: `tab-${index}`,
|
||||
serverProps: {
|
||||
collectionConfig,
|
||||
globalConfig,
|
||||
i18n,
|
||||
payload,
|
||||
i18n: req.i18n,
|
||||
payload: req.payload,
|
||||
permissions,
|
||||
req,
|
||||
user: req.user,
|
||||
} satisfies DocumentTabServerPropsOnly,
|
||||
})
|
||||
}
|
||||
|
||||
return (
|
||||
<DocumentTab
|
||||
<DefaultDocumentTab
|
||||
collectionConfig={collectionConfig}
|
||||
globalConfig={globalConfig}
|
||||
key={`tab-${index}`}
|
||||
path={viewPath}
|
||||
{...{
|
||||
...props,
|
||||
...tab,
|
||||
}}
|
||||
permissions={permissions}
|
||||
req={req}
|
||||
tabConfig={tabConfig}
|
||||
/>
|
||||
)
|
||||
})}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { I18n } from '@payloadcms/translations'
|
||||
import type {
|
||||
Payload,
|
||||
PayloadRequest,
|
||||
SanitizedCollectionConfig,
|
||||
SanitizedGlobalConfig,
|
||||
SanitizedPermissions,
|
||||
@@ -18,11 +18,10 @@ export const DocumentHeader: React.FC<{
|
||||
collectionConfig?: SanitizedCollectionConfig
|
||||
globalConfig?: SanitizedGlobalConfig
|
||||
hideTabs?: boolean
|
||||
i18n: I18n
|
||||
payload: Payload
|
||||
permissions: SanitizedPermissions
|
||||
req: PayloadRequest
|
||||
}> = (props) => {
|
||||
const { collectionConfig, globalConfig, hideTabs, i18n, payload, permissions } = props
|
||||
const { collectionConfig, globalConfig, hideTabs, permissions, req } = props
|
||||
|
||||
return (
|
||||
<Gutter className={baseClass}>
|
||||
@@ -31,9 +30,8 @@ export const DocumentHeader: React.FC<{
|
||||
<DocumentTabs
|
||||
collectionConfig={collectionConfig}
|
||||
globalConfig={globalConfig}
|
||||
i18n={i18n}
|
||||
payload={payload}
|
||||
permissions={permissions}
|
||||
req={req}
|
||||
/>
|
||||
)}
|
||||
</Gutter>
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
@import '~@payloadcms/ui/scss';
|
||||
|
||||
$tab-width: 16px;
|
||||
$tab-width: 24px;
|
||||
|
||||
@layer payload-default {
|
||||
.query-inspector {
|
||||
--tab-width: 24px;
|
||||
|
||||
&__json-children {
|
||||
position: relative;
|
||||
|
||||
&--nested {
|
||||
& li {
|
||||
padding-left: $tab-width;
|
||||
padding-left: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +25,14 @@ $tab-width: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
&__row-line {
|
||||
&--nested {
|
||||
.query-inspector__json-children {
|
||||
padding-left: var(--tab-width);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&__list-wrap {
|
||||
position: relative;
|
||||
}
|
||||
@@ -37,10 +47,16 @@ $tab-width: 16px;
|
||||
border-bottom-right-radius: 0;
|
||||
position: relative;
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
column-gap: 14px;
|
||||
row-gap: 10px;
|
||||
align-items: center;
|
||||
left: -3px;
|
||||
left: 0;
|
||||
width: calc(100% + 3px);
|
||||
background-color: var(--theme-elevation-50);
|
||||
|
||||
&:not(.query-inspector__list-toggle--empty) {
|
||||
margin-left: calc(var(--tab-width) * -1 - 10px);
|
||||
}
|
||||
|
||||
svg .stroke {
|
||||
stroke: var(--theme-elevation-400);
|
||||
@@ -82,14 +98,32 @@ $tab-width: 16px;
|
||||
&__bracket {
|
||||
position: relative;
|
||||
|
||||
&--nested {
|
||||
margin-left: $tab-width;
|
||||
}
|
||||
|
||||
&--position-end {
|
||||
left: 1px;
|
||||
left: 2px;
|
||||
width: calc(100% - 5px);
|
||||
}
|
||||
}
|
||||
|
||||
// Some specific rules targetting the very top of the nested JSON structure or very first items since they need slightly different styling
|
||||
&__results {
|
||||
& > .query-inspector__row-line--nested {
|
||||
& > .query-inspector__list-toggle {
|
||||
margin-left: 0;
|
||||
column-gap: 6px;
|
||||
|
||||
.query-inspector__toggle-row-icon {
|
||||
margin-left: -4px;
|
||||
}
|
||||
}
|
||||
|
||||
& > .query-inspector__json-children {
|
||||
padding-left: calc(var(--base) * 1);
|
||||
}
|
||||
|
||||
& > .query-inspector__bracket--nested > .query-inspector__bracket--position-end {
|
||||
padding-left: 16px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,9 +137,8 @@ export async function Account({ initPageResult, params, searchParams }: AdminVie
|
||||
<DocumentHeader
|
||||
collectionConfig={collectionConfig}
|
||||
hideTabs
|
||||
i18n={i18n}
|
||||
payload={payload}
|
||||
permissions={permissions}
|
||||
req={req}
|
||||
/>
|
||||
<HydrateAuthProvider permissions={permissions} />
|
||||
{RenderServerComponent({
|
||||
|
||||
@@ -19,17 +19,14 @@ type RenderTrashViewArgs = {
|
||||
redirectAfterRestore?: boolean
|
||||
} & AdminViewServerProps
|
||||
|
||||
export const TrashView: React.FC<
|
||||
{ query?: any } & Omit<RenderTrashViewArgs, 'enableRowSelections'>
|
||||
> = async (args) => {
|
||||
export const TrashView: React.FC<Omit<RenderTrashViewArgs, 'enableRowSelections'>> = async (
|
||||
args,
|
||||
) => {
|
||||
try {
|
||||
const { List: TrashList } = await renderListView({
|
||||
...args,
|
||||
enableRowSelections: true,
|
||||
query: {
|
||||
...(args.query || {}),
|
||||
trash: true, // force trash view
|
||||
},
|
||||
trash: true,
|
||||
viewType: 'trash',
|
||||
})
|
||||
|
||||
|
||||
@@ -110,17 +110,18 @@ export const renderDocument = async ({
|
||||
|
||||
// Fetch the doc required for the view
|
||||
let doc =
|
||||
initialData ||
|
||||
(await getDocumentData({
|
||||
id: idFromArgs,
|
||||
collectionSlug,
|
||||
globalSlug,
|
||||
locale,
|
||||
payload,
|
||||
req,
|
||||
segments,
|
||||
user,
|
||||
}))
|
||||
!idFromArgs && !globalSlug
|
||||
? initialData || null
|
||||
: await getDocumentData({
|
||||
id: idFromArgs,
|
||||
collectionSlug,
|
||||
globalSlug,
|
||||
locale,
|
||||
payload,
|
||||
req,
|
||||
segments,
|
||||
user,
|
||||
})
|
||||
|
||||
if (isEditing && !doc) {
|
||||
// If it's a collection document that doesn't exist, redirect to collection list
|
||||
@@ -416,9 +417,8 @@ export const renderDocument = async ({
|
||||
<DocumentHeader
|
||||
collectionConfig={collectionConfig}
|
||||
globalConfig={globalConfig}
|
||||
i18n={i18n}
|
||||
payload={payload}
|
||||
permissions={permissions}
|
||||
req={req}
|
||||
/>
|
||||
)}
|
||||
<HydrateAuthProvider permissions={permissions} />
|
||||
|
||||
@@ -5,6 +5,7 @@ import type {
|
||||
PaginatedDocs,
|
||||
PayloadRequest,
|
||||
SanitizedCollectionConfig,
|
||||
ViewTypes,
|
||||
Where,
|
||||
} from 'payload'
|
||||
|
||||
@@ -22,7 +23,9 @@ export const handleGroupBy = async ({
|
||||
enableRowSelections,
|
||||
query,
|
||||
req,
|
||||
trash = false,
|
||||
user,
|
||||
viewType,
|
||||
where: whereWithMergedSearch,
|
||||
}: {
|
||||
clientConfig: ClientConfig
|
||||
@@ -34,7 +37,9 @@ export const handleGroupBy = async ({
|
||||
enableRowSelections?: boolean
|
||||
query?: ListQuery
|
||||
req: PayloadRequest
|
||||
trash?: boolean
|
||||
user: any
|
||||
viewType?: ViewTypes
|
||||
where: Where
|
||||
}): Promise<{
|
||||
columnState: Column[]
|
||||
@@ -88,6 +93,7 @@ export const handleGroupBy = async ({
|
||||
populate,
|
||||
req,
|
||||
sort: query?.groupBy,
|
||||
trash,
|
||||
where: whereWithMergedSearch,
|
||||
})
|
||||
|
||||
@@ -127,6 +133,7 @@ export const handleGroupBy = async ({
|
||||
// Note: if we wanted to enable table-by-table sorting, we could use this:
|
||||
// sort: query?.queryByGroup?.[valueOrRelationshipID]?.sort,
|
||||
sort: query?.sort,
|
||||
trash,
|
||||
user,
|
||||
where: {
|
||||
...(whereWithMergedSearch || {}),
|
||||
@@ -136,10 +143,11 @@ export const handleGroupBy = async ({
|
||||
},
|
||||
})
|
||||
|
||||
let heading = valueOrRelationshipID || req.i18n.t('general:noValue')
|
||||
let heading = valueOrRelationshipID
|
||||
|
||||
if (
|
||||
groupByField?.type === 'relationship' &&
|
||||
potentiallyPopulatedRelationship &&
|
||||
typeof potentiallyPopulatedRelationship === 'object'
|
||||
) {
|
||||
heading =
|
||||
@@ -147,14 +155,24 @@ export const handleGroupBy = async ({
|
||||
valueOrRelationshipID
|
||||
}
|
||||
|
||||
if (groupByField.type === 'date') {
|
||||
if (groupByField.type === 'date' && valueOrRelationshipID) {
|
||||
heading = formatDate({
|
||||
date: String(heading),
|
||||
date: String(valueOrRelationshipID),
|
||||
i18n: req.i18n,
|
||||
pattern: clientConfig.admin.dateFormat,
|
||||
})
|
||||
}
|
||||
|
||||
if (groupByField.type === 'checkbox') {
|
||||
if (valueOrRelationshipID === true) {
|
||||
heading = req.i18n.t('general:true')
|
||||
}
|
||||
|
||||
if (valueOrRelationshipID === false) {
|
||||
heading = req.i18n.t('general:false')
|
||||
}
|
||||
}
|
||||
|
||||
if (groupData.docs && groupData.docs.length > 0) {
|
||||
const { columnState: newColumnState, Table: NewTable } = renderTable({
|
||||
clientCollectionConfig,
|
||||
@@ -166,13 +184,14 @@ export const handleGroupBy = async ({
|
||||
enableRowSelections,
|
||||
groupByFieldPath,
|
||||
groupByValue: valueOrRelationshipID,
|
||||
heading,
|
||||
heading: heading || req.i18n.t('general:noValue'),
|
||||
i18n: req.i18n,
|
||||
key: `table-${valueOrRelationshipID}`,
|
||||
orderableFieldName: collectionConfig.orderable === true ? '_order' : undefined,
|
||||
payload: req.payload,
|
||||
query,
|
||||
useAsTitle: collectionConfig.admin.useAsTitle,
|
||||
viewType,
|
||||
})
|
||||
|
||||
// Only need to set `columnState` once, using the first table's column state
|
||||
|
||||
@@ -1,20 +1,19 @@
|
||||
import type {
|
||||
AdminViewServerProps,
|
||||
CollectionPreferences,
|
||||
Column,
|
||||
ColumnPreference,
|
||||
ListQuery,
|
||||
ListViewClientProps,
|
||||
ListViewServerPropsOnly,
|
||||
PaginatedDocs,
|
||||
QueryPreset,
|
||||
SanitizedCollectionPermission,
|
||||
} from 'payload'
|
||||
|
||||
import { DefaultListView, HydrateAuthProvider, ListQueryProvider } from '@payloadcms/ui'
|
||||
import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerComponent'
|
||||
import { renderFilters, renderTable, upsertPreferences } from '@payloadcms/ui/rsc'
|
||||
import { notFound } from 'next/navigation.js'
|
||||
import {
|
||||
type AdminViewServerProps,
|
||||
type CollectionPreferences,
|
||||
type Column,
|
||||
type ColumnPreference,
|
||||
type ListQuery,
|
||||
type ListViewClientProps,
|
||||
type ListViewServerPropsOnly,
|
||||
type PaginatedDocs,
|
||||
type QueryPreset,
|
||||
type SanitizedCollectionPermission,
|
||||
} from 'payload'
|
||||
import {
|
||||
combineWhereConstraints,
|
||||
formatAdminURL,
|
||||
@@ -41,6 +40,10 @@ type RenderListViewArgs = {
|
||||
query: ListQuery
|
||||
redirectAfterDelete?: boolean
|
||||
redirectAfterDuplicate?: boolean
|
||||
/**
|
||||
* @experimental This prop is subject to change in future releases.
|
||||
*/
|
||||
trash?: boolean
|
||||
} & AdminViewServerProps
|
||||
|
||||
/**
|
||||
@@ -67,6 +70,7 @@ export const renderListView = async (
|
||||
params,
|
||||
query: queryFromArgs,
|
||||
searchParams,
|
||||
trash,
|
||||
viewType,
|
||||
} = args
|
||||
|
||||
@@ -134,46 +138,25 @@ export const renderListView = async (
|
||||
throw new Error('not-found')
|
||||
}
|
||||
|
||||
let baseListFilter = undefined
|
||||
|
||||
if (typeof collectionConfig.admin?.baseListFilter === 'function') {
|
||||
baseListFilter = await collectionConfig.admin.baseListFilter({
|
||||
limit: query.limit,
|
||||
page: query.page,
|
||||
req,
|
||||
sort: query.sort,
|
||||
})
|
||||
}
|
||||
|
||||
let whereCondition = mergeListSearchAndWhere({
|
||||
collectionConfig,
|
||||
search: typeof query?.search === 'string' ? query.search : undefined,
|
||||
where: combineWhereConstraints([query?.where, baseListFilter]),
|
||||
const baseFilterConstraint = await (
|
||||
collectionConfig.admin?.baseFilter ?? collectionConfig.admin?.baseListFilter
|
||||
)?.({
|
||||
limit: query.limit,
|
||||
page: query.page,
|
||||
req,
|
||||
sort: query.sort,
|
||||
})
|
||||
|
||||
if (query?.trash === true) {
|
||||
whereCondition = {
|
||||
and: [
|
||||
whereCondition,
|
||||
{
|
||||
deletedAt: {
|
||||
exists: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
let queryPreset: QueryPreset | undefined
|
||||
let queryPresetPermissions: SanitizedCollectionPermission | undefined
|
||||
|
||||
let whereWithMergedSearch = mergeListSearchAndWhere({
|
||||
collectionConfig,
|
||||
search: typeof query?.search === 'string' ? query.search : undefined,
|
||||
where: combineWhereConstraints([query?.where, baseListFilter]),
|
||||
where: combineWhereConstraints([query?.where, baseFilterConstraint]),
|
||||
})
|
||||
|
||||
if (query?.trash === true) {
|
||||
if (trash === true) {
|
||||
whereWithMergedSearch = {
|
||||
and: [
|
||||
whereWithMergedSearch,
|
||||
@@ -209,56 +192,81 @@ export const renderListView = async (
|
||||
}
|
||||
}
|
||||
|
||||
let data: PaginatedDocs | undefined
|
||||
let Table: React.ReactNode | React.ReactNode[] = null
|
||||
let columnState: Column[] = []
|
||||
let data: PaginatedDocs = {
|
||||
// no results default
|
||||
docs: [],
|
||||
hasNextPage: false,
|
||||
hasPrevPage: false,
|
||||
limit: query.limit,
|
||||
nextPage: null,
|
||||
page: 1,
|
||||
pagingCounter: 0,
|
||||
prevPage: null,
|
||||
totalDocs: 0,
|
||||
totalPages: 0,
|
||||
}
|
||||
|
||||
if (collectionConfig.admin.groupBy && query.groupBy) {
|
||||
;({ columnState, data, Table } = await handleGroupBy({
|
||||
clientConfig,
|
||||
collectionConfig,
|
||||
collectionSlug,
|
||||
columns: collectionPreferences?.columns,
|
||||
customCellProps,
|
||||
drawerSlug,
|
||||
enableRowSelections,
|
||||
query,
|
||||
req,
|
||||
user,
|
||||
where: whereWithMergedSearch,
|
||||
}))
|
||||
} else {
|
||||
data = await req.payload.find({
|
||||
collection: collectionSlug,
|
||||
depth: 0,
|
||||
draft: true,
|
||||
fallbackLocale: false,
|
||||
includeLockStatus: true,
|
||||
limit: query?.limit ? Number(query.limit) : undefined,
|
||||
locale: req.locale,
|
||||
overrideAccess: false,
|
||||
page: query?.page ? Number(query.page) : undefined,
|
||||
req,
|
||||
sort: query?.sort,
|
||||
trash: query?.trash === true,
|
||||
user,
|
||||
where: whereWithMergedSearch,
|
||||
})
|
||||
;({ columnState, Table } = renderTable({
|
||||
clientCollectionConfig: clientConfig.collections.find((c) => c.slug === collectionSlug),
|
||||
collectionConfig,
|
||||
columns: collectionPreferences?.columns,
|
||||
customCellProps,
|
||||
data,
|
||||
drawerSlug,
|
||||
enableRowSelections,
|
||||
i18n: req.i18n,
|
||||
orderableFieldName: collectionConfig.orderable === true ? '_order' : undefined,
|
||||
payload: req.payload,
|
||||
query,
|
||||
useAsTitle: collectionConfig.admin.useAsTitle,
|
||||
viewType,
|
||||
}))
|
||||
try {
|
||||
if (collectionConfig.admin.groupBy && query.groupBy) {
|
||||
;({ columnState, data, Table } = await handleGroupBy({
|
||||
clientConfig,
|
||||
collectionConfig,
|
||||
collectionSlug,
|
||||
columns: collectionPreferences?.columns,
|
||||
customCellProps,
|
||||
drawerSlug,
|
||||
enableRowSelections,
|
||||
query,
|
||||
req,
|
||||
trash,
|
||||
user,
|
||||
viewType,
|
||||
where: whereWithMergedSearch,
|
||||
}))
|
||||
} else {
|
||||
data = await req.payload.find({
|
||||
collection: collectionSlug,
|
||||
depth: 0,
|
||||
draft: true,
|
||||
fallbackLocale: false,
|
||||
includeLockStatus: true,
|
||||
limit: query?.limit ? Number(query.limit) : undefined,
|
||||
locale: req.locale,
|
||||
overrideAccess: false,
|
||||
page: query?.page ? Number(query.page) : undefined,
|
||||
req,
|
||||
sort: query?.sort,
|
||||
trash,
|
||||
user,
|
||||
where: whereWithMergedSearch,
|
||||
})
|
||||
;({ columnState, Table } = renderTable({
|
||||
clientCollectionConfig: clientConfig.collections.find((c) => c.slug === collectionSlug),
|
||||
collectionConfig,
|
||||
columns: collectionPreferences?.columns,
|
||||
customCellProps,
|
||||
data,
|
||||
drawerSlug,
|
||||
enableRowSelections,
|
||||
i18n: req.i18n,
|
||||
orderableFieldName: collectionConfig.orderable === true ? '_order' : undefined,
|
||||
payload: req.payload,
|
||||
query,
|
||||
useAsTitle: collectionConfig.admin.useAsTitle,
|
||||
viewType,
|
||||
}))
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.name !== 'QueryError') {
|
||||
// QueryErrors are expected when a user filters by a field they do not have access to
|
||||
req.payload.logger.error({
|
||||
err,
|
||||
msg: `There was an error fetching the list view data for collection ${collectionSlug}`,
|
||||
})
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
const renderedFilters = renderFilters(collectionConfig.fields, req.payload.importMap)
|
||||
|
||||
@@ -15,7 +15,7 @@ export const SetStepNav: React.FC<{
|
||||
readonly isTrashed?: boolean
|
||||
versionToCreatedAtFormatted?: string
|
||||
versionToID?: string
|
||||
versionToUseAsTitle?: string
|
||||
versionToUseAsTitle?: Record<string, string> | string
|
||||
}> = ({
|
||||
id,
|
||||
collectionConfig,
|
||||
@@ -54,7 +54,7 @@ export const SetStepNav: React.FC<{
|
||||
? versionToUseAsTitle?.[locale.code] || docLabel
|
||||
: versionToUseAsTitle
|
||||
} else if (useAsTitle === 'id') {
|
||||
docLabel = versionToID
|
||||
docLabel = String(id)
|
||||
}
|
||||
|
||||
const docBasePath: `/${string}` = isTrashed
|
||||
|
||||
@@ -17,7 +17,13 @@ import {
|
||||
type SanitizedFieldPermissions,
|
||||
type VersionField,
|
||||
} from 'payload'
|
||||
import { fieldIsID, fieldShouldBeLocalized, getUniqueListBy, tabHasName } from 'payload/shared'
|
||||
import {
|
||||
fieldIsID,
|
||||
fieldShouldBeLocalized,
|
||||
getFieldPermissions,
|
||||
getUniqueListBy,
|
||||
tabHasName,
|
||||
} from 'payload/shared'
|
||||
|
||||
import { diffComponents } from './fields/index.js'
|
||||
import { getFieldPathsModified } from './utilities/getFieldPathsModified.js'
|
||||
@@ -223,21 +229,16 @@ const buildVersionField = ({
|
||||
BuildVersionFieldsArgs,
|
||||
'fields' | 'parentIndexPath' | 'versionFromSiblingData' | 'versionToSiblingData'
|
||||
>): BaseVersionField | null => {
|
||||
const fieldName: null | string = 'name' in field ? field.name : null
|
||||
const { permissions, read: hasReadPermission } = getFieldPermissions({
|
||||
field,
|
||||
operation: 'read',
|
||||
parentName: parentPath?.includes('.')
|
||||
? parentPath.split('.')[parentPath.split('.').length - 1]
|
||||
: parentPath,
|
||||
permissions: fieldPermissions,
|
||||
})
|
||||
|
||||
const hasPermission =
|
||||
fieldPermissions === true ||
|
||||
!fieldName ||
|
||||
fieldPermissions?.[fieldName] === true ||
|
||||
fieldPermissions?.[fieldName]?.read
|
||||
|
||||
const subFieldPermissions =
|
||||
fieldPermissions === true ||
|
||||
!fieldName ||
|
||||
fieldPermissions?.[fieldName] === true ||
|
||||
fieldPermissions?.[fieldName]?.fields
|
||||
|
||||
if (!hasPermission) {
|
||||
if (!hasReadPermission) {
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -292,13 +293,29 @@ const buildVersionField = ({
|
||||
parentPath,
|
||||
parentSchemaPath,
|
||||
})
|
||||
|
||||
let tabPermissions: typeof fieldPermissions = undefined
|
||||
|
||||
if (typeof permissions === 'boolean') {
|
||||
tabPermissions = permissions
|
||||
} else if (permissions && typeof permissions === 'object') {
|
||||
if ('name' in tab) {
|
||||
tabPermissions =
|
||||
typeof permissions.fields?.[tab.name] === 'object'
|
||||
? permissions.fields?.[tab.name].fields
|
||||
: permissions.fields?.[tab.name]
|
||||
} else {
|
||||
tabPermissions = permissions.fields
|
||||
}
|
||||
}
|
||||
|
||||
const tabVersion = {
|
||||
name: 'name' in tab ? tab.name : null,
|
||||
fields: buildVersionFields({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions,
|
||||
fieldPermissions: tabPermissions,
|
||||
fields: tab.fields,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
@@ -324,6 +341,13 @@ const buildVersionField = ({
|
||||
}
|
||||
} // At this point, we are dealing with a `row`, `collapsible`, etc
|
||||
else if ('fields' in field) {
|
||||
let subfieldPermissions: typeof fieldPermissions = undefined
|
||||
|
||||
if (typeof permissions === 'boolean') {
|
||||
subfieldPermissions = permissions
|
||||
} else if (permissions && typeof permissions === 'object') {
|
||||
subfieldPermissions = permissions.fields
|
||||
}
|
||||
if (field.type === 'array' && (valueTo || valueFrom)) {
|
||||
const maxLength = Math.max(
|
||||
Array.isArray(valueTo) ? valueTo.length : 0,
|
||||
@@ -339,7 +363,7 @@ const buildVersionField = ({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions,
|
||||
fieldPermissions: subfieldPermissions,
|
||||
fields: field.fields,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
@@ -363,7 +387,7 @@ const buildVersionField = ({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions,
|
||||
fieldPermissions: subfieldPermissions,
|
||||
fields: field.fields,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
@@ -421,11 +445,24 @@ const buildVersionField = ({
|
||||
}
|
||||
}
|
||||
|
||||
let blockPermissions: typeof fieldPermissions = undefined
|
||||
|
||||
if (permissions === true) {
|
||||
blockPermissions = true
|
||||
} else {
|
||||
const permissionsBlockSpecific = permissions?.blocks?.[blockSlugToMatch]
|
||||
if (permissionsBlockSpecific === true) {
|
||||
blockPermissions = true
|
||||
} else {
|
||||
blockPermissions = permissionsBlockSpecific?.fields
|
||||
}
|
||||
}
|
||||
|
||||
baseVersionField.rows[i] = buildVersionFields({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions,
|
||||
fieldPermissions: blockPermissions,
|
||||
fields,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
@@ -459,7 +496,7 @@ const buildVersionField = ({
|
||||
*/
|
||||
diffMethod: 'diffWordsWithSpace',
|
||||
field: clientField,
|
||||
fieldPermissions: subFieldPermissions,
|
||||
fieldPermissions: typeof permissions === 'object' ? permissions.fields : permissions,
|
||||
parentIsLocalized,
|
||||
|
||||
nestingLevel: nestingLevel ? nestingLevel : undefined,
|
||||
|
||||
@@ -18,12 +18,12 @@ export const generateLabelFromValue = ({
|
||||
value: PopulatedRelationshipValue
|
||||
}): string => {
|
||||
let relatedDoc: TypeWithID
|
||||
let relationTo: string = field.relationTo as string
|
||||
let valueToReturn: string = ''
|
||||
|
||||
const relationTo: string = 'relationTo' in value ? value.relationTo : (field.relationTo as string)
|
||||
|
||||
if (typeof value === 'object' && 'relationTo' in value) {
|
||||
relatedDoc = value.value
|
||||
relationTo = value.relationTo
|
||||
} else {
|
||||
// Non-polymorphic relationship
|
||||
relatedDoc = value
|
||||
|
||||
@@ -411,6 +411,11 @@ export async function VersionView(props: DocumentViewServerProps) {
|
||||
})
|
||||
}
|
||||
|
||||
const useAsTitleFieldName = collectionConfig?.admin?.useAsTitle || 'id'
|
||||
const versionToUseAsTitle =
|
||||
useAsTitleFieldName === 'id'
|
||||
? String(versionTo.parent)
|
||||
: versionTo.version?.[useAsTitleFieldName]
|
||||
return (
|
||||
<DefaultVersionView
|
||||
canUpdate={docPermissions?.update}
|
||||
@@ -425,7 +430,7 @@ export async function VersionView(props: DocumentViewServerProps) {
|
||||
VersionToCreatedAtLabel={formatPill({ doc: versionTo, labelStyle: 'pill' })}
|
||||
versionToID={versionTo.id}
|
||||
versionToStatus={versionTo.version?._status}
|
||||
versionToUseAsTitle={versionTo[collectionConfig?.admin?.useAsTitle || 'id']}
|
||||
versionToUseAsTitle={versionToUseAsTitle}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/payload-cloud",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The official Payload Cloud plugin",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload",
|
||||
"version": "3.49.0",
|
||||
"version": "3.50.0",
|
||||
"description": "Node, React, Headless CMS and Application Framework built on Next.js",
|
||||
"keywords": [
|
||||
"admin panel",
|
||||
|
||||
@@ -68,6 +68,9 @@ export type FieldPaths = {
|
||||
path: string
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO: This should be renamed to `FieldComponentServerProps` or similar
|
||||
*/
|
||||
export type ServerComponentProps = {
|
||||
clientField: ClientFieldWithOptionalType
|
||||
clientFieldSchemaMap: ClientFieldSchemaMap
|
||||
|
||||
@@ -2,6 +2,7 @@ import type { SanitizedPermissions } from '../../auth/types.js'
|
||||
import type { SanitizedCollectionConfig } from '../../collections/config/types.js'
|
||||
import type { PayloadComponent, SanitizedConfig, ServerProps } from '../../config/types.js'
|
||||
import type { SanitizedGlobalConfig } from '../../globals/config/types.js'
|
||||
import type { PayloadRequest } from '../../types/index.js'
|
||||
import type { Data, DocumentSlots, FormState } from '../types.js'
|
||||
import type { InitPageResult, ViewTypes } from './index.js'
|
||||
|
||||
@@ -50,6 +51,7 @@ export type DocumentTabServerPropsOnly = {
|
||||
readonly collectionConfig?: SanitizedCollectionConfig
|
||||
readonly globalConfig?: SanitizedGlobalConfig
|
||||
readonly permissions: SanitizedPermissions
|
||||
readonly req: PayloadRequest
|
||||
} & ServerProps
|
||||
|
||||
export type DocumentTabClientProps = {
|
||||
@@ -60,9 +62,13 @@ export type DocumentTabServerProps = DocumentTabClientProps & DocumentTabServerP
|
||||
|
||||
export type DocumentTabCondition = (args: {
|
||||
collectionConfig: SanitizedCollectionConfig
|
||||
/**
|
||||
* @deprecated: Use `req.payload.config` instead. This will be removed in v4.
|
||||
*/
|
||||
config: SanitizedConfig
|
||||
globalConfig: SanitizedGlobalConfig
|
||||
permissions: SanitizedPermissions
|
||||
req: PayloadRequest
|
||||
}) => boolean
|
||||
|
||||
// Everything is optional because we merge in the defaults
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export const isUserLocked = (date: number): boolean => {
|
||||
export const isUserLocked = (date: Date): boolean => {
|
||||
if (!date) {
|
||||
return false
|
||||
}
|
||||
return date > Date.now()
|
||||
return date.getTime() > Date.now()
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { v4 as uuid } from 'uuid'
|
||||
|
||||
import type {
|
||||
AuthOperationsFromCollectionSlug,
|
||||
Collection,
|
||||
@@ -24,7 +22,7 @@ import { getFieldsToSign } from '../getFieldsToSign.js'
|
||||
import { getLoginOptions } from '../getLoginOptions.js'
|
||||
import { isUserLocked } from '../isUserLocked.js'
|
||||
import { jwtSign } from '../jwt.js'
|
||||
import { removeExpiredSessions } from '../removeExpiredSessions.js'
|
||||
import { addSessionToUser } from '../sessions.js'
|
||||
import { authenticateLocalStrategy } from '../strategies/local/authenticate.js'
|
||||
import { incrementLoginAttempts } from '../strategies/local/incrementLoginAttempts.js'
|
||||
import { resetLoginAttempts } from '../strategies/local/resetLoginAttempts.js'
|
||||
@@ -50,6 +48,11 @@ type CheckLoginPermissionArgs = {
|
||||
user: any
|
||||
}
|
||||
|
||||
/**
|
||||
* Throws an error if the user is locked or does not exist.
|
||||
* This does not check the login attempts, only the lock status. Whoever increments login attempts
|
||||
* is responsible for locking the user properly, not whoever checks the login permission.
|
||||
*/
|
||||
export const checkLoginPermission = ({
|
||||
loggingInWithUsername,
|
||||
req,
|
||||
@@ -59,7 +62,7 @@ export const checkLoginPermission = ({
|
||||
throw new AuthenticationError(req.t, Boolean(loggingInWithUsername))
|
||||
}
|
||||
|
||||
if (isUserLocked(new Date(user.lockUntil).getTime())) {
|
||||
if (isUserLocked(new Date(user.lockUntil))) {
|
||||
throw new LockedAuth(req.t)
|
||||
}
|
||||
}
|
||||
@@ -206,11 +209,11 @@ export const loginOperation = async <TSlug extends CollectionSlug>(
|
||||
where: whereConstraint,
|
||||
})
|
||||
|
||||
let user = await payload.db.findOne<any>({
|
||||
let user = (await payload.db.findOne<TypedUser>({
|
||||
collection: collectionConfig.slug,
|
||||
req,
|
||||
where: whereConstraint,
|
||||
})
|
||||
})) as TypedUser
|
||||
|
||||
checkLoginPermission({
|
||||
loggingInWithUsername: Boolean(canLoginWithUsername && sanitizedUsername),
|
||||
@@ -230,9 +233,16 @@ export const loginOperation = async <TSlug extends CollectionSlug>(
|
||||
if (maxLoginAttemptsEnabled) {
|
||||
await incrementLoginAttempts({
|
||||
collection: collectionConfig,
|
||||
doc: user,
|
||||
payload: req.payload,
|
||||
req,
|
||||
user,
|
||||
})
|
||||
|
||||
// Re-check login permissions and max attempts after incrementing attempts, in case parallel updates occurred
|
||||
checkLoginPermission({
|
||||
loggingInWithUsername: Boolean(canLoginWithUsername && sanitizedUsername),
|
||||
req,
|
||||
user,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -243,40 +253,45 @@ export const loginOperation = async <TSlug extends CollectionSlug>(
|
||||
throw new UnverifiedEmail({ t: req.t })
|
||||
}
|
||||
|
||||
/*
|
||||
* Correct password accepted - re‑check that the account didn't
|
||||
* get locked by parallel bad attempts in the meantime.
|
||||
*/
|
||||
if (maxLoginAttemptsEnabled) {
|
||||
const { lockUntil, loginAttempts } = (await payload.db.findOne<TypedUser>({
|
||||
collection: collectionConfig.slug,
|
||||
req,
|
||||
select: {
|
||||
lockUntil: true,
|
||||
loginAttempts: true,
|
||||
},
|
||||
where: { id: { equals: user.id } },
|
||||
}))!
|
||||
|
||||
user.lockUntil = lockUntil
|
||||
user.loginAttempts = loginAttempts
|
||||
|
||||
checkLoginPermission({
|
||||
req,
|
||||
user,
|
||||
})
|
||||
}
|
||||
|
||||
const fieldsToSignArgs: Parameters<typeof getFieldsToSign>[0] = {
|
||||
collectionConfig,
|
||||
email: sanitizedEmail!,
|
||||
user,
|
||||
}
|
||||
|
||||
if (collectionConfig.auth.useSessions) {
|
||||
// Add session to user
|
||||
const newSessionID = uuid()
|
||||
const now = new Date()
|
||||
const tokenExpInMs = collectionConfig.auth.tokenExpiration * 1000
|
||||
const expiresAt = new Date(now.getTime() + tokenExpInMs)
|
||||
const { sid } = await addSessionToUser({
|
||||
collectionConfig,
|
||||
payload,
|
||||
req,
|
||||
user,
|
||||
})
|
||||
|
||||
const session = { id: newSessionID, createdAt: now, expiresAt }
|
||||
|
||||
if (!user.sessions?.length) {
|
||||
user.sessions = [session]
|
||||
} else {
|
||||
user.sessions = removeExpiredSessions(user.sessions)
|
||||
user.sessions.push(session)
|
||||
}
|
||||
|
||||
await payload.db.updateOne({
|
||||
id: user.id,
|
||||
collection: collectionConfig.slug,
|
||||
data: user,
|
||||
req,
|
||||
returning: false,
|
||||
})
|
||||
|
||||
user.collection = collectionConfig.slug
|
||||
user._strategy = 'local-jwt'
|
||||
|
||||
fieldsToSignArgs.sid = newSessionID
|
||||
if (sid) {
|
||||
fieldsToSignArgs.sid = sid
|
||||
}
|
||||
|
||||
const fieldsToSign = getFieldsToSign(fieldsToSignArgs)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import url from 'url'
|
||||
import { v4 as uuid } from 'uuid'
|
||||
|
||||
import type { Collection } from '../../collections/config/types.js'
|
||||
import type { Document, PayloadRequest } from '../../types/index.js'
|
||||
@@ -11,7 +10,7 @@ import { initTransaction } from '../../utilities/initTransaction.js'
|
||||
import { killTransaction } from '../../utilities/killTransaction.js'
|
||||
import { getFieldsToSign } from '../getFieldsToSign.js'
|
||||
import { jwtSign } from '../jwt.js'
|
||||
import { removeExpiredSessions } from '../removeExpiredSessions.js'
|
||||
import { removeExpiredSessions } from '../sessions.js'
|
||||
|
||||
export type Result = {
|
||||
exp: number
|
||||
@@ -74,11 +73,10 @@ export const refreshOperation = async (incomingArgs: Arguments): Promise<Result>
|
||||
const parsedURL = url.parse(args.req.url!)
|
||||
const isGraphQL = parsedURL.pathname === config.routes.graphQL
|
||||
|
||||
const user = await args.req.payload.findByID({
|
||||
id: args.req.user.id,
|
||||
collection: args.req.user.collection,
|
||||
depth: isGraphQL ? 0 : args.collection.config.auth.depth,
|
||||
req: args.req,
|
||||
let user = await req.payload.db.findOne<any>({
|
||||
collection: collectionConfig.slug,
|
||||
req,
|
||||
where: { id: { equals: args.req.user.id } },
|
||||
})
|
||||
|
||||
const sid = args.req.user._sid
|
||||
@@ -88,7 +86,7 @@ export const refreshOperation = async (incomingArgs: Arguments): Promise<Result>
|
||||
throw new Forbidden(args.req.t)
|
||||
}
|
||||
|
||||
const existingSession = user.sessions.find(({ id }) => id === sid)
|
||||
const existingSession = user.sessions.find(({ id }: { id: number }) => id === sid)
|
||||
|
||||
const now = new Date()
|
||||
const tokenExpInMs = collectionConfig.auth.tokenExpiration * 1000
|
||||
@@ -106,6 +104,13 @@ export const refreshOperation = async (incomingArgs: Arguments): Promise<Result>
|
||||
})
|
||||
}
|
||||
|
||||
user = await req.payload.findByID({
|
||||
id: user.id,
|
||||
collection: collectionConfig.slug,
|
||||
depth: isGraphQL ? 0 : args.collection.config.auth.depth,
|
||||
req: args.req,
|
||||
})
|
||||
|
||||
if (user) {
|
||||
user.collection = args.req.user.collection
|
||||
user._strategy = args.req.user._strategy
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
import type { UserSession } from './types.js'
|
||||
|
||||
export const removeExpiredSessions = (sessions: UserSession[]) => {
|
||||
const now = new Date()
|
||||
|
||||
return sessions.filter(({ expiresAt }) => {
|
||||
const expiry = expiresAt instanceof Date ? expiresAt : new Date(expiresAt)
|
||||
return expiry > now
|
||||
})
|
||||
}
|
||||
67
packages/payload/src/auth/sessions.ts
Normal file
67
packages/payload/src/auth/sessions.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { v4 as uuid } from 'uuid'
|
||||
|
||||
import type { SanitizedCollectionConfig } from '../collections/config/types.js'
|
||||
import type { TypedUser } from '../index.js'
|
||||
import type { Payload, PayloadRequest } from '../types/index.js'
|
||||
import type { UserSession } from './types.js'
|
||||
|
||||
/**
|
||||
* Removes expired sessions from an array of sessions
|
||||
*/
|
||||
export const removeExpiredSessions = (sessions: UserSession[]) => {
|
||||
const now = new Date()
|
||||
|
||||
return sessions.filter(({ expiresAt }) => {
|
||||
const expiry = expiresAt instanceof Date ? expiresAt : new Date(expiresAt)
|
||||
return expiry > now
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a session to the user and removes expired sessions
|
||||
* @returns The session ID (sid) if sessions are used
|
||||
*/
|
||||
export const addSessionToUser = async ({
|
||||
collectionConfig,
|
||||
payload,
|
||||
req,
|
||||
user,
|
||||
}: {
|
||||
collectionConfig: SanitizedCollectionConfig
|
||||
payload: Payload
|
||||
req: PayloadRequest
|
||||
user: TypedUser
|
||||
}): Promise<{ sid?: string }> => {
|
||||
let sid: string | undefined
|
||||
if (collectionConfig.auth.useSessions) {
|
||||
// Add session to user
|
||||
sid = uuid()
|
||||
const now = new Date()
|
||||
const tokenExpInMs = collectionConfig.auth.tokenExpiration * 1000
|
||||
const expiresAt = new Date(now.getTime() + tokenExpInMs)
|
||||
|
||||
const session = { id: sid, createdAt: now, expiresAt }
|
||||
|
||||
if (!user.sessions?.length) {
|
||||
user.sessions = [session]
|
||||
} else {
|
||||
user.sessions = removeExpiredSessions(user.sessions)
|
||||
user.sessions.push(session)
|
||||
}
|
||||
|
||||
await payload.db.updateOne({
|
||||
id: user.id,
|
||||
collection: collectionConfig.slug,
|
||||
data: user,
|
||||
req,
|
||||
returning: false,
|
||||
})
|
||||
|
||||
user.collection = collectionConfig.slug
|
||||
user._strategy = 'local-jwt'
|
||||
}
|
||||
|
||||
return {
|
||||
sid,
|
||||
}
|
||||
}
|
||||
@@ -1,59 +1,154 @@
|
||||
import type { SanitizedCollectionConfig, TypeWithID } from '../../../collections/config/types.js'
|
||||
import type { JsonObject, Payload } from '../../../index.js'
|
||||
import type { SanitizedCollectionConfig } from '../../../collections/config/types.js'
|
||||
import type { PayloadRequest } from '../../../types/index.js'
|
||||
|
||||
import { type JsonObject, type Payload, type TypedUser } from '../../../index.js'
|
||||
import { isUserLocked } from '../../isUserLocked.js'
|
||||
|
||||
type Args = {
|
||||
collection: SanitizedCollectionConfig
|
||||
doc: Record<string, unknown> & TypeWithID
|
||||
payload: Payload
|
||||
req: PayloadRequest
|
||||
user: TypedUser
|
||||
}
|
||||
|
||||
// Note: this function does not use req in most updates, as we want those to be visible in parallel requests that are on a different
|
||||
// transaction. At the same time, we want updates from parallel requests to be visible here.
|
||||
export const incrementLoginAttempts = async ({
|
||||
collection,
|
||||
doc,
|
||||
payload,
|
||||
req,
|
||||
user,
|
||||
}: Args): Promise<void> => {
|
||||
const {
|
||||
auth: { lockTime, maxLoginAttempts },
|
||||
} = collection
|
||||
|
||||
if ('lockUntil' in doc && typeof doc.lockUntil === 'string') {
|
||||
const lockUntil = new Date(doc.lockUntil).getTime()
|
||||
const currentTime = Date.now()
|
||||
|
||||
let updatedLockUntil: null | string = null
|
||||
let updatedLoginAttempts: null | number = null
|
||||
|
||||
if (user.lockUntil && !isUserLocked(new Date(user.lockUntil))) {
|
||||
// Expired lock, restart count at 1
|
||||
if (lockUntil < Date.now()) {
|
||||
await payload.update({
|
||||
id: doc.id,
|
||||
collection: collection.slug,
|
||||
data: {
|
||||
lockUntil: null,
|
||||
loginAttempts: 1,
|
||||
},
|
||||
depth: 0,
|
||||
req,
|
||||
})
|
||||
const updatedUser = await payload.db.updateOne({
|
||||
id: user.id,
|
||||
collection: collection.slug,
|
||||
data: {
|
||||
lockUntil: null,
|
||||
loginAttempts: 1,
|
||||
},
|
||||
req,
|
||||
select: {
|
||||
lockUntil: true,
|
||||
loginAttempts: true,
|
||||
},
|
||||
})
|
||||
updatedLockUntil = updatedUser.lockUntil
|
||||
updatedLoginAttempts = updatedUser.loginAttempts
|
||||
user.lockUntil = updatedLockUntil
|
||||
} else {
|
||||
const data: JsonObject = {
|
||||
loginAttempts: {
|
||||
$inc: 1,
|
||||
},
|
||||
}
|
||||
|
||||
return
|
||||
const willReachMaxAttempts =
|
||||
typeof user.loginAttempts === 'number' && user.loginAttempts + 1 >= maxLoginAttempts
|
||||
// Lock the account if at max attempts and not already locked
|
||||
if (willReachMaxAttempts) {
|
||||
const lockUntil = new Date(currentTime + lockTime).toISOString()
|
||||
data.lockUntil = lockUntil
|
||||
}
|
||||
|
||||
const updatedUser = await payload.db.updateOne({
|
||||
id: user.id,
|
||||
collection: collection.slug,
|
||||
data,
|
||||
select: {
|
||||
lockUntil: true,
|
||||
loginAttempts: true,
|
||||
},
|
||||
})
|
||||
|
||||
updatedLockUntil = updatedUser.lockUntil
|
||||
updatedLoginAttempts = updatedUser.loginAttempts
|
||||
}
|
||||
|
||||
const data: JsonObject = {
|
||||
loginAttempts: Number(doc.loginAttempts) + 1,
|
||||
if (updatedLoginAttempts === null) {
|
||||
throw new Error('Failed to update login attempts or lockUntil for user')
|
||||
}
|
||||
|
||||
// Lock the account if at max attempts and not already locked
|
||||
if (typeof doc.loginAttempts === 'number' && doc.loginAttempts + 1 >= maxLoginAttempts) {
|
||||
const lockUntil = new Date(Date.now() + lockTime).toISOString()
|
||||
data.lockUntil = lockUntil
|
||||
}
|
||||
// Check updated latest lockUntil and loginAttempts in case there were parallel updates
|
||||
const reachedMaxAttemptsForCurrentUser =
|
||||
typeof updatedLoginAttempts === 'number' && updatedLoginAttempts - 1 >= maxLoginAttempts
|
||||
|
||||
await payload.update({
|
||||
id: doc.id,
|
||||
collection: collection.slug,
|
||||
data,
|
||||
depth: 0,
|
||||
req,
|
||||
})
|
||||
const reachedMaxAttemptsForNextUser =
|
||||
typeof updatedLoginAttempts === 'number' && updatedLoginAttempts >= maxLoginAttempts
|
||||
|
||||
if (reachedMaxAttemptsForCurrentUser) {
|
||||
user.lockUntil = updatedLockUntil
|
||||
}
|
||||
user.loginAttempts = updatedLoginAttempts - 1 // -1, as the updated increment is applied for the *next* login attempt, not the current one
|
||||
|
||||
if (
|
||||
reachedMaxAttemptsForNextUser &&
|
||||
(!updatedLockUntil || !isUserLocked(new Date(updatedLockUntil)))
|
||||
) {
|
||||
// If lockUntil reached max login attempts due to multiple parallel attempts but user was not locked yet,
|
||||
const newLockUntil = new Date(currentTime + lockTime).toISOString()
|
||||
|
||||
await payload.db.updateOne({
|
||||
id: user.id,
|
||||
collection: collection.slug,
|
||||
data: {
|
||||
lockUntil: newLockUntil,
|
||||
},
|
||||
returning: false,
|
||||
})
|
||||
|
||||
if (reachedMaxAttemptsForCurrentUser) {
|
||||
user.lockUntil = newLockUntil
|
||||
}
|
||||
|
||||
if (collection.auth.useSessions) {
|
||||
// Remove all active sessions that have been created in a 20 second window. This protects
|
||||
// against brute force attacks - example: 99 incorrect, 1 correct parallel login attempts.
|
||||
// The correct login attempt will be finished first, as it's faster due to not having to perform
|
||||
// an additional db update here.
|
||||
// However, this request (the incorrect login attempt request) can kill the successful login attempt here.
|
||||
|
||||
// Fetch user sessions separately (do not do this in the updateOne select in order to preserve the returning: true db call optimization)
|
||||
const currentUser = await payload.db.findOne<TypedUser>({
|
||||
collection: collection.slug,
|
||||
select: {
|
||||
sessions: true,
|
||||
},
|
||||
where: {
|
||||
id: {
|
||||
equals: user.id,
|
||||
},
|
||||
},
|
||||
})
|
||||
if (currentUser?.sessions?.length) {
|
||||
// Does not hurt also removing expired sessions
|
||||
currentUser.sessions = currentUser.sessions.filter((session) => {
|
||||
const sessionCreatedAt = new Date(session.createdAt)
|
||||
const twentySecondsAgo = new Date(currentTime - 20000)
|
||||
|
||||
// Remove sessions created within the last 20 seconds
|
||||
return sessionCreatedAt <= twentySecondsAgo
|
||||
})
|
||||
|
||||
user.sessions = currentUser.sessions
|
||||
|
||||
await payload.db.updateOne({
|
||||
id: user.id,
|
||||
collection: collection.slug,
|
||||
data: user,
|
||||
returning: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,15 +21,14 @@ export const resetLoginAttempts = async ({
|
||||
) {
|
||||
return
|
||||
}
|
||||
await payload.update({
|
||||
await payload.db.updateOne({
|
||||
id: doc.id,
|
||||
collection: collection.slug,
|
||||
data: {
|
||||
lockUntil: null,
|
||||
loginAttempts: 0,
|
||||
},
|
||||
depth: 0,
|
||||
overrideAccess: true,
|
||||
req,
|
||||
returning: false,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ export type ServerOnlyCollectionProperties = keyof Pick<
|
||||
|
||||
export type ServerOnlyCollectionAdminProperties = keyof Pick<
|
||||
SanitizedCollectionConfig['admin'],
|
||||
'baseListFilter' | 'components' | 'hidden'
|
||||
'baseFilter' | 'baseListFilter' | 'components' | 'hidden'
|
||||
>
|
||||
|
||||
export type ServerOnlyUploadProperties = keyof Pick<
|
||||
@@ -94,6 +94,7 @@ const serverOnlyUploadProperties: Partial<ServerOnlyUploadProperties>[] = [
|
||||
|
||||
const serverOnlyCollectionAdminProperties: Partial<ServerOnlyCollectionAdminProperties>[] = [
|
||||
'hidden',
|
||||
'baseFilter',
|
||||
'baseListFilter',
|
||||
'components',
|
||||
// 'preview' is handled separately
|
||||
|
||||
@@ -85,6 +85,7 @@ export type HookOperationType =
|
||||
| 'readDistinct'
|
||||
| 'refresh'
|
||||
| 'resetPassword'
|
||||
| 'restoreVersion'
|
||||
| 'update'
|
||||
|
||||
type CreateOrUpdateOperation = Extract<HookOperationType, 'create' | 'update'>
|
||||
@@ -269,7 +270,7 @@ export type EnableFoldersOptions = {
|
||||
debug?: boolean
|
||||
}
|
||||
|
||||
export type BaseListFilter = (args: {
|
||||
export type BaseFilter = (args: {
|
||||
limit: number
|
||||
locale?: TypedLocale
|
||||
page: number
|
||||
@@ -277,7 +278,31 @@ export type BaseListFilter = (args: {
|
||||
sort: string
|
||||
}) => null | Promise<null | Where> | Where
|
||||
|
||||
/**
|
||||
* @deprecated Use `BaseFilter` instead.
|
||||
*/
|
||||
export type BaseListFilter = BaseFilter
|
||||
|
||||
export type CollectionAdminOptions = {
|
||||
/**
|
||||
* Defines a default base filter which will be applied in the following parts of the admin panel:
|
||||
* - List View
|
||||
* - Relationship fields for internal links within the Lexical editor
|
||||
*
|
||||
* This is especially useful for plugins like multi-tenant. For example,
|
||||
* a user may have access to multiple tenants, but should only see content
|
||||
* related to the currently active or selected tenant in those places.
|
||||
*/
|
||||
baseFilter?: BaseFilter
|
||||
/**
|
||||
* @deprecated Use `baseFilter` instead. If both are defined,
|
||||
* `baseFilter` will take precedence. This property remains only
|
||||
* for backward compatibility and may be removed in a future version.
|
||||
*
|
||||
* Originally, `baseListFilter` was intended to filter only the List View
|
||||
* in the admin panel. However, base filtering is often required in other areas
|
||||
* such as internal link relationships in the Lexical editor.
|
||||
*/
|
||||
baseListFilter?: BaseListFilter
|
||||
/**
|
||||
* Custom admin components
|
||||
@@ -687,7 +712,7 @@ export type AuthCollection = {
|
||||
}
|
||||
|
||||
export type TypeWithID = {
|
||||
deletedAt?: string
|
||||
deletedAt?: null | string
|
||||
docId?: any
|
||||
id: number | string
|
||||
}
|
||||
@@ -695,7 +720,7 @@ export type TypeWithID = {
|
||||
export type TypeWithTimestamps = {
|
||||
[key: string]: unknown
|
||||
createdAt: string
|
||||
deletedAt?: string
|
||||
deletedAt?: null | string
|
||||
id: number | string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
@@ -8,13 +8,15 @@ import { countOperation } from '../operations/count.js'
|
||||
|
||||
export const countHandler: PayloadHandler = async (req) => {
|
||||
const collection = getRequestCollection(req)
|
||||
const { where } = req.query as {
|
||||
const { trash, where } = req.query as {
|
||||
trash?: string
|
||||
where?: Where
|
||||
}
|
||||
|
||||
const result = await countOperation({
|
||||
collection,
|
||||
req,
|
||||
trash: trash === 'true',
|
||||
where,
|
||||
})
|
||||
|
||||
|
||||
@@ -11,13 +11,14 @@ import { findDistinctOperation } from '../operations/findDistinct.js'
|
||||
|
||||
export const findDistinctHandler: PayloadHandler = async (req) => {
|
||||
const collection = getRequestCollection(req)
|
||||
const { depth, field, limit, page, sort, where } = req.query as {
|
||||
const { depth, field, limit, page, sort, trash, where } = req.query as {
|
||||
depth?: string
|
||||
field?: string
|
||||
limit?: string
|
||||
page?: string
|
||||
sort?: string
|
||||
sortOrder?: string
|
||||
trash?: string
|
||||
where?: Where
|
||||
}
|
||||
|
||||
@@ -33,6 +34,7 @@ export const findDistinctHandler: PayloadHandler = async (req) => {
|
||||
page: isNumber(page) ? Number(page) : undefined,
|
||||
req,
|
||||
sort: typeof sort === 'string' ? sort.split(',') : undefined,
|
||||
trash: trash === 'true',
|
||||
where,
|
||||
})
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import { docAccessHandler } from './docAccess.js'
|
||||
import { duplicateHandler } from './duplicate.js'
|
||||
import { findHandler } from './find.js'
|
||||
import { findByIDHandler } from './findByID.js'
|
||||
import { findDistinctHandler } from './findDistinct.js'
|
||||
// import { findDistinctHandler } from './findDistinct.js'
|
||||
import { findVersionByIDHandler } from './findVersionByID.js'
|
||||
import { findVersionsHandler } from './findVersions.js'
|
||||
import { previewHandler } from './preview.js'
|
||||
|
||||
@@ -7,6 +7,7 @@ import { executeAccess } from '../../auth/executeAccess.js'
|
||||
import { combineQueries } from '../../database/combineQueries.js'
|
||||
import { validateQueryPaths } from '../../database/queryValidation/validateQueryPaths.js'
|
||||
import { sanitizeWhereQuery } from '../../database/sanitizeWhereQuery.js'
|
||||
import { appendNonTrashedFilter } from '../../utilities/appendNonTrashedFilter.js'
|
||||
import { killTransaction } from '../../utilities/killTransaction.js'
|
||||
import { buildAfterOperation } from './utils.js'
|
||||
|
||||
@@ -15,6 +16,7 @@ export type Arguments = {
|
||||
disableErrors?: boolean
|
||||
overrideAccess?: boolean
|
||||
req?: PayloadRequest
|
||||
trash?: boolean
|
||||
where?: Where
|
||||
}
|
||||
|
||||
@@ -47,6 +49,7 @@ export const countOperation = async <TSlug extends CollectionSlug>(
|
||||
disableErrors,
|
||||
overrideAccess,
|
||||
req,
|
||||
trash = false,
|
||||
where,
|
||||
} = args
|
||||
|
||||
@@ -71,9 +74,16 @@ export const countOperation = async <TSlug extends CollectionSlug>(
|
||||
|
||||
let result: { totalDocs: number }
|
||||
|
||||
const fullWhere = combineQueries(where!, accessResult!)
|
||||
let fullWhere = combineQueries(where!, accessResult!)
|
||||
sanitizeWhereQuery({ fields: collectionConfig.flattenedFields, payload, where: fullWhere })
|
||||
|
||||
// Exclude trashed documents when trash: false
|
||||
fullWhere = appendNonTrashedFilter({
|
||||
enableTrash: collectionConfig.trash,
|
||||
trash,
|
||||
where: fullWhere,
|
||||
})
|
||||
|
||||
await validateQueryPaths({
|
||||
collectionConfig,
|
||||
overrideAccess: overrideAccess!,
|
||||
|
||||
@@ -291,6 +291,7 @@ export const createOperation = async <
|
||||
autosave,
|
||||
collection: collectionConfig,
|
||||
docWithLocales: result,
|
||||
operation: 'create',
|
||||
payload,
|
||||
publishSpecificLocale,
|
||||
req,
|
||||
|
||||
@@ -12,6 +12,7 @@ import { sanitizeWhereQuery } from '../../database/sanitizeWhereQuery.js'
|
||||
import { APIError } from '../../errors/APIError.js'
|
||||
import { Forbidden } from '../../errors/Forbidden.js'
|
||||
import { relationshipPopulationPromise } from '../../fields/hooks/afterRead/relationshipPopulationPromise.js'
|
||||
import { appendNonTrashedFilter } from '../../utilities/appendNonTrashedFilter.js'
|
||||
import { getFieldByPath } from '../../utilities/getFieldByPath.js'
|
||||
import { killTransaction } from '../../utilities/killTransaction.js'
|
||||
import { buildAfterOperation } from './utils.js'
|
||||
@@ -29,6 +30,7 @@ export type Arguments = {
|
||||
req?: PayloadRequest
|
||||
showHiddenFields?: boolean
|
||||
sort?: Sort
|
||||
trash?: boolean
|
||||
where?: Where
|
||||
}
|
||||
export const findDistinctOperation = async (
|
||||
@@ -60,6 +62,7 @@ export const findDistinctOperation = async (
|
||||
overrideAccess,
|
||||
populate,
|
||||
showHiddenFields = false,
|
||||
trash = false,
|
||||
where,
|
||||
} = args
|
||||
|
||||
@@ -96,9 +99,16 @@ export const findDistinctOperation = async (
|
||||
// Find Distinct
|
||||
// /////////////////////////////////////
|
||||
|
||||
const fullWhere = combineQueries(where!, accessResult!)
|
||||
let fullWhere = combineQueries(where!, accessResult!)
|
||||
sanitizeWhereQuery({ fields: collectionConfig.flattenedFields, payload, where: fullWhere })
|
||||
|
||||
// Exclude trashed documents when trash: false
|
||||
fullWhere = appendNonTrashedFilter({
|
||||
enableTrash: collectionConfig.trash,
|
||||
trash,
|
||||
where: fullWhere,
|
||||
})
|
||||
|
||||
await validateQueryPaths({
|
||||
collectionConfig,
|
||||
overrideAccess: overrideAccess!,
|
||||
|
||||
@@ -41,6 +41,15 @@ export type Options<TSlug extends CollectionSlug> = {
|
||||
* Recommended to pass when using the Local API from hooks, as usually you want to execute the operation within the current transaction.
|
||||
*/
|
||||
req?: Partial<PayloadRequest>
|
||||
/**
|
||||
* When set to `true`, the query will include both normal and trashed documents.
|
||||
* To query only trashed documents, pass `trash: true` and combine with a `where` clause filtering by `deletedAt`.
|
||||
* By default (`false`), the query will only include normal documents and exclude those with a `deletedAt` field.
|
||||
*
|
||||
* This argument has no effect unless `trash` is enabled on the collection.
|
||||
* @default false
|
||||
*/
|
||||
trash?: boolean
|
||||
/**
|
||||
* If you set `overrideAccess` to `false`, you can pass a user to use against the access control checks.
|
||||
*/
|
||||
@@ -55,7 +64,13 @@ export async function countLocal<TSlug extends CollectionSlug>(
|
||||
payload: Payload,
|
||||
options: Options<TSlug>,
|
||||
): Promise<{ totalDocs: number }> {
|
||||
const { collection: collectionSlug, disableErrors, overrideAccess = true, where } = options
|
||||
const {
|
||||
collection: collectionSlug,
|
||||
disableErrors,
|
||||
overrideAccess = true,
|
||||
trash = false,
|
||||
where,
|
||||
} = options
|
||||
|
||||
const collection = payload.collections[collectionSlug]
|
||||
|
||||
@@ -70,6 +85,7 @@ export async function countLocal<TSlug extends CollectionSlug>(
|
||||
disableErrors,
|
||||
overrideAccess,
|
||||
req: await createLocalReq(options as CreateLocalReqOptions, payload),
|
||||
trash,
|
||||
where,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -83,6 +83,15 @@ export type Options<
|
||||
* @example ['group', '-createdAt'] // sort by 2 fields, ASC group and DESC createdAt
|
||||
*/
|
||||
sort?: Sort
|
||||
/**
|
||||
* When set to `true`, the query will include both normal and trashed documents.
|
||||
* To query only trashed documents, pass `trash: true` and combine with a `where` clause filtering by `deletedAt`.
|
||||
* By default (`false`), the query will only include normal documents and exclude those with a `deletedAt` field.
|
||||
*
|
||||
* This argument has no effect unless `trash` is enabled on the collection.
|
||||
* @default false
|
||||
*/
|
||||
trash?: boolean
|
||||
/**
|
||||
* If you set `overrideAccess` to `false`, you can pass a user to use against the access control checks.
|
||||
*/
|
||||
@@ -111,6 +120,7 @@ export async function findDistinct<
|
||||
populate,
|
||||
showHiddenFields,
|
||||
sort,
|
||||
trash = false,
|
||||
where,
|
||||
} = options
|
||||
const collection = payload.collections[collectionSlug]
|
||||
@@ -133,6 +143,7 @@ export async function findDistinct<
|
||||
req: await createLocalReq(options as CreateLocalReqOptions, payload),
|
||||
showHiddenFields,
|
||||
sort,
|
||||
trash,
|
||||
where,
|
||||
}) as Promise<PaginatedDistinctDocs<Record<TField, DataFromCollectionSlug<TSlug>[TField]>>>
|
||||
}
|
||||
|
||||
@@ -10,15 +10,23 @@ import { combineQueries } from '../../database/combineQueries.js'
|
||||
import { APIError, Forbidden, NotFound } from '../../errors/index.js'
|
||||
import { afterChange } from '../../fields/hooks/afterChange/index.js'
|
||||
import { afterRead } from '../../fields/hooks/afterRead/index.js'
|
||||
import { beforeChange } from '../../fields/hooks/beforeChange/index.js'
|
||||
import { beforeValidate } from '../../fields/hooks/beforeValidate/index.js'
|
||||
import { commitTransaction } from '../../utilities/commitTransaction.js'
|
||||
import { deepCopyObjectSimple } from '../../utilities/deepCopyObject.js'
|
||||
import { initTransaction } from '../../utilities/initTransaction.js'
|
||||
import { killTransaction } from '../../utilities/killTransaction.js'
|
||||
import { sanitizeSelect } from '../../utilities/sanitizeSelect.js'
|
||||
import { getLatestCollectionVersion } from '../../versions/getLatestCollectionVersion.js'
|
||||
import { saveVersion } from '../../versions/saveVersion.js'
|
||||
import { buildAfterOperation } from './utils.js'
|
||||
|
||||
export type Arguments = {
|
||||
collection: Collection
|
||||
currentDepth?: number
|
||||
depth?: number
|
||||
disableErrors?: boolean
|
||||
disableTransaction?: boolean
|
||||
draft?: boolean
|
||||
id: number | string
|
||||
overrideAccess?: boolean
|
||||
@@ -35,7 +43,7 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
id,
|
||||
collection: { config: collectionConfig },
|
||||
depth,
|
||||
draft,
|
||||
draft: draftArg = false,
|
||||
overrideAccess = false,
|
||||
populate,
|
||||
req,
|
||||
@@ -45,6 +53,25 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
} = args
|
||||
|
||||
try {
|
||||
const shouldCommit = !args.disableTransaction && (await initTransaction(args.req))
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (args.collection.config.hooks?.beforeOperation?.length) {
|
||||
for (const hook of args.collection.config.hooks.beforeOperation) {
|
||||
args =
|
||||
(await hook({
|
||||
args,
|
||||
collection: args.collection.config,
|
||||
context: args.req.context,
|
||||
operation: 'restoreVersion',
|
||||
req: args.req,
|
||||
})) || args
|
||||
}
|
||||
}
|
||||
|
||||
if (!id) {
|
||||
throw new APIError('Missing ID of version to restore.', httpStatus.BAD_REQUEST)
|
||||
}
|
||||
@@ -68,7 +95,7 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
throw new NotFound(req.t)
|
||||
}
|
||||
|
||||
const parentDocID = rawVersion.parent
|
||||
const { parent: parentDocID, version: versionToRestoreWithLocales } = rawVersion
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Access
|
||||
@@ -90,6 +117,7 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
where: combineQueries({ id: { equals: parentDocID } }, accessResults),
|
||||
}
|
||||
|
||||
// Get the document from the non versioned collection
|
||||
const doc = await req.payload.db.findOne(findOneArgs)
|
||||
|
||||
if (!doc && !hasWherePolicy) {
|
||||
@@ -109,7 +137,6 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
// /////////////////////////////////////
|
||||
// fetch previousDoc
|
||||
// /////////////////////////////////////
|
||||
|
||||
const prevDocWithLocales = await getLatestCollectionVersion({
|
||||
id: parentDocID,
|
||||
config: collectionConfig,
|
||||
@@ -118,6 +145,109 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
req,
|
||||
})
|
||||
|
||||
// originalDoc with hoisted localized data
|
||||
const originalDoc = await afterRead({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
depth: 0,
|
||||
doc: deepCopyObjectSimple(prevDocWithLocales),
|
||||
draft: draftArg,
|
||||
fallbackLocale: null,
|
||||
global: null,
|
||||
locale: locale!,
|
||||
overrideAccess: true,
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
// version data with hoisted localized data
|
||||
const prevVersionDoc = await afterRead({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
depth: 0,
|
||||
doc: deepCopyObjectSimple(versionToRestoreWithLocales),
|
||||
draft: draftArg,
|
||||
fallbackLocale: null,
|
||||
global: null,
|
||||
locale: locale!,
|
||||
overrideAccess: true,
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
})
|
||||
|
||||
let data = deepCopyObjectSimple(prevVersionDoc)
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeValidate - Fields
|
||||
// /////////////////////////////////////
|
||||
|
||||
data = await beforeValidate({
|
||||
id: parentDocID,
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data,
|
||||
doc: originalDoc,
|
||||
global: null,
|
||||
operation: 'update',
|
||||
overrideAccess,
|
||||
req,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeValidate - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (collectionConfig.hooks?.beforeValidate?.length) {
|
||||
for (const hook of collectionConfig.hooks.beforeValidate) {
|
||||
data =
|
||||
(await hook({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data,
|
||||
operation: 'update',
|
||||
originalDoc,
|
||||
req,
|
||||
})) || data
|
||||
}
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeChange - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (collectionConfig.hooks?.beforeChange?.length) {
|
||||
for (const hook of collectionConfig.hooks.beforeChange) {
|
||||
data =
|
||||
(await hook({
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data,
|
||||
operation: 'update',
|
||||
originalDoc,
|
||||
req,
|
||||
})) || data
|
||||
}
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeChange - Fields
|
||||
// /////////////////////////////////////
|
||||
|
||||
let result = await beforeChange({
|
||||
id: parentDocID,
|
||||
collection: collectionConfig,
|
||||
context: req.context,
|
||||
data: { ...data, id: parentDocID },
|
||||
doc: originalDoc,
|
||||
docWithLocales: versionToRestoreWithLocales,
|
||||
global: null,
|
||||
operation: 'update',
|
||||
overrideAccess,
|
||||
req,
|
||||
skipValidation:
|
||||
draftArg && collectionConfig.versions.drafts && !collectionConfig.versions.drafts.validate,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Update
|
||||
// /////////////////////////////////////
|
||||
@@ -128,10 +258,10 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
select: incomingSelect,
|
||||
})
|
||||
|
||||
let result = await req.payload.db.updateOne({
|
||||
result = await req.payload.db.updateOne({
|
||||
id: parentDocID,
|
||||
collection: collectionConfig.slug,
|
||||
data: rawVersion.version,
|
||||
data: result,
|
||||
req,
|
||||
select,
|
||||
})
|
||||
@@ -140,18 +270,16 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
// Save `previousDoc` as a version after restoring
|
||||
// /////////////////////////////////////
|
||||
|
||||
const prevVersion = { ...prevDocWithLocales }
|
||||
|
||||
delete prevVersion.id
|
||||
|
||||
await payload.db.createVersion({
|
||||
result = await saveVersion({
|
||||
id: parentDocID,
|
||||
autosave: false,
|
||||
collectionSlug: collectionConfig.slug,
|
||||
createdAt: prevVersion.createdAt,
|
||||
parent: parentDocID,
|
||||
collection: collectionConfig,
|
||||
docWithLocales: result,
|
||||
draft: draftArg,
|
||||
operation: 'restoreVersion',
|
||||
payload,
|
||||
req,
|
||||
updatedAt: new Date().toISOString(),
|
||||
versionData: draft ? { ...rawVersion.version, _status: 'draft' } : rawVersion.version,
|
||||
select,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
@@ -225,6 +353,21 @@ export const restoreVersionOperation = async <TData extends TypeWithID = any>(
|
||||
}
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// afterOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
result = await buildAfterOperation({
|
||||
args,
|
||||
collection: collectionConfig,
|
||||
operation: 'restoreVersion',
|
||||
result,
|
||||
})
|
||||
|
||||
if (shouldCommit) {
|
||||
await commitTransaction(req)
|
||||
}
|
||||
|
||||
return result
|
||||
} catch (error: unknown) {
|
||||
await killTransaction(req)
|
||||
|
||||
@@ -314,6 +314,7 @@ export const updateDocument = async <
|
||||
collection: collectionConfig,
|
||||
docWithLocales: result,
|
||||
draft: shouldSaveDraft,
|
||||
operation: 'update',
|
||||
payload,
|
||||
publishSpecificLocale,
|
||||
req,
|
||||
|
||||
@@ -2,7 +2,7 @@ import type { forgotPasswordOperation } from '../../auth/operations/forgotPasswo
|
||||
import type { loginOperation } from '../../auth/operations/login.js'
|
||||
import type { refreshOperation } from '../../auth/operations/refresh.js'
|
||||
import type { resetPasswordOperation } from '../../auth/operations/resetPassword.js'
|
||||
import type { CollectionSlug } from '../../index.js'
|
||||
import type { CollectionSlug, restoreVersionOperation } from '../../index.js'
|
||||
import type { PayloadRequest } from '../../types/index.js'
|
||||
import type { SanitizedCollectionConfig, SelectFromCollectionSlug } from '../config/types.js'
|
||||
import type { countOperation } from './count.js'
|
||||
@@ -36,6 +36,7 @@ export type AfterOperationMap<TOperationGeneric extends CollectionSlug> = {
|
||||
login: typeof loginOperation<TOperationGeneric>
|
||||
refresh: typeof refreshOperation
|
||||
resetPassword: typeof resetPasswordOperation<TOperationGeneric>
|
||||
restoreVersion: typeof restoreVersionOperation
|
||||
update: typeof updateOperation<TOperationGeneric, SelectFromCollectionSlug<TOperationGeneric>>
|
||||
updateByID: typeof updateByIDOperation<
|
||||
TOperationGeneric,
|
||||
@@ -108,6 +109,11 @@ export type AfterOperationArg<TOperationGeneric extends CollectionSlug> = {
|
||||
operation: 'resetPassword'
|
||||
result: Awaited<ReturnType<AfterOperationMap<TOperationGeneric>['resetPassword']>>
|
||||
}
|
||||
| {
|
||||
args: Parameters<AfterOperationMap<TOperationGeneric>['restoreVersion']>[0]
|
||||
operation: 'restoreVersion'
|
||||
result: Awaited<ReturnType<AfterOperationMap<TOperationGeneric>['restoreVersion']>>
|
||||
}
|
||||
| {
|
||||
args: Parameters<AfterOperationMap<TOperationGeneric>['update']>[0]
|
||||
operation: 'update'
|
||||
|
||||
@@ -160,32 +160,29 @@ export async function validateSearchParam({
|
||||
let fieldAccess: any
|
||||
|
||||
if (versionFields) {
|
||||
fieldAccess = policies[entityType]![entitySlug]!
|
||||
fieldAccess = policies[entityType]![entitySlug]!.fields
|
||||
|
||||
if (segments[0] === 'parent' || segments[0] === 'version') {
|
||||
if (segments[0] === 'parent' || segments[0] === 'version' || segments[0] === 'snapshot') {
|
||||
segments.shift()
|
||||
}
|
||||
} else {
|
||||
fieldAccess = policies[entityType]![entitySlug]!.fields
|
||||
}
|
||||
|
||||
segments.forEach((segment) => {
|
||||
if (fieldAccess[segment]) {
|
||||
if ('fields' in fieldAccess[segment]) {
|
||||
fieldAccess = fieldAccess[segment].fields
|
||||
} else if (
|
||||
'blocks' in fieldAccess[segment] ||
|
||||
'blockReferences' in fieldAccess[segment]
|
||||
) {
|
||||
fieldAccess = fieldAccess[segment]
|
||||
} else {
|
||||
fieldAccess = fieldAccess[segment]
|
||||
if (segments.length) {
|
||||
segments.forEach((segment) => {
|
||||
if (fieldAccess[segment]) {
|
||||
if ('fields' in fieldAccess[segment]) {
|
||||
fieldAccess = fieldAccess[segment].fields
|
||||
} else {
|
||||
fieldAccess = fieldAccess[segment]
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
if (!fieldAccess?.read?.permission) {
|
||||
errors.push({ path: fieldPath })
|
||||
if (!fieldAccess?.read?.permission) {
|
||||
errors.push({ path: fieldPath })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
1
packages/payload/src/exports/i18n/id.ts
Normal file
1
packages/payload/src/exports/i18n/id.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { id } from '@payloadcms/translations/languages/id'
|
||||
@@ -6,6 +6,7 @@ export {
|
||||
parseCookies,
|
||||
} from '../auth/cookies.js'
|
||||
export { getLoginOptions } from '../auth/getLoginOptions.js'
|
||||
export { addSessionToUser, removeExpiredSessions } from '../auth/sessions.js'
|
||||
export { getFromImportMap } from '../bin/generateImportMap/utilities/getFromImportMap.js'
|
||||
export { parsePayloadComponent } from '../bin/generateImportMap/utilities/parsePayloadComponent.js'
|
||||
export { defaults as collectionDefaults } from '../collections/config/defaults.js'
|
||||
|
||||
@@ -14,6 +14,7 @@ export const baseIDField: TextField = {
|
||||
defaultValue: () => new ObjectId().toHexString(),
|
||||
hooks: {
|
||||
beforeChange: [({ value }) => value || new ObjectId().toHexString()],
|
||||
// ID field values for arrays and blocks need to be unique when duplicating, as on postgres they are stored on the same table as primary keys.
|
||||
beforeDuplicate: [() => new ObjectId().toHexString()],
|
||||
},
|
||||
label: 'ID',
|
||||
|
||||
@@ -111,13 +111,14 @@ export const promise = async ({
|
||||
parentSchemaPath,
|
||||
})
|
||||
|
||||
const fieldAffectsDataResult = fieldAffectsData(field)
|
||||
const pathSegments = path ? path.split('.') : []
|
||||
const schemaPathSegments = schemaPath ? schemaPath.split('.') : []
|
||||
const indexPathSegments = indexPath ? indexPath.split('-').filter(Boolean)?.map(Number) : []
|
||||
let removedFieldValue = false
|
||||
|
||||
if (
|
||||
fieldAffectsData(field) &&
|
||||
fieldAffectsDataResult &&
|
||||
field.hidden &&
|
||||
typeof siblingDoc[field.name!] !== 'undefined' &&
|
||||
!showHiddenFields
|
||||
@@ -139,16 +140,17 @@ export const promise = async ({
|
||||
}
|
||||
}
|
||||
|
||||
const shouldHoistLocalizedValue =
|
||||
const shouldHoistLocalizedValue: boolean = Boolean(
|
||||
flattenLocales &&
|
||||
fieldAffectsData(field) &&
|
||||
typeof siblingDoc[field.name!] === 'object' &&
|
||||
siblingDoc[field.name!] !== null &&
|
||||
fieldShouldBeLocalized({ field, parentIsLocalized: parentIsLocalized! }) &&
|
||||
locale !== 'all' &&
|
||||
req.payload.config.localization
|
||||
fieldAffectsDataResult &&
|
||||
typeof siblingDoc[field.name!] === 'object' &&
|
||||
siblingDoc[field.name!] !== null &&
|
||||
fieldShouldBeLocalized({ field, parentIsLocalized: parentIsLocalized! }) &&
|
||||
locale !== 'all' &&
|
||||
req.payload.config.localization,
|
||||
)
|
||||
|
||||
if (shouldHoistLocalizedValue) {
|
||||
if (fieldAffectsDataResult && shouldHoistLocalizedValue) {
|
||||
// replace actual value with localized value before sanitizing
|
||||
// { [locale]: fields } -> fields
|
||||
const value = siblingDoc[field.name!][locale!]
|
||||
@@ -187,7 +189,7 @@ export const promise = async ({
|
||||
case 'group': {
|
||||
// Fill groups with empty objects so fields with hooks within groups can populate
|
||||
// themselves virtually as necessary
|
||||
if (fieldAffectsData(field) && typeof siblingDoc[field.name] === 'undefined') {
|
||||
if (fieldAffectsDataResult && typeof siblingDoc[field.name] === 'undefined') {
|
||||
siblingDoc[field.name] = {}
|
||||
}
|
||||
|
||||
@@ -234,7 +236,7 @@ export const promise = async ({
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldAffectsData(field)) {
|
||||
if (fieldAffectsDataResult) {
|
||||
// Execute hooks
|
||||
if (triggerHooks && field.hooks?.afterRead) {
|
||||
for (const hook of field.hooks.afterRead) {
|
||||
@@ -400,7 +402,7 @@ export const promise = async ({
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(rows)) {
|
||||
if (Array.isArray(rows) && rows.length > 0) {
|
||||
rows.forEach((row, rowIndex) => {
|
||||
traverseFields({
|
||||
blockData,
|
||||
@@ -468,6 +470,8 @@ export const promise = async ({
|
||||
})
|
||||
}
|
||||
})
|
||||
} else if (shouldHoistLocalizedValue && (!rows || rows.length === 0)) {
|
||||
siblingDoc[field.name] = null
|
||||
} else if (field.hidden !== true || showHiddenFields === true) {
|
||||
siblingDoc[field.name] = []
|
||||
}
|
||||
@@ -477,7 +481,7 @@ export const promise = async ({
|
||||
case 'blocks': {
|
||||
const rows = siblingDoc[field.name]
|
||||
|
||||
if (Array.isArray(rows)) {
|
||||
if (Array.isArray(rows) && rows.length > 0) {
|
||||
rows.forEach((row, rowIndex) => {
|
||||
const blockTypeToMatch = (row as JsonObject).blockType
|
||||
|
||||
@@ -573,6 +577,8 @@ export const promise = async ({
|
||||
})
|
||||
}
|
||||
})
|
||||
} else if (shouldHoistLocalizedValue && (!rows || rows.length === 0)) {
|
||||
siblingDoc[field.name] = null
|
||||
} else if (field.hidden !== true || showHiddenFields === true) {
|
||||
siblingDoc[field.name] = []
|
||||
}
|
||||
@@ -617,7 +623,7 @@ export const promise = async ({
|
||||
}
|
||||
|
||||
case 'group': {
|
||||
if (fieldAffectsData(field)) {
|
||||
if (fieldAffectsDataResult) {
|
||||
let groupDoc = siblingDoc[field.name] as JsonObject
|
||||
|
||||
if (typeof siblingDoc[field.name] !== 'object') {
|
||||
|
||||
@@ -63,7 +63,8 @@ export const promise = async <T>({
|
||||
let fieldData = siblingDoc?.[field.name!]
|
||||
const fieldIsLocalized = localization && fieldShouldBeLocalized({ field, parentIsLocalized })
|
||||
|
||||
// Run field beforeDuplicate hooks
|
||||
// Run field beforeDuplicate hooks.
|
||||
// These hooks are responsible for resetting the `id` field values of array and block rows. See `baseIDField`.
|
||||
if (Array.isArray(field.hooks?.beforeDuplicate)) {
|
||||
if (fieldIsLocalized) {
|
||||
const localeData: JsonObject = {}
|
||||
|
||||
@@ -28,20 +28,20 @@ export async function buildFolderWhereConstraints({
|
||||
}),
|
||||
]
|
||||
|
||||
if (typeof collectionConfig.admin?.baseListFilter === 'function') {
|
||||
const baseListFilterConstraint = await collectionConfig.admin.baseListFilter({
|
||||
limit: 0,
|
||||
locale: localeCode,
|
||||
page: 1,
|
||||
req,
|
||||
sort:
|
||||
sort ||
|
||||
(typeof collectionConfig.defaultSort === 'string' ? collectionConfig.defaultSort : 'id'),
|
||||
})
|
||||
const baseFilterConstraint = await (
|
||||
collectionConfig.admin?.baseFilter ?? collectionConfig.admin?.baseListFilter
|
||||
)?.({
|
||||
limit: 0,
|
||||
locale: localeCode,
|
||||
page: 1,
|
||||
req,
|
||||
sort:
|
||||
sort ||
|
||||
(typeof collectionConfig.defaultSort === 'string' ? collectionConfig.defaultSort : 'id'),
|
||||
})
|
||||
|
||||
if (baseListFilterConstraint) {
|
||||
constraints.push(baseListFilterConstraint)
|
||||
}
|
||||
if (baseFilterConstraint) {
|
||||
constraints.push(baseFilterConstraint)
|
||||
}
|
||||
|
||||
if (folderID) {
|
||||
|
||||
@@ -30,15 +30,17 @@ export function formatFolderOrDocumentItem({
|
||||
if (isUpload) {
|
||||
itemValue.filename = value.filename
|
||||
itemValue.mimeType = value.mimeType
|
||||
itemValue.url = isImage(value.mimeType)
|
||||
? getBestFitFromSizes({
|
||||
sizes: value.sizes,
|
||||
targetSizeMax: 520,
|
||||
targetSizeMin: 300,
|
||||
url: value.url,
|
||||
width: value.width,
|
||||
})
|
||||
: undefined
|
||||
itemValue.url =
|
||||
value.thumbnailURL ||
|
||||
(isImage(value.mimeType)
|
||||
? getBestFitFromSizes({
|
||||
sizes: value.sizes,
|
||||
targetSizeMax: 520,
|
||||
targetSizeMin: 300,
|
||||
url: value.url,
|
||||
width: value.width,
|
||||
})
|
||||
: undefined)
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -282,6 +282,7 @@ export const updateOperation = async <
|
||||
docWithLocales: result,
|
||||
draft: shouldSaveDraft,
|
||||
global: globalConfig,
|
||||
operation: 'update',
|
||||
payload,
|
||||
publishSpecificLocale,
|
||||
req,
|
||||
|
||||
@@ -873,6 +873,7 @@ export class BasePayload {
|
||||
this.config.jobs.scheduling
|
||||
) {
|
||||
await this.jobs.handleSchedules({
|
||||
allQueues: cronConfig.allQueues,
|
||||
queue: cronConfig.queue,
|
||||
})
|
||||
}
|
||||
@@ -891,6 +892,7 @@ export class BasePayload {
|
||||
}
|
||||
|
||||
await this.jobs.run({
|
||||
allQueues: cronConfig.allQueues,
|
||||
limit: cronConfig.limit ?? DEFAULT_LIMIT,
|
||||
queue: cronConfig.queue,
|
||||
silent: cronConfig.silent,
|
||||
@@ -1168,6 +1170,7 @@ export type {
|
||||
AfterRefreshHook as CollectionAfterRefreshHook,
|
||||
AuthCollection,
|
||||
AuthOperationsFromCollectionSlug,
|
||||
BaseFilter,
|
||||
BaseListFilter,
|
||||
BeforeChangeHook as CollectionBeforeChangeHook,
|
||||
BeforeDeleteHook as CollectionBeforeDeleteHook,
|
||||
|
||||
@@ -7,6 +7,13 @@ import type { TaskConfig } from './taskTypes.js'
|
||||
import type { WorkflowConfig } from './workflowTypes.js'
|
||||
|
||||
export type AutorunCronConfig = {
|
||||
/**
|
||||
* If you want to autoRUn jobs from all queues, set this to true.
|
||||
* If you set this to true, the `queue` property will be ignored.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
allQueues?: boolean
|
||||
/**
|
||||
* The cron schedule for the job.
|
||||
* @default '* * * * *' (every minute).
|
||||
@@ -43,6 +50,8 @@ export type AutorunCronConfig = {
|
||||
limit?: number
|
||||
/**
|
||||
* The queue name for the job.
|
||||
*
|
||||
* @default 'default'
|
||||
*/
|
||||
queue?: string
|
||||
/**
|
||||
|
||||
@@ -45,11 +45,18 @@ export const handleSchedulesJobsEndpoint: Endpoint = {
|
||||
)
|
||||
}
|
||||
|
||||
const { queue } = req.query as {
|
||||
const { allQueues, queue } = req.query as {
|
||||
allQueues?: 'false' | 'true'
|
||||
queue?: string
|
||||
}
|
||||
|
||||
const { errored, queued, skipped } = await handleSchedules({ queue, req })
|
||||
const runAllQueues = allQueues && !(typeof allQueues === 'string' && allQueues === 'false')
|
||||
|
||||
const { errored, queued, skipped } = await handleSchedules({
|
||||
allQueues: runAllQueues,
|
||||
queue,
|
||||
req,
|
||||
})
|
||||
|
||||
return Response.json(
|
||||
{
|
||||
|
||||
@@ -56,7 +56,7 @@ export const runJobsEndpoint: Endpoint = {
|
||||
|
||||
if (shouldHandleSchedules && jobsConfig.scheduling) {
|
||||
// If should handle schedules and schedules are defined
|
||||
await req.payload.jobs.handleSchedules({ queue: runAllQueues ? undefined : queue, req })
|
||||
await req.payload.jobs.handleSchedules({ allQueues: runAllQueues, queue, req })
|
||||
}
|
||||
|
||||
const runJobsArgs: RunJobsArgs = {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import ObjectIdImport from 'bson-objectid'
|
||||
|
||||
import type { PayloadRequest } from '../../index.js'
|
||||
import type { JobLog, PayloadRequest } from '../../index.js'
|
||||
import type { RunJobsSilent } from '../localAPI.js'
|
||||
import type { UpdateJobFunction } from '../operations/runJobs/runJob/getUpdateJobFunction.js'
|
||||
import type { TaskError } from './index.js'
|
||||
@@ -60,19 +60,6 @@ export async function handleTaskError({
|
||||
|
||||
const currentDate = getCurrentDate()
|
||||
|
||||
;(job.log ??= []).push({
|
||||
id: new ObjectId().toHexString(),
|
||||
completedAt: currentDate.toISOString(),
|
||||
error: errorJSON,
|
||||
executedAt: executedAt.toISOString(),
|
||||
input,
|
||||
output: output ?? {},
|
||||
parent: req.payload.config.jobs.addParentToTaskLog ? parent : undefined,
|
||||
state: 'failed',
|
||||
taskID,
|
||||
taskSlug,
|
||||
})
|
||||
|
||||
if (job.waitUntil) {
|
||||
// Check if waitUntil is in the past
|
||||
const waitUntil = new Date(job.waitUntil)
|
||||
@@ -100,6 +87,19 @@ export async function handleTaskError({
|
||||
maxRetries = retriesConfig.attempts
|
||||
}
|
||||
|
||||
const taskLogToPush: JobLog = {
|
||||
id: new ObjectId().toHexString(),
|
||||
completedAt: currentDate.toISOString(),
|
||||
error: errorJSON,
|
||||
executedAt: executedAt.toISOString(),
|
||||
input,
|
||||
output: output ?? {},
|
||||
parent: req.payload.config.jobs.addParentToTaskLog ? parent : undefined,
|
||||
state: 'failed',
|
||||
taskID,
|
||||
taskSlug,
|
||||
}
|
||||
|
||||
if (!taskStatus?.complete && (taskStatus?.totalTried ?? 0) >= maxRetries) {
|
||||
/**
|
||||
* Task reached max retries => workflow will not retry
|
||||
@@ -108,7 +108,9 @@ export async function handleTaskError({
|
||||
await updateJob({
|
||||
error: errorJSON,
|
||||
hasError: true,
|
||||
log: job.log,
|
||||
log: {
|
||||
$push: taskLogToPush,
|
||||
} as any,
|
||||
processing: false,
|
||||
totalTried: (job.totalTried ?? 0) + 1,
|
||||
waitUntil: job.waitUntil,
|
||||
@@ -168,7 +170,9 @@ export async function handleTaskError({
|
||||
await updateJob({
|
||||
error: hasFinalError ? errorJSON : undefined,
|
||||
hasError: hasFinalError, // If reached max retries => final error. If hasError is true this job will not be retried
|
||||
log: job.log,
|
||||
log: {
|
||||
$push: taskLogToPush,
|
||||
} as any,
|
||||
processing: false,
|
||||
totalTried: (job.totalTried ?? 0) + 1,
|
||||
waitUntil: job.waitUntil,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user