Compare commits
5 Commits
fix/versio
...
feat/pushj
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a8e2a835cc | ||
|
|
b194ffc504 | ||
|
|
d2ca782a3d | ||
|
|
f61e7d06b0 | ||
|
|
e84f43fca1 |
2
.github/actions/triage/action.yml
vendored
2
.github/actions/triage/action.yml
vendored
@@ -26,7 +26,7 @@ runs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
- name: Run action
|
||||
run: node ${{ github.action_path }}/dist/index.js
|
||||
shell: sh
|
||||
|
||||
4
.github/workflows/audit-dependencies.yml
vendored
4
.github/workflows/audit-dependencies.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup
|
||||
uses: ./.github/actions/setup
|
||||
|
||||
@@ -34,7 +34,7 @@ jobs:
|
||||
|
||||
- name: Slack notification on failure
|
||||
if: failure()
|
||||
uses: slackapi/slack-github-action@v2.1.1
|
||||
uses: slackapi/slack-github-action@v2.1.0
|
||||
with:
|
||||
webhook: ${{ inputs.debug == 'true' && secrets.SLACK_TEST_WEBHOOK_URL || secrets.SLACK_WEBHOOK_URL }}
|
||||
webhook-type: incoming-webhook
|
||||
|
||||
2
.github/workflows/dispatch-event.yml
vendored
2
.github/workflows/dispatch-event.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
name: Repository dispatch
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Dispatch event
|
||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
|
||||
24
.github/workflows/main.yml
vendored
24
.github/workflows/main.yml
vendored
@@ -6,6 +6,7 @@ on:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
- labeled
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
@@ -33,7 +34,7 @@ jobs:
|
||||
- name: tune linux network
|
||||
run: sudo ethtool -K eth0 tx off rx off
|
||||
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
with:
|
||||
@@ -62,7 +63,7 @@ jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -78,7 +79,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Node setup
|
||||
uses: ./.github/actions/setup
|
||||
@@ -98,7 +99,7 @@ jobs:
|
||||
needs: [changes, build]
|
||||
if: ${{ needs.changes.outputs.needs_tests == 'true' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Node setup
|
||||
uses: ./.github/actions/setup
|
||||
@@ -122,7 +123,7 @@ jobs:
|
||||
needs: [changes, build]
|
||||
if: ${{ needs.changes.outputs.needs_tests == 'true' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Node setup
|
||||
uses: ./.github/actions/setup
|
||||
@@ -184,7 +185,7 @@ jobs:
|
||||
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Node setup
|
||||
uses: ./.github/actions/setup
|
||||
@@ -309,7 +310,7 @@ jobs:
|
||||
env:
|
||||
SUITE_NAME: ${{ matrix.suite }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Node setup
|
||||
uses: ./.github/actions/setup
|
||||
@@ -370,7 +371,6 @@ jobs:
|
||||
# report-tag: ${{ matrix.suite }}
|
||||
# job-summary: true
|
||||
|
||||
# This is unused, keeping it here for reference and possibly enabling in the future
|
||||
tests-e2e-turbo:
|
||||
runs-on: ubuntu-24.04
|
||||
needs: [changes, build]
|
||||
@@ -447,7 +447,7 @@ jobs:
|
||||
env:
|
||||
SUITE_NAME: ${{ matrix.suite }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Node setup
|
||||
uses: ./.github/actions/setup
|
||||
@@ -550,7 +550,7 @@ jobs:
|
||||
MONGODB_VERSION: 6.0
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Node setup
|
||||
uses: ./.github/actions/setup
|
||||
@@ -647,7 +647,7 @@ jobs:
|
||||
needs: [changes, build]
|
||||
if: ${{ needs.changes.outputs.needs_tests == 'true' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Node setup
|
||||
uses: ./.github/actions/setup
|
||||
@@ -706,7 +706,7 @@ jobs:
|
||||
actions: read # for fetching base branch bundle stats
|
||||
pull-requests: write # for comments
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Node setup
|
||||
uses: ./.github/actions/setup
|
||||
|
||||
4
.github/workflows/post-release-templates.yml
vendored
4
.github/workflows/post-release-templates.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
release_tag: ${{ steps.determine_tag.outputs.release_tag }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
sparse-checkout: .github/workflows
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
POSTGRES_DB: payloadtests
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup
|
||||
uses: ./.github/actions/setup
|
||||
|
||||
6
.github/workflows/post-release.yml
vendored
6
.github/workflows/post-release.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ github.event_name != 'workflow_dispatch' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/release-commenter
|
||||
continue-on-error: true
|
||||
env:
|
||||
@@ -43,9 +43,9 @@ jobs:
|
||||
if: ${{ github.event_name != 'workflow_dispatch' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
- name: Github Releases To Discord
|
||||
uses: SethCohen/github-releases-to-discord@v1.19.0
|
||||
uses: SethCohen/github-releases-to-discord@v1.16.2
|
||||
with:
|
||||
webhook_url: ${{ secrets.DISCORD_RELEASES_WEBHOOK_URL }}
|
||||
color: '16777215'
|
||||
|
||||
2
.github/workflows/pr-title.yml
vendored
2
.github/workflows/pr-title.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
name: lint-pr-title
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: amannn/action-semantic-pull-request@v6
|
||||
- uses: amannn/action-semantic-pull-request@v5
|
||||
id: lint_pr_title
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
2
.github/workflows/publish-prerelease.yml
vendored
2
.github/workflows/publish-prerelease.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup
|
||||
uses: ./.github/actions/setup
|
||||
- name: Load npm token
|
||||
|
||||
2
.github/workflows/triage.yml
vendored
2
.github/workflows/triage.yml
vendored
@@ -90,7 +90,7 @@ jobs:
|
||||
if: github.event_name == 'issues'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.ref }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -331,7 +331,5 @@ test/databaseAdapter.js
|
||||
test/.localstack
|
||||
test/google-cloud-storage
|
||||
test/azurestoragedata/
|
||||
/media-without-delete-access
|
||||
|
||||
|
||||
licenses.csv
|
||||
|
||||
54
AGENTS.md
54
AGENTS.md
@@ -1,54 +0,0 @@
|
||||
# Payload Monorepo Agent Instructions
|
||||
|
||||
## Project Structure
|
||||
|
||||
- Packages are located in the `packages/` directory.
|
||||
- The main Payload package is `packages/payload`. This contains the core functionality.
|
||||
- Database adapters are in `packages/db-*`.
|
||||
- The UI package is `packages/ui`.
|
||||
- The Next.js integration is in `packages/next`.
|
||||
- Rich text editor packages are in `packages/richtext-*`.
|
||||
- Storage adapters are in `packages/storage-*`.
|
||||
- Email adapters are in `packages/email-*`.
|
||||
- Plugins which add additional functionality are in `packages/plugin-*`.
|
||||
- Documentation is in the `docs/` directory.
|
||||
- Monorepo tooling is in the `tools/` directory.
|
||||
- Test suites and configs are in the `test/` directory.
|
||||
- LLMS.txt is at URL: https://payloadcms.com/llms.txt
|
||||
- LLMS-FULL.txt is at URL: https://payloadcms.com/llms-full.txt
|
||||
|
||||
## Dev environment tips
|
||||
|
||||
- Any package can be built using a `pnpm build:*` script defined in the root `package.json`. These typically follow the format `pnpm build:<directory_name>`. The options are all of the top-level directories inside the `packages/` directory. Ex `pnpm build:db-mongodb` which builds the `packages/db-mongodb` package.
|
||||
- ALL packages can be built with `pnpm build:all`.
|
||||
- Use `pnpm dev` to start the monorepo dev server. This loads the default config located at `test/_community/config.ts`.
|
||||
- Specific dev configs for each package can be run with `pnpm dev <directory_name>`. The options are all of the top-level directories inside the `test/` directory. Ex `pnpm dev fields` which loads the `test/fields/config.ts` config. The directory name can either encompass a single area of functionality or be the name of a specific package.
|
||||
|
||||
## Testing instructions
|
||||
|
||||
- There are unit, integration, and e2e tests in the monorepo.
|
||||
- Unit tests can be run with `pnpm test:unit`.
|
||||
- Integration tests can be run with `pnpm test:int`. Individual test suites can be run with `pnpm test:int <directory_name>`, which will point at `test/<directory_name>/int.spec.ts`.
|
||||
- E2E tests can be run with `pnpm test:e2e`.
|
||||
- All tests can be run with `pnpm test`.
|
||||
- Prefer running `pnpm test:int` for verifying local code changes.
|
||||
|
||||
## PR Guidelines
|
||||
|
||||
- This repository follows conventional commits for PR titles
|
||||
- PR Title format: <type>(<scope>): <title>. Title must start with a lowercase letter.
|
||||
- Valid types are build, chore, ci, docs, examples, feat, fix, perf, refactor, revert, style, templates, test
|
||||
- Prefer `feat` for new features and `fix` for bug fixes.
|
||||
- Valid scopes are the following regex patterns: cpa, db-\*, db-mongodb, db-postgres, db-vercel-postgres, db-sqlite, drizzle, email-\*, email-nodemailer, email-resend, eslint, graphql, live-preview, live-preview-react, next, payload-cloud, plugin-cloud, plugin-cloud-storage, plugin-form-builder, plugin-import-export, plugin-multi-tenant, plugin-nested-docs, plugin-redirects, plugin-search, plugin-sentry, plugin-seo, plugin-stripe, richtext-\*, richtext-lexical, richtext-slate, storage-\*, storage-azure, storage-gcs, storage-uploadthing, storage-vercel-blob, storage-s3, translations, ui, templates, examples(\/(\w|-)+)?, deps
|
||||
- Scopes should be chosen based upon the package(s) being modified. If multiple packages are being modified, choose the most relevant one or no scope at all.
|
||||
- Example PR titles:
|
||||
- `feat(db-mongodb): add support for transactions`
|
||||
- `feat(richtext-lexical): add options to hide block handles`
|
||||
- `fix(ui): json field type ignoring editorOptions`
|
||||
|
||||
## Commit Guidelines
|
||||
|
||||
- This repository follows conventional commits for commit messages
|
||||
- The first commit of a branch should follow the PR title format: <type>(<scope>): <title>. Follow the same rules as PR titles.
|
||||
- Subsequent commits should prefer `chore` commits without a scope unless a specific package is being modified.
|
||||
- These will eventually be squashed into the first commit when merging the PR.
|
||||
@@ -98,7 +98,6 @@ The following options are available:
|
||||
| -------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `avatar` | Set account profile picture. Options: `gravatar`, `default` or a custom React component. |
|
||||
| `autoLogin` | Used to automate log-in for dev and demonstration convenience. [More details](../authentication/overview). |
|
||||
| `autoRefresh` | Used to automatically refresh user tokens for users logged into the dashboard. [More details](../authentication/overview). |
|
||||
| `components` | Component overrides that affect the entirety of the Admin Panel. [More details](../custom-components/overview). |
|
||||
| `custom` | Any custom properties you wish to pass to the Admin Panel. |
|
||||
| `dateFormat` | The date format that will be used for all dates within the Admin Panel. Any valid [date-fns](https://date-fns.org/) format pattern can be used. |
|
||||
@@ -108,7 +107,6 @@ The following options are available:
|
||||
| `suppressHydrationWarning` | If set to `true`, suppresses React hydration mismatch warnings during the hydration of the root `<html>` tag. Defaults to `false`. |
|
||||
| `theme` | Restrict the Admin Panel theme to use only one of your choice. Default is `all`. |
|
||||
| `timezones` | Configure the timezone settings for the admin panel. [More details](#timezones) |
|
||||
| `toast` | Customize the handling of toast messages within the Admin Panel. [More details](#toasts) |
|
||||
| `user` | The `slug` of the Collection that you want to allow to login to the Admin Panel. [More details](#the-admin-user-collection). |
|
||||
|
||||
<Banner type="success">
|
||||
@@ -300,20 +298,3 @@ We validate the supported timezones array by checking the value against the list
|
||||
`timezone: true`. See [Date Fields](../fields/overview#date) for more
|
||||
information.
|
||||
</Banner>
|
||||
|
||||
## Toast
|
||||
|
||||
The `admin.toast` configuration allows you to customize the handling of toast messages within the Admin Panel, such as increasing the duration they are displayed and limiting the number of visible toasts at once.
|
||||
|
||||
<Banner type="info">
|
||||
**Note:** The Admin Panel currently uses the
|
||||
[Sonner](https://sonner.emilkowal.ski) library for toast notifications.
|
||||
</Banner>
|
||||
|
||||
The following options are available for the `admin.toast` configuration:
|
||||
|
||||
| Option | Description | Default |
|
||||
| ---------- | ---------------------------------------------------------------------------------------------------------------- | ------- |
|
||||
| `duration` | The length of time (in milliseconds) that a toast message is displayed. | `4000` |
|
||||
| `expand` | If `true`, will expand the message stack so that all messages are shown simultaneously without user interaction. | `false` |
|
||||
| `limit` | The maximum number of toasts that can be visible on the screen at once. | `5` |
|
||||
|
||||
@@ -33,7 +33,7 @@ export const Users: CollectionConfig = {
|
||||
}
|
||||
```
|
||||
|
||||

|
||||

|
||||
_Admin Panel screenshot depicting an Admins Collection with Auth enabled_
|
||||
|
||||
## Config Options
|
||||
@@ -173,25 +173,6 @@ The following options are available:
|
||||
| **`password`** | The password of the user to login as. This is only needed if `prefillOnly` is set to true |
|
||||
| **`prefillOnly`** | If set to true, the login credentials will be prefilled but the user will still need to click the login button. |
|
||||
|
||||
## Auto-Refresh
|
||||
|
||||
Turning this property on will allow users to stay logged in indefinitely while their browser is open and on the admin panel, by automatically refreshing their authentication token before it expires.
|
||||
|
||||
To enable auto-refresh for user tokens, set `autoRefresh: true` in the [Payload Config](../admin/overview#admin-options) to:
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
|
||||
export default buildConfig({
|
||||
// ...
|
||||
// highlight-start
|
||||
admin: {
|
||||
autoRefresh: true,
|
||||
},
|
||||
// highlight-end
|
||||
})
|
||||
```
|
||||
|
||||
## Operations
|
||||
|
||||
All auth-related operations are available via Payload's REST, Local, and GraphQL APIs. These operations are automatically added to your Collection when you enable Authentication. [More details](./operations).
|
||||
|
||||
@@ -141,7 +141,7 @@ The following options are available:
|
||||
| `livePreview` | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
|
||||
| `components` | Swap in your own React components to be used within this Collection. [More details](#custom-components). |
|
||||
| `listSearchableFields` | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
|
||||
| `pagination` | Set pagination-specific options for this Collection in the List View. [More details](#pagination). |
|
||||
| `pagination` | Set pagination-specific options for this Collection. [More details](#pagination). |
|
||||
| `baseFilter` | Defines a default base filter which will be applied to the List View (along with any other filters applied by the user) and internal links in Lexical Editor, |
|
||||
|
||||
<Banner type="warning">
|
||||
|
||||
@@ -131,29 +131,6 @@ localization: {
|
||||
|
||||
Since the filtering happens at the root level of the application and its result is not calculated every time you navigate to a new page, you may want to call `router.refresh` in a custom component that watches when values that affect the result change. In the example above, you would want to do this when `supportedLocales` changes on the tenant document.
|
||||
|
||||
## Experimental Options
|
||||
|
||||
Experimental options are features that may not be fully stable and may change or be removed in future releases.
|
||||
|
||||
These options can be enabled in your Payload Config under the `experimental` key. You can set them like this:
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
|
||||
export default buildConfig({
|
||||
// ...
|
||||
experimental: {
|
||||
localizeStatus: true,
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
The following experimental options are available related to localization:
|
||||
|
||||
| Option | Description |
|
||||
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`localizeStatus`** | **Boolean.** When `true`, shows document status per locale in the admin panel instead of always showing the latest overall status. Opt-in for backwards compatibility. Defaults to `false`. |
|
||||
|
||||
## Field Localization
|
||||
|
||||
Payload Localization works on a **field** level—not a document level. In addition to configuring the base Payload Config to support Localization, you need to specify each field that you would like to localize.
|
||||
|
||||
@@ -70,7 +70,6 @@ The following options are available:
|
||||
| **`admin`** | The configuration options for the Admin Panel, including Custom Components, Live Preview, etc. [More details](../admin/overview#admin-options). |
|
||||
| **`bin`** | Register custom bin scripts for Payload to execute. [More Details](#custom-bin-scripts). |
|
||||
| **`editor`** | The Rich Text Editor which will be used by `richText` fields. [More details](../rich-text/overview). |
|
||||
| **`experimental`** | Configure experimental features for Payload. These may be unstable and may change or be removed in future releases. [More details](../experimental). |
|
||||
| **`db`** \* | The Database Adapter which will be used by Payload. [More details](../database/overview). |
|
||||
| **`serverURL`** | A string used to define the absolute URL of your app. This includes the protocol, for example `https://example.com`. No paths allowed, only protocol, domain and (optionally) port. |
|
||||
| **`collections`** | An array of Collections for Payload to manage. [More details](./collections). |
|
||||
|
||||
@@ -158,7 +158,7 @@ export function MyCustomView(props: AdminViewServerProps) {
|
||||
|
||||
<Banner type="success">
|
||||
**Tip:** For consistent layout and navigation, you may want to wrap your
|
||||
Custom View with one of the built-in [Templates](./overview#templates).
|
||||
Custom View with one of the built-in [Template](./overview#templates).
|
||||
</Banner>
|
||||
|
||||
### View Templates
|
||||
|
||||
@@ -293,6 +293,7 @@ Here's an example of a custom `editMenuItems` component:
|
||||
|
||||
```tsx
|
||||
import React from 'react'
|
||||
import { PopupList } from '@payloadcms/ui'
|
||||
|
||||
import type { EditMenuItemsServerProps } from 'payload'
|
||||
|
||||
@@ -300,12 +301,12 @@ export const EditMenuItems = async (props: EditMenuItemsServerProps) => {
|
||||
const href = `/custom-action?id=${props.id}`
|
||||
|
||||
return (
|
||||
<>
|
||||
<a href={href}>Custom Edit Menu Item</a>
|
||||
<a href={href}>
|
||||
<PopupList.ButtonGroup>
|
||||
<PopupList.Button href={href}>Custom Edit Menu Item</PopupList.Button>
|
||||
<PopupList.Button href={href}>
|
||||
Another Custom Edit Menu Item - add as many as you need!
|
||||
</a>
|
||||
</>
|
||||
</PopupList.Button>
|
||||
</PopupList.ButtonGroup>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
@@ -63,22 +63,3 @@ export const MyCollection: CollectionConfig = {
|
||||
],
|
||||
}
|
||||
```
|
||||
## Localized fields and MongoDB indexes
|
||||
|
||||
When you set `index: true` or `unique: true` on a localized field, MongoDB creates one index **per locale path** (e.g., `slug.en`, `slug.da-dk`, etc.). With many locales and indexed fields, this can quickly approach MongoDB's per-collection index limit.
|
||||
|
||||
If you know you'll query specifically by a locale, index only those locale paths using the collection-level `indexes` option instead of setting `index: true` on the localized field. This approach gives you more control and helps avoid unnecessary indexes.
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const Pages: CollectionConfig = {
|
||||
fields: [{ name: 'slug', type: 'text', localized: true }],
|
||||
indexes: [
|
||||
// Index English slug only (rather than all locales)
|
||||
{ fields: ['slug.en'] },
|
||||
// You could also make it unique:
|
||||
// { fields: ['slug.en'], unique: true },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
@@ -60,21 +60,21 @@ You can access Mongoose models as follows:
|
||||
|
||||
## Using other MongoDB implementations
|
||||
|
||||
You can import the `compatibilityOptions` object to get the recommended settings for other MongoDB implementations. Since these databases aren't officially supported by payload, you may still encounter issues even with these settings (please create an issue or PR if you believe these options should be updated):
|
||||
You can import the `compatabilityOptions` object to get the recommended settings for other MongoDB implementations. Since these databases aren't officially supported by payload, you may still encounter issues even with these settings (please create an issue or PR if you believe these options should be updated):
|
||||
|
||||
```ts
|
||||
import { mongooseAdapter, compatibilityOptions } from '@payloadcms/db-mongodb'
|
||||
import { mongooseAdapter, compatabilityOptions } from '@payloadcms/db-mongodb'
|
||||
|
||||
export default buildConfig({
|
||||
db: mongooseAdapter({
|
||||
url: process.env.DATABASE_URI,
|
||||
// For example, if you're using firestore:
|
||||
...compatibilityOptions.firestore,
|
||||
...compatabilityOptions.firestore,
|
||||
}),
|
||||
})
|
||||
```
|
||||
|
||||
We export compatibility options for [DocumentDB](https://aws.amazon.com/documentdb/), [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db) and [Firestore](https://cloud.google.com/firestore/mongodb-compatibility/docs/overview). Known limitations:
|
||||
We export compatability options for [DocumentDB](https://aws.amazon.com/documentdb/), [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db) and [Firestore](https://cloud.google.com/firestore/mongodb-compatibility/docs/overview). Known limitations:
|
||||
|
||||
- Azure Cosmos DB does not support transactions that update two or more documents in different collections, which is a common case when using Payload (via hooks).
|
||||
- Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`.
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
---
|
||||
title: Experimental Features
|
||||
label: Overview
|
||||
order: 10
|
||||
desc: Enable and configure experimental functionality within Payload. These featuresmay be unstable and may change or be removed without notice.
|
||||
keywords: experimental, unstable, beta, preview, features, configuration, Payload, cms, headless, javascript, node, react, nextjs
|
||||
---
|
||||
|
||||
Experimental features allow you to try out new functionality before it becomes a stable part of Payload. These features may still be in active development, may have incomplete functionality, and can change or be removed in future releases without warning.
|
||||
|
||||
## How It Works
|
||||
|
||||
Experimental features are configured via the root-level `experimental` property in your [Payload Config](../configuration/overview). This property contains individual feature flags, each flag can be configured independently, allowing you to selectively opt into specific functionality.
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
|
||||
const config = buildConfig({
|
||||
// ...
|
||||
experimental: {
|
||||
localizeStatus: true, // highlight-line
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
## Experimental Options
|
||||
|
||||
The following options are available:
|
||||
|
||||
| Option | Description |
|
||||
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`localizeStatus`** | **Boolean.** When `true`, shows document status per locale in the admin panel instead of always showing the latest overall status. Opt-in for backwards compatibility. Defaults to `false`. |
|
||||
|
||||
This list may change without notice.
|
||||
|
||||
## When to Use Experimental Features
|
||||
|
||||
You might enable an experimental feature when:
|
||||
|
||||
- You want early access to new capabilities before their stable release.
|
||||
- You can accept the risks of using potentially unstable functionality.
|
||||
- You are testing new features in a development or staging environment.
|
||||
- You wish to provide feedback to the Payload team on new functionality.
|
||||
|
||||
If you are working on a production application, carefully evaluate whether the benefits outweigh the risks. For most stable applications, it is recommended to wait until the feature is officially released.
|
||||
|
||||
<Banner type="success">
|
||||
<strong>Tip:</strong> To stay up to date on experimental features or share
|
||||
your feedback, visit the{' '}
|
||||
<a
|
||||
href="https://github.com/payloadcms/payload/discussions"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
Payload GitHub Discussions
|
||||
</a>{' '}
|
||||
or{' '}
|
||||
<a
|
||||
href="https://github.com/payloadcms/payload/issues"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
open an issue
|
||||
</a>
|
||||
.
|
||||
</Banner>
|
||||
@@ -81,7 +81,7 @@ To install a Database Adapter, you can run **one** of the following commands:
|
||||
|
||||
#### 2. Copy Payload files into your Next.js app folder
|
||||
|
||||
Payload installs directly in your Next.js `/app` folder, and you'll need to place some files into that folder for Payload to run. You can copy these files from the [Blank Template](https://github.com/payloadcms/payload/tree/main/templates/blank/src/app/%28payload%29) on GitHub. Once you have the required Payload files in place in your `/app` folder, you should have something like this:
|
||||
Payload installs directly in your Next.js `/app` folder, and you'll need to place some files into that folder for Payload to run. You can copy these files from the [Blank Template](<https://github.com/payloadcms/payload/tree/main/templates/blank/src/app/(payload)>) on GitHub. Once you have the required Payload files in place in your `/app` folder, you should have something like this:
|
||||
|
||||
```plaintext
|
||||
app/
|
||||
|
||||
@@ -162,11 +162,6 @@ const result = await payload.find({
|
||||
})
|
||||
```
|
||||
|
||||
<Banner type="info">
|
||||
`pagination`, `page`, and `limit` are three related properties [documented
|
||||
here](/docs/queries/pagination).
|
||||
</Banner>
|
||||
|
||||
### Find by ID#collection-find-by-id
|
||||
|
||||
```js
|
||||
|
||||
@@ -207,7 +207,7 @@ Everything mentioned above applies to local development as well, but there are a
|
||||
### Enable Turbopack
|
||||
|
||||
<Banner type="warning">
|
||||
**Note:** In the future this will be the default. Use at your own risk.
|
||||
**Note:** In the future this will be the default. Use as your own risk.
|
||||
</Banner>
|
||||
|
||||
Add `--turbo` to your dev script to significantly speed up your local development server start time.
|
||||
|
||||
@@ -79,7 +79,6 @@ formBuilderPlugin({
|
||||
text: true,
|
||||
textarea: true,
|
||||
select: true,
|
||||
radio: true,
|
||||
email: true,
|
||||
state: true,
|
||||
country: true,
|
||||
@@ -294,46 +293,14 @@ Maps to a `textarea` input on your front-end. Used to collect a multi-line strin
|
||||
|
||||
Maps to a `select` input on your front-end. Used to display a list of options.
|
||||
|
||||
| Property | Type | Description |
|
||||
| -------------- | -------- | ------------------------------------------------------------------------------- |
|
||||
| `name` | string | The name of the field. |
|
||||
| `label` | string | The label of the field. |
|
||||
| `defaultValue` | string | The default value of the field. |
|
||||
| `placeholder` | string | The placeholder text for the field. |
|
||||
| `width` | string | The width of the field on the front-end. |
|
||||
| `required` | checkbox | Whether or not the field is required when submitted. |
|
||||
| `options` | array | An array of objects that define the select options. See below for more details. |
|
||||
|
||||
#### Select Options
|
||||
|
||||
Each option in the `options` array defines a selectable choice for the select field.
|
||||
|
||||
| Property | Type | Description |
|
||||
| -------- | ------ | ----------------------------------- |
|
||||
| `label` | string | The display text for the option. |
|
||||
| `value` | string | The value submitted for the option. |
|
||||
|
||||
### Radio
|
||||
|
||||
Maps to radio button inputs on your front-end. Used to allow users to select a single option from a list of choices.
|
||||
|
||||
| Property | Type | Description |
|
||||
| -------------- | -------- | ------------------------------------------------------------------------------ |
|
||||
| `name` | string | The name of the field. |
|
||||
| `label` | string | The label of the field. |
|
||||
| `defaultValue` | string | The default value of the field. |
|
||||
| `width` | string | The width of the field on the front-end. |
|
||||
| `required` | checkbox | Whether or not the field is required when submitted. |
|
||||
| `options` | array | An array of objects that define the radio options. See below for more details. |
|
||||
|
||||
#### Radio Options
|
||||
|
||||
Each option in the `options` array defines a selectable choice for the radio field.
|
||||
|
||||
| Property | Type | Description |
|
||||
| -------- | ------ | ----------------------------------- |
|
||||
| `label` | string | The display text for the option. |
|
||||
| `value` | string | The value submitted for the option. |
|
||||
| Property | Type | Description |
|
||||
| -------------- | -------- | -------------------------------------------------------- |
|
||||
| `name` | string | The name of the field. |
|
||||
| `label` | string | The label of the field. |
|
||||
| `defaultValue` | string | The default value of the field. |
|
||||
| `width` | string | The width of the field on the front-end. |
|
||||
| `required` | checkbox | Whether or not the field is required when submitted. |
|
||||
| `options` | array | An array of objects with `label` and `value` properties. |
|
||||
|
||||
### Email (field)
|
||||
|
||||
|
||||
@@ -80,11 +80,6 @@ type MultiTenantPluginConfig<ConfigTypes = unknown> = {
|
||||
* @default false
|
||||
*/
|
||||
isGlobal?: boolean
|
||||
/**
|
||||
* Opt out of adding the tenant field and place
|
||||
* it manually using the `tenantField` export from the plugin
|
||||
*/
|
||||
customTenantField?: boolean
|
||||
/**
|
||||
* Overrides for the tenant field, will override the entire tenantField configuration
|
||||
*/
|
||||
|
||||
@@ -148,12 +148,6 @@ export const Pages: CollectionConfig<'pages'> = {
|
||||
}
|
||||
```
|
||||
|
||||
<VideoDrawer
|
||||
id="Snqjng_w-QU"
|
||||
label="Watch default populate in action"
|
||||
drawerTitle="How to easily optimize Payload CMS requests with defaultPopulate"
|
||||
/>
|
||||
|
||||
<Banner type="warning">
|
||||
**Important:** When using `defaultPopulate` on a collection with
|
||||
[Uploads](/docs/fields/upload) enabled and you want to select the `url` field,
|
||||
|
||||
@@ -773,28 +773,3 @@ const res = await fetch(`${api}/${collectionSlug}?depth=1&locale=en`, {
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
### Passing as JSON
|
||||
|
||||
When using `X-Payload-HTTP-Method-Override`, it expects the body to be a query string. If you want to pass JSON instead, you can set the `Content-Type` to `application/json` and include the JSON body in the request.
|
||||
|
||||
#### Example
|
||||
|
||||
```ts
|
||||
const res = await fetch(`${api}/${collectionSlug}/${id}`, {
|
||||
// Only the findByID endpoint supports HTTP method overrides with JSON data
|
||||
method: 'POST',
|
||||
credentials: 'include',
|
||||
headers: {
|
||||
'Accept-Language': i18n.language,
|
||||
'Content-Type': 'application/json',
|
||||
'X-Payload-HTTP-Method-Override': 'GET',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
depth: 1,
|
||||
locale: 'en',
|
||||
}),
|
||||
})
|
||||
```
|
||||
|
||||
This can be more efficient for large JSON payloads, as you avoid converting data to and from query strings. However, only certain endpoints support this. Supported endpoints will read the parsed body under a `data` property, instead of reading from query parameters as with standard GET requests.
|
||||
|
||||
@@ -11,7 +11,7 @@ keywords: lexical, richtext, html
|
||||
There are two main approaches to convert your Lexical-based rich text to HTML:
|
||||
|
||||
1. **Generate HTML on-demand (Recommended)**: Convert JSON to HTML wherever you need it, on-demand.
|
||||
2. **Generate HTML within your Collection**: Create a new field that automatically converts your saved JSON content to HTML. This is not recommended because it adds overhead to the Payload API.
|
||||
2. **Generate HTML within your Collection**: Create a new field that automatically converts your saved JSON content to HTML. This is not recommended because it adds overhead to the Payload API and may not work well with live preview.
|
||||
|
||||
### On-demand
|
||||
|
||||
@@ -101,7 +101,10 @@ export const MyRSCComponent = async ({
|
||||
|
||||
### HTML field
|
||||
|
||||
The `lexicalHTMLField()` helper converts JSON to HTML and saves it in a field that is updated every time you read it via an `afterRead` hook. It's generally not recommended, as it creates a column with duplicate content in another format.
|
||||
The `lexicalHTMLField()` helper converts JSON to HTML and saves it in a field that is updated every time you read it via an `afterRead` hook. It's generally not recommended for two reasons:
|
||||
|
||||
1. It creates a column with duplicate content in another format.
|
||||
2. In [client-side live preview](/docs/live-preview/client), it makes it not "live".
|
||||
|
||||
Consider using the [on-demand HTML converter above](/docs/rich-text/converting-html#on-demand-recommended) or the [JSX converter](/docs/rich-text/converting-jsx) unless you have a good reason.
|
||||
|
||||
|
||||
@@ -269,13 +269,11 @@ Lexical does not generate accurate type definitions for your richText fields for
|
||||
|
||||
The Rich Text Field editor configuration has an `admin` property with the following options:
|
||||
|
||||
| Property | Description |
|
||||
| ------------------------------- | ----------------------------------------------------------------------------------------------------------- |
|
||||
| **`placeholder`** | Set this property to define a placeholder string for the field. |
|
||||
| **`hideGutter`** | Set this property to `true` to hide this field's gutter within the Admin Panel. |
|
||||
| **`hideInsertParagraphAtEnd`** | Set this property to `true` to hide the "+" button that appears at the end of the editor. |
|
||||
| **`hideDraggableBlockElement`** | Set this property to `true` to hide the draggable element that appears when you hover a node in the editor. |
|
||||
| **`hideAddBlockButton`** | Set this property to `true` to hide the "+" button that appears when you hover a node in the editor. |
|
||||
| Property | Description |
|
||||
| ------------------------------ | ---------------------------------------------------------------------------------------- |
|
||||
| **`placeholder`** | Set this property to define a placeholder string for the field. |
|
||||
| **`hideGutter`** | Set this property to `true` to hide this field's gutter within the Admin Panel. |
|
||||
| **`hideInsertParagraphAtEnd`** | Set this property to `true` to hide the "+" button that appears at the end of the editor |
|
||||
|
||||
### Disable the gutter
|
||||
|
||||
|
||||
@@ -13,8 +13,8 @@ keywords: uploads, images, media, overview, documentation, Content Management Sy
|
||||
</Banner>
|
||||
|
||||
<LightDarkImage
|
||||
srcLight="https://payloadcms.com/images/docs/uploads-overview.jpg"
|
||||
srcDark="https://payloadcms.com/images/docs/uploads-overview.jpg"
|
||||
srcLight="https://payloadcms.com/images/docs/upload-admin.jpg"
|
||||
srcDark="https://payloadcms.com/images/docs/upload-admin.jpg"
|
||||
alt="Shows an Upload enabled collection in the Payload Admin Panel"
|
||||
caption="Admin Panel screenshot depicting a Media Collection with Upload enabled"
|
||||
/>
|
||||
|
||||
@@ -12,7 +12,7 @@ Extending on Payload's [Draft](/docs/versions/drafts) functionality, you can con
|
||||
Autosave relies on Versions and Drafts being enabled in order to function.
|
||||
</Banner>
|
||||
|
||||

|
||||

|
||||
_If Autosave is enabled, drafts will be created automatically as the document is modified and the Admin UI adds an indicator describing when the document was last saved to the top right of the sidebar._
|
||||
|
||||
## Options
|
||||
|
||||
@@ -14,7 +14,7 @@ Payload's Draft functionality builds on top of the Versions functionality to all
|
||||
|
||||
By enabling Versions with Drafts, your collections and globals can maintain _newer_, and _unpublished_ versions of your documents. It's perfect for cases where you might want to work on a document, update it and save your progress, but not necessarily make it publicly published right away. Drafts are extremely helpful when building preview implementations.
|
||||
|
||||

|
||||

|
||||
_If Drafts are enabled, the typical Save button is replaced with new actions which allow you to either save a draft, or publish your changes._
|
||||
|
||||
## Options
|
||||
|
||||
@@ -13,7 +13,7 @@ keywords: version history, revisions, audit log, draft, publish, restore, autosa
|
||||
|
||||
When enabled, Payload will automatically scaffold a new Collection in your database to store versions of your document(s) over time, and the Admin UI will be extended with additional views that allow you to browse document versions, view diffs in order to see exactly what has changed in your documents (and when they changed), and restore documents back to prior versions easily.
|
||||
|
||||

|
||||

|
||||
_Comparing an old version to a newer version of a document_
|
||||
|
||||
**With Versions, you can:**
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload-monorepo",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"workspaces": [
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/admin-bar",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"description": "An admin bar for React apps using Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "create-payload-app",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -35,12 +35,7 @@ export const connect: Connect = async function connect(
|
||||
}
|
||||
|
||||
try {
|
||||
if (!this.connection) {
|
||||
this.connection = await mongoose.createConnection(urlToConnect, connectionOptions).asPromise()
|
||||
}
|
||||
|
||||
await this.connection.openUri(urlToConnect, connectionOptions)
|
||||
|
||||
this.connection = (await mongoose.connect(urlToConnect, connectionOptions)).connection
|
||||
if (this.useAlternativeDropDatabase) {
|
||||
if (this.connection.db) {
|
||||
// Firestore doesn't support dropDatabase, so we monkey patch
|
||||
@@ -80,8 +75,7 @@ export const connect: Connect = async function connect(
|
||||
if (!hotReload) {
|
||||
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
|
||||
this.payload.logger.info('---- DROPPING DATABASE ----')
|
||||
await this.connection.dropDatabase()
|
||||
|
||||
await mongoose.connection.dropDatabase()
|
||||
this.payload.logger.info('---- DROPPED DATABASE ----')
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,16 +17,10 @@ export const create: Create = async function create(
|
||||
|
||||
const options: CreateOptions = {
|
||||
session: await getSession(this, req),
|
||||
// Timestamps are manually added by the write transform
|
||||
timestamps: false,
|
||||
}
|
||||
|
||||
let doc
|
||||
|
||||
if (!data.createdAt) {
|
||||
data.createdAt = new Date().toISOString()
|
||||
}
|
||||
|
||||
transform({
|
||||
adapter: this,
|
||||
data,
|
||||
|
||||
@@ -14,10 +14,6 @@ export const createGlobal: CreateGlobal = async function createGlobal(
|
||||
) {
|
||||
const { globalConfig, Model } = getGlobal({ adapter: this, globalSlug })
|
||||
|
||||
if (!data.createdAt) {
|
||||
;(data as any).createdAt = new Date().toISOString()
|
||||
}
|
||||
|
||||
transform({
|
||||
adapter: this,
|
||||
data,
|
||||
@@ -28,8 +24,6 @@ export const createGlobal: CreateGlobal = async function createGlobal(
|
||||
|
||||
const options: CreateOptions = {
|
||||
session: await getSession(this, req),
|
||||
// Timestamps are manually added by the write transform
|
||||
timestamps: false,
|
||||
}
|
||||
|
||||
let [result] = (await Model.create([data], options)) as any
|
||||
|
||||
@@ -12,7 +12,6 @@ export const createGlobalVersion: CreateGlobalVersion = async function createGlo
|
||||
autosave,
|
||||
createdAt,
|
||||
globalSlug,
|
||||
localeStatus,
|
||||
parent,
|
||||
publishedLocale,
|
||||
req,
|
||||
@@ -26,24 +25,18 @@ export const createGlobalVersion: CreateGlobalVersion = async function createGlo
|
||||
|
||||
const options = {
|
||||
session: await getSession(this, req),
|
||||
// Timestamps are manually added by the write transform
|
||||
timestamps: false,
|
||||
}
|
||||
|
||||
const data = {
|
||||
autosave,
|
||||
createdAt,
|
||||
latest: true,
|
||||
localeStatus,
|
||||
parent,
|
||||
publishedLocale,
|
||||
snapshot,
|
||||
updatedAt,
|
||||
version: versionData,
|
||||
}
|
||||
if (!data.createdAt) {
|
||||
data.createdAt = new Date().toISOString()
|
||||
}
|
||||
|
||||
const fields = buildVersionGlobalFields(this.payload.config, globalConfig)
|
||||
|
||||
|
||||
@@ -12,7 +12,6 @@ export const createVersion: CreateVersion = async function createVersion(
|
||||
autosave,
|
||||
collectionSlug,
|
||||
createdAt,
|
||||
localeStatus,
|
||||
parent,
|
||||
publishedLocale,
|
||||
req,
|
||||
@@ -30,24 +29,18 @@ export const createVersion: CreateVersion = async function createVersion(
|
||||
|
||||
const options = {
|
||||
session: await getSession(this, req),
|
||||
// Timestamps are manually added by the write transform
|
||||
timestamps: false,
|
||||
}
|
||||
|
||||
const data = {
|
||||
autosave,
|
||||
createdAt,
|
||||
latest: true,
|
||||
localeStatus,
|
||||
parent,
|
||||
publishedLocale,
|
||||
snapshot,
|
||||
updatedAt,
|
||||
version: versionData,
|
||||
}
|
||||
if (!data.createdAt) {
|
||||
data.createdAt = new Date().toISOString()
|
||||
}
|
||||
|
||||
const fields = buildVersionCollectionFields(this.payload.config, collectionConfig)
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import type { Destroy } from 'payload'
|
||||
|
||||
import mongoose from 'mongoose'
|
||||
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
|
||||
export const destroy: Destroy = async function destroy(this: MongooseAdapter) {
|
||||
await this.connection.close()
|
||||
await mongoose.disconnect()
|
||||
|
||||
for (const name of Object.keys(this.connection.models)) {
|
||||
this.connection.deleteModel(name)
|
||||
}
|
||||
Object.keys(mongoose.models).map((model) => mongoose.deleteModel(model))
|
||||
}
|
||||
|
||||
@@ -331,7 +331,7 @@ export function mongooseAdapter({
|
||||
}
|
||||
}
|
||||
|
||||
export { compatibilityOptions } from './utilities/compatibilityOptions.js'
|
||||
export { compatabilityOptions } from './utilities/compatabilityOptions.js'
|
||||
|
||||
/**
|
||||
* Attempt to find migrations directory.
|
||||
|
||||
@@ -19,14 +19,11 @@ import { getBuildQueryPlugin } from './queries/getBuildQueryPlugin.js'
|
||||
import { getDBName } from './utilities/getDBName.js'
|
||||
|
||||
export const init: Init = function init(this: MongooseAdapter) {
|
||||
// Always create a scoped, **unopened** connection object
|
||||
// (no URI here; models compile per-connection and do not require an open socket)
|
||||
this.connection ??= mongoose.createConnection()
|
||||
|
||||
this.payload.config.collections.forEach((collection: SanitizedCollectionConfig) => {
|
||||
const schemaOptions = this.collectionsSchemaOptions?.[collection.slug]
|
||||
|
||||
const schema = buildCollectionSchema(collection, this.payload, schemaOptions)
|
||||
|
||||
if (collection.versions) {
|
||||
const versionModelName = getDBName({ config: collection, versions: true })
|
||||
|
||||
@@ -58,7 +55,7 @@ export const init: Init = function init(this: MongooseAdapter) {
|
||||
const versionCollectionName =
|
||||
this.autoPluralization === true && !collection.dbName ? undefined : versionModelName
|
||||
|
||||
this.versions[collection.slug] = this.connection.model(
|
||||
this.versions[collection.slug] = mongoose.model(
|
||||
versionModelName,
|
||||
versionSchema,
|
||||
versionCollectionName,
|
||||
@@ -69,14 +66,14 @@ export const init: Init = function init(this: MongooseAdapter) {
|
||||
const collectionName =
|
||||
this.autoPluralization === true && !collection.dbName ? undefined : modelName
|
||||
|
||||
this.collections[collection.slug] = this.connection.model<any>(
|
||||
this.collections[collection.slug] = mongoose.model<any>(
|
||||
modelName,
|
||||
schema,
|
||||
collectionName,
|
||||
) as CollectionModel
|
||||
})
|
||||
|
||||
this.globals = buildGlobalModel(this) as GlobalModel
|
||||
this.globals = buildGlobalModel(this.payload) as GlobalModel
|
||||
|
||||
this.payload.config.globals.forEach((global) => {
|
||||
if (global.versions) {
|
||||
@@ -104,7 +101,7 @@ export const init: Init = function init(this: MongooseAdapter) {
|
||||
}),
|
||||
)
|
||||
|
||||
this.versions[global.slug] = this.connection.model<any>(
|
||||
this.versions[global.slug] = mongoose.model<any>(
|
||||
versionModelName,
|
||||
versionSchema,
|
||||
versionModelName,
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import type { Payload } from 'payload'
|
||||
|
||||
import mongoose from 'mongoose'
|
||||
|
||||
import type { MongooseAdapter } from '../index.js'
|
||||
import type { GlobalModel } from '../types.js'
|
||||
|
||||
import { getBuildQueryPlugin } from '../queries/getBuildQueryPlugin.js'
|
||||
import { buildSchema } from './buildSchema.js'
|
||||
|
||||
export const buildGlobalModel = (adapter: MongooseAdapter): GlobalModel | null => {
|
||||
if (adapter.payload.config.globals && adapter.payload.config.globals.length > 0) {
|
||||
export const buildGlobalModel = (payload: Payload): GlobalModel | null => {
|
||||
if (payload.config.globals && payload.config.globals.length > 0) {
|
||||
const globalsSchema = new mongoose.Schema(
|
||||
{},
|
||||
{ discriminatorKey: 'globalType', minimize: false, timestamps: true },
|
||||
@@ -15,13 +16,9 @@ export const buildGlobalModel = (adapter: MongooseAdapter): GlobalModel | null =
|
||||
|
||||
globalsSchema.plugin(getBuildQueryPlugin())
|
||||
|
||||
const Globals = adapter.connection.model(
|
||||
'globals',
|
||||
globalsSchema,
|
||||
'globals',
|
||||
) as unknown as GlobalModel
|
||||
const Globals = mongoose.model('globals', globalsSchema, 'globals') as unknown as GlobalModel
|
||||
|
||||
Object.values(adapter.payload.config.globals).forEach((globalConfig) => {
|
||||
Object.values(payload.config.globals).forEach((globalConfig) => {
|
||||
const globalSchema = buildSchema({
|
||||
buildSchemaOptions: {
|
||||
options: {
|
||||
@@ -29,7 +26,7 @@ export const buildGlobalModel = (adapter: MongooseAdapter): GlobalModel | null =
|
||||
},
|
||||
},
|
||||
configFields: globalConfig.fields,
|
||||
payload: adapter.payload,
|
||||
payload,
|
||||
})
|
||||
Globals.discriminator(globalConfig.slug, globalSchema)
|
||||
})
|
||||
|
||||
@@ -63,10 +63,7 @@ const migrateModelWithBatching = async ({
|
||||
},
|
||||
},
|
||||
})),
|
||||
{
|
||||
session, // Timestamps are manually added by the write transform
|
||||
timestamps: false,
|
||||
},
|
||||
{ session },
|
||||
)
|
||||
|
||||
skip += batchSize
|
||||
|
||||
@@ -179,13 +179,6 @@ export const queryDrafts: QueryDrafts = async function queryDrafts(
|
||||
|
||||
for (let i = 0; i < result.docs.length; i++) {
|
||||
const id = result.docs[i].parent
|
||||
|
||||
const localeStatus = result.docs[i].localeStatus || {}
|
||||
if (locale && localeStatus[locale]) {
|
||||
result.docs[i].status = localeStatus[locale]
|
||||
result.docs[i].version._status = localeStatus[locale]
|
||||
}
|
||||
|
||||
result.docs[i] = result.docs[i].version ?? {}
|
||||
result.docs[i].id = id
|
||||
}
|
||||
|
||||
@@ -26,8 +26,6 @@ export const updateGlobal: UpdateGlobal = async function updateGlobal(
|
||||
select,
|
||||
}),
|
||||
session: await getSession(this, req),
|
||||
// Timestamps are manually added by the write transform
|
||||
timestamps: false,
|
||||
}
|
||||
|
||||
transform({ adapter: this, data, fields, globalSlug, operation: 'write' })
|
||||
|
||||
@@ -39,8 +39,6 @@ export async function updateGlobalVersion<T extends TypeWithID>(
|
||||
select,
|
||||
}),
|
||||
session: await getSession(this, req),
|
||||
// Timestamps are manually added by the write transform
|
||||
timestamps: false,
|
||||
}
|
||||
|
||||
const query = await buildQuery({
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { MongooseUpdateQueryOptions, UpdateQuery } from 'mongoose'
|
||||
import type { MongooseUpdateQueryOptions } from 'mongoose'
|
||||
import type { Job, UpdateJobs, Where } from 'payload'
|
||||
|
||||
import type { MongooseAdapter } from './index.js'
|
||||
@@ -14,13 +14,9 @@ export const updateJobs: UpdateJobs = async function updateMany(
|
||||
this: MongooseAdapter,
|
||||
{ id, data, limit, req, returning, sort: sortArg, where: whereArg },
|
||||
) {
|
||||
if (
|
||||
!(data?.log as object[])?.length &&
|
||||
!(data.log && typeof data.log === 'object' && '$push' in data.log)
|
||||
) {
|
||||
if (!(data?.log as object[])?.length) {
|
||||
delete data.log
|
||||
}
|
||||
|
||||
const where = id ? { id: { equals: id } } : (whereArg as Where)
|
||||
|
||||
const { collectionConfig, Model } = getCollection({
|
||||
@@ -40,8 +36,6 @@ export const updateJobs: UpdateJobs = async function updateMany(
|
||||
lean: true,
|
||||
new: true,
|
||||
session: await getSession(this, req),
|
||||
// Timestamps are manually added by the write transform
|
||||
timestamps: false,
|
||||
}
|
||||
|
||||
let query = await buildQuery({
|
||||
@@ -51,44 +45,17 @@ export const updateJobs: UpdateJobs = async function updateMany(
|
||||
where,
|
||||
})
|
||||
|
||||
let updateData: UpdateQuery<any> = data
|
||||
|
||||
const $inc: Record<string, number> = {}
|
||||
const $push: Record<string, { $each: any[] } | any> = {}
|
||||
|
||||
transform({
|
||||
$inc,
|
||||
$push,
|
||||
adapter: this,
|
||||
data,
|
||||
fields: collectionConfig.fields,
|
||||
operation: 'write',
|
||||
})
|
||||
|
||||
const updateOps: UpdateQuery<any> = {}
|
||||
|
||||
if (Object.keys($inc).length) {
|
||||
updateOps.$inc = $inc
|
||||
}
|
||||
if (Object.keys($push).length) {
|
||||
updateOps.$push = $push
|
||||
}
|
||||
if (Object.keys(updateOps).length) {
|
||||
updateOps.$set = updateData
|
||||
updateData = updateOps
|
||||
}
|
||||
transform({ adapter: this, data, fields: collectionConfig.fields, operation: 'write' })
|
||||
|
||||
let result: Job[] = []
|
||||
|
||||
try {
|
||||
if (id) {
|
||||
if (returning === false) {
|
||||
await Model.updateOne(query, updateData, options)
|
||||
transform({ adapter: this, data, fields: collectionConfig.fields, operation: 'read' })
|
||||
|
||||
await Model.updateOne(query, data, options)
|
||||
return null
|
||||
} else {
|
||||
const doc = await Model.findOneAndUpdate(query, updateData, options)
|
||||
const doc = await Model.findOneAndUpdate(query, data, options)
|
||||
result = doc ? [doc] : []
|
||||
}
|
||||
} else {
|
||||
@@ -105,7 +72,7 @@ export const updateJobs: UpdateJobs = async function updateMany(
|
||||
query = { _id: { $in: documentsToUpdate.map((doc) => doc._id) } }
|
||||
}
|
||||
|
||||
await Model.updateMany(query, updateData, options)
|
||||
await Model.updateMany(query, data, options)
|
||||
|
||||
if (returning === false) {
|
||||
return null
|
||||
|
||||
@@ -58,8 +58,6 @@ export const updateMany: UpdateMany = async function updateMany(
|
||||
select,
|
||||
}),
|
||||
session: await getSession(this, req),
|
||||
// Timestamps are manually added by the write transform
|
||||
timestamps: false,
|
||||
}
|
||||
|
||||
let query = await buildQuery({
|
||||
|
||||
@@ -38,8 +38,6 @@ export const updateOne: UpdateOne = async function updateOne(
|
||||
select,
|
||||
}),
|
||||
session: await getSession(this, req),
|
||||
// Timestamps are manually added by the write transform
|
||||
timestamps: false,
|
||||
}
|
||||
|
||||
const query = await buildQuery({
|
||||
@@ -58,18 +56,11 @@ export const updateOne: UpdateOne = async function updateOne(
|
||||
const $push: Record<string, { $each: any[] } | any> = {}
|
||||
|
||||
transform({ $inc, $push, adapter: this, data, fields, operation: 'write' })
|
||||
|
||||
const updateOps: UpdateQuery<any> = {}
|
||||
|
||||
if (Object.keys($inc).length) {
|
||||
updateOps.$inc = $inc
|
||||
updateData = { $inc, $set: updateData }
|
||||
}
|
||||
if (Object.keys($push).length) {
|
||||
updateOps.$push = $push
|
||||
}
|
||||
if (Object.keys(updateOps).length) {
|
||||
updateOps.$set = updateData
|
||||
updateData = updateOps
|
||||
updateData = { $push, $set: updateData }
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
@@ -45,8 +45,6 @@ export const updateVersion: UpdateVersion = async function updateVersion(
|
||||
select,
|
||||
}),
|
||||
session: await getSession(this, req),
|
||||
// Timestamps are manually added by the write transform
|
||||
timestamps: false,
|
||||
}
|
||||
|
||||
const query = await buildQuery({
|
||||
|
||||
@@ -2,9 +2,9 @@ import type { Args } from '../index.js'
|
||||
|
||||
/**
|
||||
* Each key is a mongo-compatible database and the value
|
||||
* is the recommended `mongooseAdapter` settings for compatibility.
|
||||
* is the recommended `mongooseAdapter` settings for compatability.
|
||||
*/
|
||||
export const compatibilityOptions = {
|
||||
export const compatabilityOptions = {
|
||||
cosmosdb: {
|
||||
transactionOptions: false,
|
||||
useJoinAggregations: false,
|
||||
@@ -12,7 +12,6 @@ export const compatibilityOptions = {
|
||||
},
|
||||
documentdb: {
|
||||
disableIndexHints: true,
|
||||
useJoinAggregations: false,
|
||||
},
|
||||
firestore: {
|
||||
disableIndexHints: true,
|
||||
@@ -395,10 +395,6 @@ describe('transform', () => {
|
||||
data,
|
||||
fields: config.collections[0].fields,
|
||||
})
|
||||
if ('updatedAt' in data) {
|
||||
delete data.updatedAt
|
||||
}
|
||||
|
||||
const flattenValuesAfter = Object.values(flattenRelationshipValues(data))
|
||||
|
||||
flattenValuesAfter.forEach((value, i) => {
|
||||
|
||||
@@ -492,24 +492,11 @@ export const transform = ({
|
||||
if (value && typeof value === 'object' && '$push' in value) {
|
||||
const push = value.$push
|
||||
|
||||
if (config.localization && fieldShouldBeLocalized({ field, parentIsLocalized })) {
|
||||
if (typeof push === 'object' && push !== null) {
|
||||
Object.entries(push).forEach(([localeKey, localeData]) => {
|
||||
if (Array.isArray(localeData)) {
|
||||
$push[`${parentPath}${field.name}.${localeKey}`] = { $each: localeData }
|
||||
} else if (typeof localeData === 'object') {
|
||||
$push[`${parentPath}${field.name}.${localeKey}`] = localeData
|
||||
}
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if (Array.isArray(push)) {
|
||||
$push[`${parentPath}${field.name}`] = { $each: push }
|
||||
} else if (typeof push === 'object') {
|
||||
$push[`${parentPath}${field.name}`] = push
|
||||
}
|
||||
if (Array.isArray(push)) {
|
||||
$push[`${parentPath}${field.name}`] = { $each: push }
|
||||
} else if (typeof push === 'object') {
|
||||
$push[`${parentPath}${field.name}`] = push
|
||||
}
|
||||
|
||||
delete ref[field.name]
|
||||
}
|
||||
}
|
||||
@@ -592,15 +579,4 @@ export const transform = ({
|
||||
parentIsLocalized,
|
||||
ref: data,
|
||||
})
|
||||
|
||||
if (operation === 'write') {
|
||||
if (typeof data.updatedAt === 'undefined') {
|
||||
// If data.updatedAt is explicitly set to `null` we should not set it - this means we don't want to change the value of updatedAt.
|
||||
data.updatedAt = new Date().toISOString()
|
||||
} else if (data.updatedAt === null) {
|
||||
// `updatedAt` may be explicitly set to null to disable updating it - if that is the case, we need to delete the property. Keeping it as null will
|
||||
// cause the database to think we want to set it to null, which we don't.
|
||||
delete data.updatedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-sqlite",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The officially supported SQLite database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-vercel-postgres",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"description": "Vercel Postgres adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/drizzle",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"description": "A library of shared functions used by different payload database adapters",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -15,7 +15,6 @@ export async function createGlobalVersion<T extends TypeWithID>(
|
||||
autosave,
|
||||
createdAt,
|
||||
globalSlug,
|
||||
localeStatus,
|
||||
publishedLocale,
|
||||
req,
|
||||
returning,
|
||||
@@ -36,7 +35,6 @@ export async function createGlobalVersion<T extends TypeWithID>(
|
||||
autosave,
|
||||
createdAt,
|
||||
latest: true,
|
||||
localeStatus,
|
||||
publishedLocale,
|
||||
snapshot,
|
||||
updatedAt,
|
||||
|
||||
@@ -15,7 +15,6 @@ export async function createVersion<T extends TypeWithID>(
|
||||
autosave,
|
||||
collectionSlug,
|
||||
createdAt,
|
||||
localeStatus,
|
||||
parent,
|
||||
publishedLocale,
|
||||
req,
|
||||
@@ -41,7 +40,6 @@ export async function createVersion<T extends TypeWithID>(
|
||||
autosave,
|
||||
createdAt,
|
||||
latest: true,
|
||||
localeStatus,
|
||||
parent,
|
||||
publishedLocale,
|
||||
snapshot,
|
||||
|
||||
@@ -791,14 +791,9 @@ export const traverseFields = ({
|
||||
} else {
|
||||
shouldSelect = true
|
||||
}
|
||||
const tableName = fieldShouldBeLocalized({ field, parentIsLocalized })
|
||||
? `${currentTableName}${adapter.localesSuffix}`
|
||||
: currentTableName
|
||||
|
||||
if (shouldSelect) {
|
||||
args.extras[name] = sql
|
||||
.raw(`ST_AsGeoJSON("${adapter.tables[tableName][name].name}")::jsonb`)
|
||||
.as(name)
|
||||
args.extras[name] = sql.raw(`ST_AsGeoJSON(${toSnakeCase(name)})::jsonb`).as(name)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
@@ -110,32 +110,19 @@ export const sanitizeQueryValue = ({
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to convert a single date value to ISO string
|
||||
const convertDateToISO = (item: unknown): unknown => {
|
||||
if (typeof item === 'string') {
|
||||
if (item === 'null' || item === '') {
|
||||
return null
|
||||
}
|
||||
const date = new Date(item)
|
||||
return Number.isNaN(date.getTime()) ? undefined : date.toISOString()
|
||||
} else if (typeof item === 'number') {
|
||||
return new Date(item).toISOString()
|
||||
} else if (item instanceof Date) {
|
||||
return item.toISOString()
|
||||
}
|
||||
return item
|
||||
}
|
||||
|
||||
if (field.type === 'date' && operator !== 'exists') {
|
||||
if (Array.isArray(formattedValue)) {
|
||||
// Handle arrays of dates for 'in' and 'not_in' operators
|
||||
formattedValue = formattedValue.map(convertDateToISO).filter((item) => item !== undefined)
|
||||
} else {
|
||||
const converted = convertDateToISO(val)
|
||||
if (converted === undefined) {
|
||||
return { operator, value: undefined }
|
||||
if (typeof val === 'string') {
|
||||
if (val === 'null' || val === '') {
|
||||
formattedValue = null
|
||||
} else {
|
||||
const date = new Date(val)
|
||||
if (Number.isNaN(date.getTime())) {
|
||||
return { operator, value: undefined }
|
||||
}
|
||||
formattedValue = date.toISOString()
|
||||
}
|
||||
formattedValue = converted
|
||||
} else if (typeof val === 'number') {
|
||||
formattedValue = new Date(val).toISOString()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -36,17 +36,15 @@ export const queryDrafts: QueryDrafts = async function queryDrafts(
|
||||
where: combinedWhere,
|
||||
})
|
||||
|
||||
for (let i = 0; i < result.docs.length; i++) {
|
||||
const id = result.docs[i].parent
|
||||
const localeStatus = result.docs[i].localeStatus || {}
|
||||
if (locale && localeStatus[locale]) {
|
||||
result.docs[i].status = localeStatus[locale]
|
||||
result.docs[i].version._status = localeStatus[locale]
|
||||
}
|
||||
return {
|
||||
...result,
|
||||
docs: result.docs.map((doc) => {
|
||||
doc = {
|
||||
id: doc.parent,
|
||||
...doc.version,
|
||||
}
|
||||
|
||||
result.docs[i] = result.docs[i].version ?? {}
|
||||
result.docs[i].id = id
|
||||
return doc
|
||||
}),
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -129,23 +129,8 @@ export const traverseFields = ({
|
||||
const arrayTableName = adapter.tableNameMap.get(`${parentTableName}_${columnName}`)
|
||||
|
||||
if (isLocalized) {
|
||||
let value: {
|
||||
[locale: string]: unknown[]
|
||||
} = data[field.name] as any
|
||||
|
||||
let push = false
|
||||
if (typeof value === 'object' && '$push' in value) {
|
||||
value = value.$push as any
|
||||
push = true
|
||||
}
|
||||
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
Object.entries(value).forEach(([localeKey, _localeData]) => {
|
||||
let localeData = _localeData
|
||||
if (push && !Array.isArray(localeData)) {
|
||||
localeData = [localeData]
|
||||
}
|
||||
|
||||
if (typeof data[field.name] === 'object' && data[field.name] !== null) {
|
||||
Object.entries(data[field.name]).forEach(([localeKey, localeData]) => {
|
||||
if (Array.isArray(localeData)) {
|
||||
const newRows = transformArray({
|
||||
adapter,
|
||||
@@ -167,25 +152,22 @@ export const traverseFields = ({
|
||||
textsToDelete,
|
||||
withinArrayOrBlockLocale: localeKey,
|
||||
})
|
||||
|
||||
if (push) {
|
||||
if (!arraysToPush[arrayTableName]) {
|
||||
arraysToPush[arrayTableName] = []
|
||||
}
|
||||
arraysToPush[arrayTableName] = arraysToPush[arrayTableName].concat(newRows)
|
||||
} else {
|
||||
if (!arrays[arrayTableName]) {
|
||||
arrays[arrayTableName] = []
|
||||
}
|
||||
arrays[arrayTableName] = arrays[arrayTableName].concat(newRows)
|
||||
if (!arrays[arrayTableName]) {
|
||||
arrays[arrayTableName] = []
|
||||
}
|
||||
arrays[arrayTableName] = arrays[arrayTableName].concat(newRows)
|
||||
}
|
||||
})
|
||||
}
|
||||
} else {
|
||||
let value = data[field.name]
|
||||
let push = false
|
||||
if (typeof value === 'object' && '$push' in value) {
|
||||
if (
|
||||
// TODO do this for localized as well in DRY way
|
||||
|
||||
typeof value === 'object' &&
|
||||
'$push' in value
|
||||
) {
|
||||
value = Array.isArray(value.$push) ? value.$push : [value.$push]
|
||||
push = true
|
||||
}
|
||||
@@ -585,19 +567,6 @@ export const traverseFields = ({
|
||||
valuesToTransform.forEach(({ localeKey, ref, value }) => {
|
||||
let formattedValue = value
|
||||
|
||||
if (field.type === 'date') {
|
||||
if (fieldName === 'updatedAt' && typeof formattedValue === 'undefined') {
|
||||
// let the db handle this. If formattedValue is explicitly set to `null` we should not set it - this means we don't want to change the value of updatedAt.
|
||||
formattedValue = new Date().toISOString()
|
||||
} else {
|
||||
if (typeof value === 'number' && !Number.isNaN(value)) {
|
||||
formattedValue = new Date(value).toISOString()
|
||||
} else if (value instanceof Date) {
|
||||
formattedValue = value.toISOString()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof value !== 'undefined') {
|
||||
if (value && field.type === 'point' && adapter.name !== 'sqlite') {
|
||||
formattedValue = sql`ST_GeomFromGeoJSON(${JSON.stringify(value)})`
|
||||
@@ -622,6 +591,19 @@ export const traverseFields = ({
|
||||
|
||||
formattedValue = sql.raw(`${columnName} + ${value.$inc}`)
|
||||
}
|
||||
|
||||
if (field.type === 'date') {
|
||||
if (typeof value === 'number' && !Number.isNaN(value)) {
|
||||
formattedValue = new Date(value).toISOString()
|
||||
} else if (value instanceof Date) {
|
||||
formattedValue = value.toISOString()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (field.type === 'date' && fieldName === 'updatedAt') {
|
||||
// let the db handle this
|
||||
formattedValue = new Date().toISOString()
|
||||
}
|
||||
|
||||
if (typeof formattedValue !== 'undefined') {
|
||||
|
||||
@@ -42,21 +42,15 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
upsertTarget,
|
||||
where,
|
||||
}: Args): Promise<T> => {
|
||||
if (operation === 'create' && !data.createdAt) {
|
||||
data.createdAt = new Date().toISOString()
|
||||
}
|
||||
|
||||
let insertedRow: Record<string, unknown> = { id }
|
||||
if (id && shouldUseOptimizedUpsertRow({ data, fields })) {
|
||||
const transformedForWrite = transformForWrite({
|
||||
const { arraysToPush, row } = transformForWrite({
|
||||
adapter,
|
||||
data,
|
||||
enableAtomicWrites: true,
|
||||
fields,
|
||||
tableName,
|
||||
})
|
||||
const { row } = transformedForWrite
|
||||
const { arraysToPush } = transformedForWrite
|
||||
|
||||
const drizzle = db as LibSQLDatabase
|
||||
|
||||
@@ -72,19 +66,10 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
})
|
||||
}
|
||||
|
||||
// If row.updatedAt is not set, delete it to avoid triggering hasDataToUpdate. `updatedAt` may be explicitly set to null to
|
||||
// disable triggering hasDataToUpdate.
|
||||
if (typeof row.updatedAt === 'undefined' || row.updatedAt === null) {
|
||||
delete row.updatedAt
|
||||
}
|
||||
|
||||
const hasDataToUpdate = row && Object.keys(row)?.length
|
||||
|
||||
// Then, handle regular row update
|
||||
|
||||
if (ignoreResult) {
|
||||
if (hasDataToUpdate) {
|
||||
// Only update row if there is something to update.
|
||||
// Example: if the data only consists of a single $push, calling insertArrays is enough - we don't need to update the row.
|
||||
if (row && Object.keys(row).length) {
|
||||
await drizzle
|
||||
.update(adapter.tables[tableName])
|
||||
.set(row)
|
||||
@@ -105,7 +90,7 @@ export const upsertRow = async <T extends Record<string, unknown> | TypeWithID>(
|
||||
const findManyKeysLength = Object.keys(findManyArgs).length
|
||||
const hasOnlyColumns = Object.keys(findManyArgs.columns || {}).length > 0
|
||||
|
||||
if (!hasDataToUpdate) {
|
||||
if (!row || !Object.keys(row).length) {
|
||||
// Nothing to update => just fetch current row and return
|
||||
findManyArgs.where = eq(adapter.tables[tableName].id, insertedRow.id)
|
||||
|
||||
|
||||
@@ -9,12 +9,7 @@ export const buildIndexName = ({
|
||||
name: string
|
||||
number?: number
|
||||
}): string => {
|
||||
let indexName = `${name}${number ? `_${number}` : ''}_idx`
|
||||
|
||||
if (indexName.length > 60) {
|
||||
const suffix = `${number ? `_${number}` : ''}_idx`
|
||||
indexName = `${name.slice(0, 60 - suffix.length)}${suffix}`
|
||||
}
|
||||
const indexName = `${name}${number ? `_${number}` : ''}_idx`
|
||||
|
||||
if (!adapter.indexes.has(indexName)) {
|
||||
adapter.indexes.add(indexName)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/email-nodemailer",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"description": "Payload Nodemailer Email Adapter",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/email-resend",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"description": "Payload Resend Email Adapter",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/graphql",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -22,7 +22,6 @@ export const formatName = (string: string): string => {
|
||||
.replace(/\)/g, '_')
|
||||
.replace(/'/g, '_')
|
||||
.replace(/ /g, '')
|
||||
.replace(/\[|\]/g, '_')
|
||||
|
||||
return formatted || '_'
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview-react",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The official React SDK for Payload Live Preview",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview-vue",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The official Vue SDK for Payload Live Preview",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/live-preview",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"description": "The official live preview JavaScript SDK for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,12 +1,22 @@
|
||||
import type { FieldSchemaJSON } from 'payload'
|
||||
|
||||
import type { CollectionPopulationRequestHandler, LivePreviewMessageEvent } from './types.js'
|
||||
|
||||
import { isLivePreviewEvent } from './isLivePreviewEvent.js'
|
||||
import { mergeData } from './mergeData.js'
|
||||
|
||||
const _payloadLivePreview: {
|
||||
fieldSchema: FieldSchemaJSON | undefined
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
previousData: any
|
||||
} = {
|
||||
/**
|
||||
* For performance reasons, `fieldSchemaJSON` will only be sent once on the initial message
|
||||
* We need to cache this value so that it can be used across subsequent messages
|
||||
* To do this, save `fieldSchemaJSON` when it arrives as a global variable
|
||||
* Send this cached value to `mergeData`, instead of `eventData.fieldSchemaJSON` directly
|
||||
*/
|
||||
fieldSchema: undefined,
|
||||
/**
|
||||
* Each time the data is merged, cache the result as a `previousData` variable
|
||||
* This will ensure changes compound overtop of each other
|
||||
@@ -25,13 +35,26 @@ export const handleMessage = async <T extends Record<string, any>>(args: {
|
||||
const { apiRoute, depth, event, initialData, requestHandler, serverURL } = args
|
||||
|
||||
if (isLivePreviewEvent(event, serverURL)) {
|
||||
const { collectionSlug, data, globalSlug, locale } = event.data
|
||||
const { data, externallyUpdatedRelationship, fieldSchemaJSON, locale } = event.data
|
||||
|
||||
if (!_payloadLivePreview?.fieldSchema && fieldSchemaJSON) {
|
||||
_payloadLivePreview.fieldSchema = fieldSchemaJSON
|
||||
}
|
||||
|
||||
if (!_payloadLivePreview?.fieldSchema) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(
|
||||
'Payload Live Preview: No `fieldSchemaJSON` was received from the parent window. Unable to merge data.',
|
||||
)
|
||||
|
||||
return initialData
|
||||
}
|
||||
|
||||
const mergedData = await mergeData<T>({
|
||||
apiRoute,
|
||||
collectionSlug,
|
||||
depth,
|
||||
globalSlug,
|
||||
externallyUpdatedRelationship,
|
||||
fieldSchema: _payloadLivePreview.fieldSchema,
|
||||
incomingData: data,
|
||||
initialData: _payloadLivePreview?.previousData || initialData,
|
||||
locale,
|
||||
|
||||
@@ -4,5 +4,6 @@ export { isLivePreviewEvent } from './isLivePreviewEvent.js'
|
||||
export { mergeData } from './mergeData.js'
|
||||
export { ready } from './ready.js'
|
||||
export { subscribe } from './subscribe.js'
|
||||
export { traverseRichText } from './traverseRichText.js'
|
||||
export type { LivePreviewMessageEvent } from './types.js'
|
||||
export { unsubscribe } from './unsubscribe.js'
|
||||
|
||||
@@ -1,60 +1,115 @@
|
||||
import type { CollectionPopulationRequestHandler } from './types.js'
|
||||
import type { DocumentEvent, FieldSchemaJSON, PaginatedDocs } from 'payload'
|
||||
|
||||
const defaultRequestHandler: CollectionPopulationRequestHandler = ({
|
||||
import type { CollectionPopulationRequestHandler, PopulationsByCollection } from './types.js'
|
||||
|
||||
import { traverseFields } from './traverseFields.js'
|
||||
|
||||
const defaultRequestHandler = ({
|
||||
apiPath,
|
||||
data,
|
||||
endpoint,
|
||||
serverURL,
|
||||
}: {
|
||||
apiPath: string
|
||||
endpoint: string
|
||||
serverURL: string
|
||||
}) => {
|
||||
const url = `${serverURL}${apiPath}/${endpoint}`
|
||||
|
||||
return fetch(url, {
|
||||
body: JSON.stringify(data),
|
||||
credentials: 'include',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Payload-HTTP-Method-Override': 'GET',
|
||||
},
|
||||
method: 'POST',
|
||||
})
|
||||
}
|
||||
|
||||
// Relationships are only updated when their `id` or `relationTo` changes, by comparing the old and new values
|
||||
// This needs to also happen when locale changes, except this is not not part of the API response
|
||||
// Instead, we keep track of the old locale ourselves and trigger a re-population when it changes
|
||||
let prevLocale: string | undefined
|
||||
|
||||
export const mergeData = async <T extends Record<string, any>>(args: {
|
||||
apiRoute?: string
|
||||
collectionSlug?: string
|
||||
/**
|
||||
* @deprecated Use `requestHandler` instead
|
||||
*/
|
||||
collectionPopulationRequestHandler?: CollectionPopulationRequestHandler
|
||||
depth?: number
|
||||
globalSlug?: string
|
||||
externallyUpdatedRelationship?: DocumentEvent
|
||||
fieldSchema: FieldSchemaJSON
|
||||
incomingData: Partial<T>
|
||||
initialData: T
|
||||
locale?: string
|
||||
requestHandler?: CollectionPopulationRequestHandler
|
||||
returnNumberOfRequests?: boolean
|
||||
serverURL: string
|
||||
}): Promise<T> => {
|
||||
}): Promise<
|
||||
{
|
||||
_numberOfRequests?: number
|
||||
} & T
|
||||
> => {
|
||||
const {
|
||||
apiRoute,
|
||||
collectionSlug,
|
||||
depth,
|
||||
globalSlug,
|
||||
externallyUpdatedRelationship,
|
||||
fieldSchema,
|
||||
incomingData,
|
||||
initialData,
|
||||
locale,
|
||||
returnNumberOfRequests,
|
||||
serverURL,
|
||||
} = args
|
||||
|
||||
const requestHandler = args.requestHandler || defaultRequestHandler
|
||||
const result = { ...initialData }
|
||||
|
||||
const result = await requestHandler({
|
||||
apiPath: apiRoute || '/api',
|
||||
data: {
|
||||
data: incomingData,
|
||||
depth,
|
||||
locale,
|
||||
},
|
||||
endpoint: encodeURI(
|
||||
`${globalSlug ? 'globals/' : ''}${collectionSlug ?? globalSlug}${collectionSlug ? `/${initialData.id}` : ''}`,
|
||||
),
|
||||
serverURL,
|
||||
}).then((res) => res.json())
|
||||
const populationsByCollection: PopulationsByCollection = {}
|
||||
|
||||
return result
|
||||
traverseFields({
|
||||
externallyUpdatedRelationship,
|
||||
fieldSchema,
|
||||
incomingData,
|
||||
localeChanged: prevLocale !== locale,
|
||||
populationsByCollection,
|
||||
result,
|
||||
})
|
||||
|
||||
await Promise.all(
|
||||
Object.entries(populationsByCollection).map(async ([collection, populations]) => {
|
||||
let res: PaginatedDocs
|
||||
|
||||
const ids = new Set(populations.map(({ id }) => id))
|
||||
const requestHandler =
|
||||
args.collectionPopulationRequestHandler || args.requestHandler || defaultRequestHandler
|
||||
|
||||
try {
|
||||
res = await requestHandler({
|
||||
apiPath: apiRoute || '/api',
|
||||
endpoint: encodeURI(
|
||||
`${collection}?depth=${depth}&limit=${ids.size}&where[id][in]=${Array.from(ids).join(',')}${locale ? `&locale=${locale}` : ''}`,
|
||||
),
|
||||
serverURL,
|
||||
}).then((res) => res.json())
|
||||
|
||||
if (res?.docs?.length > 0) {
|
||||
res.docs.forEach((doc) => {
|
||||
populationsByCollection[collection]?.forEach((population) => {
|
||||
if (population.id === doc.id) {
|
||||
population.ref[population.accessor] = doc
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err) // eslint-disable-line no-console
|
||||
}
|
||||
}),
|
||||
)
|
||||
|
||||
prevLocale = locale
|
||||
|
||||
return {
|
||||
...result,
|
||||
...(returnNumberOfRequests
|
||||
? { _numberOfRequests: Object.keys(populationsByCollection).length }
|
||||
: {}),
|
||||
}
|
||||
}
|
||||
|
||||
299
packages/live-preview/src/traverseFields.ts
Normal file
299
packages/live-preview/src/traverseFields.ts
Normal file
@@ -0,0 +1,299 @@
|
||||
import type { DocumentEvent, FieldSchemaJSON } from 'payload'
|
||||
|
||||
import type { PopulationsByCollection } from './types.js'
|
||||
|
||||
import { traverseRichText } from './traverseRichText.js'
|
||||
|
||||
export const traverseFields = <T extends Record<string, any>>(args: {
|
||||
externallyUpdatedRelationship?: DocumentEvent
|
||||
fieldSchema: FieldSchemaJSON
|
||||
incomingData: T
|
||||
localeChanged: boolean
|
||||
populationsByCollection: PopulationsByCollection
|
||||
result: Record<string, any>
|
||||
}): void => {
|
||||
const {
|
||||
externallyUpdatedRelationship,
|
||||
fieldSchema: fieldSchemas,
|
||||
incomingData,
|
||||
localeChanged,
|
||||
populationsByCollection,
|
||||
result,
|
||||
} = args
|
||||
|
||||
fieldSchemas.forEach((fieldSchema) => {
|
||||
if ('name' in fieldSchema && typeof fieldSchema.name === 'string') {
|
||||
const fieldName = fieldSchema.name
|
||||
|
||||
switch (fieldSchema.type) {
|
||||
case 'array':
|
||||
if (
|
||||
!incomingData[fieldName] &&
|
||||
incomingData[fieldName] !== undefined &&
|
||||
result?.[fieldName] !== undefined
|
||||
) {
|
||||
result[fieldName] = []
|
||||
}
|
||||
|
||||
if (Array.isArray(incomingData[fieldName])) {
|
||||
result[fieldName] = incomingData[fieldName].map((incomingRow, i) => {
|
||||
if (!result[fieldName]) {
|
||||
result[fieldName] = []
|
||||
}
|
||||
|
||||
if (!result[fieldName][i]) {
|
||||
result[fieldName][i] = {}
|
||||
}
|
||||
|
||||
traverseFields({
|
||||
externallyUpdatedRelationship,
|
||||
fieldSchema: fieldSchema.fields!,
|
||||
incomingData: incomingRow,
|
||||
localeChanged,
|
||||
populationsByCollection,
|
||||
result: result[fieldName][i],
|
||||
})
|
||||
|
||||
return result[fieldName][i]
|
||||
})
|
||||
}
|
||||
|
||||
break
|
||||
|
||||
case 'blocks':
|
||||
if (Array.isArray(incomingData[fieldName])) {
|
||||
result[fieldName] = incomingData[fieldName].map((incomingBlock, i) => {
|
||||
const incomingBlockJSON = fieldSchema.blocks?.[incomingBlock.blockType]
|
||||
|
||||
if (!result[fieldName]) {
|
||||
result[fieldName] = []
|
||||
}
|
||||
|
||||
if (
|
||||
!result[fieldName][i] ||
|
||||
result[fieldName][i].id !== incomingBlock.id ||
|
||||
result[fieldName][i].blockType !== incomingBlock.blockType
|
||||
) {
|
||||
result[fieldName][i] = {
|
||||
blockType: incomingBlock.blockType,
|
||||
}
|
||||
}
|
||||
|
||||
traverseFields({
|
||||
externallyUpdatedRelationship,
|
||||
fieldSchema: incomingBlockJSON!.fields!,
|
||||
incomingData: incomingBlock,
|
||||
localeChanged,
|
||||
populationsByCollection,
|
||||
result: result[fieldName][i],
|
||||
})
|
||||
|
||||
return result[fieldName][i]
|
||||
})
|
||||
} else {
|
||||
result[fieldName] = []
|
||||
}
|
||||
|
||||
break
|
||||
|
||||
case 'group':
|
||||
// falls through
|
||||
case 'tabs':
|
||||
if (!result[fieldName]) {
|
||||
result[fieldName] = {}
|
||||
}
|
||||
|
||||
traverseFields({
|
||||
externallyUpdatedRelationship,
|
||||
fieldSchema: fieldSchema.fields!,
|
||||
incomingData: incomingData[fieldName] || {},
|
||||
localeChanged,
|
||||
populationsByCollection,
|
||||
result: result[fieldName],
|
||||
})
|
||||
|
||||
break
|
||||
|
||||
case 'relationship':
|
||||
// falls through
|
||||
case 'upload':
|
||||
// Handle `hasMany` relationships
|
||||
if (fieldSchema.hasMany && Array.isArray(incomingData[fieldName])) {
|
||||
if (!result[fieldName] || !incomingData[fieldName].length) {
|
||||
result[fieldName] = []
|
||||
}
|
||||
|
||||
incomingData[fieldName].forEach((incomingRelation, i) => {
|
||||
// Handle `hasMany` polymorphic
|
||||
if (Array.isArray(fieldSchema.relationTo)) {
|
||||
// if the field doesn't exist on the result, create it
|
||||
// the value will be populated later
|
||||
if (!result[fieldName][i]) {
|
||||
result[fieldName][i] = {
|
||||
relationTo: incomingRelation.relationTo,
|
||||
}
|
||||
}
|
||||
|
||||
const oldID = result[fieldName][i]?.value?.id
|
||||
const oldRelation = result[fieldName][i]?.relationTo
|
||||
const newID = incomingRelation.value
|
||||
const newRelation = incomingRelation.relationTo
|
||||
|
||||
const hasChanged = newID !== oldID || newRelation !== oldRelation
|
||||
|
||||
const hasUpdated =
|
||||
newRelation === externallyUpdatedRelationship?.entitySlug &&
|
||||
newID === externallyUpdatedRelationship?.id
|
||||
|
||||
if (hasChanged || hasUpdated || localeChanged) {
|
||||
if (!populationsByCollection[newRelation]) {
|
||||
populationsByCollection[newRelation] = []
|
||||
}
|
||||
|
||||
populationsByCollection[newRelation].push({
|
||||
id: incomingRelation.value,
|
||||
accessor: 'value',
|
||||
ref: result[fieldName][i],
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// Handle `hasMany` monomorphic
|
||||
const hasChanged = incomingRelation !== result[fieldName][i]?.id
|
||||
|
||||
const hasUpdated =
|
||||
fieldSchema.relationTo === externallyUpdatedRelationship?.entitySlug &&
|
||||
incomingRelation === externallyUpdatedRelationship?.id
|
||||
|
||||
if (hasChanged || hasUpdated || localeChanged) {
|
||||
if (!populationsByCollection[fieldSchema.relationTo!]) {
|
||||
populationsByCollection[fieldSchema.relationTo!] = []
|
||||
}
|
||||
|
||||
populationsByCollection[fieldSchema.relationTo!]?.push({
|
||||
id: incomingRelation,
|
||||
accessor: i,
|
||||
ref: result[fieldName],
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
} else {
|
||||
// Handle `hasOne` polymorphic
|
||||
if (Array.isArray(fieldSchema.relationTo)) {
|
||||
// if the field doesn't exist on the result, create it
|
||||
// the value will be populated later
|
||||
if (!result[fieldName]) {
|
||||
result[fieldName] = {
|
||||
relationTo: incomingData[fieldName]?.relationTo,
|
||||
}
|
||||
}
|
||||
|
||||
const hasNewValue =
|
||||
incomingData[fieldName] &&
|
||||
typeof incomingData[fieldName] === 'object' &&
|
||||
incomingData[fieldName] !== null
|
||||
|
||||
const hasOldValue =
|
||||
result[fieldName] &&
|
||||
typeof result[fieldName] === 'object' &&
|
||||
result[fieldName] !== null
|
||||
|
||||
const newID = hasNewValue
|
||||
? typeof incomingData[fieldName].value === 'object'
|
||||
? incomingData[fieldName].value.id
|
||||
: incomingData[fieldName].value
|
||||
: ''
|
||||
|
||||
const oldID = hasOldValue
|
||||
? typeof result[fieldName].value === 'object'
|
||||
? result[fieldName].value.id
|
||||
: result[fieldName].value
|
||||
: ''
|
||||
|
||||
const newRelation = hasNewValue ? incomingData[fieldName].relationTo : ''
|
||||
const oldRelation = hasOldValue ? result[fieldName].relationTo : ''
|
||||
|
||||
const hasChanged = newID !== oldID || newRelation !== oldRelation
|
||||
|
||||
const hasUpdated =
|
||||
newRelation === externallyUpdatedRelationship?.entitySlug &&
|
||||
newID === externallyUpdatedRelationship?.id
|
||||
|
||||
// if the new value/relation is different from the old value/relation
|
||||
// populate the new value, otherwise leave it alone
|
||||
if (hasChanged || hasUpdated || localeChanged) {
|
||||
// if the new value is not empty, populate it
|
||||
// otherwise set the value to null
|
||||
if (newID) {
|
||||
if (!populationsByCollection[newRelation]) {
|
||||
populationsByCollection[newRelation] = []
|
||||
}
|
||||
|
||||
populationsByCollection[newRelation].push({
|
||||
id: newID,
|
||||
accessor: 'value',
|
||||
ref: result[fieldName],
|
||||
})
|
||||
} else {
|
||||
result[fieldName] = null
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Handle `hasOne` monomorphic
|
||||
const newID: number | string | undefined =
|
||||
(incomingData[fieldName] &&
|
||||
typeof incomingData[fieldName] === 'object' &&
|
||||
incomingData[fieldName].id) ||
|
||||
incomingData[fieldName]
|
||||
|
||||
const oldID: number | string | undefined =
|
||||
(result[fieldName] &&
|
||||
typeof result[fieldName] === 'object' &&
|
||||
result[fieldName].id) ||
|
||||
result[fieldName]
|
||||
|
||||
const hasChanged = newID !== oldID
|
||||
|
||||
const hasUpdated =
|
||||
fieldSchema.relationTo === externallyUpdatedRelationship?.entitySlug &&
|
||||
newID === externallyUpdatedRelationship?.id
|
||||
|
||||
// if the new value is different from the old value
|
||||
// populate the new value, otherwise leave it alone
|
||||
if (hasChanged || hasUpdated || localeChanged) {
|
||||
// if the new value is not empty, populate it
|
||||
// otherwise set the value to null
|
||||
if (newID) {
|
||||
if (!populationsByCollection[fieldSchema.relationTo!]) {
|
||||
populationsByCollection[fieldSchema.relationTo!] = []
|
||||
}
|
||||
|
||||
populationsByCollection[fieldSchema.relationTo!]?.push({
|
||||
id: newID,
|
||||
accessor: fieldName,
|
||||
ref: result as Record<string, unknown>,
|
||||
})
|
||||
} else {
|
||||
result[fieldName] = null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'richText':
|
||||
result[fieldName] = traverseRichText({
|
||||
externallyUpdatedRelationship,
|
||||
incomingData: incomingData[fieldName],
|
||||
populationsByCollection,
|
||||
result: result[fieldName],
|
||||
})
|
||||
|
||||
break
|
||||
|
||||
default:
|
||||
result[fieldName] = incomingData[fieldName]
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
105
packages/live-preview/src/traverseRichText.ts
Normal file
105
packages/live-preview/src/traverseRichText.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import type { DocumentEvent } from 'payload'
|
||||
|
||||
import type { PopulationsByCollection } from './types.js'
|
||||
|
||||
export const traverseRichText = ({
|
||||
externallyUpdatedRelationship,
|
||||
incomingData,
|
||||
populationsByCollection,
|
||||
result,
|
||||
}: {
|
||||
externallyUpdatedRelationship?: DocumentEvent
|
||||
incomingData: any
|
||||
populationsByCollection: PopulationsByCollection
|
||||
result: any
|
||||
}): any => {
|
||||
if (Array.isArray(incomingData)) {
|
||||
if (!result) {
|
||||
result = []
|
||||
}
|
||||
|
||||
result = incomingData.map((item, index) => {
|
||||
if (!result[index]) {
|
||||
result[index] = item
|
||||
}
|
||||
|
||||
return traverseRichText({
|
||||
externallyUpdatedRelationship,
|
||||
incomingData: item,
|
||||
populationsByCollection,
|
||||
result: result[index],
|
||||
})
|
||||
})
|
||||
} else if (incomingData && typeof incomingData === 'object') {
|
||||
if (!result) {
|
||||
result = {}
|
||||
}
|
||||
|
||||
// Remove keys from `result` that do not appear in `incomingData`
|
||||
// There's likely another way to do this,
|
||||
// But recursion and references make this very difficult
|
||||
Object.keys(result).forEach((key) => {
|
||||
if (!(key in incomingData)) {
|
||||
delete result[key]
|
||||
}
|
||||
})
|
||||
|
||||
// Iterate over the keys of `incomingData` and populate `result`
|
||||
Object.keys(incomingData).forEach((key) => {
|
||||
if (!result[key]) {
|
||||
// Instantiate the key in `result` if it doesn't exist
|
||||
// Ensure its type matches the type of the `incomingData`
|
||||
// We don't have a schema to check against here
|
||||
result[key] =
|
||||
incomingData[key] && typeof incomingData[key] === 'object'
|
||||
? Array.isArray(incomingData[key])
|
||||
? []
|
||||
: {}
|
||||
: undefined
|
||||
}
|
||||
|
||||
const isRelationship = key === 'value' && 'relationTo' in incomingData
|
||||
|
||||
if (isRelationship) {
|
||||
// or if there are no keys besides id
|
||||
const needsPopulation =
|
||||
!result.value ||
|
||||
typeof result.value !== 'object' ||
|
||||
(typeof result.value === 'object' &&
|
||||
Object.keys(result.value).length === 1 &&
|
||||
'id' in result.value)
|
||||
|
||||
const hasChanged =
|
||||
result &&
|
||||
typeof result === 'object' &&
|
||||
result.value.id === externallyUpdatedRelationship?.id
|
||||
|
||||
if (needsPopulation || hasChanged) {
|
||||
if (!populationsByCollection[incomingData.relationTo]) {
|
||||
populationsByCollection[incomingData.relationTo] = []
|
||||
}
|
||||
|
||||
populationsByCollection[incomingData.relationTo]?.push({
|
||||
id:
|
||||
incomingData[key] && typeof incomingData[key] === 'object'
|
||||
? incomingData[key].id
|
||||
: incomingData[key],
|
||||
accessor: 'value',
|
||||
ref: result,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
result[key] = traverseRichText({
|
||||
externallyUpdatedRelationship,
|
||||
incomingData: incomingData[key],
|
||||
populationsByCollection,
|
||||
result: result[key],
|
||||
})
|
||||
}
|
||||
})
|
||||
} else {
|
||||
result = incomingData
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
@@ -1,13 +1,11 @@
|
||||
import type { DocumentEvent } from 'payload'
|
||||
import type { DocumentEvent, FieldSchemaJSON } from 'payload'
|
||||
|
||||
export type CollectionPopulationRequestHandler = ({
|
||||
apiPath,
|
||||
data,
|
||||
endpoint,
|
||||
serverURL,
|
||||
}: {
|
||||
apiPath: string
|
||||
data: Record<string, any>
|
||||
endpoint: string
|
||||
serverURL: string
|
||||
}) => Promise<Response>
|
||||
@@ -16,11 +14,18 @@ export type LivePreviewArgs = {}
|
||||
|
||||
export type LivePreview = void
|
||||
|
||||
export type PopulationsByCollection = {
|
||||
[slug: string]: Array<{
|
||||
accessor: number | string
|
||||
id: number | string
|
||||
ref: Record<string, unknown>
|
||||
}>
|
||||
}
|
||||
|
||||
export type LivePreviewMessageEvent<T> = MessageEvent<{
|
||||
collectionSlug?: string
|
||||
data: T
|
||||
externallyUpdatedRelationship?: DocumentEvent
|
||||
globalSlug?: string
|
||||
fieldSchemaJSON: FieldSchemaJSON
|
||||
locale?: string
|
||||
type: 'payload-live-preview'
|
||||
}>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/next",
|
||||
"version": "3.54.0",
|
||||
"version": "3.50.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -17,11 +17,6 @@ export async function refresh({ config }: { config: any }) {
|
||||
throw new Error('Cannot refresh token: user not authenticated')
|
||||
}
|
||||
|
||||
const existingCookie = await getExistingAuthToken(payload.config.cookiePrefix)
|
||||
if (!existingCookie) {
|
||||
return { message: 'No valid token found to refresh', success: false }
|
||||
}
|
||||
|
||||
const collection: CollectionSlug | undefined = result.user.collection
|
||||
const collectionConfig = payload.collections[collection]
|
||||
|
||||
@@ -40,10 +35,15 @@ export async function refresh({ config }: { config: any }) {
|
||||
return { message: 'Token refresh failed', success: false }
|
||||
}
|
||||
|
||||
const existingCookie = await getExistingAuthToken(payload.config.cookiePrefix)
|
||||
if (!existingCookie) {
|
||||
return { message: 'No valid token found to refresh', success: false }
|
||||
}
|
||||
|
||||
await setPayloadAuthCookie({
|
||||
authConfig: collectionConfig.config.auth,
|
||||
cookiePrefix: payload.config.cookiePrefix,
|
||||
token: refreshResult.refreshedToken,
|
||||
token: existingCookie.value,
|
||||
})
|
||||
|
||||
return { message: 'Token refreshed successfully', success: true }
|
||||
|
||||
@@ -9,7 +9,7 @@ const handlerBuilder =
|
||||
async (
|
||||
request: Request,
|
||||
args: {
|
||||
params: Promise<{ slug?: string[] }>
|
||||
params: Promise<{ slug: string[] }>
|
||||
},
|
||||
): Promise<Response> => {
|
||||
const awaitedConfig = await config
|
||||
|
||||
@@ -178,9 +178,7 @@ export const buildCollectionFolderView = async (
|
||||
permissions?.collections?.[config.folders.slug]?.create
|
||||
? config.folders.slug
|
||||
: null,
|
||||
resolvedFolderID && permissions?.collections?.[collectionSlug]?.create
|
||||
? collectionSlug
|
||||
: null,
|
||||
permissions?.collections?.[collectionSlug]?.create ? collectionSlug : null,
|
||||
].filter(Boolean),
|
||||
baseFolderPath: `/collections/${collectionSlug}/${config.folders.slug}`,
|
||||
breadcrumbs,
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import type {
|
||||
AdminViewServerProps,
|
||||
SanitizedDocumentPermissions,
|
||||
SanitizedFieldsPermissions,
|
||||
} from 'payload'
|
||||
import type { AdminViewServerProps } from 'payload'
|
||||
|
||||
import { buildFormState } from '@payloadcms/ui/utilities/buildFormState'
|
||||
import React from 'react'
|
||||
|
||||
import { getDocPreferences } from '../Document/getDocPreferences.js'
|
||||
import { getDocumentData } from '../Document/getDocumentData.js'
|
||||
import { getDocumentPermissions } from '../Document/getDocumentPermissions.js'
|
||||
import { CreateFirstUserClient } from './index.client.js'
|
||||
import './index.scss'
|
||||
|
||||
@@ -46,27 +43,18 @@ export async function CreateFirstUserView({ initPageResult }: AdminViewServerPro
|
||||
user: req.user,
|
||||
})
|
||||
|
||||
const baseFields: SanitizedFieldsPermissions = Object.fromEntries(
|
||||
collectionConfig.fields
|
||||
.filter((f): f is { name: string } & typeof f => 'name' in f && typeof f.name === 'string')
|
||||
.map((f) => [f.name, { create: true, read: true, update: true }]),
|
||||
)
|
||||
|
||||
// In create-first-user we should always allow all fields
|
||||
const docPermissionsForForm: SanitizedDocumentPermissions = {
|
||||
create: true,
|
||||
delete: true,
|
||||
fields: baseFields,
|
||||
read: true,
|
||||
readVersions: true,
|
||||
update: true,
|
||||
}
|
||||
// Get permissions
|
||||
const { docPermissions } = await getDocumentPermissions({
|
||||
collectionConfig,
|
||||
data,
|
||||
req,
|
||||
})
|
||||
|
||||
// Build initial form state from data
|
||||
const { state: formState } = await buildFormState({
|
||||
collectionSlug: collectionConfig.slug,
|
||||
data,
|
||||
docPermissions: docPermissionsForForm,
|
||||
docPermissions,
|
||||
docPreferences,
|
||||
locale: locale?.code,
|
||||
operation: 'create',
|
||||
@@ -81,7 +69,7 @@ export async function CreateFirstUserView({ initPageResult }: AdminViewServerPro
|
||||
<h1>{req.t('general:welcome')}</h1>
|
||||
<p>{req.t('authentication:beginCreateFirstUser')}</p>
|
||||
<CreateFirstUserClient
|
||||
docPermissions={docPermissionsForForm}
|
||||
docPermissions={docPermissions}
|
||||
docPreferences={docPreferences}
|
||||
initialState={formState}
|
||||
loginWithUsername={loginWithUsername}
|
||||
|
||||
@@ -208,7 +208,7 @@ export const renderDocument = async ({
|
||||
globalSlug,
|
||||
locale: locale?.code,
|
||||
operation,
|
||||
readOnly: isTrashedDoc || isLocked,
|
||||
readOnly: isTrashedDoc,
|
||||
renderAllFields: true,
|
||||
req,
|
||||
schemaPath: collectionSlug || globalSlug,
|
||||
@@ -333,7 +333,6 @@ export const renderDocument = async ({
|
||||
}
|
||||
|
||||
const documentSlots = renderDocumentSlots({
|
||||
id,
|
||||
collectionConfig,
|
||||
globalConfig,
|
||||
hasSavePermission,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type {
|
||||
BeforeDocumentControlsServerPropsOnly,
|
||||
DefaultServerFunctionArgs,
|
||||
DocumentSlots,
|
||||
EditMenuItemsServerPropsOnly,
|
||||
PayloadRequest,
|
||||
@@ -26,11 +27,10 @@ export const renderDocumentSlots: (args: {
|
||||
collectionConfig?: SanitizedCollectionConfig
|
||||
globalConfig?: SanitizedGlobalConfig
|
||||
hasSavePermission: boolean
|
||||
id?: number | string
|
||||
permissions: SanitizedDocumentPermissions
|
||||
req: PayloadRequest
|
||||
}) => DocumentSlots = (args) => {
|
||||
const { id, collectionConfig, globalConfig, hasSavePermission, req } = args
|
||||
const { collectionConfig, globalConfig, hasSavePermission, req } = args
|
||||
|
||||
const components: DocumentSlots = {} as DocumentSlots
|
||||
|
||||
@@ -39,7 +39,6 @@ export const renderDocumentSlots: (args: {
|
||||
const isPreviewEnabled = collectionConfig?.admin?.preview || globalConfig?.admin?.preview
|
||||
|
||||
const serverProps: ServerProps = {
|
||||
id,
|
||||
i18n: req.i18n,
|
||||
payload: req.payload,
|
||||
user: req.user,
|
||||
@@ -170,11 +169,10 @@ export const renderDocumentSlots: (args: {
|
||||
return components
|
||||
}
|
||||
|
||||
export const renderDocumentSlotsHandler: ServerFunction<{
|
||||
collectionSlug: string
|
||||
id?: number | string
|
||||
}> = async (args) => {
|
||||
const { id, collectionSlug, req } = args
|
||||
export const renderDocumentSlotsHandler: ServerFunction<{ collectionSlug: string }> = async (
|
||||
args,
|
||||
) => {
|
||||
const { collectionSlug, req } = args
|
||||
|
||||
const collectionConfig = req.payload.collections[collectionSlug]?.config
|
||||
|
||||
@@ -189,7 +187,6 @@ export const renderDocumentSlotsHandler: ServerFunction<{
|
||||
})
|
||||
|
||||
return renderDocumentSlots({
|
||||
id,
|
||||
collectionConfig,
|
||||
hasSavePermission,
|
||||
permissions: docPermissions,
|
||||
|
||||
@@ -15,16 +15,6 @@ import './index.scss'
|
||||
|
||||
const baseClass = 'logout'
|
||||
|
||||
/**
|
||||
* This component should **just** be the inactivity route and do nothing with logging the user out.
|
||||
*
|
||||
* It currently handles too much, the auth provider should just log the user out and then
|
||||
* we could remove the useEffect in this file. So instead of the logout button
|
||||
* being an anchor link, it should be a button that calls `logOut` in the provider.
|
||||
*
|
||||
* This view is still useful if cookies attempt to refresh and fail, i.e. the user
|
||||
* is logged out due to inactivity.
|
||||
*/
|
||||
export const LogoutClient: React.FC<{
|
||||
adminRoute: string
|
||||
inactivity?: boolean
|
||||
@@ -36,11 +26,9 @@ export const LogoutClient: React.FC<{
|
||||
|
||||
const { startRouteTransition } = useRouteTransition()
|
||||
|
||||
const isLoggedIn = React.useMemo(() => {
|
||||
return Boolean(user?.id)
|
||||
}, [user?.id])
|
||||
const [isLoggedOut, setIsLoggedOut] = React.useState<boolean>(!user)
|
||||
|
||||
const navigatingToLoginRef = React.useRef(false)
|
||||
const logOutSuccessRef = React.useRef(false)
|
||||
|
||||
const [loginRoute] = React.useState(() =>
|
||||
formatAdminURL({
|
||||
@@ -57,25 +45,26 @@ export const LogoutClient: React.FC<{
|
||||
const router = useRouter()
|
||||
|
||||
const handleLogOut = React.useCallback(async () => {
|
||||
if (!inactivity && !navigatingToLoginRef.current) {
|
||||
navigatingToLoginRef.current = true
|
||||
await logOut()
|
||||
const loggedOut = await logOut()
|
||||
setIsLoggedOut(loggedOut)
|
||||
|
||||
if (!inactivity && loggedOut && !logOutSuccessRef.current) {
|
||||
toast.success(t('authentication:loggedOutSuccessfully'))
|
||||
logOutSuccessRef.current = true
|
||||
startRouteTransition(() => router.push(loginRoute))
|
||||
return
|
||||
}
|
||||
}, [inactivity, logOut, loginRoute, router, startRouteTransition, t])
|
||||
|
||||
useEffect(() => {
|
||||
if (isLoggedIn) {
|
||||
if (!isLoggedOut) {
|
||||
void handleLogOut()
|
||||
} else if (!navigatingToLoginRef.current) {
|
||||
navigatingToLoginRef.current = true
|
||||
} else {
|
||||
startRouteTransition(() => router.push(loginRoute))
|
||||
}
|
||||
}, [handleLogOut, isLoggedIn, loginRoute, router, startRouteTransition])
|
||||
}, [handleLogOut, isLoggedOut, loginRoute, router, startRouteTransition])
|
||||
|
||||
if (!isLoggedIn && inactivity) {
|
||||
if (isLoggedOut && inactivity) {
|
||||
return (
|
||||
<div className={`${baseClass}__wrap`}>
|
||||
<h2>{t('authentication:loggedOutInactivity')}</h2>
|
||||
|
||||
@@ -4,8 +4,8 @@ import type { ClientCollectionConfig, ClientGlobalConfig } from 'payload'
|
||||
import type React from 'react'
|
||||
|
||||
import { getTranslation } from '@payloadcms/translations'
|
||||
import { useConfig, useDocumentTitle, useLocale, useStepNav, useTranslation } from '@payloadcms/ui'
|
||||
import { formatAdminURL } from 'payload/shared'
|
||||
import { useConfig, useLocale, useStepNav, useTranslation } from '@payloadcms/ui'
|
||||
import { fieldAffectsData, formatAdminURL } from 'payload/shared'
|
||||
import { useEffect } from 'react'
|
||||
|
||||
export const SetStepNav: React.FC<{
|
||||
@@ -15,6 +15,7 @@ export const SetStepNav: React.FC<{
|
||||
readonly isTrashed?: boolean
|
||||
versionToCreatedAtFormatted?: string
|
||||
versionToID?: string
|
||||
versionToUseAsTitle?: Record<string, string> | string
|
||||
}> = ({
|
||||
id,
|
||||
collectionConfig,
|
||||
@@ -22,12 +23,12 @@ export const SetStepNav: React.FC<{
|
||||
isTrashed,
|
||||
versionToCreatedAtFormatted,
|
||||
versionToID,
|
||||
versionToUseAsTitle,
|
||||
}) => {
|
||||
const { config } = useConfig()
|
||||
const { setStepNav } = useStepNav()
|
||||
const { i18n, t } = useTranslation()
|
||||
const locale = useLocale()
|
||||
const { title } = useDocumentTitle()
|
||||
|
||||
useEffect(() => {
|
||||
const {
|
||||
@@ -37,7 +38,24 @@ export const SetStepNav: React.FC<{
|
||||
if (collectionConfig) {
|
||||
const collectionSlug = collectionConfig.slug
|
||||
|
||||
const useAsTitle = collectionConfig.admin?.useAsTitle || 'id'
|
||||
const pluralLabel = collectionConfig.labels?.plural
|
||||
let docLabel = `[${t('general:untitled')}]`
|
||||
|
||||
const fields = collectionConfig.fields
|
||||
|
||||
const titleField = fields.find(
|
||||
(f) => fieldAffectsData(f) && 'name' in f && f.name === useAsTitle,
|
||||
)
|
||||
|
||||
if (titleField && versionToUseAsTitle) {
|
||||
docLabel =
|
||||
'localized' in titleField && titleField.localized
|
||||
? versionToUseAsTitle?.[locale.code] || docLabel
|
||||
: versionToUseAsTitle
|
||||
} else if (useAsTitle === 'id') {
|
||||
docLabel = String(id)
|
||||
}
|
||||
|
||||
const docBasePath: `/${string}` = isTrashed
|
||||
? `/collections/${collectionSlug}/trash/${id}`
|
||||
@@ -65,14 +83,14 @@ export const SetStepNav: React.FC<{
|
||||
|
||||
nav.push(
|
||||
{
|
||||
label: title,
|
||||
label: docLabel,
|
||||
url: formatAdminURL({
|
||||
adminRoute,
|
||||
path: docBasePath,
|
||||
}),
|
||||
},
|
||||
{
|
||||
label: t('version:versions'),
|
||||
label: 'Versions',
|
||||
url: formatAdminURL({
|
||||
adminRoute,
|
||||
path: `${docBasePath}/versions`,
|
||||
@@ -100,7 +118,7 @@ export const SetStepNav: React.FC<{
|
||||
}),
|
||||
},
|
||||
{
|
||||
label: t('version:versions'),
|
||||
label: 'Versions',
|
||||
url: formatAdminURL({
|
||||
adminRoute,
|
||||
path: `/globals/${globalSlug}/versions`,
|
||||
@@ -121,7 +139,7 @@ export const SetStepNav: React.FC<{
|
||||
i18n,
|
||||
collectionConfig,
|
||||
globalConfig,
|
||||
title,
|
||||
versionToUseAsTitle,
|
||||
versionToCreatedAtFormatted,
|
||||
versionToID,
|
||||
])
|
||||
|
||||
@@ -40,6 +40,7 @@ export const DefaultVersionView: React.FC<DefaultVersionsViewProps> = ({
|
||||
VersionToCreatedAtLabel,
|
||||
versionToID,
|
||||
versionToStatus,
|
||||
versionToUseAsTitle,
|
||||
}) => {
|
||||
const { config, getEntityConfig } = useConfig()
|
||||
const { code } = useLocale()
|
||||
@@ -274,6 +275,7 @@ export const DefaultVersionView: React.FC<DefaultVersionsViewProps> = ({
|
||||
isTrashed={isTrashed}
|
||||
versionToCreatedAtFormatted={versionToCreatedAtFormatted}
|
||||
versionToID={versionToID}
|
||||
versionToUseAsTitle={versionToUseAsTitle}
|
||||
/>
|
||||
<Gutter className={`${baseClass}__diff-wrap`}>
|
||||
<SelectedLocalesContext value={{ selectedLocales: locales.map((locale) => locale.name) }}>
|
||||
|
||||
@@ -21,4 +21,5 @@ export type DefaultVersionsViewProps = {
|
||||
VersionToCreatedAtLabel: React.ReactNode
|
||||
versionToID?: string
|
||||
versionToStatus?: string
|
||||
versionToUseAsTitle?: string
|
||||
}
|
||||
|
||||
@@ -15,18 +15,18 @@ import {
|
||||
type PayloadComponent,
|
||||
type PayloadRequest,
|
||||
type SanitizedFieldPermissions,
|
||||
type SanitizedFieldsPermissions,
|
||||
type VersionField,
|
||||
} from 'payload'
|
||||
import {
|
||||
fieldIsID,
|
||||
fieldShouldBeLocalized,
|
||||
getFieldPaths,
|
||||
getFieldPermissions,
|
||||
getUniqueListBy,
|
||||
tabHasName,
|
||||
} from 'payload/shared'
|
||||
|
||||
import { diffComponents } from './fields/index.js'
|
||||
import { getFieldPathsModified } from './utilities/getFieldPathsModified.js'
|
||||
|
||||
export type BuildVersionFieldsArgs = {
|
||||
clientSchemaMap: ClientFieldSchemaMap
|
||||
@@ -34,8 +34,12 @@ export type BuildVersionFieldsArgs = {
|
||||
Record<FieldTypes, PayloadComponent<FieldDiffServerProps, FieldDiffClientProps>>
|
||||
>
|
||||
entitySlug: string
|
||||
fieldPermissions:
|
||||
| {
|
||||
[key: string]: SanitizedFieldPermissions
|
||||
}
|
||||
| true
|
||||
fields: Field[]
|
||||
fieldsPermissions: SanitizedFieldsPermissions
|
||||
i18n: I18nClient
|
||||
modifiedOnly: boolean
|
||||
nestingLevel?: number
|
||||
@@ -60,8 +64,8 @@ export const buildVersionFields = ({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions,
|
||||
fields,
|
||||
fieldsPermissions,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
nestingLevel = 0,
|
||||
@@ -86,7 +90,7 @@ export const buildVersionFields = ({
|
||||
continue
|
||||
}
|
||||
|
||||
const { indexPath, path, schemaPath } = getFieldPaths({
|
||||
const { indexPath, path, schemaPath } = getFieldPathsModified({
|
||||
field,
|
||||
index: fieldIndex,
|
||||
parentIndexPath,
|
||||
@@ -127,12 +131,12 @@ export const buildVersionFields = ({
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
field,
|
||||
fieldPermissions,
|
||||
i18n,
|
||||
indexPath,
|
||||
locale,
|
||||
modifiedOnly,
|
||||
nestingLevel,
|
||||
parentFieldsPermissions: fieldsPermissions,
|
||||
parentIsLocalized: true,
|
||||
parentPath,
|
||||
parentSchemaPath,
|
||||
@@ -154,11 +158,11 @@ export const buildVersionFields = ({
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
field,
|
||||
fieldPermissions,
|
||||
i18n,
|
||||
indexPath,
|
||||
modifiedOnly,
|
||||
nestingLevel,
|
||||
parentFieldsPermissions: fieldsPermissions,
|
||||
parentIsLocalized: parentIsLocalized || ('localized' in field && field.localized),
|
||||
parentPath,
|
||||
parentSchemaPath,
|
||||
@@ -194,12 +198,12 @@ const buildVersionField = ({
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
field,
|
||||
fieldPermissions,
|
||||
i18n,
|
||||
indexPath,
|
||||
locale,
|
||||
modifiedOnly,
|
||||
nestingLevel,
|
||||
parentFieldsPermissions,
|
||||
parentIsLocalized,
|
||||
parentPath,
|
||||
parentSchemaPath,
|
||||
@@ -216,7 +220,6 @@ const buildVersionField = ({
|
||||
locale?: string
|
||||
modifiedOnly?: boolean
|
||||
nestingLevel: number
|
||||
parentFieldsPermissions: SanitizedFieldsPermissions
|
||||
parentIsLocalized: boolean
|
||||
path: string
|
||||
schemaPath: string
|
||||
@@ -224,35 +227,18 @@ const buildVersionField = ({
|
||||
valueTo: unknown
|
||||
} & Omit<
|
||||
BuildVersionFieldsArgs,
|
||||
| 'fields'
|
||||
| 'fieldsPermissions'
|
||||
| 'parentIndexPath'
|
||||
| 'versionFromSiblingData'
|
||||
| 'versionToSiblingData'
|
||||
'fields' | 'parentIndexPath' | 'versionFromSiblingData' | 'versionToSiblingData'
|
||||
>): BaseVersionField | null => {
|
||||
let hasReadPermission: boolean = false
|
||||
let fieldPermissions: SanitizedFieldPermissions | undefined = undefined
|
||||
|
||||
if (typeof parentFieldsPermissions === 'boolean') {
|
||||
hasReadPermission = parentFieldsPermissions
|
||||
fieldPermissions = parentFieldsPermissions
|
||||
} else {
|
||||
if ('name' in field) {
|
||||
fieldPermissions = parentFieldsPermissions?.[field.name]
|
||||
if (typeof fieldPermissions === 'boolean') {
|
||||
hasReadPermission = fieldPermissions
|
||||
} else if (typeof fieldPermissions?.read === 'boolean') {
|
||||
hasReadPermission = fieldPermissions.read
|
||||
}
|
||||
} else {
|
||||
// If the field is unnamed and parentFieldsPermissions is an object, its sub-fields will decide their read permissions state.
|
||||
// As far as this field is concerned, we are allowed to read it, as we need to reach its sub-fields to determine their read permissions.
|
||||
hasReadPermission = true
|
||||
}
|
||||
}
|
||||
const { permissions, read: hasReadPermission } = getFieldPermissions({
|
||||
field,
|
||||
operation: 'read',
|
||||
parentName: parentPath?.includes('.')
|
||||
? parentPath.split('.')[parentPath.split('.').length - 1]
|
||||
: parentPath,
|
||||
permissions: fieldPermissions,
|
||||
})
|
||||
|
||||
if (!hasReadPermission) {
|
||||
// HasReadPermission is only valid if the field has a name. E.g. for a tabs field it would incorrectly return `false`.
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -300,7 +286,7 @@ const buildVersionField = ({
|
||||
indexPath: tabIndexPath,
|
||||
path: tabPath,
|
||||
schemaPath: tabSchemaPath,
|
||||
} = getFieldPaths({
|
||||
} = getFieldPathsModified({
|
||||
field: tabAsField,
|
||||
index: tabIndex,
|
||||
parentIndexPath: indexPath,
|
||||
@@ -308,21 +294,18 @@ const buildVersionField = ({
|
||||
parentSchemaPath,
|
||||
})
|
||||
|
||||
let tabFieldsPermissions: SanitizedFieldsPermissions = undefined
|
||||
let tabPermissions: typeof fieldPermissions = undefined
|
||||
|
||||
// The tabs field does not have its own permissions as it's unnamed => use parentFieldsPermissions
|
||||
if (typeof parentFieldsPermissions === 'boolean') {
|
||||
tabFieldsPermissions = parentFieldsPermissions
|
||||
} else {
|
||||
if (typeof permissions === 'boolean') {
|
||||
tabPermissions = permissions
|
||||
} else if (permissions && typeof permissions === 'object') {
|
||||
if ('name' in tab) {
|
||||
const tabPermissions = parentFieldsPermissions?.[tab.name]
|
||||
if (typeof tabPermissions === 'boolean') {
|
||||
tabFieldsPermissions = tabPermissions
|
||||
} else {
|
||||
tabFieldsPermissions = tabPermissions?.fields
|
||||
}
|
||||
tabPermissions =
|
||||
typeof permissions.fields?.[tab.name] === 'object'
|
||||
? permissions.fields?.[tab.name].fields
|
||||
: permissions.fields?.[tab.name]
|
||||
} else {
|
||||
tabFieldsPermissions = parentFieldsPermissions
|
||||
tabPermissions = permissions.fields
|
||||
}
|
||||
}
|
||||
|
||||
@@ -332,25 +315,21 @@ const buildVersionField = ({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions: tabPermissions,
|
||||
fields: tab.fields,
|
||||
fieldsPermissions: tabFieldsPermissions,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
nestingLevel: nestingLevel + 1,
|
||||
parentIndexPath: isNamedTab ? '' : tabIndexPath,
|
||||
parentIsLocalized: parentIsLocalized || tab.localized,
|
||||
parentPath: isNamedTab ? tabPath : 'name' in field ? path : parentPath,
|
||||
parentSchemaPath: isNamedTab
|
||||
? tabSchemaPath
|
||||
: 'name' in field
|
||||
? schemaPath
|
||||
: parentSchemaPath,
|
||||
parentPath: isNamedTab ? tabPath : path,
|
||||
parentSchemaPath: isNamedTab ? tabSchemaPath : parentSchemaPath,
|
||||
req,
|
||||
selectedLocales,
|
||||
versionFromSiblingData: 'name' in tab ? valueFrom?.[tab.name] : valueFrom,
|
||||
versionToSiblingData: 'name' in tab ? valueTo?.[tab.name] : valueTo,
|
||||
}).versionFields,
|
||||
label: typeof tab.label === 'function' ? tab.label({ i18n, t: i18n.t }) : tab.label,
|
||||
label: tab.label,
|
||||
}
|
||||
if (tabVersion?.fields?.length) {
|
||||
baseVersionField.tabs.push(tabVersion)
|
||||
@@ -360,19 +339,15 @@ const buildVersionField = ({
|
||||
if (modifiedOnly && !baseVersionField.tabs.length) {
|
||||
return null
|
||||
}
|
||||
} // At this point, we are dealing with a `row`, `collapsible`, array`, etc
|
||||
} // At this point, we are dealing with a `row`, `collapsible`, etc
|
||||
else if ('fields' in field) {
|
||||
let subFieldsPermissions: SanitizedFieldsPermissions = undefined
|
||||
let subfieldPermissions: typeof fieldPermissions = undefined
|
||||
|
||||
if ('name' in field && typeof fieldPermissions !== 'undefined') {
|
||||
// Named fields like arrays
|
||||
subFieldsPermissions =
|
||||
typeof fieldPermissions === 'boolean' ? fieldPermissions : fieldPermissions.fields
|
||||
} else {
|
||||
// Unnamed fields like collapsible and row inherit directly from parent permissions
|
||||
subFieldsPermissions = parentFieldsPermissions
|
||||
if (typeof permissions === 'boolean') {
|
||||
subfieldPermissions = permissions
|
||||
} else if (permissions && typeof permissions === 'object') {
|
||||
subfieldPermissions = permissions.fields
|
||||
}
|
||||
|
||||
if (field.type === 'array' && (valueTo || valueFrom)) {
|
||||
const maxLength = Math.max(
|
||||
Array.isArray(valueTo) ? valueTo.length : 0,
|
||||
@@ -388,15 +363,15 @@ const buildVersionField = ({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions: subfieldPermissions,
|
||||
fields: field.fields,
|
||||
fieldsPermissions: subFieldsPermissions,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
nestingLevel: nestingLevel + 1,
|
||||
parentIndexPath: 'name' in field ? '' : indexPath,
|
||||
parentIsLocalized: parentIsLocalized || field.localized,
|
||||
parentPath: ('name' in field ? path : parentPath) + '.' + i,
|
||||
parentSchemaPath: 'name' in field ? schemaPath : parentSchemaPath,
|
||||
parentPath: path + '.' + i,
|
||||
parentSchemaPath: schemaPath,
|
||||
req,
|
||||
selectedLocales,
|
||||
versionFromSiblingData: fromRow,
|
||||
@@ -412,8 +387,8 @@ const buildVersionField = ({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions: subfieldPermissions,
|
||||
fields: field.fields,
|
||||
fieldsPermissions: subFieldsPermissions,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
nestingLevel: field.type !== 'row' ? nestingLevel + 1 : nestingLevel,
|
||||
@@ -470,19 +445,16 @@ const buildVersionField = ({
|
||||
}
|
||||
}
|
||||
|
||||
let blockFieldsPermissions: SanitizedFieldsPermissions = undefined
|
||||
let blockPermissions: typeof fieldPermissions = undefined
|
||||
|
||||
// fieldPermissions will be set here, as the blocks field has a name
|
||||
if (typeof fieldPermissions === 'boolean') {
|
||||
blockFieldsPermissions = fieldPermissions
|
||||
} else if (typeof fieldPermissions?.blocks === 'boolean') {
|
||||
blockFieldsPermissions = fieldPermissions.blocks
|
||||
if (permissions === true) {
|
||||
blockPermissions = true
|
||||
} else {
|
||||
const permissionsBlockSpecific = fieldPermissions?.blocks?.[blockSlugToMatch]
|
||||
if (typeof permissionsBlockSpecific === 'boolean') {
|
||||
blockFieldsPermissions = permissionsBlockSpecific
|
||||
const permissionsBlockSpecific = permissions?.blocks?.[blockSlugToMatch]
|
||||
if (permissionsBlockSpecific === true) {
|
||||
blockPermissions = true
|
||||
} else {
|
||||
blockFieldsPermissions = permissionsBlockSpecific?.fields
|
||||
blockPermissions = permissionsBlockSpecific?.fields
|
||||
}
|
||||
}
|
||||
|
||||
@@ -490,15 +462,15 @@ const buildVersionField = ({
|
||||
clientSchemaMap,
|
||||
customDiffComponents,
|
||||
entitySlug,
|
||||
fieldPermissions: blockPermissions,
|
||||
fields,
|
||||
fieldsPermissions: blockFieldsPermissions,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
nestingLevel: nestingLevel + 1,
|
||||
parentIndexPath: 'name' in field ? '' : indexPath,
|
||||
parentIsLocalized: parentIsLocalized || ('localized' in field && field.localized),
|
||||
parentPath: ('name' in field ? path : parentPath) + '.' + i,
|
||||
parentSchemaPath: ('name' in field ? schemaPath : parentSchemaPath) + '.' + toBlock.slug,
|
||||
parentPath: path + '.' + i,
|
||||
parentSchemaPath: schemaPath + '.' + toBlock.slug,
|
||||
req,
|
||||
selectedLocales,
|
||||
versionFromSiblingData: fromRow,
|
||||
@@ -524,8 +496,7 @@ const buildVersionField = ({
|
||||
*/
|
||||
diffMethod: 'diffWordsWithSpace',
|
||||
field: clientField,
|
||||
fieldPermissions:
|
||||
typeof fieldPermissions === 'undefined' ? parentFieldsPermissions : fieldPermissions,
|
||||
fieldPermissions: typeof permissions === 'object' ? permissions.fields : permissions,
|
||||
parentIsLocalized,
|
||||
|
||||
nestingLevel: nestingLevel ? nestingLevel : undefined,
|
||||
|
||||
@@ -25,7 +25,7 @@ export const Iterable: React.FC<FieldDiffClientProps> = ({
|
||||
parentIsLocalized,
|
||||
versionValue: valueTo,
|
||||
}) => {
|
||||
const { i18n, t } = useTranslation()
|
||||
const { i18n } = useTranslation()
|
||||
const { selectedLocales } = useSelectedLocales()
|
||||
const { config } = useConfig()
|
||||
|
||||
@@ -73,9 +73,7 @@ export const Iterable: React.FC<FieldDiffClientProps> = ({
|
||||
})
|
||||
|
||||
const rowNumber = String(i + 1).padStart(2, '0')
|
||||
const rowLabel = fieldIsArrayType(field)
|
||||
? `${t('general:item')} ${rowNumber}`
|
||||
: `${t('fields:block')} ${rowNumber}`
|
||||
const rowLabel = fieldIsArrayType(field) ? `Item ${rowNumber}` : `Block ${rowNumber}`
|
||||
|
||||
return (
|
||||
<div className={`${baseClass}__row`} key={i}>
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
import type { PayloadRequest, RelationshipField, TypeWithID } from 'payload'
|
||||
|
||||
import {
|
||||
fieldAffectsData,
|
||||
fieldIsPresentationalOnly,
|
||||
fieldShouldBeLocalized,
|
||||
flattenTopLevelFields,
|
||||
} from 'payload/shared'
|
||||
import { fieldAffectsData, fieldIsPresentationalOnly, fieldShouldBeLocalized } from 'payload/shared'
|
||||
|
||||
import type { PopulatedRelationshipValue } from './index.js'
|
||||
|
||||
@@ -37,12 +32,7 @@ export const generateLabelFromValue = ({
|
||||
const relatedCollection = req.payload.collections[relationTo].config
|
||||
|
||||
const useAsTitle = relatedCollection?.admin?.useAsTitle
|
||||
|
||||
const flattenedRelatedCollectionFields = flattenTopLevelFields(relatedCollection.fields, {
|
||||
moveSubFieldsToTop: true,
|
||||
})
|
||||
|
||||
const useAsTitleField = flattenedRelatedCollectionFields.find(
|
||||
const useAsTitleField = relatedCollection.fields.find(
|
||||
(f) => fieldAffectsData(f) && !fieldIsPresentationalOnly(f) && f.name === useAsTitle,
|
||||
)
|
||||
let titleFieldIsLocalized = false
|
||||
|
||||
@@ -223,8 +223,8 @@ export async function VersionView(props: DocumentViewServerProps) {
|
||||
clientSchemaMap,
|
||||
customDiffComponents: {},
|
||||
entitySlug: collectionSlug || globalSlug,
|
||||
fieldPermissions: docPermissions?.fields,
|
||||
fields: (collectionConfig || globalConfig)?.fields,
|
||||
fieldsPermissions: docPermissions?.fields,
|
||||
i18n,
|
||||
modifiedOnly,
|
||||
parentIndexPath: '',
|
||||
@@ -411,6 +411,11 @@ export async function VersionView(props: DocumentViewServerProps) {
|
||||
})
|
||||
}
|
||||
|
||||
const useAsTitleFieldName = collectionConfig?.admin?.useAsTitle || 'id'
|
||||
const versionToUseAsTitle =
|
||||
useAsTitleFieldName === 'id'
|
||||
? String(versionTo.parent)
|
||||
: versionTo.version?.[useAsTitleFieldName]
|
||||
return (
|
||||
<DefaultVersionView
|
||||
canUpdate={docPermissions?.update}
|
||||
@@ -425,6 +430,7 @@ export async function VersionView(props: DocumentViewServerProps) {
|
||||
VersionToCreatedAtLabel={formatPill({ doc: versionTo, labelStyle: 'pill' })}
|
||||
versionToID={versionTo.id}
|
||||
versionToStatus={versionTo.version?._status}
|
||||
versionToUseAsTitle={versionToUseAsTitle}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ type AutosaveCellProps = {
|
||||
rowData: {
|
||||
autosave?: boolean
|
||||
id: number | string
|
||||
localeStatus?: Record<string, 'draft' | 'published'>
|
||||
publishedLocale?: string
|
||||
version: {
|
||||
_status: string
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user