Compare commits

..

7 Commits

Author SHA1 Message Date
Jessica Chowdhury
78a1a71a95 chore: add docs and default test for auth access control property 2025-05-27 12:29:00 +01:00
Jessica Chowdhury
35473ebe6d Merge branch 'main' into feat/auth-access 2025-05-27 12:08:01 +01:00
Jessica Chowdhury
74ffff05be Merge branch 'main' into feat/auth-access 2025-03-31 14:19:08 +01:00
Jessica Chowdhury
6aa1db2fe0 chore: merge with main and fix type error 2025-03-17 14:35:24 +00:00
Jessica Chowdhury
f0498f335a Merge branch 'main' into feat/auth-access 2025-03-17 14:34:09 +00:00
Jessica Chowdhury
4692813af7 chore: add auth access e2e tests and fix type error 2025-03-13 12:29:36 +00:00
Jessica Chowdhury
aef39cf9ca feat: adds auth option to collection access control 2025-03-12 16:07:15 +00:00
1469 changed files with 22475 additions and 45580 deletions

34
.github/CODEOWNERS vendored
View File

@@ -1,34 +0,0 @@
# Order matters. The last matching pattern takes precedence
## Package Exports
**/exports/ @denolfe @DanRibbens
## Packages
/packages/create-payload-app/src/ @denolfe
/packages/email-*/src/ @denolfe
/packages/eslint-*/ @denolfe @AlessioGr
/packages/plugin-cloud-storage/src/ @denolfe
/packages/plugin-multi-tenant/src/ @JarrodMFlesch
/packages/richtext-*/src/ @AlessioGr
/packages/storage-*/src/ @denolfe
/packages/ui/src/ @jacobsfletch @AlessioGr @JarrodMFlesch
## Templates
/templates/_data/ @denolfe
/templates/_template/ @denolfe
## Build Files
**/jest.config.js @denolfe @AlessioGr
**/tsconfig*.json @denolfe @AlessioGr
## Root
/.github/ @denolfe
/.husky/ @denolfe
/.vscode/ @denolfe @AlessioGr
/package.json @denolfe
/tools/ @denolfe

View File

@@ -43,7 +43,6 @@ body:
- 'plugin: cloud'
- 'plugin: cloud-storage'
- 'plugin: form-builder'
- 'plugin: multi-tenant'
- 'plugin: nested-docs'
- 'plugin: richtext-lexical'
- 'plugin: richtext-slate'
@@ -60,7 +59,10 @@ body:
label: Environment Info
description: Paste output from `pnpm payload info` _or_ Payload, Node.js, and Next.js versions. Please avoid using "latest"—specific version numbers help us accurately diagnose and resolve issues.
render: text
placeholder: Run `pnpm payload info` in your terminal and paste the output here.
placeholder: |
Payload:
Node.js:
Next.js:
validations:
required: true

View File

@@ -4,17 +4,24 @@ description: |
inputs:
node-version:
description: Node.js version override
description: Node.js version
required: true
default: 23.11.0
pnpm-version:
description: Pnpm version override
description: Pnpm version
required: true
default: 9.7.1
pnpm-run-install:
description: Whether to run pnpm install
required: false
default: true
pnpm-restore-cache:
description: Whether to restore cache
required: false
default: true
pnpm-install-cache-key:
description: The cache key override for the pnpm install cache
description: The cache key for the pnpm install cache
default: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
outputs:
pnpm-store-path:
@@ -30,44 +37,15 @@ runs:
shell: bash
run: sudo ethtool -K eth0 tx off rx off
- name: Get versions from .tool-versions or use overrides
shell: bash
run: |
# if node-version input is provided, use it; otherwise, read from .tool-versions
if [ "${{ inputs.node-version }}" ]; then
echo "Node version override provided: ${{ inputs.node-version }}"
echo "NODE_VERSION=${{ inputs.node-version }}" >> $GITHUB_ENV
elif [ -f .tool-versions ]; then
NODE_VERSION=$(grep '^nodejs ' .tool-versions | awk '{print $2}')
echo "NODE_VERSION=$NODE_VERSION" >> $GITHUB_ENV
echo "Node version resolved to: $NODE_VERSION"
else
echo "No .tool-versions file found and no node-version input provided. Invalid configuration."
exit 1
fi
# if pnpm-version input is provided, use it; otherwise, read from .tool-versions
if [ "${{ inputs.pnpm-version }}" ]; then
echo "Pnpm version override provided: ${{ inputs.pnpm-version }}"
echo "PNPM_VERSION=${{ inputs.pnpm-version }}" >> $GITHUB_ENV
elif [ -f .tool-versions ]; then
PNPM_VERSION=$(grep '^pnpm ' .tool-versions | awk '{print $2}')
echo "PNPM_VERSION=$PNPM_VERSION" >> $GITHUB_ENV
echo "Pnpm version resolved to: $PNPM_VERSION"
else
echo "No .tool-versions file found and no pnpm-version input provided. Invalid configuration."
exit 1
fi
- name: Setup Node@${{ inputs.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
node-version: ${{ inputs.node-version }}
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: ${{ env.PNPM_VERSION }}
version: ${{ inputs.pnpm-version }}
run_install: false
- name: Get pnpm store path
@@ -77,25 +55,14 @@ runs:
echo "STORE_PATH=$STORE_PATH" >> $GITHUB_ENV
echo "Pnpm store path resolved to: $STORE_PATH"
- name: Compute Cache Key
shell: bash
run: |
if [ -n "${{ inputs.pnpm-install-cache-key }}" ]; then
PNPM_INSTALL_CACHE_KEY="${{ inputs.pnpm-install-cache-key }}"
else
PNPM_INSTALL_CACHE_KEY="pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}"
fi
echo "Computed PNPM_INSTALL_CACHE_KEY: $PNPM_INSTALL_CACHE_KEY"
echo "PNPM_INSTALL_CACHE_KEY=$PNPM_INSTALL_CACHE_KEY" >> $GITHUB_ENV
- name: Restore pnpm install cache
if: ${{ inputs.pnpm-restore-cache == 'true' }}
uses: actions/cache@v4
with:
path: ${{ env.STORE_PATH }}
key: ${{ env.PNPM_INSTALL_CACHE_KEY }}
key: ${{ inputs.pnpm-install-cache-key }}
restore-keys: |
pnpm-store-${{ env.PNPM_VERSION }}-
pnpm-store-${{ inputs.pnpm-version }}-
pnpm-store-
- name: Run pnpm install
@@ -105,5 +72,5 @@ runs:
# Set the cache key output
- run: |
echo "pnpm-install-cache-key=${{ env.PNPM_INSTALL_CACHE_KEY }}" >> $GITHUB_OUTPUT
echo "pnpm-install-cache-key=${{ inputs.pnpm-install-cache-key }}" >> $GITHUB_ENV
shell: bash

View File

@@ -40,7 +40,7 @@ There are a couple ways run integration tests:
- **Granularly** - you can run individual tests in vscode by installing the Jest Runner plugin and using that to run individual tests. Clicking the `debug` button will run the test in debug mode allowing you to set break points.
<img src="https://raw.githubusercontent.com/payloadcms/payload/main/.github/assets/int-debug.png" />
<img src="https://raw.githubusercontent.com/payloadcms/payload/main/packages/payload/src/assets/images/github/int-debug.png" />
- **Manually** - you can run all int tests in the `/test/_community/int.spec.ts` file by running the following command:
@@ -57,7 +57,7 @@ The easiest way to run E2E tests is to install
Once they are installed you can open the `testing` tab in vscode sidebar and drill down to the test you want to run, i.e. `/test/_community/e2e.spec.ts`
<img src="https://raw.githubusercontent.com/payloadcms/payload/main/.github/assets/e2e-debug.png" />
<img src="https://raw.githubusercontent.com/payloadcms/payload/main/packages/payload/src/assets/images/github/e2e-debug.png" />
#### Notes

View File

@@ -1,30 +0,0 @@
#!/bin/bash
severity=${1:-"critical"}
audit_json=$(pnpm audit --prod --json)
output_file="audit_output.json"
echo "Auditing for ${severity} vulnerabilities..."
echo "${audit_json}" | jq --arg severity "${severity}" '
.advisories | to_entries |
map(select(.value.patched_versions != "<0.0.0" and .value.severity == $severity) |
{
package: .value.module_name,
vulnerable: .value.vulnerable_versions,
fixed_in: .value.patched_versions
}
)
' >$output_file
audit_length=$(jq 'length' $output_file)
if [[ "${audit_length}" -gt "0" ]]; then
echo "Actionable vulnerabilities found in the following packages:"
jq -r '.[] | "\u001b[1m\(.package)\u001b[0m vulnerable in \u001b[31m\(.vulnerable)\u001b[0m fixed in \u001b[32m\(.fixed_in)\u001b[0m"' $output_file | while read -r line; do echo -e "$line"; done
echo "Output written to ${output_file}"
exit 1
else
echo "No actionable vulnerabilities"
exit 0
fi

View File

@@ -1,53 +0,0 @@
name: audit-dependencies
on:
# Sundays at 2am EST
schedule:
- cron: '0 7 * * 0'
workflow_dispatch:
inputs:
audit-level:
description: The level of audit to run (low, moderate, high, critical)
required: false
default: critical
debug:
description: Enable debug logging
required: false
default: false
env:
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
jobs:
audit:
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup
uses: ./.github/actions/setup
- name: Run audit dependencies script
id: audit_dependencies
run: ./.github/workflows/audit-dependencies.sh ${{ inputs.audit-level }}
- name: Slack notification on failure
if: failure()
uses: slackapi/slack-github-action@v2.1.0
with:
webhook: ${{ inputs.debug == 'true' && secrets.SLACK_TEST_WEBHOOK_URL || secrets.SLACK_WEBHOOK_URL }}
webhook-type: incoming-webhook
payload: |
{
"username": "GitHub Actions Bot",
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "🚨 Actionable vulnerabilities found: <https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Details>"
}
},
]
}

View File

@@ -1,4 +1,4 @@
name: ci
name: build
on:
pull_request:
@@ -6,7 +6,6 @@ on:
- opened
- reopened
- synchronize
- labeled
push:
branches:
- main
@@ -17,6 +16,8 @@ concurrency:
cancel-in-progress: true
env:
NODE_VERSION: 23.11.0
PNPM_VERSION: 9.7.1
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
@@ -69,6 +70,10 @@ jobs:
- name: Node setup
uses: ./.github/actions/setup
with:
node-version: ${{ env.NODE_VERSION }}
pnpm-version: ${{ env.PNPM_VERSION }}
pnpm-install-cache-key: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
- name: Lint
run: pnpm lint -- --quiet
@@ -83,6 +88,10 @@ jobs:
- name: Node setup
uses: ./.github/actions/setup
with:
node-version: ${{ env.NODE_VERSION }}
pnpm-version: ${{ env.PNPM_VERSION }}
pnpm-install-cache-key: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
- run: pnpm run build:all
env:
@@ -104,8 +113,11 @@ jobs:
- name: Node setup
uses: ./.github/actions/setup
with:
node-version: ${{ env.NODE_VERSION }}
pnpm-version: ${{ env.PNPM_VERSION }}
pnpm-run-install: false
pnpm-restore-cache: false # Full build is restored below
pnpm-install-cache-key: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
- name: Restore build
uses: actions/cache@v4
@@ -128,8 +140,11 @@ jobs:
- name: Node setup
uses: ./.github/actions/setup
with:
node-version: ${{ env.NODE_VERSION }}
pnpm-version: ${{ env.PNPM_VERSION }}
pnpm-run-install: false
pnpm-restore-cache: false # Full build is restored below
pnpm-install-cache-key: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
- name: Restore build
uses: actions/cache@v4
@@ -147,7 +162,6 @@ jobs:
needs: [changes, build]
if: ${{ needs.changes.outputs.needs_tests == 'true' }}
name: int-${{ matrix.database }}
timeout-minutes: 45
strategy:
fail-fast: false
matrix:
@@ -159,7 +173,6 @@ jobs:
- supabase
- sqlite
- sqlite-uuid
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
@@ -171,8 +184,7 @@ jobs:
services:
postgres:
# Custom postgres 17 docker image that supports both pg-vector and postgis: https://github.com/payloadcms/postgis-vector
image: ${{ (startsWith(matrix.database, 'postgres') ) && 'ghcr.io/payloadcms/postgis-vector:latest' || '' }}
image: ${{ (startsWith(matrix.database, 'postgres') ) && 'postgis/postgis:16-3.4' || '' }}
env:
# must specify password for PG Docker container image, see: https://registry.hub.docker.com/_/postgres?tab=description&page=1&name=10
POSTGRES_USER: ${{ env.POSTGRES_USER }}
@@ -189,8 +201,11 @@ jobs:
- name: Node setup
uses: ./.github/actions/setup
with:
node-version: ${{ env.NODE_VERSION }}
pnpm-version: ${{ env.PNPM_VERSION }}
pnpm-run-install: false
pnpm-restore-cache: false # Full build is restored below
pnpm-install-cache-key: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
- name: Restore build
uses: actions/cache@v4
@@ -242,7 +257,6 @@ jobs:
needs: [changes, build]
if: ${{ needs.changes.outputs.needs_tests == 'true' }}
name: e2e-${{ matrix.suite }}
timeout-minutes: 45
strategy:
fail-fast: false
matrix:
@@ -297,141 +311,6 @@ jobs:
- plugin-cloud-storage
- plugin-form-builder
- plugin-import-export
- plugin-multi-tenant
- plugin-nested-docs
- plugin-seo
- sort
- versions
- uploads
env:
SUITE_NAME: ${{ matrix.suite }}
steps:
- uses: actions/checkout@v4
- name: Node setup
uses: ./.github/actions/setup
with:
pnpm-run-install: false
pnpm-restore-cache: false # Full build is restored below
- name: Restore build
uses: actions/cache@v4
with:
path: ./*
key: ${{ github.sha }}-${{ github.run_number }}
- name: Start LocalStack
run: pnpm docker:start
if: ${{ matrix.suite == 'plugin-cloud-storage' }}
- name: Store Playwright's Version
run: |
# Extract the version number using a more targeted regex pattern with awk
PLAYWRIGHT_VERSION=$(pnpm ls @playwright/test --depth=0 | awk '/@playwright\/test/ {print $2}')
echo "Playwright's Version: $PLAYWRIGHT_VERSION"
echo "PLAYWRIGHT_VERSION=$PLAYWRIGHT_VERSION" >> $GITHUB_ENV
- name: Cache Playwright Browsers for Playwright's Version
id: cache-playwright-browsers
uses: actions/cache@v4
with:
path: ~/.cache/ms-playwright
key: playwright-browsers-${{ env.PLAYWRIGHT_VERSION }}
- name: Setup Playwright - Browsers and Dependencies
if: steps.cache-playwright-browsers.outputs.cache-hit != 'true'
run: pnpm exec playwright install --with-deps chromium
- name: Setup Playwright - Dependencies-only
if: steps.cache-playwright-browsers.outputs.cache-hit == 'true'
run: pnpm exec playwright install-deps chromium
- name: E2E Tests
run: PLAYWRIGHT_JSON_OUTPUT_NAME=results_${{ matrix.suite }}.json pnpm test:e2e:prod:ci:noturbo ${{ matrix.suite }}
env:
PLAYWRIGHT_JSON_OUTPUT_NAME: results_${{ matrix.suite }}.json
NEXT_TELEMETRY_DISABLED: 1
- uses: actions/upload-artifact@v4
if: always()
with:
name: test-results-${{ matrix.suite }}
path: test/test-results/
if-no-files-found: ignore
retention-days: 1
# Disabled until this is fixed: https://github.com/daun/playwright-report-summary/issues/156
# - uses: daun/playwright-report-summary@v3
# with:
# report-file: results_${{ matrix.suite }}.json
# report-tag: ${{ matrix.suite }}
# job-summary: true
tests-e2e-turbo:
runs-on: ubuntu-24.04
needs: [changes, build]
if: >-
needs.changes.outputs.needs_tests == 'true' &&
(
contains(github.event.pull_request.labels.*.name, 'run-e2e-turbo') ||
github.event.label.name == 'run-e2e-turbo'
)
name: e2e-turbo-${{ matrix.suite }}
strategy:
fail-fast: false
matrix:
# find test -type f -name 'e2e.spec.ts' | sort | xargs dirname | xargs -I {} basename {}
suite:
- _community
- access-control
- admin__e2e__general
- admin__e2e__list-view
- admin__e2e__document-view
- admin-bar
- admin-root
- auth
- auth-basic
- bulk-edit
- joins
- field-error-states
- fields-relationship
- fields__collections__Array
- fields__collections__Blocks
- fields__collections__Blocks#config.blockreferences.ts
- fields__collections__Checkbox
- fields__collections__Collapsible
- fields__collections__ConditionalLogic
- fields__collections__CustomID
- fields__collections__Date
- fields__collections__Email
- fields__collections__Indexed
- fields__collections__JSON
- fields__collections__Number
- fields__collections__Point
- fields__collections__Radio
- fields__collections__Relationship
- fields__collections__Row
- fields__collections__Select
- fields__collections__Tabs
- fields__collections__Tabs2
- fields__collections__Text
- fields__collections__UI
- fields__collections__Upload
- hooks
- lexical__collections__Lexical__e2e__main
- lexical__collections__Lexical__e2e__blocks
- lexical__collections__Lexical__e2e__blocks#config.blockreferences.ts
- lexical__collections__RichText
- query-presets
- form-state
- live-preview
- localization
- locked-documents
- i18n
- plugin-cloud-storage
- plugin-form-builder
- plugin-import-export
- plugin-multi-tenant
- plugin-nested-docs
- plugin-seo
- sort
@@ -445,8 +324,11 @@ jobs:
- name: Node setup
uses: ./.github/actions/setup
with:
node-version: ${{ env.NODE_VERSION }}
pnpm-version: ${{ env.PNPM_VERSION }}
pnpm-run-install: false
pnpm-restore-cache: false # Full build is restored below
pnpm-install-cache-key: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
- name: Restore build
uses: actions/cache@v4
@@ -489,7 +371,7 @@ jobs:
- uses: actions/upload-artifact@v4
if: always()
with:
name: test-results-turbo${{ matrix.suite }}
name: test-results-${{ matrix.suite }}
path: test/test-results/
if-no-files-found: ignore
retention-days: 1
@@ -501,32 +383,24 @@ jobs:
# report-tag: ${{ matrix.suite }}
# job-summary: true
# Build listed templates with packed local packages and then runs their int and e2e tests
build-and-test-templates:
# Build listed templates with packed local packages
build-templates:
runs-on: ubuntu-24.04
needs: [changes, build]
if: ${{ needs.changes.outputs.needs_build == 'true' }}
name: build-template-${{ matrix.template }}-${{ matrix.database }}
needs: build
strategy:
fail-fast: false
matrix:
include:
- template: blank
database: mongodb
- template: website
database: mongodb
- template: with-payload-cloud
database: mongodb
- template: with-vercel-mongodb
database: mongodb
# Postgres
- template: with-postgres
database: postgres
- template: with-vercel-postgres
database: postgres
@@ -536,6 +410,8 @@ jobs:
# - template: with-vercel-website
# database: postgres
name: ${{ matrix.template }}-${{ matrix.database }}
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
@@ -548,8 +424,11 @@ jobs:
- name: Node setup
uses: ./.github/actions/setup
with:
node-version: ${{ env.NODE_VERSION }}
pnpm-version: ${{ env.PNPM_VERSION }}
pnpm-run-install: false
pnpm-restore-cache: false # Full build is restored below
pnpm-install-cache-key: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
- name: Restore build
uses: actions/cache@v4
@@ -596,45 +475,6 @@ jobs:
env:
NODE_OPTIONS: --max-old-space-size=8096
- name: Store Playwright's Version
run: |
# Extract the version number using a more targeted regex pattern with awk
PLAYWRIGHT_VERSION=$(pnpm ls @playwright/test --depth=0 | awk '/@playwright\/test/ {print $2}')
echo "Playwright's Version: $PLAYWRIGHT_VERSION"
echo "PLAYWRIGHT_VERSION=$PLAYWRIGHT_VERSION" >> $GITHUB_ENV
- name: Cache Playwright Browsers for Playwright's Version
id: cache-playwright-browsers
uses: actions/cache@v4
with:
path: ~/.cache/ms-playwright
key: playwright-browsers-${{ env.PLAYWRIGHT_VERSION }}
- name: Setup Playwright - Browsers and Dependencies
if: steps.cache-playwright-browsers.outputs.cache-hit != 'true'
run: pnpm exec playwright install --with-deps chromium
- name: Setup Playwright - Dependencies-only
if: steps.cache-playwright-browsers.outputs.cache-hit == 'true'
run: pnpm exec playwright install-deps chromium
- name: Runs Template Int Tests
run: pnpm --filter ${{ matrix.template }} run test:int
env:
NODE_OPTIONS: --max-old-space-size=8096
PAYLOAD_DATABASE: ${{ matrix.database }}
POSTGRES_URL: ${{ env.POSTGRES_URL }}
MONGODB_URL: mongodb://localhost:27017/payloadtests
- name: Runs Template E2E Tests
run: PLAYWRIGHT_JSON_OUTPUT_NAME=results_${{ matrix.template }}.json pnpm --filter ${{ matrix.template }} test:e2e
env:
NODE_OPTIONS: --max-old-space-size=8096
PAYLOAD_DATABASE: ${{ matrix.database }}
POSTGRES_URL: ${{ env.POSTGRES_URL }}
MONGODB_URL: mongodb://localhost:27017/payloadtests
NEXT_TELEMETRY_DISABLED: 1
tests-type-generation:
runs-on: ubuntu-24.04
needs: [changes, build]
@@ -645,8 +485,11 @@ jobs:
- name: Node setup
uses: ./.github/actions/setup
with:
node-version: ${{ env.NODE_VERSION }}
pnpm-version: ${{ env.PNPM_VERSION }}
pnpm-run-install: false
pnpm-restore-cache: false # Full build is restored below
pnpm-install-cache-key: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
- name: Restore build
uses: actions/cache@v4
@@ -667,7 +510,7 @@ jobs:
needs:
- lint
- build
- build-and-test-templates
- build-templates
- tests-unit
- tests-int
- tests-e2e
@@ -690,34 +533,3 @@ jobs:
- run: |
echo github.ref: ${{ github.ref }}
echo isV3: ${{ github.ref == 'refs/heads/main' }}
analyze:
runs-on: ubuntu-latest
needs: [changes, build]
timeout-minutes: 5
permissions:
contents: read # for checkout repository
actions: read # for fetching base branch bundle stats
pull-requests: write # for comments
steps:
- uses: actions/checkout@v4
- name: Node setup
uses: ./.github/actions/setup
with:
pnpm-run-install: false
pnpm-restore-cache: false # Full build is restored below
- name: Restore build
uses: actions/cache@v4
with:
path: ./*
key: ${{ github.sha }}-${{ github.run_number }}
- run: pnpm run build:bundle-for-analysis # Esbuild packages that haven't already been built in the build step for the purpose of analyzing bundle size
env:
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
- name: Analyze esbuild bundle size
uses: exoego/esbuild-bundle-analyzer@v1
with:
metafiles: 'packages/payload/meta_index.json,packages/payload/meta_shared.json,packages/ui/meta_client.json,packages/ui/meta_shared.json,packages/next/meta_index.json,packages/richtext-lexical/meta_client.json'

View File

@@ -7,6 +7,8 @@ on:
workflow_dispatch:
env:
NODE_VERSION: 23.11.0
PNPM_VERSION: 9.7.1
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
@@ -58,6 +60,9 @@ jobs:
- name: Setup
uses: ./.github/actions/setup
with:
node-version: ${{ env.NODE_VERSION }}
pnpm-version: ${{ env.PNPM_VERSION }}
- name: Start PostgreSQL
uses: CasperWA/postgresql-action@v1.2

View File

@@ -12,6 +12,8 @@ on:
default: ''
env:
NODE_VERSION: 23.11.0
PNPM_VERSION: 9.7.1
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry

View File

@@ -7,6 +7,8 @@ on:
workflow_dispatch:
env:
NODE_VERSION: 23.11.0
PNPM_VERSION: 9.7.1
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
@@ -21,6 +23,9 @@ jobs:
uses: actions/checkout@v4
- name: Setup
uses: ./.github/actions/setup
with:
node-version: ${{ env.NODE_VERSION }}
pnpm-version: ${{ env.PNPM_VERSION }}
- name: Load npm token
run: echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:

7
.gitignore vendored
View File

@@ -22,13 +22,6 @@ meta_server.json
meta_index.json
meta_shared.json
packages/payload/esbuild
packages/ui/esbuild
packages/next/esbuild
packages/richtext-lexical/esbuild
audit_output.json
.turbo
# Ignore test directory media folder/files

View File

@@ -6,8 +6,6 @@
"source.fixAll.eslint": "explicit"
},
"editor.formatOnSaveMode": "file",
"files.insertFinalNewline": true,
"files.trimTrailingWhitespace": true,
"eslint.rules.customizations": [
// Silence some warnings that will get auto-fixed
{ "rule": "perfectionist/*", "severity": "off", "fixable": true },

View File

@@ -87,43 +87,41 @@ You can run the entire test suite using `pnpm test`. If you wish to only run e2e
By default, `pnpm test:int` will only run int test against MongoDB. To run int tests against postgres, you can use `pnpm test:int:postgres`. You will have to have postgres installed on your system for this to work.
### Pull Requests
### Commits
For all Pull Requests, you should be extremely descriptive about both your problem and proposed solution. If there are any affected open or closed issues, please leave the issue number in your PR description.
We use [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) for our commit messages. Please follow this format when creating commits. Here are some examples:
All commits within a PR are squashed when merged, using the PR title as the commit message. For that reason, please use [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) for your PR titles.
- `feat: adds new feature`
- `fix: fixes bug`
- `docs: adds documentation`
- `chore: does chore`
Here are some examples:
Here's a breakdown of the format. At the top-level, we use the following types to categorize our commits:
- `feat: add new feature`
- `fix: fix bug`
- `docs: add documentation`
- `test: add/fix tests`
- `refactor: refactor code`
- `chore: anything that does not fit into the above categories`
If applicable, you must indicate the affected packages in parentheses to "scope" the changes. Changes to the payload chore package do not require scoping.
Here are some examples:
- `feat(ui): add new feature`
- `fix(richtext-lexical): fix bug`
- `feat`: new feature that adds functionality. These are automatically added to the changelog when creating new releases.
- `fix`: a fix to an existing feature. These are automatically added to the changelog when creating new releases.
- `docs`: changes to [docs](./docs) only. These do not appear in the changelog.
- `chore`: changes to code that is neither a fix nor a feature (e.g. refactoring, adding tests, etc.). These do not appear in the changelog.
If you are committing to [templates](./templates) or [examples](./examples), use the `chore` type with the proper scope, like this:
- `chore(templates): adds feature to template`
- `chore(examples): fixes bug in example`
## Pull Requests
For all Pull Requests, you should be extremely descriptive about both your problem and proposed solution. If there are any affected open or closed issues, please leave the issue number in your PR message.
## Previewing docs
This is how you can preview changes you made locally to the docs:
1. Clone our [website repository](https://github.com/payloadcms/website)
2. Run `pnpm install`
2. Run `yarn install`
3. Duplicate the `.env.example` file and rename it to `.env`
4. Add a `DOCS_DIR` environment variable to the `.env` file which points to the absolute path of your modified docs folder. For example `DOCS_DIR=/Users/yourname/Documents/GitHub/payload/docs`
5. Run `pnpm fetchDocs:local`. If this was successful, you should see no error messages and the following output: _Docs successfully written to /.../website/src/app/docs.json_. There could be error messages if you have incorrect markdown in your local docs folder. In this case, it will tell you how you can fix it
6. You're done! Now you can start the website locally using `pnpm dev` and preview the docs under [http://localhost:3000/docs/local](http://localhost:3000/docs/local)
5. Run `yarn run fetchDocs:local`. If this was successful, you should see no error messages and the following output: _Docs successfully written to /.../website/src/app/docs.json_. There could be error messages if you have incorrect markdown in your local docs folder. In this case, it will tell you how you can fix it
6. You're done! Now you can start the website locally using `yarn run dev` and preview the docs under [http://localhost:3000/docs/](http://localhost:3000/docs/)
## Internationalization (i18n)

View File

@@ -45,7 +45,7 @@ There are a couple ways to do this:
- **Granularly** - you can run individual tests in vscode by installing the Jest Runner plugin and using that to run individual tests. Clicking the `debug` button will run the test in debug mode allowing you to set break points.
<img src="https://raw.githubusercontent.com/payloadcms/payload/main/.github/assets/int-debug.png" />
<img src="https://raw.githubusercontent.com/payloadcms/payload/main/packages/payload/src/assets/images/github/int-debug.png" />
- **Manually** - you can run all int tests in the `/test/_community/int.spec.ts` file by running the following command:
@@ -62,7 +62,7 @@ The easiest way to run E2E tests is to install
Once they are installed you can open the `testing` tab in vscode sidebar and drill down to the test you want to run, i.e. `/test/_community/e2e.spec.ts`
<img src="https://raw.githubusercontent.com/payloadcms/payload/main/.github/assets/e2e-debug.png" />
<img src="https://raw.githubusercontent.com/payloadcms/payload/main/packages/payload/src/assets/images/github/e2e-debug.png" />
#### Notes

View File

@@ -42,6 +42,7 @@ export const CollectionWithAccessControl: CollectionConfig = {
// Auth-enabled Collections only
admin: () => {...},
auth: () => {...},
unlock: () => {...},
// Version-enabled Collections only
@@ -62,10 +63,11 @@ The following options are available:
If a Collection supports [`Authentication`](../authentication/overview), the following additional options are available:
| Function | Allows/Denies Access |
| ------------ | ---------------------------------------------------------------------------------------- |
| **`admin`** | Used to restrict access to the [Admin Panel](../admin/overview). [More details](#admin). |
| **`unlock`** | Used to restrict which users can access the `unlock` operation. [More details](#unlock). |
| Function | Allows/Denies Access |
| ------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`admin`** | Used to restrict access to the [Admin Panel](../admin/overview). [More details](#admin). |
| **`auth`** | Used to restrict access to the [Authentication Fields](../authentication/overview) that get automatically added to auth-enabled collections (i.e. email, username, password). [More details](#auth). |
| **`unlock`** | Used to restrict which users can access the `unlock` operation. [More details](#unlock). |
If a Collection supports [Versions](../versions/overview), the following additional options are available:
@@ -287,6 +289,33 @@ The following arguments are provided to the `admin` function:
| --------- | ----------------------------------------------------------------------------------------------------------------------------- |
| **`req`** | The [Request](https://developer.mozilla.org/en-US/docs/Web/API/Request) object containing the currently authenticated `user`. |
### Auth
Determines which users can access the [Authentication Fields](../authentication/overview) that are automatically added to auth-enabled collections—specifically `email`, `username`, and `password`. This access control function allows you to restrict read and update access to these fields. The access control function will be applied to all fields, it is not currently possible to target specfic fields i.e. just `email` or just `password`.
To add Auth Access Control to a Collection, use the `auth` property in the [Collection Config](../configuration/collections):
```ts
import type { CollectionConfig } from 'payload'
export const CollectionWithAuthFieldAccess: CollectionConfig = {
// ...
access: {
// highlight-start
auth: ({ req: { user } }) => {
return user?.role === 'admin'
},
// highlight-end
},
}
```
The following arguments are provided to the `auth` function:
| Option | Description |
| --------- | ----------------------------------------------------------------------------------------------------------------------------- |
| **`req`** | The [Request](https://developer.mozilla.org/en-US/docs/Web/API/Request) object containing the currently authenticated `user`. |
### Unlock
Determines which users can [unlock](/docs/authentication/operations#unlock) other users who may be blocked from authenticating successfully due to [failing too many login attempts](/docs/authentication/overview#config-options).

View File

@@ -32,18 +32,18 @@ The Admin Panel serves as the entire HTTP layer for Payload, providing a full CR
Once you [install Payload](../getting-started/installation), the following files and directories will be created in your app:
```plaintext
app
├─ (payload)
├── admin
├─── [[...segments]]
app/
├─ (payload)/
├── admin/
├─── [[...segments]]/
├──── page.tsx
├──── not-found.tsx
├── api
├─── [...slug]
├── api/
├─── [...slug]/
├──── route.ts
├── graphql
├── graphql/
├──── route.ts
├── graphql-playground
├── graphql-playground/
├──── route.ts
├── custom.scss
├── layout.tsx
@@ -84,30 +84,30 @@ import { buildConfig } from 'payload'
const config = buildConfig({
// ...
// highlight-start
admin: {
// highlight-line
// ...
},
// highlight-end
})
```
The following options are available:
| Option | Description |
| -------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------- |
| `avatar` | Set account profile picture. Options: `gravatar`, `default` or a custom React component. |
| `autoLogin` | Used to automate log-in for dev and demonstration convenience. [More details](../authentication/overview). |
| `components` | Component overrides that affect the entirety of the Admin Panel. [More details](../custom-components/overview). |
| `custom` | Any custom properties you wish to pass to the Admin Panel. |
| `dateFormat` | The date format that will be used for all dates within the Admin Panel. Any valid [date-fns](https://date-fns.org/) format pattern can be used. |
| `livePreview` | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| `meta` | Base metadata to use for the Admin Panel. [More details](./metadata). |
| `routes` | Replace built-in Admin Panel routes with your own custom routes. [More details](#customizing-routes). |
| `suppressHydrationWarning` | If set to `true`, suppresses React hydration mismatch warnings during the hydration of the root `<html>` tag. Defaults to `false`. |
| `theme` | Restrict the Admin Panel theme to use only one of your choice. Default is `all`. |
| `timezones` | Configure the timezone settings for the admin panel. [More details](#timezones) |
| `user` | The `slug` of the Collection that you want to allow to login to the Admin Panel. [More details](#the-admin-user-collection). |
| Option | Description |
| ------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------- |
| **`avatar`** | Set account profile picture. Options: `gravatar`, `default` or a custom React component. |
| **`autoLogin`** | Used to automate log-in for dev and demonstration convenience. [More details](../authentication/overview). |
| **`buildPath`** | Specify an absolute path for where to store the built Admin bundle used in production. Defaults to `path.resolve(process.cwd(), 'build')`. |
| **`components`** | Component overrides that affect the entirety of the Admin Panel. [More details](../custom-components/overview). |
| **`custom`** | Any custom properties you wish to pass to the Admin Panel. |
| **`dateFormat`** | The date format that will be used for all dates within the Admin Panel. Any valid [date-fns](https://date-fns.org/) format pattern can be used. |
| **`livePreview`** | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| **`meta`** | Base metadata to use for the Admin Panel. [More details](./metadata). |
| **`routes`** | Replace built-in Admin Panel routes with your own custom routes. [More details](#customizing-routes). |
| **`suppressHydrationWarning`** | If set to `true`, suppresses React hydration mismatch warnings during the hydration of the root `<html>` tag. Defaults to `false`. |
| **`theme`** | Restrict the Admin Panel theme to use only one of your choice. Default is `all`. |
| **`timezones`** | Configure the timezone settings for the admin panel. [More details](#timezones) |
| **`user`** | The `slug` of the Collection that you want to allow to login to the Admin Panel. [More details](#the-admin-user-collection). |
<Banner type="success">
**Reminder:** These are the _root-level_ options for the Admin Panel. You can
@@ -187,12 +187,6 @@ The following options are available:
| `graphQL` | `/graphql` | The [GraphQL API](../graphql/overview) base path. |
| `graphQLPlayground` | `/graphql-playground` | The GraphQL Playground. |
<Banner type="warning">
**Important:** Changing Root-level Routes also requires a change to [Project
Structure](#project-structure) to match the new route. [More
details](#customizing-root-level-routes).
</Banner>
<Banner type="success">
**Tip:** You can easily add _new_ routes to the Admin Panel through [Custom
Endpoints](../rest-api/overview#custom-endpoints) and [Custom
@@ -203,29 +197,13 @@ The following options are available:
You can change the Root-level Routes as needed, such as to mount the Admin Panel at the root of your application.
This change, however, also requires a change to your [Project Structure](#project-structure) to match the new route.
For example, if you set `routes.admin` to `/`:
```ts
import { buildConfig } from 'payload'
const config = buildConfig({
// ...
routes: {
admin: '/', // highlight-line
},
})
```
Then you would need to completely remove the `admin` directory from the project structure:
Changing Root-level Routes also requires a change to [Project Structure](#project-structure) to match the new route. For example, if you set `routes.admin` to `/`, you would need to completely remove the `admin` directory from the project structure:
```plaintext
app
├─ (payload)
├── [[...segments]]
app/
├─ (payload)/
├── [[...segments]]/
├──── ...
├── layout.tsx
```
<Banner type="warning">

View File

@@ -981,15 +981,7 @@ const MyComponent: React.FC = () => {
## useTableColumns
Returns properties and methods to manipulate table columns:
| Property | Description |
| ------------------------ | ------------------------------------------------------------------------------------------ |
| **`columns`** | The current state of columns including their active status and configuration |
| **`LinkedCellOverride`** | A component override for linked cells in the table |
| **`moveColumn`** | A method to reorder columns. Accepts `{ fromIndex: number, toIndex: number }` as arguments |
| **`resetColumnsState`** | A method to reset columns back to their default configuration as defined in the collection config |
| **`setActiveColumns`** | A method to set specific columns to active state while preserving the existing column order. Accepts an array of column names to activate |
| **`toggleColumn`** | A method to toggle a single column's visibility. Accepts a column name as string |
Returns methods to manipulate table columns
```tsx
'use client'
@@ -997,30 +989,17 @@ import { useTableColumns } from '@payloadcms/ui'
const MyComponent: React.FC = () => {
// highlight-start
const { setActiveColumns, resetColumnsState } = useTableColumns()
const { setActiveColumns } = useTableColumns()
const activateSpecificColumns = () => {
// Only activates the id and createdAt columns
// Other columns retain their current active/inactive state
// The original column order is preserved
setActiveColumns(['id', 'createdAt'])
}
const resetToDefaults = () => {
// Resets to the default columns defined in the collection config
resetColumnsState()
const resetColumns = () => {
setActiveColumns(['id', 'createdAt', 'updatedAt'])
}
// highlight-end
return (
<div>
<button type="button" onClick={activateSpecificColumns}>
Activate Specific Columns
</button>
<button type="button" onClick={resetToDefaults}>
Reset To Defaults
</button>
</div>
<button type="button" onClick={resetColumns}>
Reset columns
</button>
)
}
```

View File

@@ -25,12 +25,11 @@ A strategy is made up of the following:
The `authenticate` function is passed the following arguments:
| Argument | Description |
| ---------------------- | ------------------------------------------------------------------------------------------------------------------- |
| **`canSetHeaders`** \* | Whether or not the strategy is being executed from a context where response headers can be set. Default is `false`. |
| **`headers`** \* | The headers on the incoming request. Useful for retrieving identifiable information on a request. |
| **`payload`** \* | The Payload class. Useful for authenticating the identifiable information against Payload. |
| **`isGraphQL`** | Whether or not the strategy is being executed within the GraphQL endpoint. Default is `false`. |
| Argument | Description |
| ---------------- | ------------------------------------------------------------------------------------------------- |
| **`headers`** \* | The headers on the incoming request. Useful for retrieving identifiable information on a request. |
| **`payload`** \* | The Payload class. Useful for authenticating the identifiable information against Payload. |
| **`isGraphQL`** | Whether or not the request was made from a GraphQL endpoint. Default is `false`. |
### Example Strategy

View File

@@ -23,9 +23,6 @@ Example:
```ts
const user = await fetch('http://localhost:3000/api/users/login', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
email: 'dev@payloadcms.com',
password: 'password',

View File

@@ -180,22 +180,19 @@ As Payload sets HTTP-only cookies, logging out cannot be done by just removing a
**Example REST API logout**:
```ts
const res = await fetch(
'http://localhost:3000/api/[collection-slug]/logout?allSessions=false',
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
const res = await fetch('http://localhost:3000/api/[collection-slug]/logout', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
)
})
```
**Example GraphQL Mutation**:
```
mutation {
logoutUser(allSessions: false)
logout[collection-singular-label]
}
```
@@ -206,10 +203,6 @@ mutation {
docs](../local-api/server-functions#reusable-payload-server-functions).
</Banner>
#### Logging out with sessions enabled
By default, logging out will only end the session pertaining to the JWT that was used to log out with. However, you can pass `allSessions: true` to the logout operation in order to end all sessions for the user logging out.
## Refresh
Allows for "refreshing" JWTs. If your user has a token that is about to expire, but the user is still active and using the app, you might want to use the `refresh` operation to receive a new token by executing this operation via the authenticated user.

View File

@@ -91,7 +91,6 @@ The following options are available:
| **`strategies`** | Advanced - an array of custom authentication strategies to extend this collection's authentication with. [More details](./custom-strategies). |
| **`tokenExpiration`** | How long (in seconds) to keep the user logged in. JWTs and HTTP-only cookies will both expire at the same time. |
| **`useAPIKey`** | Payload Authentication provides for API keys to be set on each user within an Authentication-enabled Collection. [More details](./api-keys). |
| **`useSessions`** | True by default. Set to `false` to use stateless JWTs for authentication instead of sessions. |
| **`verify`** | Set to `true` or pass an object with verification options to require users to verify by email before they are allowed to log into your app. [More details](./email#email-verification). |
### Login With Username
@@ -143,17 +142,14 @@ import { buildConfig } from 'payload'
export default buildConfig({
// ...
// highlight-start
admin: {
autoLogin:
process.env.NODE_ENV === 'development'
? {
email: 'test@example.com',
password: 'test',
prefillOnly: true,
}
: false,
},
autoLogin:
process.env.NEXT_PUBLIC_ENABLE_AUTOLOGIN === 'true'
? {
email: 'test@example.com',
password: 'test',
prefillOnly: true,
}
: false,
// highlight-end
})
```
@@ -179,7 +175,7 @@ All auth-related operations are available via Payload's REST, Local, and GraphQL
## Strategies
Out of the box Payload ships with three powerful Authentication strategies:
Out of the box Payload ships with a three powerful Authentication strategies:
- [HTTP-Only Cookies](./cookies)
- [JSON Web Tokens (JWT)](./jwt)
@@ -202,43 +198,3 @@ API Keys can be enabled on auth collections. These are particularly useful when
### Custom Strategies
There are cases where these may not be enough for your application. Payload is extendable by design so you can wire up your own strategy when you need to. [More details](./custom-strategies).
### Access Control
Default auth fields including `email`, `username`, and `password` can be overridden by defining a custom field with the same name in your collection config. This allows you to customize the field — including access control — while preserving the underlying auth functionality. For example, you might want to restrict the `email` field from being updated once it is created, or only allow it to be read by certain user roles. You can achieve this by redefining the field and setting access rules accordingly.
Here's an example of how to restrict access to default auth fields:
```ts
import type { CollectionConfig } from 'payload'
export const Auth: CollectionConfig = {
slug: 'users',
auth: true,
fields: [
{
name: 'email', // or 'username'
type: 'text',
access: {
create: () => true,
read: () => false,
update: () => false,
},
},
{
name: 'password', // this will be applied to all password-related fields including new password, confirm password.
type: 'text',
hidden: true, // needed only for the password field to prevent duplication in the Admin panel
access: {
update: () => false,
},
},
],
}
```
**Note:**
- Access functions will apply across the application — I.e. if `read` access is disabled on `email`, it will not appear in the Admin panel UI or API.
- Restricting `read` on the `email` or `username` disables the **Unlock** action in the Admin panel as this function requires access to a user-identifying field.
- When overriding the `password` field, you may need to include `hidden: true` to prevent duplicate fields being displayed in the Admin panel.

View File

@@ -85,7 +85,6 @@ The following options are available:
| `defaultPopulate` | Specify which fields to select when this Collection is populated from another document. [More Details](../queries/select#defaultpopulate-collection-config-property). |
| `indexes` | Define compound indexes for this collection. This can be used to either speed up querying/sorting by 2 or more fields at the same time or to ensure uniqueness between several fields. |
| `forceSelect` | Specify which fields should be selected always, regardless of the `select` query which can be useful that the field exists for access control / hooks |
| `disableBulkEdit` | Disable the bulk edit operation for the collection in the admin panel and the REST API |
_\* An asterisk denotes that a property is required._
@@ -121,33 +120,26 @@ export const MyCollection: CollectionConfig = {
The following options are available:
| Option | Description |
| ---------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `group` | Text or localization object used to group Collection and Global links in the admin navigation. Set to `false` to hide the link from the navigation while keeping its routes accessible. |
| `hidden` | Set to true or a function, called with the current user, returning true to exclude this Collection from navigation and admin routing. |
| `hooks` | Admin-specific hooks for this Collection. [More details](../hooks/collections). |
| `useAsTitle` | Specify a top-level field to use for a document title throughout the Admin Panel. If no field is defined, the ID of the document is used as the title. |
| `description` | Text to display below the Collection label in the List View to give editors more information. Alternatively, you can use the `admin.components.Description` to render a React component. [More details](#custom-components). |
| `defaultColumns` | Array of field names that correspond to which columns to show by default in this Collection's List View. |
| `disableCopyToLocale` | Disables the "Copy to Locale" button while editing documents within this Collection. Only applicable when localization is enabled. |
| `hideAPIURL` | Hides the "API URL" meta field while editing documents within this Collection. |
| `enableRichTextLink` | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| `enableRichTextRelationship` | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| `folders` | A boolean to enable folders for a given collection. Defaults to `false`. [More details](../folders/overview). |
| `meta` | Page metadata overrides to apply to this Collection within the Admin Panel. [More details](../admin/metadata). |
| `preview` | Function to generate preview URLs within the Admin Panel that can point to your app. [More details](../admin/preview). |
| `livePreview` | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| `components` | Swap in your own React components to be used within this Collection. [More details](#custom-components). |
| `listSearchableFields` | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
| `pagination` | Set pagination-specific options for this Collection. [More details](#pagination). |
| `baseListFilter` | You can define a default base filter for this collection's List view, which will be merged into any filters that the user performs. |
<Banner type="warning">
**Note:** If you set `useAsTitle` to a relationship or join field, it will use
only the ID of the related document(s) as the title. To display a specific
field (i.e. title) from the related document instead, create a virtual field
that extracts the desired data, and set `useAsTitle` to that virtual field.
</Banner>
| Option | Description |
| ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `group` | Text or localization object used to group Collection and Global links in the admin navigation. Set to `false` to hide the link from the navigation while keeping its routes accessible. |
| `hidden` | Set to true or a function, called with the current user, returning true to exclude this Collection from navigation and admin routing. |
| `hooks` | Admin-specific hooks for this Collection. [More details](../hooks/collections). |
| `useAsTitle` | Specify a top-level field to use for a document title throughout the Admin Panel. If no field is defined, the ID of the document is used as the title. A field with `virtual: true` cannot be used as the title, unless it's linked to a relationship'. |
| `description` | Text to display below the Collection label in the List View to give editors more information. Alternatively, you can use the `admin.components.Description` to render a React component. [More details](#custom-components). |
| `defaultColumns` | Array of field names that correspond to which columns to show by default in this Collection's List View. |
| `disableCopyToLocale` | Disables the "Copy to Locale" button while editing documents within this Collection. Only applicable when localization is enabled. |
| `hideAPIURL` | Hides the "API URL" meta field while editing documents within this Collection. |
| `enableRichTextLink` | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| `enableRichTextRelationship` | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
| `folders` | A boolean to enable folders for a given collection. Defaults to `false`. [More details](../folders/overview). |
| `meta` | Page metadata overrides to apply to this Collection within the Admin Panel. [More details](../admin/metadata). |
| `preview` | Function to generate preview URLs within the Admin Panel that can point to your app. [More details](../admin/preview). |
| `livePreview` | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
| `components` | Swap in your own React components to be used within this Collection. [More details](#custom-components). |
| `listSearchableFields` | Specify which fields should be searched in the List search view. [More details](#list-searchable-fields). |
| `pagination` | Set pagination-specific options for this Collection. [More details](#pagination). |
| `baseListFilter` | You can define a default base filter for this collection's List view, which will be merged into any filters that the user performs. |
### Custom Components
@@ -202,15 +194,13 @@ export const MyCollection: CollectionConfig = {
The following options are available:
| Option | Description |
| ------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `beforeDocumentControls` | Inject custom components before the Save / Publish buttons. [More details](../custom-components/edit-view#beforedocumentcontrols). |
| `editMenuItems` | Inject custom components within the 3-dot menu dropdown located in the document controls bar. [More details](../custom-components/edit-view#editmenuitems). |
| `SaveButton` | Replace the default Save Button within the Edit View. [Drafts](../versions/drafts) must be disabled. [More details](../custom-components/edit-view#savebutton). |
| `SaveDraftButton` | Replace the default Save Draft Button within the Edit View. [Drafts](../versions/drafts) must be enabled and autosave must be disabled. [More details](../custom-components/edit-view#savedraftbutton). |
| `PublishButton` | Replace the default Publish Button within the Edit View. [Drafts](../versions/drafts) must be enabled. [More details](../custom-components/edit-view#publishbutton). |
| `PreviewButton` | Replace the default Preview Button within the Edit View. [Preview](../admin/preview) must be enabled. [More details](../custom-components/edit-view#previewbutton). |
| `Upload` | Replace the default Upload component within the Edit View. [Upload](../upload/overview) must be enabled. [More details](../custom-components/edit-view#upload). |
| Option | Description |
| ----------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `SaveButton` | Replace the default Save Button within the Edit View. [Drafts](../versions/drafts) must be disabled. [More details](../custom-components/edit-view#savebutton). |
| `SaveDraftButton` | Replace the default Save Draft Button within the Edit View. [Drafts](../versions/drafts) must be enabled and autosave must be disabled. [More details](../custom-components/edit-view#savedraftbutton). |
| `PublishButton` | Replace the default Publish Button within the Edit View. [Drafts](../versions/drafts) must be enabled. [More details](../custom-components/edit-view#publishbutton). |
| `PreviewButton` | Replace the default Preview Button within the Edit View. [Preview](../admin/preview) must be enabled. [More details](../custom-components/edit-view#previewbutton). |
| `Upload` | Replace the default Upload component within the Edit View. [Upload](../upload/overview) must be enabled. [More details](../custom-components/edit-view#upload). |
<Banner type="success">
**Note:** For details on how to build Custom Components, see [Building Custom

View File

@@ -51,7 +51,7 @@ For more granular control, pass a configuration object instead. Payload exposes
| Property | Description |
| -------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `Component` \* | Pass in the component path that should be rendered when a user navigates to this route. |
| `path` \* | Any valid URL path or array of paths that [`path-to-regexp`](https://www.npmjs.com/package/path-to-regex) understands. Must begin with a forward slash (`/`). |
| `path` \* | Any valid URL path or array of paths that [`path-to-regexp`](https://www.npmjs.com/package/path-to-regex) understands. |
| `exact` | Boolean. When true, will only match if the path matches the `usePathname()` exactly. |
| `strict` | When true, a path that has a trailing slash will only match a `location.pathname` with a trailing slash. This has no effect when there are additional URL segments in the pathname. |
| `sensitive` | When true, will match if the path is case sensitive. |

View File

@@ -30,6 +30,7 @@ export const MyCollectionOrGlobalConfig: CollectionConfig = {
// - api
// - versions
// - version
// - livePreview
// - [key: string]
// See below for more details
},
@@ -87,7 +88,7 @@ export const MyCollection: CollectionConfig = {
### Edit View
The Edit View is where users interact with individual Collection and Global Documents. This is where they can view, edit, and save their content. The Edit View is keyed under the `default` property in the `views.edit` object.
The Edit View is where users interact with individual Collection and Global Documents. This is where they can view, edit, and save their content. the Edit View is keyed under the `default` property in the `views.edit` object.
For more information on customizing the Edit View, see the [Edit View](./edit-view) documentation.
@@ -106,8 +107,8 @@ export const MyCollection: CollectionConfig = {
components: {
views: {
edit: {
myCustomView: {
Component: '/path/to/MyCustomView',
myCustomTab: {
Component: '/path/to/MyCustomTab',
path: '/my-custom-tab',
// highlight-start
tab: {
@@ -115,14 +116,13 @@ export const MyCollection: CollectionConfig = {
},
// highlight-end
},
anotherCustomView: {
anotherCustomTab: {
Component: '/path/to/AnotherCustomView',
path: '/another-custom-view',
// highlight-start
tab: {
label: 'Another Custom View',
href: '/another-custom-view',
order: '100',
},
// highlight-end
},
@@ -143,7 +143,6 @@ The following options are available for tabs:
| ----------- | ------------------------------------------------------------------------------------------------------------- |
| `label` | The label to display in the tab. |
| `href` | The URL to navigate to when the tab is clicked. This is optional and defaults to the tab's `path`. |
| `order` | The order in which the tab appears in the navigation. Can be set on default and custom tabs. |
| `Component` | The component to render in the tab. This can be a Server or Client component. [More details](#tab-components) |
### Tab Components

View File

@@ -101,16 +101,15 @@ export const MyCollection: CollectionConfig = {
The following options are available:
| Path | Description |
| ------------------------ | ---------------------------------------------------------------------------------------------------------------------------- |
| `beforeDocumentControls` | Inject custom components before the Save / Publish buttons. [More details](#beforedocumentcontrols). |
| `editMenuItems` | Inject custom components within the 3-dot menu dropdown located in the document control bar. [More details](#editmenuitems). |
| `SaveButton` | A button that saves the current document. [More details](#savebutton). |
| `SaveDraftButton` | A button that saves the current document as a draft. [More details](#savedraftbutton). |
| `PublishButton` | A button that publishes the current document. [More details](#publishbutton). |
| `PreviewButton` | A button that previews the current document. [More details](#previewbutton). |
| `Description` | A description of the Collection. [More details](#description). |
| `Upload` | A file upload component. [More details](#upload). |
| Path | Description |
| ------------------------ | ---------------------------------------------------------------------------------------------------- |
| `beforeDocumentControls` | Inject custom components before the Save / Publish buttons. [More details](#beforedocumentcontrols). |
| `SaveButton` | A button that saves the current document. [More details](#savebutton). |
| `SaveDraftButton` | A button that saves the current document as a draft. [More details](#savedraftbutton). |
| `PublishButton` | A button that publishes the current document. [More details](#publishbutton). |
| `PreviewButton` | A button that previews the current document. [More details](#previewbutton). |
| `Description` | A description of the Collection. [More details](#description). |
| `Upload` | A file upload component. [More details](#upload). |
#### Globals
@@ -135,15 +134,14 @@ export const MyGlobal: GlobalConfig = {
The following options are available:
| Path | Description |
| ------------------------ | ---------------------------------------------------------------------------------------------------------------------------- |
| `beforeDocumentControls` | Inject custom components before the Save / Publish buttons. [More details](#beforedocumentcontrols). |
| `editMenuItems` | Inject custom components within the 3-dot menu dropdown located in the document control bar. [More details](#editmenuitems). |
| `SaveButton` | A button that saves the current document. [More details](#savebutton). |
| `SaveDraftButton` | A button that saves the current document as a draft. [More details](#savedraftbutton). |
| `PublishButton` | A button that publishes the current document. [More details](#publishbutton). |
| `PreviewButton` | A button that previews the current document. [More details](#previewbutton). |
| `Description` | A description of the Global. [More details](#description). |
| Path | Description |
| ------------------------ | ---------------------------------------------------------------------------------------------------- |
| `beforeDocumentControls` | Inject custom components before the Save / Publish buttons. [More details](#beforedocumentcontrols). |
| `SaveButton` | A button that saves the current document. [More details](#savebutton). |
| `SaveDraftButton` | A button that saves the current document as a draft. [More details](#savedraftbutton). |
| `PublishButton` | A button that publishes the current document. [More details](#publishbutton). |
| `PreviewButton` | A button that previews the current document. [More details](#previewbutton). |
| `Description` | A description of the Global. [More details](#description). |
### SaveButton
@@ -262,92 +260,6 @@ export function MyCustomDocumentControlButton(
}
```
### editMenuItems
The `editMenuItems` property allows you to inject custom components into the 3-dot menu dropdown located in the document controls bar. This dropdown contains default options including `Create New`, `Duplicate`, `Delete`, and other options when additional features are enabled. Any custom components you add will appear below these default items.
To add `editMenuItems`, use the `components.edit.editMenuItems` property in your [Collection Config](../configuration/collections):
#### Config Example
```ts
import type { CollectionConfig } from 'payload'
export const Pages: CollectionConfig = {
slug: 'pages',
admin: {
components: {
edit: {
// highlight-start
editMenuItems: ['/path/to/CustomEditMenuItem'],
// highlight-end
},
},
},
}
```
Here's an example of a custom `editMenuItems` component:
#### Server Component
```tsx
import React from 'react'
import { PopupList } from '@payloadcms/ui'
import type { EditMenuItemsServerProps } from 'payload'
export const EditMenuItems = async (props: EditMenuItemsServerProps) => {
const href = `/custom-action?id=${props.id}`
return (
<PopupList.ButtonGroup>
<PopupList.Button href={href}>Custom Edit Menu Item</PopupList.Button>
<PopupList.Button href={href}>
Another Custom Edit Menu Item - add as many as you need!
</PopupList.Button>
</PopupList.ButtonGroup>
)
}
```
#### Client Component
```tsx
'use client'
import React from 'react'
import { PopupList } from '@payloadcms/ui'
import type { EditViewMenuItemClientProps } from 'payload'
export const EditMenuItems = (props: EditViewMenuItemClientProps) => {
const handleClick = () => {
console.log('Custom button clicked!')
}
return (
<PopupList.ButtonGroup>
<PopupList.Button onClick={handleClick}>
Custom Edit Menu Item
</PopupList.Button>
<PopupList.Button onClick={handleClick}>
Another Custom Edit Menu Item - add as many as you need!
</PopupList.Button>
</PopupList.ButtonGroup>
)
}
```
<Banner type="info">
**Styling:** Use Payload's built-in `PopupList.Button` to ensure your menu
items automatically match the default dropdown styles. If you want a different
look, you can customize the appearance by passing your own `className` to
`PopupList.Button`, or use a completely custom button built with a standard
HTML `button` element or any other component that fits your design
preferences.
</Banner>
### SaveDraftButton
The `SaveDraftButton` property allows you to render a custom Save Draft Button in the Edit View.

View File

@@ -94,7 +94,6 @@ The following options are available:
| `beforeListTable` | An array of custom components to inject before the table of documents in the List View. [More details](#beforelisttable). |
| `afterList` | An array of custom components to inject after the list of documents in the List View. [More details](#afterlist). |
| `afterListTable` | An array of custom components to inject after the table of documents in the List View. [More details](#afterlisttable). |
| `listMenuItems` | An array of components to render within a menu next to the List Controls (after the Columns and Filters options) |
| `Description` | A component to render a description of the Collection. [More details](#description). |
### beforeList

View File

@@ -372,13 +372,13 @@ export default function MyCustomLogo() {
}
```
### header
### Header
The `header` property allows you to inject Custom Components above the Payload header.
The `Header` property allows you to inject Custom Components above the Payload header.
Examples of a custom header components might include an announcements banner, a notifications bar, or anything else you'd like to display at the top of the Admin Panel in a prominent location.
To add `header` components, use the `admin.components.header` property in your Payload Config:
To add `Header` components, use the `admin.components.header` property in your Payload Config:
```ts
import { buildConfig } from 'payload'
@@ -388,14 +388,14 @@ export default buildConfig({
admin: {
// highlight-start
components: {
header: ['/path/to/your/component'],
Header: ['/path/to/your/component'],
},
// highlight-end
},
})
```
Here is an example of a simple `header` component:
Here is an example of a simple `Header` component:
```tsx
export default function MyCustomHeader() {

View File

@@ -183,13 +183,13 @@ Depending on which Database Adapter you use, your migration workflow might diffe
In relational databases, migrations will be **required** for non-development database environments. But with MongoDB, you might only need to run migrations once in a while (or never even need them).
#### MongoDB#mongodb-migrations
#### MongoDB
In MongoDB, you'll only ever really need to run migrations for times where you change your database shape, and you have lots of existing data that you'd like to transform from Shape A to Shape B.
In this case, you can create a migration by running `pnpm payload migrate:create`, and then write the logic that you need to perform to migrate your documents to their new shape. You can then either run your migrations in CI before you build / deploy, or you can run them locally, against your production database, by using your production database connection string on your local computer and running the `pnpm payload migrate` command.
#### Postgres#postgres-migrations
#### Postgres
In relational databases like Postgres, migrations are a bit more important, because each time you add a new field or a new collection, you'll need to update the shape of your database to match your Payload Config (otherwise you'll see errors upon trying to read / write your data).

View File

@@ -41,7 +41,6 @@ export default buildConfig({
| `collation` | Enable language-specific string comparison with customizable options. Available on MongoDB 3.4+. Defaults locale to "en". Example: `{ strength: 3 }`. For a full list of collation options and their definitions, see the [MongoDB documentation](https://www.mongodb.com/docs/manual/reference/collation/). |
| `allowAdditionalKeys` | By default, Payload strips all additional keys from MongoDB data that don't exist in the Payload schema. If you have some data that you want to include to the result but it doesn't exist in Payload, you can set this to `true`. Be careful as Payload access control _won't_ work for this data. |
| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. |
| `disableFallbackSort` | Set to `true` to disable the adapter adding a fallback sort when sorting by non-unique fields, this can affect performance in some cases but it ensures a consistent order of results. |
## Access to Mongoose models

View File

@@ -80,8 +80,6 @@ export default buildConfig({
| `afterSchemaInit` | Drizzle schema hook. Runs after the schema is built. [More Details](#afterschemainit) |
| `generateSchemaOutputFile` | Override generated schema from `payload generate:db-schema` file path. Defaults to `{CWD}/src/payload-generated.schema.ts` |
| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. |
| `readReplicas` | An array of DB read replicas connection strings, can be used to offload read-heavy traffic. |
| `blocksAsJSON` | Store blocks as a JSON column instead of using the relational structure which can improve performance with a large amount of blocks |
## Access to Drizzle

View File

@@ -50,7 +50,6 @@ export default buildConfig({
| `generateSchemaOutputFile` | Override generated schema from `payload generate:db-schema` file path. Defaults to `{CWD}/src/payload-generated.schema.ts` |
| `autoIncrement` | Pass `true` to enable SQLite [AUTOINCREMENT](https://www.sqlite.org/autoinc.html) for primary keys to ensure the same ID cannot be reused from deleted rows |
| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. |
| `blocksAsJSON` | Store blocks as a JSON column instead of using the relational structure which can improve performance with a large amount of blocks |
## Access to Drizzle

View File

@@ -37,7 +37,7 @@ export const MyGroupField: Field = {
| ---------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`name`** | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) |
| **`fields`** \* | Array of field types to nest within this Group. |
| **`label`** | Used as a heading in the Admin Panel and to name the generated GraphQL type. Defaults to the field name, if defined. |
| **`label`** | Used as a heading in the Admin Panel and to name the generated GraphQL type. Required when name is undefined, defaults to name converted to words. |
| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) |
| **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. |
| **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). |
@@ -113,7 +113,8 @@ export const ExampleCollection: CollectionConfig = {
## Presentational group fields
You can also use the Group field to only visually group fields without affecting the data structure. Not defining a label will render just the grouped fields.
You can also use the Group field to create a presentational group of fields. This is useful when you want to group fields together visually without affecting the data structure.
The label will be required when a `name` is not provided.
```ts
import type { CollectionConfig } from 'payload'

View File

@@ -305,23 +305,6 @@ The following additional properties are provided in the `ctx` object:
| `req` | The current HTTP request object. Contains `payload`, `user`, etc. |
| `event` | Either `onChange` or `submit` depending on the current action. Used as a performance opt-in. [More details](#async-field-validations). |
#### Localized and Built-in Error Messages
You can return localized error messages by utilizing the translation function provided in the `req` object:
```ts
import type { Field } from 'payload'
export const MyField: Field = {
type: 'text',
name: 'myField',
validate: (value, { req: { t } }) =>
Boolean(value) || t('validation:required'), // highlight-line
}
```
This way you can use [Custom Translations](https://payloadcms.com/docs/configuration/i18n#custom-translations) as well as Payload's built in error messages (like `validation:required` used in the example above). For a full list of available translation strings, see the [english translation file](https://github.com/payloadcms/payload/blob/main/packages/translations/src/languages/en.ts) of Payload.
#### Reusing Default Field Validations
When using custom validation functions, Payload will use yours in place of the default. However, you might want to simply augment the default validation with your own custom logic.
@@ -351,6 +334,7 @@ import {
code,
date,
email,
group,
json,
number,
point,

View File

@@ -54,7 +54,7 @@ export const MySelectField: Field = {
| **`enumName`** | Custom enum name for this field when using SQL Database Adapter ([Postgres](/docs/database/postgres)). Auto-generated from name if not defined. |
| **`dbName`** | Custom table name (if `hasMany` set to `true`) for this field when using SQL Database Adapter ([Postgres](/docs/database/postgres)). Auto-generated from name if not defined. |
| **`interfaceName`** | Create a top level, reusable [Typescript interface](/docs/typescript/generating-types#custom-field-interfaces) & [GraphQL type](/docs/graphql/graphql-schema#custom-field-schemas). |
| **`filterOptions`** | Dynamically filter which options are available based on the user, data, etc. [More details](#filteroptions) |
| **`filterOptions`** | Dynamically filter which options are available based on the user, data, etc. [More details](#filterOptions) |
| **`typescriptSchema`** | Override field type generation with providing a JSON schema |
| **`virtual`** | Provide `true` to disable field in the database, or provide a string path to [link the field with a relationship](/docs/fields/relationship#linking-virtual-fields-with-relationships). See [Virtual Fields](https://payloadcms.com/blog/learn-how-virtual-fields-can-help-solve-common-cms-challenges) |
@@ -70,7 +70,7 @@ _\* An asterisk denotes that a property is required._
### filterOptions
Used to dynamically filter which options are available based on the current user, document data, or other criteria.
Used to dynamically filter which options are available based on the user, data, etc.
Some examples of this might include:

View File

@@ -23,12 +23,6 @@ On the payload config, you can configure the following settings under the `folde
// Type definition
type RootFoldersConfiguration = {
/**
* If true, the browse by folder view will be enabled
*
* @default true
*/
browseByFolder?: boolean
/**
* An array of functions to be ran when the folder collection is initialized
* This allows plugins to modify the collection configuration
@@ -88,16 +82,7 @@ To enable folders on a collection, you need to set the `admin.folders` property
```ts
// Type definition
type CollectionFoldersConfiguration =
| boolean
| {
/**
* If true, the collection will be included in the browse by folder view
*
* @default true
*/
browseByFolder?: boolean
}
type CollectionFoldersConfiguration = boolean
```
```ts

View File

@@ -16,15 +16,14 @@ The labels you provide for your Collections and Globals are used to name the Gra
At the top of your Payload Config you can define all the options to manage GraphQL.
| Option | Description |
| ---------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `mutations` | Any custom Mutations to be added in addition to what Payload provides. [More](/docs/graphql/extending) |
| `queries` | Any custom Queries to be added in addition to what Payload provides. [More](/docs/graphql/extending) |
| `maxComplexity` | A number used to set the maximum allowed complexity allowed by requests [More](/docs/graphql/overview#query-complexity-limits) |
| `disablePlaygroundInProduction` | A boolean that if false will enable the GraphQL playground in production environments, defaults to true. [More](/docs/graphql/overview#graphql-playground) |
| `disableIntrospectionInProduction` | A boolean that if false will enable the GraphQL introspection in production environments, defaults to true. |
| `disable` | A boolean that if true will disable the GraphQL entirely, defaults to false. |
| `validationRules` | A function that takes the ExecutionArgs and returns an array of ValidationRules. |
| Option | Description |
| ------------------------------- | ------------------------------------------------------------------------------------------------------------------------------- |
| `mutations` | Any custom Mutations to be added in addition to what Payload provides. [More](/docs/graphql/extending) |
| `queries` | Any custom Queries to be added in addition to what Payload provides. [More](/docs/graphql/extending) |
| `maxComplexity` | A number used to set the maximum allowed complexity allowed by requests [More](/docs/graphql/overview#query-complexity-limits) |
| `disablePlaygroundInProduction` | A boolean that if false will enable the GraphQL playground, defaults to true. [More](/docs/graphql/overview#graphql-playground) |
| `disable` | A boolean that if true will disable the GraphQL entirely, defaults to false. |
| `validationRules` | A function that takes the ExecutionArgs and returns an array of ValidationRules. |
## Collections

View File

@@ -121,7 +121,7 @@ The following arguments are provided to the `beforeValidate` hook:
### beforeChange
Immediately before validation, beforeChange hooks will run during create and update operations. At this stage, the data should be treated as unvalidated user input. There is no guarantee that required fields exist or that fields are in the correct format. As such, using this data for side effects requires manual validation. You can optionally modify the shape of the data to be saved.
Immediately following validation, `beforeChange` hooks will run within `create` and `update` operations. At this stage, you can be confident that the data that will be saved to the document is valid in accordance to your field validations. You can optionally modify the shape of data to be saved.
```ts
import type { CollectionBeforeChangeHook } from 'payload'
@@ -160,7 +160,6 @@ The following arguments are provided to the `afterChange` hook:
| ----------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`collection`** | The [Collection](../configuration/collections) in which this Hook is running against. |
| **`context`** | Custom context passed between hooks. [More details](./context). |
| **`data`** | The incoming data passed through the operation. |
| **`doc`** | The resulting Document after changes are applied. |
| **`operation`** | The name of the operation that this hook is running within. |
| **`previousDoc`** | The Document before changes were applied. |

View File

@@ -128,18 +128,20 @@ const MyCollection: CollectionConfig = {
## TypeScript
The default TypeScript interface for `context` is `{ [key: string]: unknown }`. If you prefer a more strict typing in your project or when authoring plugins for others, you can override this using the `declare module` syntax.
The default TypeScript interface for `context` is `{ [key: string]: unknown }`. If you prefer a more strict typing in your project or when authoring plugins for others, you can override this using the `declare` syntax.
This is known as [module augmentation / declaration merging](https://www.typescriptlang.org/docs/handbook/declaration-merging.html#module-augmentation), a TypeScript feature which allows us to add properties to existing types. Simply put this in any `.ts` or `.d.ts` file:
This is known as "type augmentation", a TypeScript feature which allows us to add types to existing types. Simply put this in any `.ts` or `.d.ts` file:
```ts
import { RequestContext as OriginalRequestContext } from 'payload'
declare module 'payload' {
// Augment the RequestContext interface to include your custom properties
export interface RequestContext {
// Create a new interface that merges your additional fields with the original one
export interface RequestContext extends OriginalRequestContext {
myObject?: string
// ...
}
}
```
This will add the property `myObject` with a type of string to every context object. Make sure to follow this example correctly, as module augmentation can mess up your types if you do it wrong.
This will add the property `myObject` with a type of string to every context object. Make sure to follow this example correctly, as type augmentation can mess up your types if you do it wrong.

View File

@@ -128,7 +128,6 @@ The following arguments are provided to the `afterChange` hook:
| ----------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`global`** | The [Global](../configuration/globals) in which this Hook is running against. |
| **`context`** | Custom context passed between hooks. [More details](./context). |
| **`data`** | The incoming data passed through the operation. |
| **`doc`** | The resulting Document after changes are applied. |
| **`previousDoc`** | The Document before changes were applied. |
| **`req`** | The [Web Request](https://developer.mozilla.org/en-US/docs/Web/API/Request) object. This is mocked for [Local API](../local-api/overview) operations. |

View File

@@ -68,26 +68,3 @@ Here's a quick overview:
- Workflows are groupings of specific tasks which should be run in-order, and can be retried from a specific point of failure
- A Job is an instance of a single task or workflow which will be executed
- A Queue is a way to segment your jobs into different "groups" - for example, some to run nightly, and others to run every 10 minutes
### Visualizing Jobs in the Admin UI
By default, the internal `payload-jobs` collection is hidden from the Payload Admin Panel. To make this collection visible for debugging or inspection purposes, you can override its configuration using `jobsCollectionOverrides`.
```ts
import { buildConfig } from 'payload'
export default buildConfig({
// ... other config
jobs: {
// ... other job settings
jobsCollectionOverrides: ({ defaultJobsCollection }) => {
if (!defaultJobsCollection.admin) {
defaultJobsCollection.admin = {}
}
defaultJobsCollection.admin.hidden = false
return defaultJobsCollection
},
},
})
```

View File

@@ -30,9 +30,7 @@ As mentioned above, you can queue jobs, but the jobs won't run unless a worker p
### Cron jobs
The `jobs.autoRun` property allows you to configure cron jobs that automatically run queued jobs at specified intervals. Note that this does not _queue_ new jobs - only _runs_ jobs that are already in the specified queue.
**Example**:
You can use the `jobs.autoRun` property to configure cron jobs:
```ts
export default buildConfig({
@@ -82,12 +80,6 @@ await fetch('/api/payload-jobs/run?limit=100&queue=nightly', {
This endpoint is automatically mounted for you and is helpful in conjunction with serverless platforms like Vercel, where you might want to use Vercel Cron to invoke a serverless function that executes your jobs.
**Query Parameters:**
- `limit`: The maximum number of jobs to run in this invocation (default: 10).
- `queue`: The name of the queue to run jobs from. If not specified, jobs will be run from the `default` queue.
- `allQueues`: If set to `true`, all jobs from all queues will be run. This will ignore the `queue` parameter.
**Vercel Cron Example**
If you're deploying on Vercel, you can add a `vercel.json` file in the root of your project that configures Vercel Cron to invoke the `run` endpoint on a cron schedule.
@@ -145,15 +137,11 @@ If you want to process jobs programmatically from your server-side code, you can
**Run all jobs:**
```ts
// Run all jobs from the `default` queue - default limit is 10
const results = await payload.jobs.run()
// You can customize the queue name and limit by passing them as arguments:
await payload.jobs.run({ queue: 'nightly', limit: 100 })
// Run all jobs from all queues:
await payload.jobs.run({ allQueues: true })
// You can provide a where clause to filter the jobs that should be run:
await payload.jobs.run({
where: { 'input.message': { equals: 'secret' } },
@@ -170,22 +158,10 @@ const results = await payload.jobs.runByID({
### Bin script
Finally, you can process jobs via the bin script that comes with Payload out of the box. By default, this script will run jobs from the `default` queue, with a limit of 10 jobs per invocation:
Finally, you can process jobs via the bin script that comes with Payload out of the box.
```sh
npx payload jobs:run
```
You can override the default queue and limit by passing the `--queue` and `--limit` flags:
```sh
npx payload jobs:run --queue myQueue --limit 15
```
If you want to run all jobs from all queues, you can pass the `--all-queues` flag:
```sh
npx payload jobs:run --all-queues
npx payload jobs:run --queue default --limit 10
```
In addition, the bin script allows you to pass a `--cron` flag to the `jobs:run` command to run the jobs on a scheduled, cron basis:

View File

@@ -329,7 +329,7 @@ available:
// responseHeaders: { ... } // returned headers from the response
// }
const result = await payload.auth({ headers, canSetHeaders: false })
const result = await payload.auth({ headers })
```
### Login

View File

@@ -393,7 +393,7 @@ export default function LoginForm() {
### Logout
Logs out the current user by clearing the authentication cookie and current sessions.
Logs out the current user by clearing the authentication cookie.
#### Importing the `logout` function
@@ -401,7 +401,7 @@ Logs out the current user by clearing the authentication cookie and current sess
import { logout } from '@payloadcms/next/auth'
```
Similar to the login function, you now need to pass your Payload config to this function and this cannot be done in a client component. Use a helper server function as shown below. To ensure all sessions are cleared, set `allSessions: true` in the options, if you wish to logout but keep current sessions active, you can set this to `false` or leave it `undefined`.
Similar to the login function, you now need to pass your Payload config to this function and this cannot be done in a client component. Use a helper server function as shown below.
```ts
'use server'
@@ -411,7 +411,7 @@ import config from '@payload-config'
export async function logoutAction() {
try {
return await logout({ allSessions: true, config })
return await logout({ config })
} catch (error) {
throw new Error(
`Logout failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
@@ -434,7 +434,7 @@ export default function LogoutButton() {
### Refresh
Refreshes the authentication token and current session for the logged-in user.
Refreshes the authentication token for the logged-in user.
#### Importing the `refresh` function
@@ -453,6 +453,7 @@ import config from '@payload-config'
export async function refreshAction() {
try {
return await refresh({
collection: 'users', // pass your collection slug
config,
})
} catch (error) {

View File

@@ -470,31 +470,6 @@ formBuilderPlugin({
})
```
### Preventing generated schema naming conflicts
Plugin fields can cause GraphQL type name collisions with your own blocks or collections. This results in errors like:
```plaintext
Error: Schema must contain uniquely named types but contains multiple types named "Country"
```
You can resolve this by overriding:
- `graphQL.singularName` in your collection config (for GraphQL schema conflicts)
- `interfaceName` in your block config
- `interfaceName` in the plugin field config
```ts
// payload.config.ts
formBuilderPlugin({
fields: {
country: {
interfaceName: 'CountryFormBlock', // overrides the generated type name to avoid a conflict
},
},
})
```
## Email
This plugin relies on the [email configuration](../email/overview) defined in your Payload configuration. It will read from your config and attempt to send your emails using the credentials provided.

View File

@@ -16,8 +16,8 @@ This plugin sets up multi-tenancy for your application from within your [Admin P
If you need help, check out our [Community
Help](https://payloadcms.com/community-help). If you think you've found a bug,
please [open a new
issue](https://github.com/payloadcms/payload/issues/new/choose) with as much
detail as possible.
issue](https://github.com/payloadcms/payload/issues/new?assignees=&labels=plugin%3A%multi-tenant&template=bug_report.md&title=plugin-multi-tenant%3A)
with as much detail as possible.
</Banner>
## Core features
@@ -35,7 +35,7 @@ This plugin sets up multi-tenancy for your application from within your [Admin P
By default this plugin cleans up documents when a tenant is deleted. You should ensure you have
strong access control on your tenants collection to prevent deletions by unauthorized users.
You can disable this behavior by setting `cleanupAfterTenantDelete` to `false` in the plugin options.
You can disabled this behavior by setting `cleanupAfterTenantDelete` to `false` in the plugin options.
</Banner>

View File

@@ -180,8 +180,8 @@ and `createBreadcrumbField` methods. They will merge your customizations overtop
```ts
import type { CollectionConfig } from 'payload'
import { createParentField } from '@payloadcms/plugin-nested-docs'
import { createBreadcrumbsField } from '@payloadcms/plugin-nested-docs'
import { createParentField } from '@payloadcms/plugin-nested-docs/fields'
import { createBreadcrumbsField } from '@payloadcms/plugin-nested-docs/fields'
const examplePageConfig: CollectionConfig = {
slug: 'pages',

View File

@@ -74,32 +74,16 @@ import * as Sentry from '@sentry/nextjs'
const config = buildConfig({
collections: [Pages, Media],
plugins: [sentryPlugin({ Sentry })],
plugins: [
sentryPlugin({
Sentry,
}),
],
})
export default config
```
## Instrumenting Database Queries
If you want Sentry to capture Postgres query performance traces, you need to inject the Sentry-patched `pg` driver into the Postgres adapter. This ensures Sentrys instrumentation hooks into your database calls.
```ts
import * as Sentry from '@sentry/nextjs'
import { buildConfig } from 'payload'
import { sentryPlugin } from '@payloadcms/plugin-sentry'
import { postgresAdapter } from '@payloadcms/db-postgres'
import pg from 'pg'
export default buildConfig({
db: postgresAdapter({
pool: { connectionString: process.env.DATABASE_URL },
pg, // Inject the patched pg driver for Sentry instrumentation
}),
plugins: [sentryPlugin({ Sentry })],
})
```
## Options
- `Sentry` : Sentry | **required**

View File

@@ -115,7 +115,6 @@ Set the `uploadsCollection` to your application's upload-enabled collection slug
##### `tabbedUI`
When the `tabbedUI` property is `true`, it appends an `SEO` tab onto your config using Payload's [Tabs Field](../fields/tabs). If your collection is not already tab-enabled, meaning the first field in your config is not of type `tabs`, then one will be created for you called `Content`. Defaults to `false`.
Note that the order of plugins or fields in your config may affect whether or not the plugin can smartly merge tabs with your existing fields. If you have a complex structure we recommend you [make use of the fields directly](#direct-use-of-fields) instead of relying on this config option.
<Banner type="info">
If you wish to continue to use top-level or sidebar fields with `tabbedUI`,

View File

@@ -1,30 +0,0 @@
---
title: Building without a DB connection
label: Building without a DB connection
order: 10
desc: You don't want to have a DB connection while building your Docker container? Learn how to prevent that!
keywords: deployment, production, config, configuration, documentation, Content Management System, cms, headless, javascript, node, react, nextjs
---
One of the most common problems when building a site for production, especially with Docker - is the DB connection requirement.
The important note is that Payload by itself does not have this requirement, But [Next.js' SSG ](https://nextjs.org/docs/pages/building-your-application/rendering/static-site-generation) does if any of your route segments have SSG enabled (which is default, unless you opted out or used a [Dynamic API](https://nextjs.org/docs/app/deep-dive/caching#dynamic-apis)) and use the Payload Local API.
Solutions:
## Using the experimental-build-mode Next.js build flag
You can run Next.js build using the `pnpx next build --experimental-build-mode compile` command to only compile the code without static generation, which does not require a DB connection. In that case, your pages will be rendered dynamically, but after that, you can still generate static pages using the `pnpx next build --experimental-build-mode generate` command when you have a DB connection.
[Next.js documentation](https://nextjs.org/docs/pages/api-reference/cli/next#next-build-options)
## Opting-out of SSG
You can opt out of SSG by adding this all the route segment files:
```ts
export const dynamic = 'force-dynamic'
```
**Note that it will disable static optimization and your site will be slower**.
More on [Next.js documentation](https://nextjs.org/docs/app/deep-dive/caching#opting-out-2)

View File

@@ -150,7 +150,7 @@ Follow the docs to configure any one of these storage providers. For local devel
## Docker
This is an example of a multi-stage docker build of Payload for production. Ensure you are setting your environment
variables on deployment, like `PAYLOAD_SECRET`, `PAYLOAD_CONFIG_PATH`, and `DATABASE_URI` if needed. If you don't want to have a DB connection and your build requires that, learn [here](./building-without-a-db-connection) how to prevent that.
variables on deployment, like `PAYLOAD_SECRET`, `PAYLOAD_CONFIG_PATH`, and `DATABASE_URI` if needed.
In your Next.js config, set the `output` property `standalone`.

View File

@@ -46,12 +46,11 @@ const config = buildConfig({
The following options are available for Query Presets:
| Option | Description |
| ------------------- | ------------------------------------------------------------------------------------------------------------------------------- |
| `access` | Used to define custom collection-level access control that applies to all presets. [More details](#access-control). |
| `filterConstraints` | Used to define which constraints are available to users when managing presets. [More details](#constraint-access-control). |
| `constraints` | Used to define custom document-level access control that apply to individual presets. [More details](#document-access-control). |
| `labels` | Custom labels to use for the Query Presets collection. |
| Option | Description |
| ------------- | ------------------------------------------------------------------------------------------------------------------------------- |
| `access` | Used to define custom collection-level access control that applies to all presets. [More details](#access-control). |
| `constraints` | Used to define custom document-level access control that apply to individual presets. [More details](#document-access-control). |
| `labels` | Custom labels to use for the Query Presets collection. |
## Access Control
@@ -60,7 +59,7 @@ Query Presets are subject to the same [Access Control](../access-control/overvie
Access Control for Query Presets can be customized in two ways:
1. [Collection Access Control](#collection-access-control): Applies to all presets. These rules are not controllable by the user and are statically defined in the config.
2. [Document Access Control](#document-access-control): Applies to each individual preset. These rules are controllable by the user and are dynamically defined on each record in the database.
2. [Document Access Control](#document-access-control): Applies to each individual preset. These rules are controllable by the user and are saved to the document.
### Collection Access Control
@@ -98,7 +97,7 @@ This example restricts all Query Presets to users with the role of `admin`.
### Document Access Control
You can also define access control rules that apply to each specific preset. Users have the ability to define and modify these rules on the fly as they manage presets. These are saved dynamically in the database on each record.
You can also define access control rules that apply to each specific preset. Users have the ability to define and modify these rules on the fly as they manage presets. These are saved dynamically in the database on each document.
When a user manages a preset, document-level access control options will be available to them in the Admin Panel for each operation.
@@ -151,8 +150,8 @@ const config = buildConfig({
}),
},
],
// highlight-end
},
// highlight-end
},
})
```
@@ -172,39 +171,3 @@ The following options are available for each constraint:
| `value` | The value to store in the database when this constraint is selected. |
| `fields` | An array of fields to render when this constraint is selected. |
| `access` | A function that determines the access control rules for this constraint. |
### Constraint Access Control
Used to dynamically filter which constraints are available based on the current user, document data, or other criteria.
Some examples of this might include:
- Ensuring that only "admins" are allowed to make a preset available to "everyone"
- Preventing the "onlyMe" option from being selected based on a hypothetical "disablePrivatePresets" checkbox
When a user lacks the permission to set a constraint, the option will either be hidden from them, or disabled if it is already saved to that preset.
To do this, you can use the `filterConstraints` property in your [Payload Config](../configuration/overview):
```ts
import { buildConfig } from 'payload'
const config = buildConfig({
// ...
queryPresets: {
// ...
// highlight-start
filterConstraints: ({ req, options }) =>
!req.user?.roles?.includes('admin')
? options.filter(
(option) =>
(typeof option === 'string' ? option : option.value) !==
'everyone',
)
: options,
// highlight-end
},
})
```
The `filterConstraints` function receives the same arguments as [`filterOptions`](../fields/select#filteroptions) in the [Select field](../fields/select).

View File

@@ -738,7 +738,7 @@ Payload supports a method override feature that allows you to send GET requests
### How to Use
To use this feature, include the `X-Payload-HTTP-Method-Override` header set to `GET` in your POST request. The parameters should be sent in the body of the request with the `Content-Type` set to `application/x-www-form-urlencoded`.
To use this feature, include the `X-HTTP-Method-Override` header set to `GET` in your POST request. The parameters should be sent in the body of the request with the `Content-Type` set to `application/x-www-form-urlencoded`.
### Example
@@ -753,7 +753,7 @@ const res = await fetch(`${api}/${collectionSlug}`, {
headers: {
'Accept-Language': i18n.language,
'Content-Type': 'application/x-www-form-urlencoded',
'X-Payload-HTTP-Method-Override': 'GET',
'X-HTTP-Method-Override': 'GET',
},
body: qs.stringify({
depth: 1,

View File

@@ -1,485 +0,0 @@
---
description: Features officially maintained by Payload.
keywords: lexical, rich text, editor, headless cms, official, features
label: Official Features
order: 35
title: Official Features
---
Below are all the Rich Text Features Payload offers. Everything is customizable; you can [create your own features](/docs/rich-text/custom-features), modify ours and share them with the community.
## Features Overview
| Feature Name | Included by default | Description |
| ------------------------------- | ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`BoldFeature`** | Yes | Adds support for bold text formatting. |
| **`ItalicFeature`** | Yes | Adds support for italic text formatting. |
| **`UnderlineFeature`** | Yes | Adds support for underlined text formatting. |
| **`StrikethroughFeature`** | Yes | Adds support for strikethrough text formatting. |
| **`SubscriptFeature`** | Yes | Adds support for subscript text formatting. |
| **`SuperscriptFeature`** | Yes | Adds support for superscript text formatting. |
| **`InlineCodeFeature`** | Yes | Adds support for inline code formatting. |
| **`ParagraphFeature`** | Yes | Provides entries in both the slash menu and toolbar dropdown for explicit paragraph creation or conversion. |
| **`HeadingFeature`** | Yes | Adds Heading Nodes (by default, H1 - H6, but that can be customized) |
| **`AlignFeature`** | Yes | Adds support for text alignment (left, center, right, justify) |
| **`IndentFeature`** | Yes | Adds support for text indentation with toolbar buttons |
| **`UnorderedListFeature`** | Yes | Adds support for unordered lists (ul) |
| **`OrderedListFeature`** | Yes | Adds support for ordered lists (ol) |
| **`ChecklistFeature`** | Yes | Adds support for interactive checklists |
| **`LinkFeature`** | Yes | Allows you to create internal and external links |
| **`RelationshipFeature`** | Yes | Allows you to create block-level (not inline) relationships to other documents |
| **`BlockquoteFeature`** | Yes | Allows you to create block-level quotes |
| **`UploadFeature`** | Yes | Allows you to create block-level upload nodes - this supports all kinds of uploads, not just images |
| **`HorizontalRuleFeature`** | Yes | Adds support for horizontal rules / separators. Basically displays an `<hr>` element |
| **`InlineToolbarFeature`** | Yes | Provides a floating toolbar which appears when you select text. This toolbar only contains actions relevant for selected text |
| **`FixedToolbarFeature`** | No | Provides a persistent toolbar pinned to the top and always visible. Both inline and fixed toolbars can be enabled at the same time. |
| **`BlocksFeature`** | No | Allows you to use Payload's [Blocks Field](../fields/blocks) directly inside your editor. In the feature props, you can specify the allowed blocks - just like in the Blocks field. |
| **`TreeViewFeature`** | No | Provides a debug box under the editor, which allows you to see the current editor state live, the dom, as well as time travel. Very useful for debugging |
| **`EXPERIMENTAL_TableFeature`** | No | Adds support for tables. This feature may be removed or receive breaking changes in the future - even within a stable lexical release, without needing a major release. |
| **`TextStateFeature`** | No | Allows you to store key-value attributes within TextNodes and assign them inline styles. |
## In depth
### BoldFeature
- Description: Adds support for bold text formatting, along with buttons to apply it in both fixed and inline toolbars.
- Included by default: Yes
- Markdown Support: `**bold**` or `__bold__`
- Keyboard Shortcut: Ctrl/Cmd + B
### ItalicFeature
- Description: Adds support for italic text formatting, along with buttons to apply it in both fixed and inline toolbars.
- Included by default: Yes
- Markdown Support: `*italic*` or `_italic_`
- Keyboard Shortcut: Ctrl/Cmd + I
### UnderlineFeature
- Description: Adds support for underlined text formatting, along with buttons to apply it in both fixed and inline toolbars.
- Included by default: Yes
- Keyboard Shortcut: Ctrl/Cmd + U
### StrikethroughFeature
- Description: Adds support for strikethrough text formatting, along with buttons to apply it in both fixed and inline toolbars.
- Included by default: Yes
- Markdown Support: `~~strikethrough~~`
### SubscriptFeature
- Description: Adds support for subscript text formatting, along with buttons to apply it in both fixed and inline toolbars.
- Included by default: Yes
### SuperscriptFeature
- Description: Adds support for superscript text formatting, along with buttons to apply it in both fixed and inline toolbars.
- Included by default: Yes
### InlineCodeFeature
- Description: Adds support for inline code formatting with distinct styling, along with buttons to apply it in both fixed and inline toolbars.
- Included by default: Yes
- Markdown Support: \`code\`
### ParagraphFeature
- Description: Provides entries in both the slash menu and toolbar dropdown for explicit paragraph creation or conversion.
- Included by default: Yes
### HeadingFeature
- Description: Adds support for heading nodes (H1-H6) with toolbar dropdown and slash menu entries for each enabled heading size.
- Included by default: Yes
- Markdown Support: `#`, `##`, `###`, ..., at start of line.
- Types:
```typescript
type HeadingFeatureProps = {
enabledHeadingSizes?: HeadingTagType[] // ['h1', 'h2', 'h3', 'h4', 'h5', 'h6']
}
```
- Usage example:
```typescript
HeadingFeature({
enabledHeadingSizes: ['h1', 'h2', 'h3'], // Default: ['h1', 'h2', 'h3', 'h4', 'h5', 'h6']
})
```
### AlignFeature
- Description: Allows text alignment (left, center, right, justify), along with buttons to apply it in both fixed and inline toolbars.
- Included by default: Yes
- Keyboard Shortcut: Ctrl/Cmd + Shift + L/E/R/J (left/center/right/justify)
### IndentFeature
- Description: Adds support for text indentation, along with buttons to apply it in both fixed and inline toolbars.
- Included by default: Yes
- Keyboard Shortcut: Tab (increase), Shift + Tab (decrease)
- Types:
```typescript
type IndentFeatureProps = {
/**
* The nodes that should not be indented. "type" property of the nodes you don't want to be indented.
* These can be: "paragraph", "heading", "listitem", "quote" or other indentable nodes if they exist.
*/
disabledNodes?: string[]
/**
* If true, pressing Tab in the middle of a block such as a paragraph or heading will not insert a tabNode.
* Instead, Tab will only be used for block-level indentation.
* @default false
*/
disableTabNode?: boolean
}
```
- Usage example:
```typescript
// Allow block-level indentation only
IndentFeature({
disableTabNode: true,
})
```
### UnorderedListFeature
- Description: Adds support for unordered lists (bullet points) with toolbar dropdown and slash menu entries.
- Included by default: Yes
- Markdown Support: `-`, `*`, or `+` at start of line
### OrderedListFeature
- Description: Adds support for ordered lists (numbered lists) with toolbar dropdown and slash menu entries.
- Included by default: Yes
- Markdown Support: `1.` at start of line
### ChecklistFeature
- Description: Adds support for interactive checklists with toolbar dropdown and slash menu entries.
- Included by default: Yes
- Markdown Support: `- [ ]` (unchecked) or `- [x]` (checked)
### LinkFeature
- Description: Allows creation of internal and external links with toolbar buttons and automatic URL conversion.
- Included by default: Yes
- Markdown Support: `[anchor](url)`
- Types:
```typescript
type LinkFeatureServerProps = {
/**
* Disables the automatic creation of links
* from URLs typed or pasted into the editor,
* @default false
*/
disableAutoLinks?: 'creationOnly' | true
/**
* A function or array defining additional fields for the link feature.
* These will be displayed in the link editor drawer.
*/
fields?:
| ((args: {
config: SanitizedConfig
defaultFields: FieldAffectingData[]
}) => (Field | FieldAffectingData)[])
| Field[]
/**
* Sets a maximum population depth for the internal
* doc default field of link, regardless of the
* remaining depth when the field is reached.
*/
maxDepth?: number
} & ExclusiveLinkCollectionsProps
type ExclusiveLinkCollectionsProps =
| {
disabledCollections?: CollectionSlug[]
enabledCollections?: never
}
| {
disabledCollections?: never
enabledCollections?: CollectionSlug[]
}
```
- Usage example:
```typescript
LinkFeature({
fields: ({ defaultFields }) => [
...defaultFields,
{
name: 'rel',
type: 'select',
options: ['noopener', 'noreferrer', 'nofollow'],
},
],
enabledCollections: ['pages', 'posts'], // Collections for internal links
maxDepth: 2, // Population depth for internal links
disableAutoLinks: false, // Allow auto-conversion of URLs
})
```
### RelationshipFeature
- Description: Allows creation of block-level relationships to other documents with toolbar button and slash menu entry.
- Included by default: Yes
- Types:
```typescript
type RelationshipFeatureProps = {
/**
* Sets a maximum population depth for this relationship, regardless of the remaining depth when the respective field is reached.
*/
maxDepth?: number
} & ExclusiveRelationshipFeatureProps
type ExclusiveRelationshipFeatureProps =
| {
disabledCollections?: CollectionSlug[]
enabledCollections?: never
}
| {
disabledCollections?: never
enabledCollections?: CollectionSlug[]
}
```
- Usage example:
```typescript
RelationshipFeature({
disabledCollections: ['users'], // Collections to exclude
maxDepth: 2, // Population depth for relationships
})
```
### UploadFeature
- Description: Allows creation of upload/media nodes with toolbar button and slash menu entry, supports all file types.
- Included by default: Yes
- Types:
```typescript
type UploadFeatureProps = {
collections?: {
[collection: CollectionSlug]: {
fields: Field[]
}
}
/**
* Sets a maximum population depth for this upload (not the fields for this upload), regardless of the remaining depth when the respective field is reached.
*/
maxDepth?: number
}
```
- Usage example:
```typescript
UploadFeature({
collections: {
uploads: {
fields: [
{
name: 'caption',
type: 'text',
label: 'Caption',
},
{
name: 'alt',
type: 'text',
label: 'Alt Text',
},
],
},
},
maxDepth: 1, // Population depth for uploads
})
```
### BlockquoteFeature
- Description: Allows creation of blockquotes with toolbar button and slash menu entry.
- Included by default: Yes
- Markdown Support: `> quote text`
### HorizontalRuleFeature
- Description: Adds support for horizontal rules/separators with toolbar button and slash menu entry.
- Included by default: Yes
- Markdown Support: `---`
### InlineToolbarFeature
- Description: Provides a floating toolbar that appears when text is selected, containing formatting options relevant to selected text.
- Included by default: Yes
### FixedToolbarFeature
- Description: Provides a persistent toolbar pinned to the top of the editor that's always visible.
- Included by default: No
- Types:
```typescript
type FixedToolbarFeatureProps = {
/**
* @default false
* If this is enabled, the toolbar will apply
* to the focused editor, not the editor with
* the FixedToolbarFeature.
*/
applyToFocusedEditor?: boolean
/**
* Custom configurations for toolbar groups
* Key is the group key (e.g. 'format', 'indent', 'align')
* Value is a partial ToolbarGroup object that will
* be merged with the default configuration
*/
customGroups?: CustomGroups
/**
* @default false
* If there is a parent editor with a fixed toolbar,
* this will disable the toolbar for this editor.
*/
disableIfParentHasFixedToolbar?: boolean
}
```
- Usage example:
```typescript
FixedToolbarFeature({
applyToFocusedEditor: false, // Apply to focused editor
customGroups: {
format: {
// Custom configuration for format group
},
},
})
```
### BlocksFeature
- Description: Allows use of Payload's Blocks Field directly in the editor with toolbar buttons and slash menu entries for each block type.
- Included by default: No
- Types:
```typescript
type BlocksFeatureProps = {
blocks?: (Block | BlockSlug)[] | Block[]
inlineBlocks?: (Block | BlockSlug)[] | Block[]
}
```
- Usage example:
```typescript
BlocksFeature({
blocks: [
{
slug: 'callout',
fields: [
{
name: 'text',
type: 'text',
required: true,
},
],
},
],
inlineBlocks: [
{
slug: 'mention',
fields: [
{
name: 'name',
type: 'text',
required: true,
},
],
},
],
})
```
### TreeViewFeature
- Description: Provides a debug panel below the editor showing the editor's internal state, DOM tree, and time travel debugging.
- Included by default: No
### EXPERIMENTAL_TableFeature
- Description: Adds support for tables with toolbar button and slash menu entry for creation and editing.
- Included by default: No
### TextStateFeature
- Description: Allows storing key-value attributes in text nodes with inline styles and toolbar dropdown for style selection.
- Included by default: No
- Types:
```typescript
type TextStateFeatureProps = {
/**
* The keys of the top-level object (stateKeys) represent the attributes that the textNode can have (e.g., color).
* The values of the top-level object (stateValues) represent the values that the attribute can have (e.g., red, blue, etc.).
* Within the stateValue, you can define inline styles and labels.
*/
state: { [stateKey: string]: StateValues }
}
type StateValues = {
[stateValue: string]: {
css: StyleObject
label: string
}
}
type StyleObject = {
[K in keyof PropertiesHyphenFallback]?:
| Extract<PropertiesHyphenFallback[K], string>
| undefined
}
```
- Usage example:
```typescript
// We offer default colors that have good contrast and look good in dark and light mode.
import { defaultColors, TextStateFeature } from '@payloadcms/richtext-lexical'
TextStateFeature({
// prettier-ignore
state: {
color: {
...defaultColors,
// fancy gradients!
galaxy: { label: 'Galaxy', css: { background: 'linear-gradient(to right, #0000ff, #ff0000)', color: 'white' } },
sunset: { label: 'Sunset', css: { background: 'linear-gradient(to top, #ff5f6d, #6a3093)' } },
},
// You can have both colored and underlined text at the same time.
// If you don't want that, you should group them within the same key.
// (just like I did with defaultColors and my fancy gradients)
underline: {
'solid': { label: 'Solid', css: { 'text-decoration': 'underline', 'text-underline-offset': '4px' } },
// You'll probably want to use the CSS light-dark() utility.
'yellow-dashed': { label: 'Yellow Dashed', css: { 'text-decoration': 'underline dashed', 'text-decoration-color': 'light-dark(#EAB308,yellow)', 'text-underline-offset': '4px' } },
},
},
}),
```
This is what the example above will look like:
<LightDarkImage
srcDark="https://payloadcms.com/images/docs/text-state-feature.png"
srcLight="https://payloadcms.com/images/docs/text-state-feature.png"
alt="Example usage in light and dark mode for TextStateFeature with defaultColors and some custom styles"
/>

View File

@@ -138,9 +138,39 @@ import { CallToAction } from '../blocks/CallToAction'
| **`defaultFeatures`** | This opinionated array contains all "recommended" default features. You can see which features are included in the default features in the table below. |
| **`rootFeatures`** | This array contains all features that are enabled in the root richText editor (the one defined in the payload.config.ts). If this field is the root richText editor, or if the root richText editor is not a lexical editor, this array will be empty. |
## Official Features
## Features overview
You can find more information about the official features in our [official features docs](../rich-text/official-features).
Here's an overview of all the included features:
| Feature Name | Included by default | Description |
| ----------------------------------- | ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **`BoldFeature`** | Yes | Handles the bold text format |
| **`ItalicFeature`** | Yes | Handles the italic text format |
| **`UnderlineFeature`** | Yes | Handles the underline text format |
| **`StrikethroughFeature`** | Yes | Handles the strikethrough text format |
| **`SubscriptFeature`** | Yes | Handles the subscript text format |
| **`SuperscriptFeature`** | Yes | Handles the superscript text format |
| **`InlineCodeFeature`** | Yes | Handles the inline-code text format |
| **`ParagraphFeature`** | Yes | Handles paragraphs. Since they are already a key feature of lexical itself, this Feature mainly handles the Slash and Add-Block menu entries for paragraphs |
| **`HeadingFeature`** | Yes | Adds Heading Nodes (by default, H1 - H6, but that can be customized) |
| **`AlignFeature`** | Yes | Allows you to align text left, centered and right |
| **`IndentFeature`** | Yes | Allows you to indent text with the tab key |
| **`UnorderedListFeature`** | Yes | Adds unordered lists (ul) |
| **`OrderedListFeature`** | Yes | Adds ordered lists (ol) |
| **`ChecklistFeature`** | Yes | Adds checklists |
| **`LinkFeature`** | Yes | Allows you to create internal and external links |
| **`RelationshipFeature`** | Yes | Allows you to create block-level (not inline) relationships to other documents |
| **`BlockquoteFeature`** | Yes | Allows you to create block-level quotes |
| **`UploadFeature`** | Yes | Allows you to create block-level upload nodes - this supports all kinds of uploads, not just images |
| **`HorizontalRuleFeature`** | Yes | Horizontal rules / separators. Basically displays an `<hr>` element |
| **`InlineToolbarFeature`** | Yes | The inline toolbar is the floating toolbar which appears when you select text. This toolbar only contains actions relevant for selected text |
| **`FixedToolbarFeature`** | No | This classic toolbar is pinned to the top and always visible. Both inline and fixed toolbars can be enabled at the same time. |
| **`BlocksFeature`** | No | Allows you to use Payload's [Blocks Field](../fields/blocks) directly inside your editor. In the feature props, you can specify the allowed blocks - just like in the Blocks field. |
| **`TreeViewFeature`** | No | Adds a debug box under the editor, which allows you to see the current editor state live, the dom, as well as time travel. Very useful for debugging |
| **`EXPERIMENTAL_TableFeature`** | No | Adds support for tables. This feature may be removed or receive breaking changes in the future - even within a stable lexical release, without needing a major release. |
| **`EXPERIMENTAL_TextStateFeature`** | No | Allows you to store key-value attributes within TextNodes and assign them inline styles. |
Notice how even the toolbars are features? That's how extensible our lexical editor is - you could theoretically create your own toolbar if you wanted to!
## Creating your own, custom Feature
@@ -306,17 +336,3 @@ You can customize the placeholder (the text that appears in the editor when it's
}),
}
```
## Detecting empty editor state
When you first type into a rich text field and subsequently delete everything through the admin panel, its value changes from `null` to a JSON object containing an empty paragraph.
If needed, you can reset the field value to `null` programmatically - for example, by using a custom hook to detect when the editor is empty.
This also applies to fields like `text` and `textArea`, which could be stored as either `null` or an empty value in the database. Since the empty value for richText is a JSON object, checking for emptiness is a bit more involved - so Payload provides a utility for it:
```ts
import { hasText } from '@payloadcms/richtext-lexical/shared'
hasText(richtextData)
```

View File

@@ -81,7 +81,6 @@ The default `elements` available in Payload are:
- `link`
- `ol`
- `ul`
- `li`
- `textAlign`
- `indent`
- [`relationship`](#relationship-element)

View File

@@ -95,7 +95,6 @@ _An asterisk denotes that an option is required._
| **`adminThumbnail`** | Set the way that the [Admin Panel](../admin/overview) will display thumbnails for this Collection. [More](#admin-thumbnails) |
| **`bulkUpload`** | Allow users to upload in bulk from the list view, default is true |
| **`cacheTags`** | Set to `false` to disable the cache tag set in the UI for the admin thumbnail component. Useful for when CDNs don't allow certain cache queries. |
| **`constructorOptions`** | An object passed to the the Sharp image library that accepts any Constructor options and applies them to the upload file. [More](https://sharp.pixelplumbing.com/api-constructor/) |
| **`crop`** | Set to `false` to disable the cropping tool in the [Admin Panel](../admin/overview). Crop is enabled by default. [More](#crop-and-focal-point-selector) |
| **`disableLocalStorage`** | Completely disable uploading files to disk locally. [More](#disabling-local-upload-storage) |
| **`displayPreview`** | Enable displaying preview of the uploaded file in Upload fields related to this Collection. Can be locally overridden by `displayPreview` option in Upload field. [More](/docs/fields/upload#config-options). |
@@ -109,14 +108,11 @@ _An asterisk denotes that an option is required._
| **`mimeTypes`** | Restrict mimeTypes in the file picker. Array of valid mimetypes or mimetype wildcards [More](#mimetypes) |
| **`pasteURL`** | Controls whether files can be uploaded from remote URLs by pasting them into the Upload field. **Enabled by default.** Accepts `false` to disable or an object with an `allowList` of valid remote URLs. [More](#uploading-files-from-remote-urls) |
| **`resizeOptions`** | An object passed to the the Sharp image library to resize the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize) |
| **`skipSafeFetch`** | Set to an `allowList` to skip the safe fetch check when fetching external files. Set to `true` to skip the safe fetch for all documents in this collection. Defaults to `false`. |
| **`allowRestrictedFileTypes`** | Set to `true` to allow restricted file types. If your Collection has defined [mimeTypes](#mimetypes), restricted file verification will be skipped. Defaults to `false`. [More](#restricted-file-types) |
| **`staticDir`** | The folder directory to use to store media in. Can be either an absolute path or relative to the directory that contains your config. Defaults to your collection slug |
| **`trimOptions`** | An object passed to the the Sharp image library to trim the uploaded file. [More](https://sharp.pixelplumbing.com/api-resize#trim) |
| **`withMetadata`** | If specified, appends metadata to the output image file. Accepts a boolean or a function that receives `metadata` and `req`, returning a boolean. |
| **`hideFileInputOnCreate`** | Set to `true` to prevent the admin UI from showing file inputs during document creation, useful for programmatic file generation. |
| **`hideRemoveFile`** | Set to `true` to prevent the admin UI having a way to remove an existing file while editing. |
| **`modifyResponseHeaders`** | Accepts an object with existing `headers` and allows you to manipulate the response headers for media files. [More](#modifying-response-headers) |
### Payload-wide Upload Options
@@ -305,47 +301,6 @@ export const Media: CollectionConfig = {
}
```
## Restricted File Types
Possibly problematic file types are automatically restricted from being uploaded to your application.
If your Collection has defined [mimeTypes](#mimetypes) or has set `allowRestrictedFileTypes` to `true`, restricted file verification will be skipped.
Restricted file types and extensions:
| File Extensions | MIME Type |
| ------------------------------------ | ----------------------------------------------- |
| `exe`, `dll` | `application/x-msdownload` |
| `exe`, `com`, `app`, `action` | `application/x-executable` |
| `bat`, `cmd` | `application/x-msdos-program` |
| `exe`, `com` | `application/x-ms-dos-executable` |
| `dmg` | `application/x-apple-diskimage` |
| `deb` | `application/x-debian-package` |
| `rpm` | `application/x-redhat-package-manager` |
| `exe`, `dll` | `application/vnd.microsoft.portable-executable` |
| `msi` | `application/x-msi` |
| `jar`, `ear`, `war` | `application/java-archive` |
| `desktop` | `application/x-desktop` |
| `cpl` | `application/x-cpl` |
| `lnk` | `application/x-ms-shortcut` |
| `pkg` | `application/x-apple-installer` |
| `htm`, `html`, `shtml`, `xhtml` | `text/html` |
| `php`, `phtml` | `application/x-httpd-php` |
| `js`, `jse` | `text/javascript` |
| `jsp` | `application/x-jsp` |
| `py` | `text/x-python` |
| `rb` | `text/x-ruby` |
| `pl` | `text/x-perl` |
| `ps1`, `psc1`, `psd1`, `psh`, `psm1` | `application/x-powershell` |
| `vbe`, `vbs` | `application/x-vbscript` |
| `ws`, `wsc`, `wsf`, `wsh` | `application/x-ms-wsh` |
| `scr` | `application/x-msdownload` |
| `asp`, `aspx` | `application/x-asp` |
| `hta` | `application/x-hta` |
| `reg` | `application/x-registry` |
| `url` | `application/x-url` |
| `workflow` | `application/x-workflow` |
| `command` | `application/x-command` |
## MimeTypes
Specifying the `mimeTypes` property can restrict what files are allowed from the user's file picker. This accepts an array of strings, which can be any valid mimetype or mimetype wildcards
@@ -454,7 +409,7 @@ To fetch files from **restricted URLs** that would otherwise be blocked by CORS,
Heres how to configure the pasteURL option to control remote URL fetching:
```ts
```
import type { CollectionConfig } from 'payload'
export const Media: CollectionConfig = {
@@ -467,7 +422,7 @@ export const Media: CollectionConfig = {
pathname: '',
port: '',
protocol: 'https',
search: '',
search: ''
},
{
hostname: 'example.com',
@@ -479,24 +434,6 @@ export const Media: CollectionConfig = {
}
```
You can also adjust server-side fetching at the upload level as well, this does not effect the `CORS` policy like the `pasteURL` option does, but it allows you to skip the safe fetch check for specific URLs.
```
import type { CollectionConfig } from 'payload'
export const Media: CollectionConfig = {
slug: 'media',
upload: {
skipSafeFetch: [
{
hostname: 'example.com',
pathname: '/images/*',
},
],
},
}
```
##### Accepted Values for `pasteURL`
| Option | Description |
@@ -520,44 +457,3 @@ _An asterisk denotes that an option is required._
## Access Control
All files that are uploaded to each Collection automatically support the `read` [Access Control](/docs/access-control/overview) function from the Collection itself. You can use this to control who should be allowed to see your uploads, and who should not.
## Modifying response headers
You can modify the response headers for files by specifying the `modifyResponseHeaders` option in your upload config. This option accepts an object with existing headers and allows you to manipulate the response headers for media files.
### Modifying existing headers
With this method you can directly interface with the `Headers` object and modify the existing headers to append or remove headers.
```ts
import type { CollectionConfig } from 'payload'
export const Media: CollectionConfig = {
slug: 'media',
upload: {
modifyResponseHeaders: ({ headers }) => {
headers.set('X-Frame-Options', 'DENY') // You can directly set headers without returning
},
},
}
```
### Return new headers
You can also return a new `Headers` object with the modified headers. This is useful if you want to set new headers or remove existing ones.
```ts
import type { CollectionConfig } from 'payload'
export const Media: CollectionConfig = {
slug: 'media',
upload: {
modifyResponseHeaders: ({ headers }) => {
const newHeaders = new Headers(headers) // Copy existing headers
newHeaders.set('X-Frame-Options', 'DENY') // Set new header
return newHeaders
},
},
}
```

View File

@@ -84,7 +84,7 @@ pnpm add @payloadcms/storage-s3
- The `config` object can be any [`S3ClientConfig`](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3) object (from [`@aws-sdk/client-s3`](https://github.com/aws/aws-sdk-js-v3)). _This is highly dependent on your AWS setup_. Check the AWS documentation for more information.
- When enabled, this package will automatically set `disableLocalStorage` to `true` for each collection.
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client. You must allow CORS PUT method for the bucket to your website.
- Configure `signedDownloads` (either globally of per-collection in `collections`) to use [presigned URLs](https://docs.aws.amazon.com/AmazonS3/latest/userguide/using-presigned-url.html) for files downloading. This can improve performance for large files (like videos) while still respecting your access control. Additionally, with `signedDownloads.shouldUseSignedURL` you can specify a condition whether Payload should use a presigned URL, if you want to use this feature only for specific files.
- Configure `signedDownloads` (either globally of per-collection in `collections`) to use [presigned URLs](https://docs.aws.amazon.com/AmazonS3/latest/userguide/using-presigned-url.html) for files downloading. This can improve performance for large files (like videos) while still respecting your access control.
```ts
import { s3Storage } from '@payloadcms/storage-s3'
@@ -100,14 +100,6 @@ export default buildConfig({
'media-with-prefix': {
prefix,
},
'media-with-presigned-downloads': {
// Filter only mp4 files
signedDownloads: {
shouldUseSignedURL: ({ collection, filename, req }) => {
return filename.endsWith('.mp4')
},
},
},
},
bucket: process.env.S3_BUCKET,
config: {

View File

@@ -51,37 +51,12 @@ Within the Admin UI, if drafts are enabled, a document can be shown with one of
#### Updating or creating drafts
If you enable drafts on a collection or global, the `create` and `update` operations for REST, GraphQL, and Local APIs expose a new option called `draft` which allows you to specify if you are creating or updating a **draft**, or if you're just sending your changes straight to the published document.
For example:
```ts
// REST API
POST /api/your-collection?draft=true
// Local API
await payload.create({
collection: 'your-collection',
data: {
// your data here
},
draft: true, // This is required to create a draft
})
// GraphQL
mutation {
createYourCollection(data: { ... }, draft: true) {
// ...
}
}
```
If you enable drafts on a collection or global, the `create` and `update` operations for REST, GraphQL, and Local APIs expose a new option called `draft` which allows you to specify if you are creating or updating a **draft**, or if you're just sending your changes straight to the published document. For example, if you pass the query parameter `?draft=true` to a REST `create` or `update` operation, your action will be treated as if you are creating a `draft` and not a published document. By default, the `draft` argument is set to `false`.
**Required fields**
If `draft` is enabled while creating or updating a document, all fields are considered as not required, so that you can save drafts that are incomplete.
Setting `_status: "draft"` will not bypass the required fields. You need to set `draft: true` as shown in the previous examples.
#### Reading drafts vs. published documents
In addition to the `draft` argument within `create` and `update` operations, a `draft` argument is also exposed for `find` and `findByID` operations.

View File

@@ -1,12 +1,14 @@
import configPromise from '@payload-config'
import { getPayload } from 'payload'
export const GET = async (request: Request) => {
export const GET = async () => {
const payload = await getPayload({
config: configPromise,
})
return Response.json({
message: 'This is an example of a custom route.',
const data = await payload.find({
collection: 'users',
})
return Response.json(data)
}

View File

@@ -1,5 +1,5 @@
{
"name": "astro-website",
"name": "website",
"version": "0.0.1",
"type": "module",
"scripts": {

View File

@@ -14,12 +14,12 @@ export const Header = () => {
<picture>
<source
media="(prefers-color-scheme: dark)"
srcSet="https://raw.githubusercontent.com/payloadcms/payload/main/packages/ui/src/assets/payload-logo-light.svg"
srcSet="https://raw.githubusercontent.com/payloadcms/payload/master/src/admin/assets/images/payload-logo-light.svg"
/>
<Image
alt="Payload Logo"
height={30}
src="https://raw.githubusercontent.com/payloadcms/payload/main/packages/ui/src/assets/payload-logo-dark.svg"
src="https://raw.githubusercontent.com/payloadcms/payload/master/src/admin/assets/images/payload-logo-dark.svg"
width={150}
/>
</picture>

View File

@@ -1,5 +1,4 @@
import type { CollectionConfig } from 'payload'
import { sanitizeUserDataForEmail } from 'payload/shared'
import { generateEmailHTML } from '../email/generateEmailHTML'
@@ -27,7 +26,7 @@ export const Newsletter: CollectionConfig = {
.sendEmail({
from: 'sender@example.com',
html: await generateEmailHTML({
content: `<p>${doc.name ? `Hi ${sanitizeUserDataForEmail(doc.name)}!` : 'Hi!'} We'll be in touch soon...</p>`,
content: `<p>${doc.name ? `Hi ${doc.name}!` : 'Hi!'} We'll be in touch soon...</p>`,
headline: 'Welcome to the newsletter!',
}),
subject: 'Thanks for signing up!',

View File

@@ -1,7 +1,5 @@
import { generateEmailHTML } from './generateEmailHTML'
import { sanitizeUserDataForEmail } from 'payload/shared'
type User = {
email: string
name?: string
@@ -18,7 +16,7 @@ export const generateVerificationEmail = async (
const { token, user } = args
return generateEmailHTML({
content: `<p>Hi${user.name ? ' ' + sanitizeUserDataForEmail(user.name) : ''}! Validate your account by clicking the button below.</p>`,
content: `<p>Hi${user.name ? ' ' + user.name : ''}! Validate your account by clicking the button below.</p>`,
cta: {
buttonLabel: 'Verify',
url: `${process.env.PAYLOAD_PUBLIC_SERVER_URL}/verify?token=${token}&email=${user.email}`,

View File

@@ -3,8 +3,14 @@
width: var(--base);
.stroke {
stroke-width: 2px;
stroke-width: 1px;
fill: none;
stroke: currentColor;
}
&:local() {
.stroke {
stroke-width: 2px;
}
}
}

View File

@@ -2,4 +2,4 @@
/// <reference types="next/image-types/global" />
// NOTE: This file should not be edited
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
// see https://nextjs.org/docs/app/building-your-application/configuring/typescript for more information.

View File

@@ -3,7 +3,6 @@
"version": "1.0.0",
"description": "Payload Live Preview example.",
"license": "MIT",
"type": "module",
"main": "dist/server.js",
"scripts": {
"build": "cross-env NODE_OPTIONS=--no-deprecation next build",

File diff suppressed because it is too large Load Diff

View File

@@ -27,12 +27,12 @@ export const Header = async () => {
<picture>
<source
media="(prefers-color-scheme: dark)"
srcSet="https://raw.githubusercontent.com/payloadcms/payload/main/packages/ui/src/assets/payload-logo-light.svg"
srcSet="https://raw.githubusercontent.com/payloadcms/payload/master/src/admin/assets/images/payload-logo-light.svg"
/>
<Image
alt="Payload Logo"
height={30}
src="https://raw.githubusercontent.com/payloadcms/payload/main/packages/ui/src/assets/payload-logo-dark.svg"
src="https://raw.githubusercontent.com/payloadcms/payload/master/src/admin/assets/images/payload-logo-dark.svg"
width={150}
/>
</picture>

View File

@@ -1,9 +1,8 @@
import type { MigrateUpArgs } from '@payloadcms/db-mongodb'
import type { Page } from '../payload-types'
import { DefaultDocumentIDType } from 'payload'
export const home = (id: DefaultDocumentIDType): Partial<Page> => ({
export const home: Partial<Page> = {
slug: 'home',
richText: [
{
@@ -42,26 +41,11 @@ export const home = (id: DefaultDocumentIDType): Partial<Page> => ({
{
text: ' you can edit this page in the admin panel and see the changes reflected here in real time.',
},
...(id
? [
{
text: ' To get started, visit ',
},
{
type: 'link',
children: [{ text: 'this page' }],
linkType: 'custom',
newTab: true,
url: `/admin/collections/pages/${id}/preview`,
},
{ text: '.' },
]
: []),
],
},
],
title: 'Home',
})
}
export const examplePage: Partial<Page> = {
slug: 'example-page',
@@ -99,18 +83,11 @@ export async function up({ payload }: MigrateUpArgs): Promise<void> {
data: examplePage as any, // eslint-disable-line
})
const { id: ogHomePageID } = await payload.create({
collection: 'pages',
data: {
title: 'Home',
richText: [],
},
})
const homepageJSON = JSON.parse(JSON.stringify(home))
const { id: homePageID } = await payload.update({
id: ogHomePageID,
const { id: homePageID } = await payload.create({
collection: 'pages',
data: home(ogHomePageID),
data: homepageJSON,
})
await payload.updateGlobal({
@@ -144,7 +121,7 @@ export async function up({ payload }: MigrateUpArgs): Promise<void> {
type: 'custom',
label: 'Dashboard',
reference: undefined,
url: '/admin',
url: 'http://localhost:3000/admin',
},
},
],

View File

@@ -6,66 +6,10 @@
* and re-run `payload generate:types` to regenerate this file.
*/
/**
* Supported timezones in IANA format.
*
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "supportedTimezones".
*/
export type SupportedTimezones =
| 'Pacific/Midway'
| 'Pacific/Niue'
| 'Pacific/Honolulu'
| 'Pacific/Rarotonga'
| 'America/Anchorage'
| 'Pacific/Gambier'
| 'America/Los_Angeles'
| 'America/Tijuana'
| 'America/Denver'
| 'America/Phoenix'
| 'America/Chicago'
| 'America/Guatemala'
| 'America/New_York'
| 'America/Bogota'
| 'America/Caracas'
| 'America/Santiago'
| 'America/Buenos_Aires'
| 'America/Sao_Paulo'
| 'Atlantic/South_Georgia'
| 'Atlantic/Azores'
| 'Atlantic/Cape_Verde'
| 'Europe/London'
| 'Europe/Berlin'
| 'Africa/Lagos'
| 'Europe/Athens'
| 'Africa/Cairo'
| 'Europe/Moscow'
| 'Asia/Riyadh'
| 'Asia/Dubai'
| 'Asia/Baku'
| 'Asia/Karachi'
| 'Asia/Tashkent'
| 'Asia/Calcutta'
| 'Asia/Dhaka'
| 'Asia/Almaty'
| 'Asia/Jakarta'
| 'Asia/Bangkok'
| 'Asia/Shanghai'
| 'Asia/Singapore'
| 'Asia/Tokyo'
| 'Asia/Seoul'
| 'Australia/Brisbane'
| 'Australia/Sydney'
| 'Pacific/Guam'
| 'Pacific/Noumea'
| 'Pacific/Auckland'
| 'Pacific/Fiji';
export interface Config {
auth: {
users: UserAuthOperations;
};
blocks: {};
collections: {
pages: Page;
users: User;

View File

@@ -2,7 +2,6 @@
import { cn } from '@/utilities/ui'
import useClickableCard from '@/utilities/useClickableCard'
import Link from 'next/link'
import { useLocale } from 'next-intl'
import React, { Fragment } from 'react'
import type { Post } from '@/payload-types'
@@ -17,7 +16,6 @@ export const Card: React.FC<{
showCategories?: boolean
title?: string
}> = (props) => {
const locale = useLocale()
const { card, link } = useClickableCard({})
const { className, doc, relationTo, showCategories, title: titleFromProps } = props
@@ -27,7 +25,7 @@ export const Card: React.FC<{
const hasCategories = categories && Array.isArray(categories) && categories.length > 0
const titleToUse = titleFromProps || title
const sanitizedDescription = description?.replace(/\s/g, ' ') // replace non-breaking space with white space
const href = `/${locale}/${relationTo}/${slug}`
const href = `/${relationTo}/${slug}`
return (
<article

View File

@@ -6,7 +6,7 @@ export const Logo = () => {
<img
alt="Payload Logo"
className="max-w-[9.375rem] invert dark:invert-0"
src="https://raw.githubusercontent.com/payloadcms/payload/main/packages/ui/src/assets/payload-logo-light.svg"
src="https://raw.githubusercontent.com/payloadcms/payload/main/packages/payload/src/admin/assets/images/payload-logo-light.svg"
/>
)
}

View File

@@ -21,7 +21,7 @@ export async function Footer({ locale }: { locale: TypedLocale }) {
<img
alt="Payload Logo"
className="max-w-[6rem] invert-0"
src="https://raw.githubusercontent.com/payloadcms/payload/main/packages/ui/src/assets/payload-logo-light.svg"
src="https://raw.githubusercontent.com/payloadcms/payload/main/packages/payload/src/admin/assets/images/payload-logo-light.svg"
/>
</picture>
</Link>

View File

@@ -53,7 +53,7 @@ export default buildConfig({
admin: {
components: {
// The `BeforeLogin` component renders a message that you see while logging into your admin panel.
// Feel free to delete this at any time. Simply remove the line below.
// Feel free to delete this at any time. Simply remove the line below and the import `BeforeLogin` statement on line 15.
beforeLogin: ['@/components/BeforeLogin'],
afterDashboard: ['@/components/AfterDashboard'],
},

View File

@@ -14,12 +14,9 @@ export const superAdminOrTenantAdminAccess: Access = ({ req }) => {
return true
}
const adminTenantAccessIDs = getUserTenantIDs(req.user, 'tenant-admin')
const requestedTenant = req?.data?.tenant
if (requestedTenant && adminTenantAccessIDs.includes(requestedTenant)) {
return true
return {
tenant: {
in: getUserTenantIDs(req.user, 'tenant-admin'),
},
}
return false
}

View File

@@ -1,6 +1,6 @@
import type { Access } from 'payload'
import type { Tenant, User } from '../../../payload-types'
import type { User } from '../../../payload-types'
import { isSuperAdmin } from '../../../access/isSuperAdmin'
import { getUserTenantIDs } from '../../../utilities/getUserTenantIDs'
@@ -14,20 +14,9 @@ export const createAccess: Access<User> = ({ req }) => {
return true
}
if (!isSuperAdmin(req.user) && req.data?.roles?.includes('super-admin')) {
return false
}
const adminTenantAccessIDs = getUserTenantIDs(req.user, 'tenant-admin')
const requestedTenants: Tenant['id'][] =
req.data?.tenants?.map((t: { tenant: Tenant['id'] }) => t.tenant) ?? []
const hasAccessToAllRequestedTenants = requestedTenants.every((tenantID) =>
adminTenantAccessIDs.includes(tenantID),
)
if (hasAccessToAllRequestedTenants) {
if (adminTenantAccessIDs.length) {
return true
}

View File

@@ -1,12 +1,14 @@
import configPromise from '@payload-config'
import { getPayload } from 'payload'
export const GET = async (request: Request) => {
export const GET = async () => {
const payload = await getPayload({
config: configPromise,
})
return Response.json({
message: 'This is an example of a custom route.',
const data = await payload.find({
collection: 'users',
})
return Response.json(data)
}

View File

@@ -1,22 +1,17 @@
{
"name": "payload-monorepo",
"version": "3.47.0",
"version": "3.39.1",
"private": true,
"type": "module",
"workspaces": [
"packages/*",
"test/*"
],
"scripts": {
"bf": "pnpm run build:force",
"build": "pnpm run build:core",
"build:admin-bar": "turbo build --filter \"@payloadcms/admin-bar\"",
"build:all": "turbo build --filter \"!blank\" --filter \"!website\"",
"build:all": "turbo build",
"build:app": "next build",
"build:app:analyze": "cross-env ANALYZE=true next build",
"build:bundle-for-analysis": "turbo run build:bundle-for-analysis",
"build:clean": "pnpm clean:build",
"build:core": "turbo build --filter \"!@payloadcms/plugin-*\" --filter \"!@payloadcms/storage-*\" --filter \"!blank\" --filter \"!website\"",
"build:core": "turbo build --filter \"!@payloadcms/plugin-*\" --filter \"!@payloadcms/storage-*\"",
"build:core:force": "pnpm clean:build && pnpm build:core --no-cache --force",
"build:create-payload-app": "turbo build --filter create-payload-app",
"build:db-mongodb": "turbo build --filter \"@payloadcms/db-mongodb\"",
@@ -65,12 +60,11 @@
"clean:build": "node ./scripts/delete-recursively.js 'media/' '**/dist/' '**/.cache/' '**/.next/' '**/.turbo/' '**/tsconfig.tsbuildinfo' '**/payload*.tgz' '**/meta_*.json'",
"clean:build:allowtgz": "node ./scripts/delete-recursively.js 'media/' '**/dist/' '**/.cache/' '**/.next/' '**/.turbo/' '**/tsconfig.tsbuildinfo' '**/meta_*.json'",
"clean:cache": "node ./scripts/delete-recursively.js node_modules/.cache! packages/payload/node_modules/.cache! .next/*",
"dev": "cross-env NODE_OPTIONS=\"--no-deprecation --max-old-space-size=16384\" tsx ./test/dev.ts",
"dev": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts",
"dev:generate-db-schema": "pnpm runts ./test/generateDatabaseSchema.ts",
"dev:generate-graphql-schema": "pnpm runts ./test/generateGraphQLSchema.ts",
"dev:generate-importmap": "pnpm runts ./test/generateImportMap.ts",
"dev:generate-types": "pnpm runts ./test/generateTypes.ts",
"dev:memorydb": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --start-memory-db",
"dev:postgres": "cross-env PAYLOAD_DATABASE=postgres pnpm runts ./test/dev.ts",
"dev:prod": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --prod",
"dev:prod:memorydb": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --prod --start-memory-db",
@@ -80,9 +74,9 @@
"docker:start": "docker compose -f test/docker-compose.yml up -d",
"docker:stop": "docker compose -f test/docker-compose.yml down",
"force:build": "pnpm run build:core:force",
"lint": "turbo run lint --log-order=grouped --continue --filter \"!blank\" --filter \"!website\"",
"lint": "turbo run lint --log-order=grouped --continue",
"lint-staged": "lint-staged",
"lint:fix": "turbo run lint:fix --log-order=grouped --continue --filter \"!blank\" --filter \"!website\"",
"lint:fix": "turbo run lint:fix --log-order=grouped --continue",
"obliterate-playwright-cache-macos": "rm -rf ~/Library/Caches/ms-playwright && find /System/Volumes/Data/private/var/folders -type d -name 'playwright*' -exec rm -rf {} +",
"prepare": "husky",
"prepare-run-test-against-prod": "pnpm bf && rm -rf test/packed && rm -rf test/node_modules && rm -rf app && rm -f test/pnpm-lock.yaml && pnpm run script:pack --all --no-build --dest test/packed && pnpm runts test/setupProd.ts && cd test && pnpm i --ignore-workspace && cd ..",
@@ -90,11 +84,7 @@
"publish-prerelease": "pnpm --filter releaser publish-prerelease",
"reinstall": "pnpm clean:all && pnpm install",
"release": "pnpm --filter releaser release --tag latest",
"runts": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" node --no-deprecation --no-experimental-strip-types --import @swc-node/register/esm-register",
"script:audit": "pnpm audit -P",
"script:audit:critical": "pnpm audit -P --audit-level critical",
"script:audit:high": "pnpm audit -P --audit-level high",
"script:audit:moderate": "pnpm audit -P --audit-level moderate",
"runts": "cross-env NODE_OPTIONS=--no-deprecation node --no-deprecation --import @swc-node/register/esm-register",
"script:build-template-with-local-pkgs": "pnpm --filter scripts build-template-with-local-pkgs",
"script:gen-templates": "pnpm --filter scripts gen-templates",
"script:gen-templates:build": "pnpm --filter scripts gen-templates --build",
@@ -103,20 +93,18 @@
"script:pack": "pnpm --filter scripts pack-all-to-dest",
"pretest": "pnpm build",
"test": "pnpm test:int && pnpm test:components && pnpm test:e2e",
"test:components": "cross-env NODE_OPTIONS=\" --no-deprecation --no-experimental-strip-types\" jest --config=jest.components.config.js",
"test:components": "cross-env NODE_OPTIONS=\" --no-deprecation\" jest --config=jest.components.config.js",
"test:e2e": "pnpm runts ./test/runE2E.ts",
"test:e2e:debug": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PWDEBUG=1 DISABLE_LOGGING=true playwright test",
"test:e2e:headed": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 DISABLE_LOGGING=true playwright test --headed",
"test:e2e:noturbo": "pnpm runts ./test/runE2E.ts --no-turbo",
"test:e2e:debug": "cross-env NODE_OPTIONS=--no-deprecation NODE_NO_WARNINGS=1 PWDEBUG=1 DISABLE_LOGGING=true playwright test",
"test:e2e:headed": "cross-env NODE_OPTIONS=--no-deprecation NODE_NO_WARNINGS=1 DISABLE_LOGGING=true playwright test --headed",
"test:e2e:prod": "pnpm prepare-run-test-against-prod && pnpm runts ./test/runE2E.ts --prod",
"test:e2e:prod:ci": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod",
"test:e2e:prod:ci:noturbo": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod --no-turbo",
"test:int": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:int:postgres": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:int:sqlite": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=sqlite DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:int": "cross-env NODE_OPTIONS=\"--no-deprecation\" NODE_NO_WARNINGS=1 DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:int:postgres": "cross-env NODE_OPTIONS=\"--no-deprecation\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:int:sqlite": "cross-env NODE_OPTIONS=\"--no-deprecation\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=sqlite DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand",
"test:types": "tstyche",
"test:unit": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=jest.config.js --runInBand",
"translateNewKeys": "pnpm --filter @tools/scripts run generateTranslations:core"
"test:unit": "cross-env NODE_OPTIONS=\"--no-deprecation\" NODE_NO_WARNINGS=1 DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=jest.config.js --runInBand",
"translateNewKeys": "pnpm --filter translations run translateNewKeys"
},
"lint-staged": {
"**/package.json": "sort-package-json",
@@ -125,29 +113,28 @@
"prettier --write",
"eslint --cache --fix"
],
"templates/website/**/*": "sh -c \"cd templates/website; pnpm install --no-frozen-lockfile --ignore-workspace; pnpm run lint --fix\"",
"templates/**/pnpm-lock.yaml": "pnpm runts scripts/remove-template-lock-files.ts",
"tsconfig.base.json": "node scripts/reset-tsconfig.js"
"tsconfig.json": "node scripts/reset-tsconfig.js"
},
"devDependencies": {
"@jest/globals": "29.7.0",
"@libsql/client": "0.14.0",
"@next/bundle-analyzer": "15.3.2",
"@payloadcms/db-postgres": "file:./packages/db-postgres",
"@payloadcms/drizzle": "file:./packages/drizzle",
"@next/bundle-analyzer": "15.3.0",
"@payloadcms/db-postgres": "workspace:*",
"@payloadcms/eslint-config": "workspace:*",
"@payloadcms/eslint-plugin": "workspace:*",
"@payloadcms/live-preview-react": "workspace:*",
"@payloadcms/translations": "file:./packages/translations",
"@playwright/test": "1.50.0",
"@sentry/nextjs": "^8.33.1",
"@sentry/node": "^8.33.1",
"@swc-node/register": "1.10.10",
"@swc/cli": "0.7.7",
"@swc/jest": "0.2.38",
"@swc-node/register": "1.10.9",
"@swc/cli": "0.6.0",
"@swc/jest": "0.2.37",
"@types/fs-extra": "^11.0.2",
"@types/jest": "29.5.12",
"@types/minimist": "1.2.5",
"@types/node": "22.15.30",
"@types/node": "22.5.4",
"@types/react": "19.1.0",
"@types/react-dom": "19.1.2",
"@types/shelljs": "0.8.15",
@@ -157,8 +144,8 @@
"create-payload-app": "workspace:*",
"cross-env": "7.0.3",
"dotenv": "16.4.7",
"drizzle-kit": "0.31.4",
"drizzle-orm": "0.44.2",
"drizzle-kit": "0.28.0",
"drizzle-orm": "0.36.1",
"escape-html": "^1.0.3",
"execa": "5.1.1",
"form-data": "3.0.1",
@@ -168,12 +155,10 @@
"jest": "29.7.0",
"lint-staged": "15.2.7",
"minimist": "1.2.8",
"mongodb-memory-server": "10.1.4",
"next": "15.3.2",
"mongodb-memory-server": "^10",
"next": "15.3.0",
"open": "^10.1.0",
"p-limit": "^5.0.0",
"payload": "file:./packages/payload",
"pg": "8.16.3",
"playwright": "1.50.0",
"playwright-core": "1.50.0",
"prettier": "3.5.3",
@@ -184,11 +169,11 @@
"shelljs": "0.8.5",
"slash": "3.0.0",
"sort-package-json": "^2.10.0",
"swc-plugin-transform-remove-imports": "4.0.4",
"swc-plugin-transform-remove-imports": "3.1.0",
"tempy": "1.0.1",
"tstyche": "^3.1.1",
"tsx": "4.19.2",
"turbo": "^2.5.4",
"turbo": "^2.3.3",
"typescript": "5.7.3"
},
"packageManager": "pnpm@9.7.1",
@@ -201,11 +186,16 @@
"copyfiles": "$copyfiles",
"cross-env": "$cross-env",
"dotenv": "$dotenv",
"drizzle-orm": "$drizzle-orm",
"graphql": "^16.8.1",
"mongodb-memory-server": "$mongodb-memory-server",
"react": "$react",
"react-dom": "$react-dom",
"typescript": "$typescript"
}
}
},
"workspaces:": [
"packages/*",
"test/*"
]
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/admin-bar",
"version": "3.47.0",
"version": "3.39.1",
"description": "An admin bar for React apps using Payload",
"homepage": "https://payloadcms.com",
"repository": {

View File

@@ -1,6 +1,6 @@
{
"name": "create-payload-app",
"version": "3.47.0",
"version": "3.39.1",
"homepage": "https://payloadcms.com",
"repository": {
"type": "git",
@@ -16,7 +16,6 @@
"url": "https://payloadcms.com"
}
],
"sideEffects": false,
"type": "module",
"exports": {
"./types": {
@@ -61,7 +60,7 @@
"dependencies": {
"@clack/prompts": "^0.7.0",
"@sindresorhus/slugify": "^1.1.0",
"@swc/core": "1.11.29",
"@swc/core": "1.10.12",
"arg": "^5.0.0",
"chalk": "^4.1.0",
"comment-json": "^4.2.3",
@@ -77,7 +76,7 @@
"@types/esprima": "^4.0.6",
"@types/fs-extra": "^9.0.12",
"@types/jest": "29.5.12",
"@types/node": "22.15.30"
"@types/node": "22.5.4"
},
"engines": {
"node": "^18.20.2 || >=20.9.0"

View File

@@ -16,15 +16,12 @@ export const configurePluginProject = ({
const devPayloadConfigPath = path.resolve(projectDirPath, './dev/payload.config.ts')
const devTsConfigPath = path.resolve(projectDirPath, './dev/tsconfig.json')
const indexTsPath = path.resolve(projectDirPath, './src/index.ts')
const devImportMapPath = path.resolve(projectDirPath, './dev/app/(payload)/admin/importMap.js')
const devPayloadConfig = fse.readFileSync(devPayloadConfigPath, 'utf8')
const devTsConfig = fse.readFileSync(devTsConfigPath, 'utf8')
const indexTs = fse.readFileSync(indexTsPath, 'utf-8')
const devImportMap = fse.readFileSync(devImportMapPath, 'utf-8')
const updatedTsConfig = devTsConfig.replaceAll('plugin-package-name-placeholder', projectName)
const updatedImportMap = devImportMap.replaceAll('plugin-package-name-placeholder', projectName)
let updatedIndexTs = indexTs.replaceAll('plugin-package-name-placeholder', projectName)
const pluginExportVariableName = toCamelCase(projectName)
@@ -46,5 +43,4 @@ export const configurePluginProject = ({
fse.writeFileSync(devPayloadConfigPath, updatedPayloadConfig)
fse.writeFileSync(devTsConfigPath, updatedTsConfig)
fse.writeFileSync(indexTsPath, updatedIndexTs)
fse.writeFileSync(devImportMapPath, updatedImportMap)
}

View File

@@ -7,13 +7,13 @@ import path from 'path'
import type { CliArgs, DbType, ProjectExample, ProjectTemplate } from '../types.js'
import { createProject, updatePackageJSONDependencies } from './create-project.js'
import { createProject } from './create-project.js'
import { dbReplacements } from './replacements.js'
import { getValidTemplates } from './templates.js'
import { manageEnvFiles } from './manage-env-files.js'
describe('createProject', () => {
let projectDir: string
beforeAll(() => {
// eslint-disable-next-line no-console
console.log = jest.fn()
@@ -63,30 +63,6 @@ describe('createProject', () => {
expect(packageJson.name).toStrictEqual(projectName)
})
it('updates project name in plugin template importMap file', async () => {
const projectName = 'my-custom-plugin'
const template: ProjectTemplate = {
name: 'plugin',
type: 'plugin',
description: 'Template for creating a Payload plugin',
url: 'https://github.com/payloadcms/payload/templates/plugin',
}
await createProject({
cliArgs: { ...args, '--local-template': 'plugin' } as CliArgs,
packageManager,
projectDir,
projectName,
template,
})
const importMapPath = path.resolve(projectDir, './dev/app/(payload)/admin/importMap.js')
const importMapFile = fse.readFileSync(importMapPath, 'utf-8')
expect(importMapFile).not.toContain('plugin-package-name-placeholder')
expect(importMapFile).toContain('my-custom-plugin')
})
it('creates example', async () => {
const projectName = 'custom-server-example'
const example: ProjectExample = {
@@ -179,36 +155,74 @@ describe('createProject', () => {
expect(content).toContain(dbReplacement.configReplacement().join('\n'))
})
})
describe('managing env files', () => {
it('updates .env files without overwriting existing data', async () => {
const envFilePath = path.join(projectDir, '.env')
const envExampleFilePath = path.join(projectDir, '.env.example')
describe('updates package.json', () => {
it('updates package name and bumps workspace versions', async () => {
const latestVersion = '3.0.0'
const initialJSON = {
name: 'test-project',
version: '1.0.0',
dependencies: {
'@payloadcms/db-mongodb': 'workspace:*',
payload: 'workspace:*',
'@payloadcms/ui': 'workspace:*',
},
}
fse.ensureDirSync(projectDir)
fse.ensureFileSync(envFilePath)
fse.ensureFileSync(envExampleFilePath)
const correctlyModifiedJSON = {
name: 'test-project',
version: '1.0.0',
dependencies: {
'@payloadcms/db-mongodb': `${latestVersion}`,
payload: `${latestVersion}`,
'@payloadcms/ui': `${latestVersion}`,
},
}
const initialEnvContent = `CUSTOM_VAR=custom-value\nDATABASE_URI=old-connection\n`
const initialEnvExampleContent = `CUSTOM_VAR=custom-value\nDATABASE_URI=old-connection\nPAYLOAD_SECRET=YOUR_SECRET_HERE\n`
updatePackageJSONDependencies({
latestVersion,
packageJson: initialJSON,
fse.writeFileSync(envFilePath, initialEnvContent)
fse.writeFileSync(envExampleFilePath, initialEnvExampleContent)
await manageEnvFiles({
cliArgs: {
'--debug': true,
} as CliArgs,
databaseType: 'mongodb',
databaseUri: 'mongodb://localhost:27017/test',
payloadSecret: 'test-secret',
projectDir,
template: undefined,
})
expect(initialJSON).toEqual(correctlyModifiedJSON)
const updatedEnvContent = fse.readFileSync(envFilePath, 'utf-8')
expect(updatedEnvContent).toContain('CUSTOM_VAR=custom-value')
expect(updatedEnvContent).toContain('DATABASE_URI=mongodb://localhost:27017/test')
expect(updatedEnvContent).toContain('PAYLOAD_SECRET=test-secret')
const updatedEnvExampleContent = fse.readFileSync(envExampleFilePath, 'utf-8')
expect(updatedEnvExampleContent).toContain('CUSTOM_VAR=custom-value')
expect(updatedEnvContent).toContain('DATABASE_URI=mongodb://localhost:27017/test')
expect(updatedEnvContent).toContain('PAYLOAD_SECRET=test-secret')
})
it('creates .env and .env.example if they do not exist', async () => {
const envFilePath = path.join(projectDir, '.env')
const envExampleFilePath = path.join(projectDir, '.env.example')
fse.ensureDirSync(projectDir)
if (fse.existsSync(envFilePath)) fse.removeSync(envFilePath)
if (fse.existsSync(envExampleFilePath)) fse.removeSync(envExampleFilePath)
await manageEnvFiles({
cliArgs: {
'--debug': true,
} as CliArgs,
databaseUri: '',
payloadSecret: '',
projectDir,
template: undefined,
})
expect(fse.existsSync(envFilePath)).toBe(true)
expect(fse.existsSync(envExampleFilePath)).toBe(true)
const updatedEnvContent = fse.readFileSync(envFilePath, 'utf-8')
expect(updatedEnvContent).toContain('DATABASE_URI=your-connection-string-here')
expect(updatedEnvContent).toContain('PAYLOAD_SECRET=YOUR_SECRET_HERE')
const updatedEnvExampleContent = fse.readFileSync(envExampleFilePath, 'utf-8')
expect(updatedEnvExampleContent).toContain('DATABASE_URI=your-connection-string-here')
expect(updatedEnvExampleContent).toContain('PAYLOAD_SECRET=YOUR_SECRET_HERE')
})
})
})

View File

@@ -129,11 +129,7 @@ export async function createProject(
const spinner = p.spinner()
spinner.start('Checking latest Payload version...')
const payloadVersion = await getLatestPackageVersion({ packageName: 'payload' })
spinner.stop(`Found latest version of Payload ${payloadVersion}`)
await updatePackageJSON({ latestVersion: payloadVersion, projectDir, projectName })
await updatePackageJSON({ projectDir, projectName })
if ('template' in args) {
if (args.template.type === 'plugin') {
@@ -148,14 +144,17 @@ export async function createProject(
}
}
await manageEnvFiles({
cliArgs,
databaseType: dbDetails?.type,
databaseUri: dbDetails?.dbUri,
payloadSecret: generateSecret(),
projectDir,
template: 'template' in args ? args.template : undefined,
})
// Call manageEnvFiles before initializing Git
if (dbDetails) {
await manageEnvFiles({
cliArgs,
databaseType: dbDetails.type,
databaseUri: dbDetails.dbUri,
payloadSecret: generateSecret(),
projectDir,
template: 'template' in args ? args.template : undefined,
})
}
// Remove yarn.lock file. This is only desired in Payload Cloud.
const lockPath = path.resolve(projectDir, 'pnpm-lock.yaml')
@@ -181,105 +180,17 @@ export async function createProject(
}
}
/**
* Reads the package.json file into an object and then does the following:
* - Sets the `name` property to the provided `projectName`.
* - Bumps the payload packages from workspace:* to the latest version.
* - Writes the updated object back to the package.json file.
*/
export async function updatePackageJSON(args: {
/**
* The latest version of Payload to use in the package.json.
*/
latestVersion: string
projectDir: string
/**
* The name of the project to set in package.json.
*/
projectName: string
}): Promise<void> {
const { latestVersion, projectDir, projectName } = args
const { projectDir, projectName } = args
const packageJsonPath = path.resolve(projectDir, 'package.json')
try {
const packageObj = await fse.readJson(packageJsonPath)
packageObj.name = projectName
updatePackageJSONDependencies({
latestVersion,
packageJson: packageObj,
})
await fse.writeJson(packageJsonPath, packageObj, { spaces: 2 })
} catch (err: unknown) {
warning(`Unable to update name in package.json. ${err instanceof Error ? err.message : ''}`)
}
}
/**
* Recursively updates a JSON object to replace all instances of `workspace:` with the latest version pinned.
*
* Does not return and instead modifies the `packageJson` object in place.
*/
export function updatePackageJSONDependencies(args: {
latestVersion: string
packageJson: Record<string, unknown>
}): void {
const { latestVersion, packageJson } = args
const updatedDependencies = Object.entries(packageJson.dependencies || {}).reduce(
(acc, [key, value]) => {
if (typeof value === 'string' && value.startsWith('workspace:')) {
acc[key] = `${latestVersion}`
} else if (key === 'payload' || key.startsWith('@payloadcms')) {
acc[key] = `${latestVersion}`
} else {
acc[key] = value
}
return acc
},
{} as Record<string, string>,
)
packageJson.dependencies = updatedDependencies
}
/**
* Fetches the latest version of a package from the NPM registry.
*
* Used in determining the latest version of Payload to use in the generated templates.
*/
async function getLatestPackageVersion({
packageName = 'payload',
}: {
/**
* Package name to fetch the latest version for based on the NPM registry URL
*
* Eg. for `'payload'`, it will fetch the version from `https://registry.npmjs.org/payload`
*
* @default 'payload'
*/
packageName?: string
}): Promise<string> {
try {
const response = await fetch(`https://registry.npmjs.org/-/package/${packageName}/dist-tags`)
const data = await response.json()
// Monster chaining for type safety just checking for data.latest
const latestVersion =
data &&
typeof data === 'object' &&
'latest' in data &&
data.latest &&
typeof data.latest === 'string'
? data.latest
: null
if (!latestVersion) {
throw new Error(`No latest version found for package: ${packageName}`)
}
return latestVersion
} catch (error) {
console.error('Error fetching Payload version:', error)
throw error
}
}

View File

@@ -17,7 +17,7 @@ export async function downloadTemplate({
}) {
const branchOrTag = template.url.split('#')?.[1] || 'latest'
const url = `https://codeload.github.com/payloadcms/payload/tar.gz/${branchOrTag}`
const filter = `payload-${branchOrTag.replace(/^v/, '').replaceAll('/', '-')}/templates/${template.name}/`
const filter = `payload-${branchOrTag.replace(/^v/, '')}/templates/${template.name}/`
if (debug) {
debugLog(`Using template url: ${template.url}`)

View File

@@ -7,7 +7,7 @@ export async function getExamples({ branch }: { branch: string }): Promise<Proje
const response = await fetch(url)
const examplesResponseList = (await response.json()) as { name: string; path: string }[]
const examplesResponseList: { name: string; path: string }[] = await response.json()
const examples: ProjectExample[] = examplesResponseList.map((example) => ({
name: example.name,

View File

@@ -1,165 +0,0 @@
import { jest } from '@jest/globals'
import fs from 'fs'
import fse from 'fs-extra'
import * as os from 'node:os'
import path from 'path'
import type { CliArgs } from '../types.js'
import { manageEnvFiles } from './manage-env-files.js'
describe('createProject', () => {
let projectDir: string
let envFilePath = ''
let envExampleFilePath = ''
beforeAll(() => {
// eslint-disable-next-line no-console
console.log = jest.fn()
})
beforeEach(() => {
const tempDirectory = fs.realpathSync(os.tmpdir())
projectDir = `${tempDirectory}/${Math.random().toString(36).substring(7)}`
envFilePath = path.join(projectDir, '.env')
envExampleFilePath = path.join(projectDir, '.env.example')
if (fse.existsSync(envFilePath)) {
fse.removeSync(envFilePath)
}
fse.ensureDirSync(projectDir)
})
afterEach(() => {
if (fse.existsSync(projectDir)) {
fse.rmSync(projectDir, { recursive: true })
}
})
it('generates .env using defaults (not from .env.example)', async () => {
// ensure no .env.example exists so that the default values are used
// the `manageEnvFiles` function will look for .env.example in the file system
if (fse.existsSync(envExampleFilePath)) {
fse.removeSync(envExampleFilePath)
}
await manageEnvFiles({
cliArgs: {
'--debug': true,
} as CliArgs,
databaseUri: '', // omitting this will ensure the default vars are used
payloadSecret: '', // omitting this will ensure the default vars are used
projectDir,
template: undefined,
})
expect(fse.existsSync(envFilePath)).toBe(true)
const updatedEnvContent = fse.readFileSync(envFilePath, 'utf-8')
expect(updatedEnvContent).toBe(
`# Added by Payload\nPAYLOAD_SECRET=YOUR_SECRET_HERE\nDATABASE_URI=your-connection-string-here`,
)
})
it('generates .env from .env.example', async () => {
// create or override the .env.example file with a connection string that will NOT be overridden
fse.ensureFileSync(envExampleFilePath)
fse.writeFileSync(
envExampleFilePath,
`DATABASE_URI=example-connection-string\nCUSTOM_VAR=custom-value\n`,
)
await manageEnvFiles({
cliArgs: {
'--debug': true,
} as CliArgs,
databaseUri: '', // omitting this will ensure the `.env.example` vars are used
payloadSecret: '', // omitting this will ensure the `.env.example` vars are used
projectDir,
template: undefined,
})
expect(fse.existsSync(envFilePath)).toBe(true)
const updatedEnvContent = fse.readFileSync(envFilePath, 'utf-8')
expect(updatedEnvContent).toBe(
`DATABASE_URI=example-connection-string\nCUSTOM_VAR=custom-value\nPAYLOAD_SECRET=YOUR_SECRET_HERE\n# Added by Payload`,
)
})
it('updates existing .env without overriding vars', async () => {
// create an existing .env file with some custom variables that should NOT be overridden
fse.ensureFileSync(envFilePath)
fse.writeFileSync(
envFilePath,
`CUSTOM_VAR=custom-value\nDATABASE_URI=example-connection-string\n`,
)
// create an .env.example file to ensure that its contents DO NOT override existing .env vars
fse.ensureFileSync(envExampleFilePath)
fse.writeFileSync(
envExampleFilePath,
`CUSTOM_VAR=custom-value-2\nDATABASE_URI=example-connection-string-2\n`,
)
await manageEnvFiles({
cliArgs: {
'--debug': true,
} as CliArgs,
databaseUri: '', // omitting this will ensure the `.env` vars are kept
payloadSecret: '', // omitting this will ensure the `.env` vars are kept
projectDir,
template: undefined,
})
expect(fse.existsSync(envFilePath)).toBe(true)
const updatedEnvContent = fse.readFileSync(envFilePath, 'utf-8')
expect(updatedEnvContent).toBe(
`# Added by Payload\nPAYLOAD_SECRET=YOUR_SECRET_HERE\nDATABASE_URI=example-connection-string\nCUSTOM_VAR=custom-value`,
)
})
it('sanitizes .env based on selected database type', async () => {
await manageEnvFiles({
cliArgs: {
'--debug': true,
} as CliArgs,
databaseType: 'mongodb', // this mimics the CLI selection and will be used as the DATABASE_URI
databaseUri: 'mongodb://localhost:27017/test', // this mimics the CLI selection and will be used as the DATABASE_URI
payloadSecret: 'test-secret', // this mimics the CLI selection and will be used as the PAYLOAD_SECRET
projectDir,
template: undefined,
})
const updatedEnvContent = fse.readFileSync(envFilePath, 'utf-8')
expect(updatedEnvContent).toBe(
`# Added by Payload\nPAYLOAD_SECRET=test-secret\nDATABASE_URI=mongodb://localhost:27017/test`,
)
// delete the generated .env file and do it again, but this time, omit the databaseUri to ensure the default is generated
fse.removeSync(envFilePath)
await manageEnvFiles({
cliArgs: {
'--debug': true,
} as CliArgs,
databaseType: 'mongodb', // this mimics the CLI selection and will be used as the DATABASE_URI
databaseUri: '', // omit this to ensure the default is generated based on the selected database type
payloadSecret: 'test-secret',
projectDir,
template: undefined,
})
const updatedEnvContentWithDefault = fse.readFileSync(envFilePath, 'utf-8')
expect(updatedEnvContentWithDefault).toBe(
`# Added by Payload\nPAYLOAD_SECRET=test-secret\nDATABASE_URI=mongodb://127.0.0.1/your-database-name`,
)
})
})

View File

@@ -6,42 +6,21 @@ import type { CliArgs, DbType, ProjectTemplate } from '../types.js'
import { debug, error } from '../utils/log.js'
import { dbChoiceRecord } from './select-db.js'
const sanitizeEnv = ({
contents,
databaseType,
databaseUri,
payloadSecret,
}: {
contents: string
databaseType: DbType | undefined
databaseUri?: string
payloadSecret?: string
}): string => {
const updateEnvExampleVariables = (
contents: string,
databaseType: DbType | undefined,
payloadSecret?: string,
databaseUri?: string,
): string => {
const seenKeys = new Set<string>()
// add defaults
let withDefaults = contents
if (
!contents.includes('DATABASE_URI') &&
!contents.includes('POSTGRES_URL') &&
!contents.includes('MONGODB_URI')
) {
withDefaults += '\nDATABASE_URI=your-connection-string-here'
}
if (!contents.includes('PAYLOAD_SECRET')) {
withDefaults += '\nPAYLOAD_SECRET=YOUR_SECRET_HERE'
}
let updatedEnv = withDefaults
const updatedEnv = contents
.split('\n')
.map((line) => {
if (line.startsWith('#') || !line.includes('=')) {
return line
}
const [key, value] = line.split('=')
const [key] = line.split('=')
if (!key) {
return
@@ -49,7 +28,6 @@ const sanitizeEnv = ({
if (key === 'DATABASE_URI' || key === 'POSTGRES_URL' || key === 'MONGODB_URI') {
const dbChoice = databaseType ? dbChoiceRecord[databaseType] : null
if (dbChoice) {
const placeholderUri = databaseUri
? databaseUri
@@ -58,8 +36,6 @@ const sanitizeEnv = ({
databaseType === 'vercel-postgres'
? `POSTGRES_URL=${placeholderUri}`
: `DATABASE_URI=${placeholderUri}`
} else {
line = `${key}=${value}`
}
}
@@ -80,10 +56,6 @@ const sanitizeEnv = ({
.reverse()
.join('\n')
if (!updatedEnv.includes('# Added by Payload')) {
updatedEnv = `# Added by Payload\n${updatedEnv}`
}
return updatedEnv
}
@@ -91,7 +63,7 @@ const sanitizeEnv = ({
export async function manageEnvFiles(args: {
cliArgs: CliArgs
databaseType?: DbType
databaseUri?: string
databaseUri: string
payloadSecret: string
projectDir: string
template?: ProjectTemplate
@@ -105,63 +77,70 @@ export async function manageEnvFiles(args: {
return
}
const pathToEnvExample = path.join(projectDir, '.env.example')
const envExamplePath = path.join(projectDir, '.env.example')
const envPath = path.join(projectDir, '.env')
let exampleEnv: null | string = ''
const emptyEnvContent = `# Added by Payload\nDATABASE_URI=your-connection-string-here\nPAYLOAD_SECRET=YOUR_SECRET_HERE\n`
try {
let updatedExampleContents: string
if (template?.type === 'plugin') {
if (debugFlag) {
debug(`plugin template detected - no .env added .env.example added`)
}
return
}
// If there's a .env.example file, use it to create or update the .env file
if (fs.existsSync(pathToEnvExample)) {
const envExampleContents = await fs.readFile(pathToEnvExample, 'utf8')
exampleEnv = sanitizeEnv({
contents: envExampleContents,
if (!fs.existsSync(envExamplePath)) {
updatedExampleContents = updateEnvExampleVariables(
emptyEnvContent,
databaseType,
databaseUri,
payloadSecret,
})
databaseUri,
)
await fs.writeFile(envExamplePath, updatedExampleContents)
if (debugFlag) {
debug(`.env.example file successfully read`)
debug(`.env.example file successfully created`)
}
} else {
const envExampleContents = await fs.readFile(envExamplePath, 'utf8')
const mergedEnvs = envExampleContents + '\n' + emptyEnvContent
updatedExampleContents = updateEnvExampleVariables(
mergedEnvs,
databaseType,
payloadSecret,
databaseUri,
)
await fs.writeFile(envExamplePath, updatedExampleContents)
if (debugFlag) {
debug(`.env.example file successfully updated`)
}
}
// If there's no .env file, create it using the .env.example content (if it exists)
if (!fs.existsSync(envPath)) {
const envContent = sanitizeEnv({
contents: exampleEnv,
const envContent = updateEnvExampleVariables(
emptyEnvContent,
databaseType,
databaseUri,
payloadSecret,
})
databaseUri,
)
await fs.writeFile(envPath, envContent)
if (debugFlag) {
debug(`.env file successfully created`)
}
} else {
// If the .env file already exists, sanitize it as-is
const envContents = await fs.readFile(envPath, 'utf8')
const updatedEnvContents = sanitizeEnv({
contents: envContents,
const mergedEnvs = envContents + '\n' + emptyEnvContent
const updatedEnvContents = updateEnvExampleVariables(
mergedEnvs,
databaseType,
databaseUri,
payloadSecret,
})
databaseUri,
)
await fs.writeFile(envPath, updatedEnvContents)
if (debugFlag) {
debug(`.env file successfully updated`)
}

View File

@@ -19,7 +19,7 @@ export async function getLatestPackageVersion({
}) {
try {
const response = await fetch(`https://registry.npmjs.org/${packageName}`)
const data = (await response.json()) as { 'dist-tags': { latest: string } }
const data = await response.json()
const latestVersion = data['dist-tags'].latest
if (debug) {

View File

@@ -1,7 +1,3 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
// Do not include DOM and DOM.Iterable as this is a server-only package.
"lib": ["ES2022"],
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-mongodb",
"version": "3.47.0",
"version": "3.39.1",
"description": "The officially supported MongoDB database adapter for Payload",
"homepage": "https://payloadcms.com",
"repository": {
@@ -17,7 +17,6 @@
"url": "https://payloadcms.com"
}
],
"sideEffects": false,
"type": "module",
"exports": {
".": {
@@ -47,7 +46,7 @@
"prepublishOnly": "pnpm clean && pnpm turbo build"
},
"dependencies": {
"mongoose": "8.15.1",
"mongoose": "8.9.5",
"mongoose-paginate-v2": "1.8.5",
"prompts": "2.4.2",
"uuid": "10.0.0"
@@ -57,8 +56,8 @@
"@types/mongoose-aggregate-paginate-v2": "1.0.12",
"@types/prompts": "^2.4.5",
"@types/uuid": "10.0.0",
"mongodb": "6.16.0",
"mongodb-memory-server": "10.1.4",
"mongodb": "6.12.0",
"mongodb-memory-server": "^10",
"payload": "workspace:*"
},
"peerDependencies": {

View File

@@ -40,9 +40,6 @@ export const connect: Connect = async function connect(
// If we are running a replica set with MongoDB Memory Server,
// wait until the replica set elects a primary before proceeding
if (this.mongoMemoryServer) {
this.payload.logger.info(
'Waiting for MongoDB Memory Server replica set to elect a primary...',
)
await new Promise((resolve) => setTimeout(resolve, 2000))
}
@@ -53,7 +50,7 @@ export const connect: Connect = async function connect(
this.beginTransaction = defaultBeginTransaction()
}
if (!hotReload) {
if (!this.mongoMemoryServer && !hotReload) {
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
this.payload.logger.info('---- DROPPING DATABASE ----')
await mongoose.connection.dropDatabase()

View File

@@ -1,49 +1,29 @@
import type { DeleteVersions, FlattenedField } from 'payload'
import { APIError, buildVersionCollectionFields, buildVersionGlobalFields } from 'payload'
import { buildVersionCollectionFields, type DeleteVersions } from 'payload'
import type { MongooseAdapter } from './index.js'
import type { CollectionModel } from './types.js'
import { buildQuery } from './queries/buildQuery.js'
import { getCollection, getGlobal } from './utilities/getEntity.js'
import { getCollection } from './utilities/getEntity.js'
import { getSession } from './utilities/getSession.js'
export const deleteVersions: DeleteVersions = async function deleteVersions(
this: MongooseAdapter,
{ collection: collectionSlug, globalSlug, locale, req, where },
{ collection: collectionSlug, locale, req, where },
) {
let fields: FlattenedField[]
let VersionsModel: CollectionModel
if (globalSlug) {
const { globalConfig, Model } = getGlobal({
adapter: this,
globalSlug,
versions: true,
})
fields = buildVersionGlobalFields(this.payload.config, globalConfig, true)
VersionsModel = Model
} else if (collectionSlug) {
const { collectionConfig, Model } = getCollection({
adapter: this,
collectionSlug,
versions: true,
})
fields = buildVersionCollectionFields(this.payload.config, collectionConfig, true)
VersionsModel = Model
} else {
throw new APIError('Either collection or globalSlug must be passed.')
}
const { collectionConfig, Model } = getCollection({
adapter: this,
collectionSlug,
versions: true,
})
const session = await getSession(this, req)
const query = await buildQuery({
adapter: this,
fields,
fields: buildVersionCollectionFields(this.payload.config, collectionConfig, true),
locale,
where,
})
await VersionsModel.deleteMany(query, { session })
await Model.deleteMany(query, { session })
}

View File

@@ -5,7 +5,11 @@ import mongoose from 'mongoose'
import type { MongooseAdapter } from './index.js'
export const destroy: Destroy = async function destroy(this: MongooseAdapter) {
await mongoose.disconnect()
if (this.mongoMemoryServer) {
await this.mongoMemoryServer.stop()
} else {
await mongoose.disconnect()
}
Object.keys(mongoose.models).map((model) => mongoose.deleteModel(model))
}

View File

@@ -118,13 +118,6 @@ export interface Args {
*/
useFacet?: boolean
} & ConnectOptions
/**
* We add a secondary sort based on `createdAt` to ensure that results are always returned in the same order when sorting by a non-unique field.
* This is because MongoDB does not guarantee the order of results, however in very large datasets this could affect performance.
*
* Set to `true` to disable this behaviour.
*/
disableFallbackSort?: boolean
/** Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false */
disableIndexHints?: boolean
/**
@@ -138,7 +131,6 @@ export interface Args {
*/
mongoMemoryServer?: MongoMemoryReplSet
prodMigrations?: Migration[]
transactionOptions?: false | TransactionOptions
/** The URL to connect to MongoDB or false to start payload and prevent connecting */
@@ -206,7 +198,6 @@ export function mongooseAdapter({
autoPluralization = true,
collectionsSchemaOptions = {},
connectOptions,
disableFallbackSort = false,
disableIndexHints = false,
ensureIndexes = false,
migrationDir: migrationDirArg,
@@ -260,7 +251,6 @@ export function mongooseAdapter({
deleteOne,
deleteVersions,
destroy,
disableFallbackSort,
find,
findGlobal,
findGlobalVersions,

View File

@@ -245,13 +245,10 @@ export async function buildSearchParam({
value: { $in },
}
} else {
const nextSubPath = pathsToQuery[i + 1]?.path
if (nextSubPath) {
relationshipQuery = {
value: {
[nextSubPath]: { $in },
},
}
relationshipQuery = {
value: {
_id: { $in },
},
}
}
}

View File

@@ -1,121 +0,0 @@
import type { Config, SanitizedConfig } from 'payload'
import { sanitizeConfig } from 'payload'
import { buildSortParam } from './buildSortParam.js'
import { MongooseAdapter } from '../index.js'
let config: SanitizedConfig
describe('builds sort params', () => {
beforeAll(async () => {
config = await sanitizeConfig({
localization: {
defaultLocale: 'en',
fallback: true,
locales: ['en', 'es'],
},
} as Config)
})
it('adds a fallback on non-unique field', () => {
const result = buildSortParam({
config,
parentIsLocalized: false,
fields: [
{
name: 'title',
type: 'text',
},
{
name: 'order',
type: 'number',
},
],
locale: 'en',
sort: 'order',
timestamps: true,
adapter: {
disableFallbackSort: false,
} as MongooseAdapter,
})
expect(result).toStrictEqual({ order: 'asc', createdAt: 'desc' })
})
it('adds a fallback when sort isnt provided', () => {
const result = buildSortParam({
config,
parentIsLocalized: false,
fields: [
{
name: 'title',
type: 'text',
},
{
name: 'order',
type: 'number',
},
],
locale: 'en',
sort: undefined,
timestamps: true,
adapter: {
disableFallbackSort: false,
} as MongooseAdapter,
})
expect(result).toStrictEqual({ createdAt: 'desc' })
})
it('does not add a fallback on non-unique field when disableFallbackSort is true', () => {
const result = buildSortParam({
config,
parentIsLocalized: false,
fields: [
{
name: 'title',
type: 'text',
},
{
name: 'order',
type: 'number',
},
],
locale: 'en',
sort: 'order',
timestamps: true,
adapter: {
disableFallbackSort: true,
} as MongooseAdapter,
})
expect(result).toStrictEqual({ order: 'asc' })
})
// This test should be true even when disableFallbackSort is false
it('does not add a fallback on unique field', () => {
const result = buildSortParam({
config,
parentIsLocalized: false,
fields: [
{
name: 'title',
type: 'text',
},
{
name: 'order',
type: 'number',
unique: true, // Marking this field as unique
},
],
locale: 'en',
sort: 'order',
timestamps: true,
adapter: {
disableFallbackSort: false,
} as MongooseAdapter,
})
expect(result).toStrictEqual({ order: 'asc' })
})
})

View File

@@ -19,7 +19,7 @@ type Args = {
fields: FlattenedField[]
locale?: string
parentIsLocalized?: boolean
sort?: Sort
sort: Sort
sortAggregation?: PipelineStage[]
timestamps: boolean
versions?: boolean
@@ -77,9 +77,6 @@ const relationshipSort = ({
) {
const relationshipPath = segments.slice(0, i + 1).join('.')
let sortFieldPath = segments.slice(i + 1, segments.length).join('.')
if (sortFieldPath.endsWith('.id')) {
sortFieldPath = sortFieldPath.split('.').slice(0, -1).join('.')
}
if (Array.isArray(field.relationTo)) {
throw new APIError('Not supported')
}
@@ -153,12 +150,6 @@ export const buildSortParam = ({
sort = [sort]
}
// We use this flag to determine if the sort is unique or not to decide whether to add a fallback sort.
const isUniqueSort = sort.some((item) => {
const field = getFieldByPath({ fields, path: item })
return field?.field?.unique
})
// In the case of Mongo, when sorting by a field that is not unique, the results are not guaranteed to be in the same order each time.
// So we add a fallback sort to ensure that the results are always in the same order.
let fallbackSort = '-id'
@@ -167,12 +158,7 @@ export const buildSortParam = ({
fallbackSort = '-createdAt'
}
const includeFallbackSort =
!adapter.disableFallbackSort &&
!isUniqueSort &&
!(sort.includes(fallbackSort) || sort.includes(fallbackSort.replace('-', '')))
if (includeFallbackSort) {
if (!(sort.includes(fallbackSort) || sort.includes(fallbackSort.replace('-', '')))) {
sort.push(fallbackSort)
}

View File

@@ -417,7 +417,7 @@ export const sanitizeQueryValue = ({
return buildExistsQuery(
formattedValue,
path,
!['checkbox', 'relationship', 'upload'].includes(field.type),
!['relationship', 'upload'].includes(field.type),
)
}
}

View File

@@ -1,5 +1,5 @@
import type { MongooseUpdateQueryOptions } from 'mongoose'
import type { Job, UpdateJobs, Where } from 'payload'
import type { BaseJob, UpdateJobs, Where } from 'payload'
import type { MongooseAdapter } from './index.js'
@@ -47,7 +47,7 @@ export const updateJobs: UpdateJobs = async function updateMany(
transform({ adapter: this, data, fields: collectionConfig.fields, operation: 'write' })
let result: Job[] = []
let result: BaseJob[] = []
try {
if (id) {

View File

@@ -55,7 +55,6 @@ export const updateOne: UpdateOne = async function updateOne(
try {
if (returning === false) {
await Model.updateOne(query, data, options)
transform({ adapter: this, data, fields, operation: 'read' })
return null
} else {
result = await Model.findOneAndUpdate(query, data, options)

Some files were not shown because too many files have changed in this diff Show More