Compare commits

..

111 Commits

Author SHA1 Message Date
Elliot DeNolf
e31f72da8e chore(release): plugin-nested-docs/1.0.12 [skip ci] 2024-02-23 14:31:26 -05:00
Elliot DeNolf
7aa058d604 chore(release): db-mongodb/1.4.3 [skip ci] 2024-02-23 14:31:11 -05:00
Elliot DeNolf
64e80d242e chore(release): payload/2.11.2 [skip ci] 2024-02-23 14:29:37 -05:00
Dan Ribbens
e8f2ca484e feat(db-postgres): configurable custom schema to use (#5047)
* feat(db-postgres): configurable custom schema to use

* test(db-postgres): use public schema

* chore(db-postgres): simplify drop schema

* chore: add postgres-custom-schema test to ci

* chore: add custom schema to postgres ci

* chore(db-postgres): custom schema in migrate

* chore: ci postgres wait condition
2024-02-23 12:48:06 -05:00
Dan Ribbens
ceca5c4e97 fix(db-postgres): set _parentID for array nested localized fields (#5117)
* fix(db-postgres): find missing path for nested arrays

* fix(db-postgres): set _parentID for array nested localized fields

* fix: afterRead fallbackLocale causing locale data loss

* chore(richtext-lexical): updated args to match payload type change

* test: simplify localization e2e duplicate
2024-02-23 12:44:30 -05:00
Dan Ribbens
ee13736288 chore(plugin-nested-docs): payload added to peerDependencies (#5143) 2024-02-23 12:37:42 -05:00
Dan Ribbens
815bdfac0b fix(db-mongodb): unique sparse for not required fields (#5114)
* fix(db-mongodb): unique sparse for not required fields

* chore(db-mongodb): cleanup sparse index condition

* test: indexed field fix
2024-02-23 12:36:34 -05:00
Dan Ribbens
7a7f0ed7e8 fix: disabling API Key does not remove the key (#5145)
* fix: disabling API Key does not remove the key

* chore: encryptKey hook return null

* chore: fix auth e2e test setup
2024-02-23 12:31:14 -05:00
Dan Ribbens
ad42d541b3 fix: transaction error from access endpoint (#5156)
* fix: transaction error from access endpoint

* chore: fix async race condition in getEntityPolicies
2024-02-23 12:28:43 -05:00
Elliot DeNolf
32ed95e1ee fix: handle thrown errors in config-level afterError hook (#5147) 2024-02-21 16:44:16 -05:00
Yunsup Sim
70e57fef18 fix: Add Context Provider in EditMany Component (#5005)
* fix: Add  Context Provider in EditMany Component

* test: Fix e2e test error
2024-02-21 16:39:34 -05:00
Jarrod Flesch
0a07f607b9 fix: only replace the drawer content with full edit component if it exists (#5144) 2024-02-21 15:44:09 -05:00
Piotr Rogowski
3918fc7c21 chore(plugin-seo): add pl translations (#5021) 2024-02-21 15:01:38 -05:00
Martin Chełminiak
13f71ac475 chore: console error for missing script when running npm run payload (#5078) 2024-02-19 10:13:50 -05:00
Ben Regenspan
07720e777a docs: Reword Hooks Overview re: server-only execution (#5070) 2024-02-19 09:56:55 -05:00
Sondre Ørland
efff47e400 chore: translation for image cropping in norwegian bokmål (#5113) 2024-02-19 09:47:32 -05:00
Elliot DeNolf
453ac218ea chore: reorder changelog 2024-02-17 01:38:13 -05:00
Elliot DeNolf
d4b09bd9cd chore(release): richtext-lexical/0.7.0 [skip ci] 2024-02-16 15:00:13 -05:00
Elliot DeNolf
dd67e03fc1 chore(release): plugin-search/1.1.0 [skip ci] 2024-02-16 15:00:03 -05:00
Elliot DeNolf
548de80bee chore(release): db-postgres/0.6.0 [skip ci] 2024-02-16 14:59:51 -05:00
Elliot DeNolf
2c05fbbb5e chore(release): plugin-form-builder/1.2.1 [skip ci] 2024-02-16 14:59:15 -05:00
Elliot DeNolf
9b54659818 chore(release): db-mongodb/1.4.2 [skip ci] 2024-02-16 14:58:57 -05:00
Elliot DeNolf
e9f550406e chore(release): payload/2.11.1 [skip ci] 2024-02-16 14:57:28 -05:00
Dan Ribbens
98b87e2278 feat(plugin-search): add req to beforeSync args for transactions (#5068)
* feat(plugin-search): pass `req` to beforeSync to support using transactions

* fix(plugin-search): hooks do not respect transactions

* chore(plugin-search): await hooks

* chore: remove eslint disable comments
2024-02-16 14:20:40 -05:00
Dan Ribbens
5f3d0169be fix: filterOptions errors cause transaction to abort (#5079)
* fix: filterOptions errors cause transaction to abort

* fix(db-mongodb): uncaught abortTransaction race condition

* chore: remove test that is not adding value

* chore: limit options on errors in filterOptions

* chore: limit options when an error occurs in filterOptions
2024-02-16 13:33:40 -05:00
Dan Ribbens
35c2a085ef fix(db-postgres): query using blockType (#5044)
* fix(db-postgres): query using blockType

* chore: cleanup commented lines
2024-02-16 13:30:26 -05:00
Dan Ribbens
1ac943ed5e fix: remove collection findByID caching (#5034) 2024-02-16 13:23:57 -05:00
Elliot DeNolf
25cee8bb10 fix(uploads): account for serverURL when retrieving external file (#5102) 2024-02-16 13:11:18 -05:00
Elliot DeNolf
419aef452d chore: add .localstack to gitignore 2024-02-16 12:53:28 -05:00
Elliot DeNolf
ea52489126 Merge pull request #4295 from payloadcms/test/plugin-cloud-storage-emulators
test(plugin-cloud-storage): use localstack for tests
2024-02-16 12:47:13 -05:00
Elliot DeNolf
e80c70acae test: cleanup 2024-02-16 12:33:07 -05:00
Elliot DeNolf
70b0064d0b test: adjust adapter log message 2024-02-16 11:32:00 -05:00
Elliot DeNolf
9636bf6efd test: rename .env -> .env.emulated, safely assert bucket contents 2024-02-16 11:31:14 -05:00
Elliot DeNolf
8f4d0da4e0 test: conditionally run plugin-cloud-storage 2024-02-16 11:31:14 -05:00
Elliot DeNolf
f0f1dbdcb0 ci: pnpm docker:start 2024-02-16 11:31:14 -05:00
Elliot DeNolf
a895aee8b1 ci: add localstack aws envs 2024-02-16 11:31:14 -05:00
Elliot DeNolf
aa1dac08c1 ci: add localstack setup 2024-02-16 11:31:14 -05:00
Elliot DeNolf
b8cd1c6ba4 test(plugin-cloud-storage): add test against localstack 2024-02-16 11:31:12 -05:00
Elliot DeNolf
6344464bc6 test(plugin-cloud-storage): add single docker compose for all emulators 2024-02-16 11:30:55 -05:00
Dan Ribbens
5d4022f144 fix(db-mongodb): find versions pagination (#5091) 2024-02-15 16:14:01 -05:00
Dan Ribbens
bf942fdfa6 feat(db-postgres): reconnect after disconnection from database (#5086) 2024-02-15 16:09:17 -05:00
Dan Ribbens
d6c25783cf feat(db-postgres): adds idType to use uuid or serial id columns (#3864)
* feat(db-postgres): WIP adds idType to use uuid or serial id columns

* chore: add postgres-uuid test ci

* chore: add postgres-uuid env vars

* chore: sanitizeQueryValue prevent invalid types

* fix(db-postgres): invalid parentID of nested arrays
2024-02-15 16:06:37 -05:00
Dan Ribbens
82e9d31127 fix(plugin-form-builder): hooks do not respect transactions (#5069)
* fix(plugin-form-builder): hooks do not respect transactions

* chore(plugin-form-builder): linting and cleanup
2024-02-15 15:55:42 -05:00
Elliot DeNolf
399e606b34 chore: use ref for pnpm overrides (#5081) 2024-02-13 12:37:43 -05:00
Alessio Gravili
0d18822062 feat(richtext-lexical)!: Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground (#5066)
* feat(richtext-lexical): Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground

* chore: upgrade lexical version used in monorepo
2024-02-12 17:54:50 +01:00
Alessio Gravili
00fc0343da feat(richtext-lexical): AddBlock handle for all nodes, even if they aren't empty paragraphs (#5063) 2024-02-12 16:11:41 +01:00
Alessio Gravili
6323965c65 fix(richtext-lexical): do not remove adjacent paragraph node when inserting certain nodes in empty editor (#5061) 2024-02-12 14:27:58 +01:00
Máté Tallósi
6d6823c3e5 feat(richtext-lexical): add justify aligment to AlignFeature (#4035) (#4868) 2024-02-12 14:27:12 +01:00
Alessio Gravili
ca70298436 chore: upgrade nodemon versions (#5059) 2024-02-12 14:11:57 +01:00
Elliot DeNolf
4f565759f6 chore(release): payload/2.11.0 [skip ci] 2024-02-09 16:12:03 -05:00
Jarrod Flesch
df39602758 feat: exposes collapsible provider with more functionality (#5043) 2024-02-09 10:38:30 -05:00
Elliot DeNolf
6ea6172afa chore(release): db-postgres/0.5.2 [skip ci] 2024-02-09 09:06:15 -05:00
Elliot DeNolf
486774796d chore(release): db-mongodb/1.4.1 [skip ci] 2024-02-09 09:06:06 -05:00
Elliot DeNolf
1cd1c38764 chore(release): payload/2.10.1 [skip ci] 2024-02-09 09:04:42 -05:00
Elliot DeNolf
f6d7da7510 fix: clearable cells handle null values (#5038) 2024-02-09 08:59:38 -05:00
Elliot DeNolf
cdc4cb971b fix(db-mongodb): handle null values with exists (#5037) 2024-02-09 08:58:10 -05:00
Elliot DeNolf
e0191b54e1 chore(release): richtext-lexical/0.6.1 [skip ci] 2024-02-08 11:49:02 -05:00
Alessio Gravili
2315781f18 fix(richtext-lexical): make editor reactive to initialValue changes (#5010) 2024-02-08 15:30:21 +01:00
Elliot DeNolf
a0a58e7fd2 fix: query relationships by explicit id field (#5022) 2024-02-07 14:18:13 -05:00
Jessica Chowdhury
e1813fb884 fix: ensures docs with the same id are shown in relationship field select (#4859) 2024-02-07 14:04:03 -05:00
Elliot DeNolf
da184d40ec fix(db-postgres): handle nested docs with drafts (#5012) 2024-02-06 21:27:33 -05:00
Elliot DeNolf
ca8675f89d chore(release): plugin-seo/2.2.1 [skip ci] 2024-02-06 15:41:58 -05:00
Elliot DeNolf
e8c6c9338d chore(release): db-postgres/0.5.1 [skip ci] 2024-02-06 15:41:35 -05:00
Elliot DeNolf
558534aff8 chore(release): richtext-lexical/0.6.0 [skip ci] 2024-02-06 15:41:04 -05:00
Elliot DeNolf
29c901ba9b chore(release): payload/2.10.0 [skip ci] 2024-02-06 15:38:33 -05:00
Elliot DeNolf
f3876c2a39 fix(db-postgres): localized field sort count (#4997)
* fix(db-postgres): localized field sort count

* test: localized sort doc count
2024-02-06 11:44:54 -05:00
Elliot DeNolf
c3a3942969 fix(db-postgres): filtering relationships with drafts enabled (#4998)
* fix(db-postgres): filtering relationships with drafts enabled

* test: draft relationship filtering
2024-02-06 11:16:27 -05:00
Paul
23b135b963 fix(templates): fix conflicting routes with Nextjs (#4725)
* Updated templates and readme to note conflicting routes

* Move information in readmes to blockquotes and move next-api to just next

* Remove unnecessary notes
2024-02-06 10:56:20 -05:00
Daniel Kirchhof
e3c8105cc2 feat: use deletion success message from server if provided (#4966) 2024-02-06 10:17:53 -05:00
Jarrod Flesch
2c71aaef75 chore: standardize req passed through the local API (#4994) 2024-02-05 15:43:59 -05:00
brachypelma
922fb9b7fa docs: added link to JSON field type (#4989) 2024-02-05 13:13:28 -05:00
Elliot DeNolf
0740d5095e fix(migrations): safely create migration file when no name passed (#4995) 2024-02-05 10:43:19 -05:00
Jacob Fletcher
b392d656fe chore(examples/multi-tentant): resets lastLoggedInTenant when none found (#4984) 2024-02-02 15:50:12 -05:00
Elliot DeNolf
c0eef90cdc ci: supabase (#4983)
* ci: supabase

* test: add supabase to adapter record

* test: adjust index tests conditional
2024-02-02 15:26:18 -05:00
Dan Ribbens
db22cbdf21 fix(plugin-seo): tabbedUI with email field causes duplicate field (#4944)
* fix(plugin-seo): tabbedUI with email field causes duplicate field

* chore(plugin-seo): code comment
2024-02-02 14:53:51 -05:00
Dan Ribbens
1e8a6b7899 feat: extend transactions to cover after and beforeOperation hooks (#4960)
* feat: extend transactions to cover after and beforeOperation hooks

* feat: use transactions in refresh operation

* docs: add req to beforeOperation and afterOperation args
2024-02-02 14:53:14 -05:00
Dan Ribbens
5d934ba02d feat: previousValue and previousSiblingDoc args added to beforeChange field hooks (#4958)
* feat: previousValue and previousSiblingDoc args added to beforeChange field hooks

* chore: fieldHook type docs
2024-02-02 13:59:11 -05:00
Elliot DeNolf
f651665f2f chore(deps): add pnpm overrides for common packages (#4980)
* chore(deps): add pnpm overrides for common packages

* chore(deps): bump @swc/core for compat w/ latest typescript
2024-02-02 13:21:57 -05:00
Elliot DeNolf
5d3659d48a fix(db-postgres): handle schema changes with supabase (#4968)
* fix(db-postgres): handle schema changes with supabase

* chore(deps): bump drizzle-orm for test suite
2024-02-02 11:29:32 -05:00
Dan Ribbens
47106d5a1a fix(db-postgres): indexes not creating for relationships, arrays, hasmany and blocks (#4976) 2024-02-02 11:16:21 -05:00
Jarrod Flesch
afa2b942e0 fix: ensures docPermissions fallback to collection permissions on create (#4969) 2024-02-01 16:54:52 -05:00
Dan Ribbens
20ddd0de5b chore: add indexes to preferences for performance (#4965) 2024-02-01 15:31:31 -05:00
Dan Ribbens
64f705c3c9 fix(db-postgres): indexes not created for non unique field names (#4967) 2024-02-01 15:21:15 -05:00
Daniel Shamburger
b30ea8aa6b docs: typo (#4948) 2024-01-31 15:40:55 -05:00
Mark Paolo Libunao
471d2113a7 feat: re-use existing logger instance passed to payload.init (#3124) 2024-01-31 15:40:14 -05:00
Alessio Gravili
8725d41164 feat: add more options to addFieldStatePromise so that it can be used for field flattening (#4799) 2024-01-30 23:02:58 +01:00
Dan Ribbens
0bd81aa25a fix(templates-ecommerce): hook errors for non-string ids (#4946) 2024-01-30 13:28:14 -05:00
Dan Ribbens
8c09ca9be5 examples: fix multi-tenant hooks to use transactions (#4955) 2024-01-30 13:26:13 -05:00
Alessio Gravili
90d7ee3e65 feat(richtext-lexical): Blocks: generate type definitions for blocks fields (#4529) 2024-01-30 16:51:18 +01:00
Kendell Joseph
58bbd8c00f chore(examples/hierarchy): adds hierarchy example (#4923) 2024-01-26 16:17:11 -05:00
Elliot DeNolf
003ad065c3 chore(release): plugin-cloud-storage/1.1.2 [skip ci] 2024-01-26 13:58:13 -05:00
Elliot DeNolf
70715926a8 chore(release): richtext-slate/1.4.0 [skip ci] 2024-01-26 13:57:45 -05:00
Elliot DeNolf
b3a6bfacf2 chore(release): db-postgres/0.5.0 [skip ci] 2024-01-26 13:57:34 -05:00
Elliot DeNolf
e1d9accb27 chore(release): db-mongodb/1.4.0 [skip ci] 2024-01-26 13:57:23 -05:00
Elliot DeNolf
f2f55a84cc chore(release): payload/2.9.0 [skip ci] 2024-01-26 13:55:20 -05:00
Dan Ribbens
eba53ba60a feat: forceAcceptWarning migration arg added to accept prompts (#4874)
* chore: gitignore test migrations

* feat: `forceAcceptWarning` migration args added to accept prompts

* chore: migrationDir env variable fallback

* chore: migrationDir testSuiteDir fallback

* chore: migrationDir testSuiteDir fallback fix

* chore: skip migrate down test
2024-01-26 13:48:53 -05:00
Dan Ribbens
f73d503fec fix(plugin-cloud-storage): slow get file performance large collections (#4927) 2024-01-26 13:43:55 -05:00
Dan Ribbens
6930c4e9f2 fix: upload input drawer does not show draft versions (#4903)
* chore: add field classname to upload field

* fix: upload input drawer does not show draft versions
2024-01-26 13:42:32 -05:00
Dan Ribbens
3eb681e847 fix: afterLogin hook write conflicts (#4904)
* fix: afterLogin hook conflict

* test: afterLogin hook returns for assertion

* chore: commit increment login attempt
2024-01-26 13:39:45 -05:00
Jarrod Flesch
cb4638cfa1 chore: make default views callable (#4928) 2024-01-26 13:38:36 -05:00
Dan Ribbens
b40e9f85a2 chore: use transactions in tests running mongoDB memory server (#4750)
* chore: use transactions in tests running mongoDB memory server

* chore: relationship test async setup changes

* chore: async test fix

* chore: flaky e2e localization test
2024-01-23 19:18:55 -05:00
Dan Ribbens
e5a7907a72 fix: remove No Options dropdown from hasMany fields (#4899) 2024-01-23 10:00:16 -05:00
Jarrod Flesch
3f25d1ca84 chore: re-orders request language detection (#4890) 2024-01-22 11:53:01 -05:00
Timothy Choi
d5720bea7b chore: add fieldIsGroupType type guard helper (#4872) 2024-01-19 14:19:35 -05:00
Jesse Sivonen
8ce15c8b07 fix(db-postgres): query unset relation (#4862) 2024-01-19 13:35:58 -05:00
Timothy Choi
9f5efef78f chore: lint #4766 (#4801)
* fix: import location for config test

* fix: linting fix
2024-01-18 09:25:52 -05:00
Dan Ribbens
dfba5222f3 fix(db-postgres): migrate down error (#4861) 2024-01-17 13:55:57 -05:00
Dan Ribbens
b99d24fcfa fix: migrate down missing filter for latest batch (#4860) 2024-01-17 13:54:30 -05:00
Elliot DeNolf
836ed77568 chore: update changelog [skip ci] 2024-01-16 16:39:42 -05:00
Elliot DeNolf
1c5d5b07c8 chore(release): plugin-seo/2.2.0 [skip ci] 2024-01-16 16:33:53 -05:00
Elliot DeNolf
da5f1f2240 chore(release): plugin-form-builder/1.2.0 [skip ci] 2024-01-16 16:33:44 -05:00
263 changed files with 14445 additions and 4650 deletions

View File

@@ -2,9 +2,9 @@ name: build
on:
pull_request:
types: [opened, reopened, synchronize]
types: [ opened, reopened, synchronize ]
push:
branches: ['main']
branches: [ 'main' ]
jobs:
changes:
@@ -15,25 +15,25 @@ jobs:
needs_build: ${{ steps.filter.outputs.needs_build }}
templates: ${{ steps.filter.outputs.templates }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 25
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
needs_build:
- '.github/workflows/**'
- 'packages/**'
- 'test/**'
- 'pnpm-lock.yaml'
- 'package.json'
templates:
- 'templates/**'
- name: Log all filter results
run: |
echo "needs_build: ${{ steps.filter.outputs.needs_build }}"
echo "templates: ${{ steps.filter.outputs.templates }}"
- uses: actions/checkout@v4
with:
fetch-depth: 25
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
needs_build:
- '.github/workflows/**'
- 'packages/**'
- 'test/**'
- 'pnpm-lock.yaml'
- 'package.json'
templates:
- 'templates/**'
- name: Log all filter results
run: |
echo "needs_build: ${{ steps.filter.outputs.needs_build }}"
echo "templates: ${{ steps.filter.outputs.templates }}"
core-build:
needs: changes
@@ -85,11 +85,15 @@ jobs:
strategy:
fail-fast: false
matrix:
database: [mongoose, postgres]
database: [mongoose, postgres, postgres-custom-schema, postgres-uuid, supabase]
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: payloadtests
AWS_ENDPOINT_URL: http://127.0.0.1:4566
AWS_ACCESS_KEY_ID: localstack
AWS_SECRET_ACCESS_KEY: localstack
AWS_REGION: us-east-1
steps:
- name: Use Node.js 18
@@ -109,6 +113,9 @@ jobs:
path: ./*
key: ${{ github.sha }}-${{ github.run_number }}
- name: Start LocalStack
run: pnpm docker:start
- name: Start PostgreSQL
uses: CasperWA/postgresql-action@v1.2
with:
@@ -116,15 +123,40 @@ jobs:
postgresql db: ${{ env.POSTGRES_DB }}
postgresql user: ${{ env.POSTGRES_USER }}
postgresql password: ${{ env.POSTGRES_PASSWORD }}
if: matrix.database == 'postgres'
if: startsWith(matrix.database, 'postgres')
- name: Install Supabase CLI
uses: supabase/setup-cli@v1
with:
version: latest
if: matrix.database == 'supabase'
- name: Initialize Supabase
run: |
supabase init
supabase start
if: matrix.database == 'supabase'
- name: Wait for PostgreSQL
run: sleep 30
if: startsWith(matrix.database, 'postgres')
- run: sleep 30
- name: Configure PostgreSQL
run: |
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "CREATE ROLE runner SUPERUSER LOGIN;"
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "SELECT version();"
echo "POSTGRES_URL=postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" >> $GITHUB_ENV
if: matrix.database == 'postgres'
if: startsWith(matrix.database, 'postgres')
- name: Configure PostgreSQL with custom schema
run: |
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "CREATE SCHEMA custom;"
if: matrix.database == 'postgres-custom-schema'
- name: Configure Supabase
run: |
echo "POSTGRES_URL=postgresql://postgres:postgres@127.0.0.1:54322/postgres" >> $GITHUB_ENV
if: matrix.database == 'supabase'
- name: Component Tests
run: pnpm test:components
@@ -142,7 +174,7 @@ jobs:
strategy:
fail-fast: false
matrix:
part: [1/8, 2/8, 3/8, 4/8, 5/8, 6/8, 7/8, 8/8]
part: [ 1/8, 2/8, 3/8, 4/8, 5/8, 6/8, 7/8, 8/8 ]
steps:
- name: Use Node.js 18
@@ -290,7 +322,7 @@ jobs:
strategy:
fail-fast: false
matrix:
template: [blank, website, ecommerce]
template: [ blank, website, ecommerce ]
steps:
- uses: actions/checkout@v4

2
.gitignore vendored
View File

@@ -6,7 +6,9 @@ dist
test-results
.devcontainer
.localstack
/migrations
.localstack
# Created by https://www.toptal.com/developers/gitignore/api/node,macos,windows,webstorm,sublimetext,visualstudiocode
# Edit at https://www.toptal.com/developers/gitignore?templates=node,macos,windows,webstorm,sublimetext,visualstudiocode

View File

@@ -1,5 +1,5 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Run Dev Fields" type="NodeJSConfigurationType" application-parameters="fields" path-to-js-file="node_modules/.pnpm/nodemon@3.0.1/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
<configuration default="false" name="Run Dev Fields" type="NodeJSConfigurationType" application-parameters="fields" path-to-js-file="node_modules/.pnpm/nodemon@3.0.3/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
<method v="2" />
</configuration>
</component>

View File

@@ -1,5 +1,5 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Run Dev _community" type="NodeJSConfigurationType" application-parameters="_community" path-to-js-file="node_modules/.pnpm/nodemon@3.0.1/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
<configuration default="false" name="Run Dev _community" type="NodeJSConfigurationType" application-parameters="_community" path-to-js-file="node_modules/.pnpm/nodemon@3.0.3/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
<method v="2" />
</configuration>
</component>

View File

@@ -1,11 +1,120 @@
## [2.11.2](https://github.com/payloadcms/payload/compare/v2.11.1...v2.11.2) (2024-02-23)
### Features
* **db-postgres:** configurable custom schema to use ([#5047](https://github.com/payloadcms/payload/issues/5047)) ([e8f2ca4](https://github.com/payloadcms/payload/commit/e8f2ca484ee56cd7767d5111e46ebd24752ff8de))
### Bug Fixes
* Add Context Provider in EditMany Component ([#5005](https://github.com/payloadcms/payload/issues/5005)) ([70e57fe](https://github.com/payloadcms/payload/commit/70e57fef184f7fcf56344ea755465f246f2253a5))
* **db-mongodb:** unique sparse for not required fields ([#5114](https://github.com/payloadcms/payload/issues/5114)) ([815bdfa](https://github.com/payloadcms/payload/commit/815bdfac0b0afbff2a20e54d5aee64b90f6b3a77))
* **db-postgres:** set _parentID for array nested localized fields ([#5117](https://github.com/payloadcms/payload/issues/5117)) ([ceca5c4](https://github.com/payloadcms/payload/commit/ceca5c4e97f53f1346797a31b6abfc0375e98215))
* disabling API Key does not remove the key ([#5145](https://github.com/payloadcms/payload/issues/5145)) ([7a7f0ed](https://github.com/payloadcms/payload/commit/7a7f0ed7e8132253be607c111c160163b84bd770))
* handle thrown errors in config-level afterError hook ([#5147](https://github.com/payloadcms/payload/issues/5147)) ([32ed95e](https://github.com/payloadcms/payload/commit/32ed95e1ee87409db234f1b7bd6d2e462fd9ed5d))
* only replace the drawer content with full edit component if it exists ([#5144](https://github.com/payloadcms/payload/issues/5144)) ([0a07f60](https://github.com/payloadcms/payload/commit/0a07f607b9fb1217ad956cd05b2a84a4042a19ca))
* transaction error from access endpoint ([#5156](https://github.com/payloadcms/payload/issues/5156)) ([ad42d54](https://github.com/payloadcms/payload/commit/ad42d541b342ed56463b81cee6d6307df6f06d7f))
## [2.11.1](https://github.com/payloadcms/payload/compare/v2.11.0...v2.11.1) (2024-02-16)
### Features
* **db-postgres:** adds idType to use uuid or serial id columns ([#3864](https://github.com/payloadcms/payload/issues/3864)) ([d6c2578](https://github.com/payloadcms/payload/commit/d6c25783cfa97983bf9db27ceb5ccd39a62c62f1))
* **db-postgres:** reconnect after disconnection from database ([#5086](https://github.com/payloadcms/payload/issues/5086)) ([bf942fd](https://github.com/payloadcms/payload/commit/bf942fdfa6ea9c26cf05295cc9db646bf31fa622))
* **plugin-search:** add req to beforeSync args for transactions ([#5068](https://github.com/payloadcms/payload/issues/5068)) ([98b87e2](https://github.com/payloadcms/payload/commit/98b87e22782c0a788f79326f22be05a6b176ad74))
* **richtext-lexical:** add justify aligment to AlignFeature ([#4035](https://github.com/payloadcms/payload/issues/4035)) ([#4868](https://github.com/payloadcms/payload/issues/4868)) ([6d6823c](https://github.com/payloadcms/payload/commit/6d6823c3e5609a58eeeeb8d043945a762f9463df))
* **richtext-lexical:** AddBlock handle for all nodes, even if they aren't empty paragraphs ([#5063](https://github.com/payloadcms/payload/issues/5063)) ([00fc034](https://github.com/payloadcms/payload/commit/00fc0343dabf184d5bab418d47c403b3ad11698f))
* **richtext-lexical:** Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground ([#5066](https://github.com/payloadcms/payload/issues/5066)) ([0d18822](https://github.com/payloadcms/payload/commit/0d18822062275c1826c8e2c3da2571a2b3483310))
### Bug Fixes
* **db-mongodb:** find versions pagination ([#5091](https://github.com/payloadcms/payload/issues/5091)) ([5d4022f](https://github.com/payloadcms/payload/commit/5d4022f1445e2809c01cb1dd599280f0a56cdc6e))
* **db-postgres:** query using blockType ([#5044](https://github.com/payloadcms/payload/issues/5044)) ([35c2a08](https://github.com/payloadcms/payload/commit/35c2a085efa6d5ad59779960874bc9728a17e3a0))
* filterOptions errors cause transaction to abort ([#5079](https://github.com/payloadcms/payload/issues/5079)) ([5f3d016](https://github.com/payloadcms/payload/commit/5f3d0169bee21e1c0963dbd7ede9fe5f1c46a5a5))
* **plugin-form-builder:** hooks do not respect transactions ([#5069](https://github.com/payloadcms/payload/issues/5069)) ([82e9d31](https://github.com/payloadcms/payload/commit/82e9d31127c8df83c5bed92a5ffdab76d331900f))
* remove collection findByID caching ([#5034](https://github.com/payloadcms/payload/issues/5034)) ([1ac943e](https://github.com/payloadcms/payload/commit/1ac943ed5e8416883b863147fdf3c23380955559))
* **richtext-lexical:** do not remove adjacent paragraph node when inserting certain nodes in empty editor ([#5061](https://github.com/payloadcms/payload/issues/5061)) ([6323965](https://github.com/payloadcms/payload/commit/6323965c652ea68dffeb716957b124d165b9ce96))
* **uploads:** account for serverURL when retrieving external file ([#5102](https://github.com/payloadcms/payload/issues/5102)) ([25cee8b](https://github.com/payloadcms/payload/commit/25cee8bb102bf80b3a4bfb4b4e46712722cc7f0d))
### ⚠ BREAKING CHANGES: @payloadcms/richtext-lexical
* **richtext-lexical:** Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground (#5066)
- You HAVE to make sure that any versions of the lexical packages (IF you have any installed) match the lexical version which richtext-lexical uses: v0.13.1. If you do not do this, you may be plagued by React useContext / "cannot find active editor state" errors
- Updates to lexical's API, e.g. the removal of INTERNAL_isPointSelection, could be breaking depending on your code. Please consult the [lexical changelog](https://github.com/facebook/lexical/blob/main/CHANGELOG.md).
## [2.11.0](https://github.com/payloadcms/payload/compare/v2.10.1...v2.11.0) (2024-02-09)
### Features
* exposes collapsible provider with more functionality ([#5043](https://github.com/payloadcms/payload/issues/5043)) ([df39602](https://github.com/payloadcms/payload/commit/df39602758ae8dc3765bb48e51f7a657babfa559))
## [2.10.1](https://github.com/payloadcms/payload/compare/v2.10.0...v2.10.1) (2024-02-09)
### Bug Fixes
* clearable cells handle null values ([#5038](https://github.com/payloadcms/payload/issues/5038)) ([f6d7da7](https://github.com/payloadcms/payload/commit/f6d7da751039df25066b51bb91d6453e1a4efd82))
* **db-mongodb:** handle null values with exists ([#5037](https://github.com/payloadcms/payload/issues/5037)) ([cdc4cb9](https://github.com/payloadcms/payload/commit/cdc4cb971b9180ba2ed09741f5af1a3c18292828))
* **db-postgres:** handle nested docs with drafts ([#5012](https://github.com/payloadcms/payload/issues/5012)) ([da184d4](https://github.com/payloadcms/payload/commit/da184d40ece74bffb224002eb5df8f6987d65043))
* ensures docs with the same id are shown in relationship field select ([#4859](https://github.com/payloadcms/payload/issues/4859)) ([e1813fb](https://github.com/payloadcms/payload/commit/e1813fb884e0dc84203fcbab87527a99a4d3a5d7))
* query relationships by explicit id field ([#5022](https://github.com/payloadcms/payload/issues/5022)) ([a0a58e7](https://github.com/payloadcms/payload/commit/a0a58e7fd20dff54d210c968f4d5defd67441bdd))
* **richtext-lexical:** make editor reactive to initialValue changes ([#5010](https://github.com/payloadcms/payload/issues/5010)) ([2315781](https://github.com/payloadcms/payload/commit/2315781f1891ddde4b4c5f2f0cfa1c17af85b7a9))
## [2.10.0](https://github.com/payloadcms/payload/compare/v2.9.0...v2.10.0) (2024-02-06)
### Features
* add more options to addFieldStatePromise so that it can be used for field flattening ([#4799](https://github.com/payloadcms/payload/issues/4799)) ([8725d41](https://github.com/payloadcms/payload/commit/8725d411645bb0270376e235669f46be2227ecc0))
* extend transactions to cover after and beforeOperation hooks ([#4960](https://github.com/payloadcms/payload/issues/4960)) ([1e8a6b7](https://github.com/payloadcms/payload/commit/1e8a6b7899f7b1e6451cc4d777602208478b483c))
* previousValue and previousSiblingDoc args added to beforeChange field hooks ([#4958](https://github.com/payloadcms/payload/issues/4958)) ([5d934ba](https://github.com/payloadcms/payload/commit/5d934ba02d07d98f781ce983228858ee5ce5c226))
* re-use existing logger instance passed to payload.init ([#3124](https://github.com/payloadcms/payload/issues/3124)) ([471d211](https://github.com/payloadcms/payload/commit/471d2113a790dc0d54b2f8ed84e6899310efd600))
* **richtext-lexical:** Blocks: generate type definitions for blocks fields ([#4529](https://github.com/payloadcms/payload/issues/4529)) ([90d7ee3](https://github.com/payloadcms/payload/commit/90d7ee3e6535d51290fc734b284ff3811dbda1f8))
* use deletion success message from server if provided ([#4966](https://github.com/payloadcms/payload/issues/4966)) ([e3c8105](https://github.com/payloadcms/payload/commit/e3c8105cc2ed6fdf8007d97cd7b5556fc71ed724))
### Bug Fixes
* **db-postgres:** filtering relationships with drafts enabled ([#4998](https://github.com/payloadcms/payload/issues/4998)) ([c3a3942](https://github.com/payloadcms/payload/commit/c3a39429697e9d335e9be199e7caafb82eb26219))
* **db-postgres:** handle schema changes with supabase ([#4968](https://github.com/payloadcms/payload/issues/4968)) ([5d3659d](https://github.com/payloadcms/payload/commit/5d3659d48ad8bbf5d96fbcd80434d2287cab97e0))
* **db-postgres:** indexes not created for non unique field names ([#4967](https://github.com/payloadcms/payload/issues/4967)) ([64f705c](https://github.com/payloadcms/payload/commit/64f705c3c94148972f67e8175e718015760d6430))
* **db-postgres:** indexes not creating for relationships, arrays, hasmany and blocks ([#4976](https://github.com/payloadcms/payload/issues/4976)) ([47106d5](https://github.com/payloadcms/payload/commit/47106d5a1af2ebd073fbbc6e474174c3d3835e5c))
* **db-postgres:** localized field sort count ([#4997](https://github.com/payloadcms/payload/issues/4997)) ([f3876c2](https://github.com/payloadcms/payload/commit/f3876c2a39efe19a1864213306725aadcc14f130))
* ensures docPermissions fallback to collection permissions on create ([#4969](https://github.com/payloadcms/payload/issues/4969)) ([afa2b94](https://github.com/payloadcms/payload/commit/afa2b942e0aad90c55744ae13e0ffe1cefa4585d))
* **migrations:** safely create migration file when no name passed ([#4995](https://github.com/payloadcms/payload/issues/4995)) ([0740d50](https://github.com/payloadcms/payload/commit/0740d5095ee1aef13e4e37f6b174d529f0f2d993))
* **plugin-seo:** tabbedUI with email field causes duplicate field ([#4944](https://github.com/payloadcms/payload/issues/4944)) ([db22cbd](https://github.com/payloadcms/payload/commit/db22cbdf21a39ed0604ab96c57ca4242eac82ce7))
## [2.9.0](https://github.com/payloadcms/payload/compare/v2.8.2...v2.9.0) (2024-01-26)
### Features
* forceAcceptWarning migration arg added to accept prompts ([#4874](https://github.com/payloadcms/payload/issues/4874)) ([eba53ba](https://github.com/payloadcms/payload/commit/eba53ba60afd7c5d37389377ed06a9b556058d49))
### Bug Fixes
* afterLogin hook write conflicts ([#4904](https://github.com/payloadcms/payload/issues/4904)) ([3eb681e](https://github.com/payloadcms/payload/commit/3eb681e847e9c55eaaa69c22bea4f4e66c7eac36))
* **db-postgres:** migrate down error ([#4861](https://github.com/payloadcms/payload/issues/4861)) ([dfba522](https://github.com/payloadcms/payload/commit/dfba5222f3abf3f236dc9212a28e1aec7d7214d5))
* **db-postgres:** query unset relation ([#4862](https://github.com/payloadcms/payload/issues/4862)) ([8ce15c8](https://github.com/payloadcms/payload/commit/8ce15c8b07800397a50dcf790c263ed5b3cfad53))
* migrate down missing filter for latest batch ([#4860](https://github.com/payloadcms/payload/issues/4860)) ([b99d24f](https://github.com/payloadcms/payload/commit/b99d24fcfa698c493ea01c41621201abe18fabe3))
* **plugin-cloud-storage:** slow get file performance large collections ([#4927](https://github.com/payloadcms/payload/issues/4927)) ([f73d503](https://github.com/payloadcms/payload/commit/f73d503fecdfa5cefdc26ab9aad60b00563f881e))
* remove No Options dropdown from hasMany fields ([#4899](https://github.com/payloadcms/payload/issues/4899)) ([e5a7907](https://github.com/payloadcms/payload/commit/e5a7907a72c1371447ac2f71fce213ed22246092))
* upload input drawer does not show draft versions ([#4903](https://github.com/payloadcms/payload/issues/4903)) ([6930c4e](https://github.com/payloadcms/payload/commit/6930c4e9f2200853121391ad8f8df48ea66c40a4))
## [2.8.2](https://github.com/payloadcms/payload/compare/v2.8.1...v2.8.2) (2024-01-16)
### Features
* **db-postgres:** support drizzle logging config ([#4809](https://github.com/payloadcms/payload/issues/4809)) ([371353f](https://github.com/payloadcms/payload/commit/371353f1535fbab4ebd9f56fc14fd10a30eec289))
* **plugin-form-builder:** add validation for form ID when creating a submission
* **plugin-seo:** allow field and interface overrides
* **plugin-form-builder:** add validation for form ID when creating a submission ([#4730](https://github.com/payloadcms/payload/pull/4730))
* **plugin-seo:** add support for interfaceName and fieldOverrides ([#4695](https://github.com/payloadcms/payload/pull/4695))
### Bug Fixes
@@ -14,6 +123,7 @@
* **db-postgres:** Remove duplicate keys from response ([#4747](https://github.com/payloadcms/payload/issues/4747)) ([eb9e771](https://github.com/payloadcms/payload/commit/eb9e771a9ca03636486d36654f215b73435574cb))
* **db-postgres:** validateExistingBlockIsIdentical with arrays ([3b88adc](https://github.com/payloadcms/payload/commit/3b88adc7d0594af63ce190c40c9ee3905df67a31))
* **db-postgres:** validateExistingBlockIsIdentical with other tables ([0647c87](https://github.com/payloadcms/payload/commit/0647c870f15dc1b122734b678c2abeb6f56377d4))
* **plugin-seo:** fix missing spread operator in URL generator function ([#4723](https://github.com/payloadcms/payload/pull/4723))
* removes max-width from field-types class & correctly sets it on uploads ([#4829](https://github.com/payloadcms/payload/issues/4829)) ([ee5390a](https://github.com/payloadcms/payload/commit/ee5390aaca37a4154cde8392b60f091ec3e5175c))
## [2.8.1](https://github.com/payloadcms/payload/compare/v2.8.0...v2.8.1) (2024-01-12)

View File

@@ -635,6 +635,37 @@ export const CustomArrayManager = () => {
]}
/>
### useCollapsible
The `useCollapsible` hook allows you to control parent collapsibles:
| Property | Description |
|---------------------------|--------------------------------------------------------------------------------------------------------------------|
| **`collapsed`** | State of the collapsible. `true` if open, `false` if collapsed |
| **`isVisible`** | If nested, determine if the nearest collapsible is visible. `true` if no parent is closed, `false` otherwise |
| **`toggle`** | Toggles the state of the nearest collapsible |
| **`withinCollapsible`** | Determine when you are within another collaspible | |
**Example:**
```tsx
import React from 'react'
import { useCollapsible } from 'payload/components/utilities'
const CustomComponent: React.FC = () => {
const { collapsed, toggle } = useCollapsible()
return (
<div>
<p className="field-type">I am {collapsed ? 'closed' : 'open'}</p>
<button onClick={toggle} type="button">
Toggle
</button>
</div>
)
}
```
### useDocumentInfo
The `useDocumentInfo` hook provides lots of information about the document currently being edited, including the following:

View File

@@ -37,11 +37,12 @@ export default buildConfig({
### Options
| Option | Description |
| ----------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `pool` | [Pool connection options](https://orm.drizzle.team/docs/quick-postgresql/node-postgres) that will be passed to Drizzle and `node-postgres`. |
| `push` | Disable Drizzle's [`db push`](https://orm.drizzle.team/kit-docs/overview#prototyping-with-db-push) in development mode. By default, `push` is enabled for development mode only. |
| `migrationDir` | Customize the directory that migrations are stored. |
| Option | Description |
|----------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `pool` | [Pool connection options](https://orm.drizzle.team/docs/quick-postgresql/node-postgres) that will be passed to Drizzle and `node-postgres`. |
| `push` | Disable Drizzle's [`db push`](https://orm.drizzle.team/kit-docs/overview#prototyping-with-db-push) in development mode. By default, `push` is enabled for development mode only. |
| `migrationDir` | Customize the directory that migrations are stored. |
| `schemaName` | A string for the postgres schema to use, defaults to 'public'. |
### Access to Drizzle

View File

@@ -28,7 +28,7 @@ This field uses the `monaco-react` editor syntax highlighting.
| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) |
| **`label`** | Text used as a field label in the Admin panel or an object with keys for each language. |
| **`unique`** | Enforce that each entry in the Collection has a unique value for this field. |
| **`index`** | Build a an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
| **`validate`** | Provide a custom validation function that will be executed on both the Admin panel and the backend. [More](/docs/fields/overview#validation) |
| **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/config), include its data in the user JWT. |
| **`hooks`** | Provide field-based hooks to control logic for this field. [More](/docs/fields/overview#field-level-hooks) |

View File

@@ -46,6 +46,7 @@ export const Page: CollectionConfig = {
- [Date](/docs/fields/date) - date / time field that saves a timestamp
- [Email](/docs/fields/email) - validates the entry is a properly formatted email
- [Group](/docs/fields/group) - nest fields within an object
- [JSON](/docs/fields/json) - saves actual JSON in the database
- [Number](/docs/fields/number) - field that enforces that its value be a number
- [Point](/docs/fields/point) - geometric coordinates for location data
- [Radio](/docs/fields/radio) - radio button group, allowing only one value to be selected

View File

@@ -38,7 +38,7 @@ caption="Admin panel screenshot of a Relationship field"
| **`label`** | Text used as a field label in the Admin panel or an object with keys for each language. |
| **`unique`** | Enforce that each entry in the Collection has a unique value for this field. |
| **`validate`** | Provide a custom validation function that will be executed on both the Admin panel and the backend. [More](/docs/fields/overview#validation) |
| **`index`** | Build a an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
| **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/config), include its data in the user JWT. |
| **`hooks`** | Provide field-based hooks to control logic for this field. [More](/docs/fields/overview#field-level-hooks) |
| **`access`** | Provide field-based access control to denote what users can see and do with this field's data. [More](/docs/fields/overview#field-level-access-control) |

View File

@@ -75,6 +75,7 @@ import { CollectionBeforeOperationHook } from 'payload/types'
const beforeOperationHook: CollectionBeforeOperationHook = async ({
args, // original arguments passed into the operation
operation, // name of the operation
req, // full express request
}) => {
return args // return modified operation arguments as necessary
}
@@ -209,6 +210,7 @@ import { CollectionAfterOperationHook } from 'payload/types'
const afterOperationHook: CollectionAfterOperationHook = async ({
args, // arguments passed into the operation
operation, // name of the operation
req, // full express request
result, // the result of the operation, before modifications
}) => {
return result // return modified result as necessary

View File

@@ -6,7 +6,8 @@ desc: Hooks can be added to any fields, and optionally modify the return value o
keywords: hooks, fields, config, configuration, documentation, Content Management System, cms, headless, javascript, node, react, express
---
Field-level hooks offer incredible potential for encapsulating your logic. They help to isolate concerns and package up functionalities to be easily reusable across your projects.
Field-level hooks offer incredible potential for encapsulating your logic. They help to isolate concerns and package up
functionalities to be easily reusable across your projects.
**Example use cases include:**
@@ -46,7 +47,8 @@ const ExampleField: Field = {
## Arguments and return values
All field-level hooks are formatted to accept the same arguments, although some arguments may be `undefined` based on which field hook you are utilizing.
All field-level hooks are formatted to accept the same arguments, although some arguments may be `undefined` based on
which field hook you are utilizing.
<Banner type="success">
<strong>Tip:</strong>
@@ -69,10 +71,10 @@ Field Hooks receive one `args` argument that contains the following properties:
| **`operation`** | A string relating to which operation the field type is currently executing within. Useful within `beforeValidate`, `beforeChange`, and `afterChange` hooks to differentiate between `create` and `update` operations. |
| **`originalDoc`** | The full original document in `update` operations. In the `afterChange` hook, this is the resulting document of the operation. |
| **`previousDoc`** | The document before changes were applied, only in `afterChange` hooks. |
| **`previousSiblingDoc`** | The sibling data from the previous document in `afterChange` hook. |
| **`previousSiblingDoc`** | The sibling data of the document before changes being applied, only in `beforeChange` and `afterChange` hook. |
| **`req`** | The Express `request` object. It is mocked for Local API operations. |
| **`value`** | The value of the field. |
| **`previousValue`** | The previous value of the field, before changes were applied, only in `afterChange` hooks. |
| **`previousValue`** | The previous value of the field, before changes, only in `beforeChange` and `afterChange` hooks. |
| **`context`** | Context passed to this hook. More info can be found under [Context](/docs/hooks/context) |
| **`field`** | The field which the hook is running against. |
| **`collection`** | The collection which the field belongs to. If the field belongs to a global, this will be null. |
@@ -80,7 +82,8 @@ Field Hooks receive one `args` argument that contains the following properties:
#### Return value
All field hooks can optionally modify the return value of the field before the operation continues. Field Hooks may optionally return the value that should be used within the field.
All field hooks can optionally modify the return value of the field before the operation continues. Field Hooks may
optionally return the value that should be used within the field.
<Banner type="warning">
<strong>Important</strong>
@@ -92,11 +95,14 @@ All field hooks can optionally modify the return value of the field before the o
## Examples of Field Hooks
To better illustrate how field-level hooks can be applied, here are some specific examples. These demonstrate the flexibility and potential of field hooks in different contexts. Remember, these examples are just a starting point - the true potential of field-level hooks lies in their adaptability to a wide array of use cases.
To better illustrate how field-level hooks can be applied, here are some specific examples. These demonstrate the
flexibility and potential of field hooks in different contexts. Remember, these examples are just a starting point - the
true potential of field-level hooks lies in their adaptability to a wide array of use cases.
### beforeValidate
Runs before the `update` operation. This hook allows you to pre-process or format field data before it undergoes validation.
Runs before the `update` operation. This hook allows you to pre-process or format field data before it undergoes
validation.
```ts
import { Field } from 'payload/types'
@@ -113,11 +119,15 @@ const usernameField: Field = {
}
```
In this example, the `beforeValidate` hook is used to process the `username` field. The hook takes the incoming value of the field and transforms it by trimming whitespace and converting it to lowercase. This ensures that the username is stored in a consistent format in the database.
In this example, the `beforeValidate` hook is used to process the `username` field. The hook takes the incoming value of
the field and transforms it by trimming whitespace and converting it to lowercase. This ensures that the username is
stored in a consistent format in the database.
### beforeChange
Immediately following validation, `beforeChange` hooks will run within `create` and `update` operations. At this stage, you can be confident that the field data that will be saved to the document is valid in accordance to your field validations.
Immediately following validation, `beforeChange` hooks will run within `create` and `update` operations. At this stage,
you can be confident that the field data that will be saved to the document is valid in accordance to your field
validations.
```ts
import { Field } from 'payload/types'
@@ -136,11 +146,14 @@ const emailField: Field = {
}
```
In the `emailField`, the `beforeChange` hook checks the `operation` type. If the operation is `create`, it performs additional validation or transformation on the email field value. This allows for operation-specific logic to be applied to the field.
In the `emailField`, the `beforeChange` hook checks the `operation` type. If the operation is `create`, it performs
additional validation or transformation on the email field value. This allows for operation-specific logic to be applied
to the field.
### afterChange
The `afterChange` hook is executed after a field's value has been changed and saved in the database. This hook is useful for post-processing or triggering side effects based on the new value of the field.
The `afterChange` hook is executed after a field's value has been changed and saved in the database. This hook is useful
for post-processing or triggering side effects based on the new value of the field.
```ts
import { Field } from 'payload/types'
@@ -165,11 +178,15 @@ const membershipStatusField: Field = {
}
```
In this example, the `afterChange` hook is used with a `membershipStatusField`, which allows users to select their membership level (Standard, Premium, VIP). The hook monitors changes in the membership status. When a change occurs, it logs the update and can be used to trigger further actions, such as tracking conversion from one tier to another or notifying them about changes in their membership benefits.
In this example, the `afterChange` hook is used with a `membershipStatusField`, which allows users to select their
membership level (Standard, Premium, VIP). The hook monitors changes in the membership status. When a change occurs, it
logs the update and can be used to trigger further actions, such as tracking conversion from one tier to another or
notifying them about changes in their membership benefits.
### afterRead
The `afterRead` hook is invoked after a field value is read from the database. This is ideal for formatting or transforming the field data for output.
The `afterRead` hook is invoked after a field value is read from the database. This is ideal for formatting or
transforming the field data for output.
```ts
import { Field } from 'payload/types'
@@ -186,8 +203,9 @@ const dateField: Field = {
}
```
Here, the `afterRead` hook for the `dateField` is used to format the date into a more readable format using `toLocaleDateString()`. This hook modifies the way the date is presented to the user, making it more user-friendly.
Here, the `afterRead` hook for the `dateField` is used to format the date into a more readable format
using `toLocaleDateString()`. This hook modifies the way the date is presented to the user, making it more
user-friendly.
## TypeScript

View File

@@ -36,7 +36,7 @@ If your Hook simply performs a side-effect, such as updating a CRM, it might be
#### Server-only execution
Payload Hooks do not have any effect within the Payload Admin panel. You can safely [remove your hooks](/docs/admin/webpack#aliasing-server-only-modules) from your Admin panel's code by customizing the Webpack config, which not only keeps your Admin bundles' filesize small but also ensures that any server-side only code does not cause problems within browser environments.
Payload Hooks are only triggered on the server. You can safely [remove your hooks](/docs/admin/webpack#aliasing-server-only-modules) from your Admin panel's client-side code by customizing the Webpack config, which not only keeps your Admin bundles' filesize small but also ensures that any server-side only code does not cause problems within browser environments.
## Hook Types

View File

@@ -98,6 +98,13 @@ On boot, a seed script is included to scaffold a basic database for you to use a
> NOTICE: seeding the database is destructive because it drops your current database to populate a fresh one from the seed template. Only run this command if you are starting a new project or can afford to lose your current data.
### Conflicting routes
>In a monorepo when routes are bootstrapped to the same host, they can conflict with Payload's own routes if they have the same name. In our template we've named the Nextjs API routes to `next` to avoid this conflict.
>
>This can happen with any other routes conflicting with Payload such as `admin` and we recommend using different names for custom routes.
>Alternatively you can also rename Payload's own routes via the [configuration](https://payloadcms.com/docs/configuration/overview).
## Production
To run Payload in production, you need to build and serve the Admin panel. To do so, follow these steps:

View File

@@ -1,5 +0,0 @@
import { NextResponse } from 'next/server'
export async function GET(): Promise<NextResponse> {
return NextResponse.json({ success: true })
}

View File

@@ -1,5 +0,0 @@
import { NextResponse } from 'next/server'
export async function POST(): Promise<NextResponse> {
return NextResponse.json({ success: true })
}

View File

@@ -0,0 +1,10 @@
import { NextResponse } from 'next/server'
/**
* The Next.js API routes can conflict with Payload's own routes if they share the same path
* To avoid this you can customise the path of Payload or the API route of Nextjs as we've done here
* See readme: https://github.com/payloadcms/payload/tree/main/examples/custom-server#conflicting-routes
* */
export async function GET(): Promise<NextResponse> {
return NextResponse.json({ success: true })
}

View File

@@ -0,0 +1,10 @@
import { NextResponse } from 'next/server'
/**
* The Next.js API routes can conflict with Payload's own routes if they share the same path
* To avoid this you can customise the path of Payload or the API route of Nextjs as we've done here
* See readme: https://github.com/payloadcms/payload/tree/main/examples/custom-server#conflicting-routes
* */
export async function POST(): Promise<NextResponse> {
return NextResponse.json({ success: true })
}

View File

@@ -0,0 +1,2 @@
DATABASE_URI=mongodb://127.0.0.1/payload-template-blank
PAYLOAD_SECRET=YOUR_SECRET_HERE

6
examples/hierarchy/.gitignore vendored Normal file
View File

@@ -0,0 +1,6 @@
build
dist
/media
node_modules
.DS_Store
.env

View File

@@ -0,0 +1,8 @@
module.exports = {
printWidth: 100,
parser: 'typescript',
semi: false,
singleQuote: true,
trailingComma: 'all',
arrowParens: 'avoid',
}

View File

@@ -0,0 +1,58 @@
# Payload Hierarchy Example
This example demonstrates how to achieve a virtual hierarchy between documents in your [Payload](https://github.com/payloadcms/payload) application.
## Quick Start
To spin up the project locally, follow these steps:
1. First clone the repo
1. Then `cd YOUR_PROJECT_REPO && cp .env.example .env`
1. Next `yarn && yarn dev` (or `docker-compose up`, see [Docker](#docker))
1. Now `open http://localhost:3000/admin` to access the admin panel
1. Create your first admin user using the form on the page
That's it! Changes made in `./src` will be reflected in your app.
## How it works
This example achieves parent/child relationships between your documents through the use of virtual fields. When you query a document with the `?children=true` query param, an afterRead hook is used to populate the documents within its own tree.
For more information on how virtual fields, see the [Official Virtual Fields Example](https://github.com/payloadcms/payload/tree/main/examples/virtual-fields).
### Collections
See the [Collections](https://payloadcms.com/docs/configuration/collections) docs for details on how to extend any of this functionality.
- #### Users
The `users` collection is a default payload users collection.
- #### Entities
The `entities` collection can define a parent as any other entity. It has a virtual field that will also populate children when it is called via the API using a query `children=true`. See [Virtual Fields](https://github.com/payloadcms/payload/tree/main/examples/virtual-fields) for more details on how virtual fields work.
The virtual field retrieves __all__ children which includes other entities and people.
- #### People
The `people` collection is a collection that can define an array of parent entities. It also has an allocation field. This is for demonstrating attaching data to a parent-child relationship.
## Development
To spin up this example locally, follow the [Quick Start](#quick-start).
## Production
To run Payload in production, you need to build and serve the Admin panel. To do so, follow these steps:
1. First invoke the `payload build` script by running `yarn build` or `npm run build` in your project root. This creates a `./build` directory with a production-ready admin bundle.
1. Then run `yarn serve` or `npm run serve` to run Node in production and serve Payload from the `./build` directory.
### Deployment
The easiest way to deploy your project is to use [Payload Cloud](https://payloadcms.com/new/import), a one-click hosting solution to deploy production-ready instances of your Payload apps directly from your GitHub repo. You can also deploy your app manually, check out the [deployment documentation](https://payloadcms.com/docs/production/deployment) for full details.
## Questions
If you have any issues or questions, reach out to us on [Discord](https://discord.com/invite/payload) or start a [GitHub discussion](https://github.com/payloadcms/payload/discussions).

View File

@@ -0,0 +1,6 @@
{
"$schema": "https://json.schemastore.org/nodemon.json",
"ext": "ts",
"exec": "ts-node src/server.ts -- -I",
"stdin": false
}

View File

@@ -0,0 +1,35 @@
{
"name": "hierarchy",
"description": "A hierarchy example with Payload",
"version": "1.0.0",
"main": "dist/server.js",
"license": "MIT",
"scripts": {
"dev": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts nodemon",
"build:payload": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload build",
"build:server": "tsc",
"build": "yarn copyfiles && yarn build:payload && yarn build:server",
"serve": "cross-env PAYLOAD_CONFIG_PATH=dist/payload.config.js NODE_ENV=production node dist/server.js",
"copyfiles": "copyfiles -u 1 \"src/**/*.{html,css,scss,ttf,woff,woff2,eot,svg,jpg,png}\" dist/",
"generate:types": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload generate:types",
"generate:graphQLSchema": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload generate:graphQLSchema",
"payload": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload"
},
"dependencies": {
"@payloadcms/bundler-webpack": "^1.0.0",
"@payloadcms/db-mongodb": "^1.0.0",
"@payloadcms/plugin-cloud": "^3.0.0",
"@payloadcms/richtext-slate": "^1.0.0",
"cross-env": "^7.0.3",
"dotenv": "^8.2.0",
"express": "^4.17.1",
"payload": "^2.0.0"
},
"devDependencies": {
"@types/express": "^4.17.9",
"copyfiles": "^2.4.1",
"nodemon": "^2.0.6",
"ts-node": "^9.1.1",
"typescript": "^4.8.4"
}
}

View File

@@ -0,0 +1,79 @@
import { CollectionConfig } from 'payload/types'
export const Entities: CollectionConfig = {
slug: 'entities',
admin: {
useAsTitle: 'name',
},
fields: [
{
name: 'name',
type: 'text',
required: true,
},
// - This field is populated by setting the query parameter 'children=true'
// - This is a virtual field used to track a child relationship
// - Only relationship information is returned by this field
// - Data beyond relationships is not stored in this field
{
name: 'children',
type: 'relationship',
relationTo: ['entities', 'people'],
access: {
create: () => false,
update: () => false,
},
hooks: {
afterRead: [
async ({ data, req }) => {
const { id } = data
if (!req.query.children) return
const people = await req.payload.find({
req,
collection: 'people',
where: {
'parents.parent': { equals: id },
},
limit: 0,
depth: 0,
pagination: false,
})
const entities = await req.payload.find({
req,
collection: 'entities',
where: {
parent: { equals: id },
},
limit: 0,
depth: 0,
pagination: false,
})
return [
...entities.docs.map(entity => {
return {
relationTo: 'entity',
value: entity,
}
}),
...people.docs.map(person => {
return {
relationTo: 'people',
value: person,
}
}),
]
},
],
},
},
{
name: 'parent',
type: 'relationship',
relationTo: 'entities',
},
],
}

View File

@@ -0,0 +1,32 @@
import { CollectionConfig } from 'payload/types'
export const People: CollectionConfig = {
slug: 'people',
admin: {
useAsTitle: 'name',
},
fields: [
{
name: 'name',
type: 'text',
required: true,
},
{
name: 'parents',
type: 'array',
fields: [
{
name: 'parent',
type: 'relationship',
relationTo: 'entities',
},
{
name: 'allocation',
type: 'number',
min: 0,
max: 100,
},
],
},
],
}

View File

@@ -0,0 +1,15 @@
import { CollectionConfig } from 'payload/types'
const Users: CollectionConfig = {
slug: 'users',
auth: true,
admin: {
useAsTitle: 'email',
},
fields: [
// Email added by default
// Add more fields as needed
],
}
export default Users

View File

@@ -0,0 +1,30 @@
import path from 'path'
import { payloadCloud } from '@payloadcms/plugin-cloud'
import { mongooseAdapter } from '@payloadcms/db-mongodb'
import { webpackBundler } from '@payloadcms/bundler-webpack'
import { slateEditor } from '@payloadcms/richtext-slate'
import { buildConfig } from 'payload/config'
import Users from './collections/Users'
import { Entities } from './collections/Entities'
import { People } from './collections/People'
export default buildConfig({
admin: {
user: Users.slug,
bundler: webpackBundler(),
},
editor: slateEditor({}),
collections: [Users, Entities, People],
typescript: {
outputFile: path.resolve(__dirname, 'payload-types.ts'),
},
graphQL: {
schemaOutputFile: path.resolve(__dirname, 'generated-schema.graphql'),
},
plugins: [payloadCloud()],
db: mongooseAdapter({
url: process.env.DATABASE_URI,
}),
})

View File

@@ -0,0 +1,27 @@
import express from 'express'
import payload from 'payload'
require('dotenv').config()
const app = express()
// Redirect root to Admin panel
app.get('/', (_, res) => {
res.redirect('/admin')
})
const start = async () => {
// Initialize Payload
await payload.init({
secret: process.env.PAYLOAD_SECRET,
express: app,
onInit: async () => {
payload.logger.info(`Payload Admin URL: ${payload.getAdminURL()}`)
},
})
// Add your own express routes here
app.listen(3000)
}
start()

View File

@@ -0,0 +1,22 @@
{
"compilerOptions": {
"target": "es5",
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"strict": false,
"esModuleInterop": true,
"skipLibCheck": true,
"outDir": "./dist",
"rootDir": "./src",
"jsx": "react",
"paths": {
"payload/generated-types": ["./src/payload-types.ts"]
}
},
"include": ["src"],
"exclude": ["node_modules", "dist", "build"],
"ts-node": {
"transpileOnly": true,
"swc": true
}
}

7896
examples/hierarchy/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -2,26 +2,27 @@ import type { AfterLoginHook } from 'payload/dist/collections/config/types'
export const recordLastLoggedInTenant: AfterLoginHook = async ({ req, user }) => {
try {
const relatedOrg = await req.payload.find({
collection: 'tenants',
where: {
'domains.domain': {
in: [req.headers.host],
},
},
depth: 0,
limit: 1,
})
if (relatedOrg.docs.length > 0) {
await req.payload.update({
id: user.id,
collection: 'users',
data: {
lastLoggedInTenant: relatedOrg.docs[0].id,
const relatedOrg = await req.payload
.find({
collection: 'tenants',
where: {
'domains.domain': {
in: [req.headers.host],
},
},
depth: 0,
limit: 1,
})
}
?.then(res => res.docs?.[0])
await req.payload.update({
id: user.id,
collection: 'users',
data: {
lastLoggedInTenant: relatedOrg?.id || null,
},
req,
})
} catch (err: unknown) {
req.payload.logger.error(`Error recording last logged in tenant for user ${user.id}: ${err}`)
}

View File

@@ -30,6 +30,7 @@ export const isSuperOrTenantAdmin = async (args: { req: PayloadRequest }): Promi
},
depth: 0,
limit: 1,
req,
})
// if this tenant does not exist, deny access

File diff suppressed because it is too large Load Diff

View File

@@ -15,9 +15,13 @@
"dev:generate-graphql-schema": "ts-node -T ./test/generateGraphQLSchema.ts",
"dev:generate-types": "ts-node -T ./test/generateTypes.ts",
"dev:postgres": "pnpm --filter payload run dev:postgres",
"docker:restart": "pnpm docker:stop --remove-orphans && pnpm docker:start",
"docker:start": "docker-compose -f packages/plugin-cloud-storage/docker-compose.yml up -d",
"docker:stop": "docker-compose -f packages/plugin-cloud-storage/docker-compose.yml down",
"fix": "eslint \"packages/**/*.ts\" --fix",
"lint": "eslint \"packages/**/*.ts\"",
"lint-staged": "lint-staged",
"prepare": "husky install",
"pretest": "pnpm build",
"reinstall": "pnpm clean:unix && pnpm install",
"script:list-packages": "tsx ./scripts/list-packages.ts",
@@ -29,10 +33,10 @@
"test:e2e:headed": "cross-env DISABLE_LOGGING=true playwright test --headed",
"test:int:postgres": "cross-env PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles",
"test:int": "cross-env DISABLE_LOGGING=true jest --forceExit --detectOpenHandles",
"translateNewKeys": "pnpm --filter payload run translateNewKeys",
"prepare": "husky install"
"translateNewKeys": "pnpm --filter payload run translateNewKeys"
},
"devDependencies": {
"@aws-sdk/client-s3": "^3.142.0",
"@payloadcms/eslint-config": "workspace:*",
"@playwright/test": "1.40.1",
"@swc/cli": "^0.1.62",
@@ -64,7 +68,7 @@
"copyfiles": "2.4.1",
"cross-env": "7.0.3",
"dotenv": "8.6.0",
"drizzle-orm": "0.28.5",
"drizzle-orm": "0.29.3",
"express": "4.18.2",
"form-data": "3.0.1",
"fs-extra": "10.1.0",
@@ -77,12 +81,12 @@
"jest": "29.7.0",
"jest-environment-jsdom": "29.7.0",
"jwt-decode": "3.1.2",
"lexical": "0.12.5",
"lexical": "0.13.1",
"lint-staged": "^14.0.1",
"minimist": "1.2.8",
"mongodb-memory-server": "^9",
"node-fetch": "2.6.12",
"nodemon": "3.0.2",
"nodemon": "3.0.3",
"prettier": "^3.0.3",
"prompts": "2.4.2",
"qs": "6.11.2",
@@ -94,7 +98,7 @@
"slash": "3.0.0",
"slate": "0.91.4",
"tempfile": "^3.0.0",
"ts-node": "10.9.1",
"ts-node": "10.9.2",
"turbo": "^1.11.1",
"typescript": "5.2.2",
"uuid": "^9.0.1"
@@ -104,6 +108,16 @@
"react-i18next": "11.18.6",
"react-router-dom": "5.3.4"
},
"pnpm": {
"overrides": {
"copyfiles": "$copyfiles",
"cross-env": "$cross-env",
"dotenv": "$dotenv",
"drizzle-orm": "$drizzle-orm",
"ts-node": "$ts-node",
"typescript": "$typescript"
}
},
"engines": {
"node": ">=14",
"pnpm": ">=8"

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-mongodb",
"version": "1.3.2",
"version": "1.4.3",
"description": "The officially supported MongoDB database adapter for Payload",
"repository": "https://github.com/payloadcms/payload",
"license": "MIT",

View File

@@ -29,15 +29,18 @@ export const connect: Connect = async function connect(this: MongooseAdapter, pa
urlToConnect = process.env.PAYLOAD_TEST_MONGO_URL
} else {
connectionOptions.dbName = 'payloadmemory'
const { MongoMemoryServer } = require('mongodb-memory-server')
const { MongoMemoryReplSet } = require('mongodb-memory-server')
const getPort = require('get-port')
const port = await getPort()
this.mongoMemoryServer = await MongoMemoryServer.create({
this.mongoMemoryServer = await MongoMemoryReplSet.create({
instance: {
dbName: 'payloadmemory',
port,
},
replSet: {
count: 3,
},
})
urlToConnect = this.mongoMemoryServer.getUri()

View File

@@ -32,7 +32,7 @@ export const createMigration: CreateMigration = async function createMigration({
// Check for predefined migration.
// Either passed in via --file or prefixed with @payloadcms/db-mongodb/
if (file || migrationName.startsWith('@payloadcms/db-mongodb/')) {
if (file || migrationName?.startsWith('@payloadcms/db-mongodb/')) {
if (!file) file = migrationName
const predefinedMigrationName = file.replace('@payloadcms/db-mongodb/', '')
@@ -59,8 +59,8 @@ export const createMigration: CreateMigration = async function createMigration({
const timestamp = `${formattedDate}_${formattedTime}`
const formattedName = migrationName.replace(/\W/g, '_')
const fileName = `${timestamp}_${formattedName}.ts`
const formattedName = migrationName?.replace(/\W/g, '_')
const fileName = migrationName ? `${timestamp}_${formattedName}.ts` : `${timestamp}_migration.ts`
const filePath = `${dir}/${fileName}`
fs.writeFileSync(filePath, migrationFileContent)
payload.logger.info({ msg: `Migration created at ${filePath}` })

View File

@@ -63,7 +63,6 @@ export const findVersions: FindVersions = async function findVersions(
lean: true,
leanWithId: true,
limit,
offset: skip || 0,
options,
page,
pagination,

View File

@@ -11,25 +11,30 @@ import type { MongooseAdapter } from '.'
/**
* Drop the current database and run all migrate up functions
*/
export async function migrateFresh(this: MongooseAdapter): Promise<void> {
export async function migrateFresh(
this: MongooseAdapter,
{ forceAcceptWarning = false }: { forceAcceptWarning?: boolean },
): Promise<void> {
const { payload } = this
const { confirm: acceptWarning } = await prompts(
{
name: 'confirm',
type: 'confirm',
initial: false,
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
},
{
onCancel: () => {
process.exit(0)
if (!forceAcceptWarning) {
const { confirm: acceptWarning } = await prompts(
{
name: 'confirm',
type: 'confirm',
initial: false,
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
},
},
)
{
onCancel: () => {
process.exit(0)
},
},
)
if (!acceptWarning) {
process.exit(0)
if (!acceptWarning) {
process.exit(0)
}
}
payload.logger.info({

View File

@@ -14,8 +14,10 @@ import type {
DateField,
EmailField,
Field,
FieldAffectingData,
GroupField,
JSONField,
NonPresentationalField,
NumberField,
PointField,
RadioField,
@@ -23,12 +25,12 @@ import type {
RichTextField,
RowField,
SelectField,
Tab,
TabsField,
TextField,
TextareaField,
UploadField,
} from 'payload/types'
import type { FieldAffectingData, NonPresentationalField, Tab, UnnamedTab } from 'payload/types'
import { Schema } from 'mongoose'
import {
@@ -61,7 +63,15 @@ const formatBaseSchema = (field: FieldAffectingData, buildSchemaOptions: BuildSc
unique: (!disableUnique && field.unique) || false,
}
if (schema.unique && (field.localized || draftsEnabled)) {
if (
schema.unique &&
(field.localized ||
draftsEnabled ||
(fieldAffectsData(field) &&
field.type !== 'group' &&
field.type !== 'tab' &&
field.required !== true))
) {
schema.sparse = true
}
@@ -79,7 +89,6 @@ const localizeSchema = (
) => {
if (fieldIsLocalized(entity) && localization && Array.isArray(localization.locales)) {
return {
localized: true,
type: localization.localeCodes.reduce(
(localeSchema, locale) => ({
...localeSchema,
@@ -89,6 +98,7 @@ const localizeSchema = (
_id: false,
},
),
localized: true,
}
}
return schema
@@ -140,7 +150,6 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
) => {
const baseSchema = {
...formatBaseSchema(field, buildSchemaOptions),
default: undefined,
type: [
buildSchema(config, field.fields, {
allowIDField: true,
@@ -153,6 +162,7 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
},
}),
],
default: undefined,
}
schema.add({
@@ -166,8 +176,8 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
buildSchemaOptions: BuildSchemaOptions,
): void => {
const fieldSchema = {
default: undefined,
type: [new Schema({}, { _id: false, discriminatorKey: 'blockType' })],
default: undefined,
}
schema.add({
@@ -187,12 +197,12 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
if (field.localized && config.localization) {
config.localization.localeCodes.forEach((localeCode) => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore Possible incorrect typing in mongoose types, this works
// @ts-expect-error Possible incorrect typing in mongoose types, this works
schema.path(`${field.name}.${localeCode}`).discriminator(blockItem.slug, blockSchema)
})
} else {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore Possible incorrect typing in mongoose types, this works
// @ts-expect-error Possible incorrect typing in mongoose types, this works
schema.path(field.name).discriminator(blockItem.slug, blockSchema)
}
})
@@ -325,14 +335,14 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
buildSchemaOptions: BuildSchemaOptions,
): void => {
const baseSchema: SchemaTypeOptions<unknown> = {
type: {
type: String,
enum: ['Point'],
},
coordinates: {
type: [Number],
default: field.defaultValue || undefined,
required: false,
type: [Number],
},
type: {
enum: ['Point'],
type: String,
},
}
if (buildSchemaOptions.disableUnique && field.unique && field.localized) {
@@ -366,11 +376,11 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
): void => {
const baseSchema = {
...formatBaseSchema(field, buildSchemaOptions),
type: String,
enum: field.options.map((option) => {
if (typeof option === 'object') return option.value
return option
}),
type: String,
}
schema.add({
@@ -388,7 +398,6 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
if (field.localized && config.localization) {
schemaToReturn = {
localized: true,
type: config.localization.localeCodes.reduce((locales, locale) => {
let localeSchema: { [key: string]: any } = {}
@@ -396,56 +405,57 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
localeSchema = {
...formatBaseSchema(field, buildSchemaOptions),
_id: false,
relationTo: { enum: field.relationTo, type: String },
type: Schema.Types.Mixed,
relationTo: { type: String, enum: field.relationTo },
value: {
refPath: `${field.name}.${locale}.relationTo`,
type: Schema.Types.Mixed,
refPath: `${field.name}.${locale}.relationTo`,
},
}
} else {
localeSchema = {
...formatBaseSchema(field, buildSchemaOptions),
ref: field.relationTo,
type: Schema.Types.Mixed,
ref: field.relationTo,
}
}
return {
...locales,
[locale]: field.hasMany ? { default: undefined, type: [localeSchema] } : localeSchema,
[locale]: field.hasMany ? { type: [localeSchema], default: undefined } : localeSchema,
}
}, {}),
localized: true,
}
} else if (hasManyRelations) {
schemaToReturn = {
...formatBaseSchema(field, buildSchemaOptions),
_id: false,
relationTo: { enum: field.relationTo, type: String },
type: Schema.Types.Mixed,
relationTo: { type: String, enum: field.relationTo },
value: {
refPath: `${field.name}.relationTo`,
type: Schema.Types.Mixed,
refPath: `${field.name}.relationTo`,
},
}
if (field.hasMany) {
schemaToReturn = {
default: undefined,
type: [schemaToReturn],
default: undefined,
}
}
} else {
schemaToReturn = {
...formatBaseSchema(field, buildSchemaOptions),
ref: field.relationTo,
type: Schema.Types.Mixed,
ref: field.relationTo,
}
if (field.hasMany) {
schemaToReturn = {
default: undefined,
type: [schemaToReturn],
default: undefined,
}
}
}
@@ -488,11 +498,11 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
): void => {
const baseSchema = {
...formatBaseSchema(field, buildSchemaOptions),
type: String,
enum: field.options.map((option) => {
if (typeof option === 'object') return option.value
return option
}),
type: String,
}
if (buildSchemaOptions.draftsEnabled || !field.required) {
@@ -576,8 +586,8 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
): void => {
const baseSchema = {
...formatBaseSchema(field, buildSchemaOptions),
ref: field.relationTo,
type: Schema.Types.Mixed,
ref: field.relationTo,
}
schema.add({

View File

@@ -77,6 +77,7 @@ export const sanitizeQueryValue = ({
// Object equality requires the value to be the first key in the object that is being queried.
if (
operator === 'equals' &&
formattedValue &&
typeof formattedValue === 'object' &&
formattedValue.value &&
formattedValue.relationTo
@@ -156,6 +157,23 @@ export const sanitizeQueryValue = ({
if (operator === 'exists') {
formattedValue = formattedValue === 'true' || formattedValue === true
// Clearable fields
if (['relationship', 'select', 'upload'].includes(field.type)) {
if (formattedValue) {
return {
rawQuery: {
$and: [{ [path]: { $exists: true } }, { [path]: { $ne: null } }],
},
}
} else {
return {
rawQuery: {
$or: [{ [path]: { $exists: false } }, { [path]: { $eq: null } }],
},
}
}
}
}
return { operator: formattedOperator, val: formattedValue }

View File

@@ -17,7 +17,11 @@ export const rollbackTransaction: RollbackTransaction = async function rollbackT
}
// the first call for rollback should be aborted and deleted causing any other operations with the same transaction to fail
await this.sessions[id].abortTransaction()
await this.sessions[id].endSession()
try {
await this.sessions[id].abortTransaction()
await this.sessions[id].endSession()
} catch (error) {
// ignore the error as it is likely a race condition from multiple errors
}
delete this.sessions[id]
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-postgres",
"version": "0.4.0",
"version": "0.6.0",
"description": "The officially supported Postgres database adapter for Payload",
"repository": "https://github.com/payloadcms/payload",
"license": "MIT",
@@ -22,7 +22,7 @@
"dependencies": {
"@libsql/client": "^0.3.1",
"console-table-printer": "2.11.2",
"drizzle-kit": "0.20.5-608ae62",
"drizzle-kit": "0.20.14-1f2c838",
"drizzle-orm": "0.29.3",
"pg": "8.11.3",
"prompts": "2.4.2",

View File

@@ -1,13 +1,51 @@
import type { Payload } from 'payload'
import type { Connect } from 'payload/database'
import { eq, sql } from 'drizzle-orm'
import { drizzle } from 'drizzle-orm/node-postgres'
import { numeric, pgTable, timestamp, varchar } from 'drizzle-orm/pg-core'
import { numeric, timestamp, varchar } from 'drizzle-orm/pg-core'
import { Pool } from 'pg'
import prompts from 'prompts'
import type { PostgresAdapter } from './types'
const connectWithReconnect = async function ({
adapter,
payload,
reconnect = false,
}: {
adapter: PostgresAdapter
payload: Payload
reconnect?: boolean
}) {
let result
if (!reconnect) {
result = await adapter.pool.connect()
} else {
try {
result = await adapter.pool.connect()
} catch (err) {
setTimeout(() => {
payload.logger.info('Reconnecting to postgres')
void connectWithReconnect({ adapter, payload, reconnect: true })
}, 1000)
}
}
if (!result) {
return
}
result.prependListener('error', (err) => {
try {
if (err.code === 'ECONNRESET') {
void connectWithReconnect({ adapter, payload, reconnect: true })
}
} catch (err) {
// swallow error
}
})
}
export const connect: Connect = async function connect(this: PostgresAdapter, payload) {
this.schema = {
...this.tables,
@@ -17,14 +55,19 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
try {
this.pool = new Pool(this.poolOptions)
await this.pool.connect()
await connectWithReconnect({ adapter: this, payload })
const logger = this.logger || false
this.drizzle = drizzle(this.pool, { schema: this.schema, logger })
this.drizzle = drizzle(this.pool, { logger, schema: this.schema })
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
this.payload.logger.info('---- DROPPING TABLES ----')
await this.drizzle.execute(sql`drop schema public cascade;
create schema public;`)
this.payload.logger.info(`---- DROPPING TABLES SCHEMA(${this.schemaName || 'public'}) ----`)
await this.drizzle.execute(
sql.raw(`
drop schema if exists ${this.schemaName || 'public'} cascade;
create schema ${this.schemaName || 'public'};
`),
)
this.payload.logger.info('---- DROPPED TABLES ----')
}
} catch (err) {
@@ -81,7 +124,7 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
await apply()
// Migration table def in order to use query using drizzle
const migrationsSchema = pgTable('payload_migrations', {
const migrationsSchema = this.pgSchema.table('payload_migrations', {
name: varchar('name'),
batch: numeric('batch'),
created_at: timestamp('created_at'),

View File

@@ -53,7 +53,7 @@ const getDefaultDrizzleSnapshot = (): DrizzleSnapshotJSON => ({
export const createMigration: CreateMigration = async function createMigration(
this: PostgresAdapter,
{ migrationName, payload },
{ forceAcceptWarning, migrationName, payload },
) {
const dir = payload.db.migrationDir
if (!fs.existsSync(dir)) {
@@ -95,7 +95,7 @@ export const createMigration: CreateMigration = async function createMigration(
const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)
if (!sqlStatementsUp.length && !sqlStatementsDown.length) {
if (!sqlStatementsUp.length && !sqlStatementsDown.length && !forceAcceptWarning) {
const { confirm: shouldCreateBlankMigration } = await prompts(
{
name: 'confirm',

View File

@@ -158,7 +158,7 @@ export const findMany = async function find({
query: db
.select({
count: sql<number>`count
(*)`,
(DISTINCT ${adapter.tables[tableName].id})`,
})
.from(table)
.where(where),

View File

@@ -78,7 +78,7 @@ export const traverseFields = ({
with: {},
}
const arrayTableName = `${currentTableName}_${toSnakeCase(field.name)}`
const arrayTableName = `${currentTableName}_${path}${toSnakeCase(field.name)}`
if (adapter.tables[`${arrayTableName}_locales`]) withArray.with._locales = _locales
currentArgs.with[`${path}${field.name}`] = withArray

View File

@@ -42,7 +42,7 @@ export type { MigrateDownArgs, MigrateUpArgs } from './types'
export function postgresAdapter(args: Args): PostgresAdapterResult {
function adapter({ payload }: { payload: Payload }) {
const migrationDir = findMigrationDir(args.migrationDir)
const idType = args.idType || 'serial'
return createDatabaseAdapter<PostgresAdapter>({
name: 'postgres',
@@ -50,12 +50,15 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
drizzle: undefined,
enums: {},
fieldConstraints: {},
idType,
logger: args.logger,
pgSchema: undefined,
pool: undefined,
poolOptions: args.pool,
push: args.push,
relations: {},
schema: {},
schemaName: args.schemaName,
sessions: {},
tables: {},
@@ -68,7 +71,10 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
createGlobalVersion,
createMigration,
createVersion,
defaultIDType: 'number',
/**
* This represents how a default ID is treated in Payload as were a field type
*/
defaultIDType: idType === 'serial' ? 'number' : 'text',
deleteMany,
deleteOne,
deleteVersions,

View File

@@ -2,16 +2,21 @@
import type { Init } from 'payload/database'
import type { SanitizedCollectionConfig } from 'payload/types'
import { pgEnum } from 'drizzle-orm/pg-core'
import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'
import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload/versions'
import toSnakeCase from 'to-snake-case'
import type { PostgresAdapter } from './types'
import { buildTable } from './schema/build'
import { getConfigIDType } from './schema/getConfigIDType'
export const init: Init = async function init(this: PostgresAdapter) {
if (this.schemaName) {
this.pgSchema = pgSchema(this.schemaName)
} else {
this.pgSchema = { table: pgTable }
}
if (this.payload.config.localization) {
this.enums.enum__locales = pgEnum(
'_locales',
@@ -24,9 +29,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
buildTable({
adapter: this,
buildTexts: true,
buildNumbers: true,
buildRelationships: true,
buildTexts: true,
disableNotNull: !!collection?.versions?.drafts,
disableUnique: false,
fields: collection.fields,
@@ -38,13 +43,11 @@ export const init: Init = async function init(this: PostgresAdapter) {
const versionsTableName = `_${tableName}_v`
const versionFields = buildVersionCollectionFields(collection)
const versionsParentIDColType = getConfigIDType(collection.fields)
buildTable({
adapter: this,
buildTexts: true,
buildNumbers: true,
buildRelationships: true,
buildTexts: true,
disableNotNull: !!collection.versions?.drafts,
disableUnique: true,
fields: versionFields,
@@ -59,9 +62,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
buildTable({
adapter: this,
buildTexts: true,
buildNumbers: true,
buildRelationships: true,
buildTexts: true,
disableNotNull: !!global?.versions?.drafts,
disableUnique: false,
fields: global.fields,
@@ -75,9 +78,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
buildTable({
adapter: this,
buildTexts: true,
buildNumbers: true,
buildRelationships: true,
buildTexts: true,
disableNotNull: !!global.versions?.drafts,
disableUnique: true,
fields: versionFields,

View File

@@ -39,7 +39,7 @@ export async function migrate(this: PostgresAdapter): Promise<void> {
latestBatch = Number(migrationsInDB[0]?.batch)
}
} else {
await createMigrationTable(this.drizzle)
await createMigrationTable(this)
}
if (migrationsInDB.find((m) => m.batch === -1)) {

View File

@@ -37,7 +37,7 @@ export async function migrateDown(this: PostgresAdapter): Promise<void> {
}
const start = Date.now()
const req = {} as PayloadRequest
const req = { payload } as PayloadRequest
try {
payload.logger.info({ msg: `Migrating down: ${migrationFile.name}` })

View File

@@ -14,33 +14,40 @@ import { parseError } from './utilities/parseError'
/**
* Drop the current database and run all migrate up functions
*/
export async function migrateFresh(this: PostgresAdapter): Promise<void> {
export async function migrateFresh(
this: PostgresAdapter,
{ forceAcceptWarning = false },
): Promise<void> {
const { payload } = this
const { confirm: acceptWarning } = await prompts(
{
name: 'confirm',
type: 'confirm',
initial: false,
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
},
{
onCancel: () => {
process.exit(0)
if (forceAcceptWarning === false) {
const { confirm: acceptWarning } = await prompts(
{
name: 'confirm',
type: 'confirm',
initial: false,
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
},
},
)
{
onCancel: () => {
process.exit(0)
},
},
)
if (!acceptWarning) {
process.exit(0)
if (!acceptWarning) {
process.exit(0)
}
}
payload.logger.info({
msg: `Dropping database.`,
})
await this.drizzle.execute(sql`drop schema public cascade;
create schema public;`)
await this.drizzle.execute(
sql.raw(`drop schema ${this.schemaName || 'public'} cascade;
create schema ${this.schemaName || 'public'};`),
)
const migrationFiles = await readMigrationFiles({ payload })
payload.logger.debug({

View File

@@ -1,4 +1,5 @@
import type { SQL } from 'drizzle-orm'
import type { PgTableWithColumns } from 'drizzle-orm/pg-core'
import type { Field, Where } from 'payload/types'
import { asc, desc } from 'drizzle-orm'
@@ -12,7 +13,7 @@ export type BuildQueryJoins = Record<string, SQL>
export type BuildQueryJoinAliases = {
condition: SQL
table: GenericTable
table: GenericTable | PgTableWithColumns<any>
}[]
type BuildQueryArgs = {
@@ -75,6 +76,7 @@ const buildQuery = async function buildQuery({
pathSegments: sortPath.replace(/__/g, '.').split('.'),
selectFields,
tableName,
value: sortPath,
})
orderBy.column = sortTable?.[sortTableColumnName]
} catch (err) {

View File

@@ -1,8 +1,9 @@
/* eslint-disable no-param-reassign */
import type { SQL } from 'drizzle-orm'
import type { Field, FieldAffectingData, TabAsField } from 'payload/types'
import type { PgTableWithColumns } from 'drizzle-orm/pg-core'
import type { Field, FieldAffectingData, NumberField, TabAsField, TextField } from 'payload/types'
import { and, eq, sql } from 'drizzle-orm'
import { and, eq, like, sql } from 'drizzle-orm'
import { alias } from 'drizzle-orm/pg-core'
import { APIError } from 'payload/errors'
import { fieldAffectsData, tabHasName } from 'payload/types'
@@ -15,7 +16,7 @@ import type { BuildQueryJoinAliases, BuildQueryJoins } from './buildQuery'
type Constraint = {
columnName: string
table: GenericTable
table: GenericTable | PgTableWithColumns<any>
value: unknown
}
@@ -26,12 +27,12 @@ type TableColumn = {
getNotNullColumnByValue?: (val: unknown) => string
pathSegments?: string[]
rawColumn?: SQL
table: GenericTable
table: GenericTable | PgTableWithColumns<any>
}
type Args = {
adapter: PostgresAdapter
aliasTable?: GenericTable
aliasTable?: GenericTable | PgTableWithColumns<any>
collectionPath: string
columnPrefix?: string
constraintPath?: string
@@ -44,6 +45,14 @@ type Args = {
rootTableName?: string
selectFields: Record<string, GenericColumn>
tableName: string
/**
* If creating a new table name for arrays and blocks, this suffix should be appended to the table name
*/
tableNameSuffix?: string
/**
* The raw value of the query before sanitization
*/
value: unknown
}
/**
* Transforms path to table and column name
@@ -65,6 +74,8 @@ export const getTableColumnFromPath = ({
rootTableName: incomingRootTableName,
selectFields,
tableName,
tableNameSuffix = '',
value,
}: Args): TableColumn => {
const fieldPath = incomingSegments[0]
let locale = incomingLocale
@@ -83,8 +94,8 @@ export const getTableColumnFromPath = ({
constraints,
field: {
name: 'id',
type: 'number',
},
type: adapter.idType === 'uuid' ? 'text' : 'number',
} as TextField | NumberField,
table: adapter.tables[newTableName],
}
}
@@ -125,6 +136,8 @@ export const getTableColumnFromPath = ({
rootTableName,
selectFields,
tableName: newTableName,
tableNameSuffix,
value,
})
}
case 'tab': {
@@ -134,7 +147,7 @@ export const getTableColumnFromPath = ({
aliasTable,
collectionPath,
columnPrefix: `${columnPrefix}${field.name}_`,
constraintPath,
constraintPath: `${constraintPath}${field.name}.`,
constraints,
fields: field.fields,
joinAliases,
@@ -144,6 +157,8 @@ export const getTableColumnFromPath = ({
rootTableName,
selectFields,
tableName: newTableName,
tableNameSuffix: `${tableNameSuffix}${toSnakeCase(field.name)}_`,
value,
})
}
return getTableColumnFromPath({
@@ -161,6 +176,8 @@ export const getTableColumnFromPath = ({
rootTableName,
selectFields,
tableName: newTableName,
tableNameSuffix,
value,
})
}
@@ -185,7 +202,7 @@ export const getTableColumnFromPath = ({
aliasTable,
collectionPath,
columnPrefix: `${columnPrefix}${field.name}_`,
constraintPath,
constraintPath: `${constraintPath}${field.name}.`,
constraints,
fields: field.fields,
joinAliases,
@@ -195,11 +212,13 @@ export const getTableColumnFromPath = ({
rootTableName,
selectFields,
tableName: newTableName,
tableNameSuffix: `${tableNameSuffix}${toSnakeCase(field.name)}_`,
value,
})
}
case 'array': {
newTableName = `${tableName}_${toSnakeCase(field.name)}`
newTableName = `${tableName}_${tableNameSuffix}${toSnakeCase(field.name)}`
constraintPath = `${constraintPath}${field.name}.%.`
if (locale && field.localized && adapter.payload.config.localization) {
joins[newTableName] = and(
@@ -232,12 +251,39 @@ export const getTableColumnFromPath = ({
rootTableName,
selectFields,
tableName: newTableName,
value,
})
}
case 'blocks': {
let blockTableColumn: TableColumn
let newTableName: string
// handle blockType queries
if (pathSegments[1] === 'blockType') {
// find the block config using the value
const blockTypes = Array.isArray(value) ? value : [value]
blockTypes.forEach((blockType) => {
const block = field.blocks.find((block) => block.slug === blockType)
newTableName = `${tableName}_blocks_${toSnakeCase(block.slug)}`
joins[newTableName] = eq(
adapter.tables[tableName].id,
adapter.tables[newTableName]._parentID,
)
constraints.push({
columnName: '_path',
table: adapter.tables[newTableName],
value: pathSegments[0],
})
})
return {
constraints,
field,
getNotNullColumnByValue: () => 'id',
table: adapter.tables[tableName],
}
}
const hasBlockField = field.blocks.some((block) => {
newTableName = `${tableName}_blocks_${toSnakeCase(block.slug)}`
constraintPath = `${constraintPath}${field.name}.%.`
@@ -258,6 +304,7 @@ export const getTableColumnFromPath = ({
rootTableName,
selectFields: blockSelectFields,
tableName: newTableName,
value,
})
} catch (error) {
// this is fine, not every block will have the field
@@ -298,9 +345,6 @@ export const getTableColumnFromPath = ({
table: blockTableColumn.table,
}
}
if (pathSegments[1] === 'blockType') {
throw new APIError('Querying on blockType is not supported')
}
break
}
@@ -317,21 +361,15 @@ export const getTableColumnFromPath = ({
// Join in the relationships table
joinAliases.push({
condition: eq(
(aliasTable || adapter.tables[rootTableName]).id,
aliasRelationshipTable.parent,
condition: and(
eq((aliasTable || adapter.tables[rootTableName]).id, aliasRelationshipTable.parent),
like(aliasRelationshipTable.path, `${constraintPath}${field.name}`),
),
table: aliasRelationshipTable,
})
selectFields[`${relationTableName}.path`] = aliasRelationshipTable.path
constraints.push({
columnName: 'path',
table: aliasRelationshipTable,
value: `${constraintPath}${field.name}`,
})
let newAliasTable
if (typeof field.relationTo === 'string') {
@@ -346,7 +384,7 @@ export const getTableColumnFromPath = ({
table: newAliasTable,
})
if (newCollectionPath === '') {
if (newCollectionPath === '' || newCollectionPath === 'id') {
return {
columnName: `${field.relationTo}ID`,
constraints,
@@ -394,6 +432,7 @@ export const getTableColumnFromPath = ({
rootTableName: newTableName,
selectFields,
tableName: newTableName,
value,
})
}
@@ -428,7 +467,7 @@ export const getTableColumnFromPath = ({
columnName: `${columnPrefix}${field.name}`,
constraints,
field,
pathSegments: pathSegments,
pathSegments,
table: targetTable,
}
}

View File

@@ -63,11 +63,7 @@ export async function parseParams({
where: condition,
})
if (builtConditions.length > 0) {
if (result) {
result = operatorMap[conditionOperator](result, ...builtConditions)
} else {
result = operatorMap[conditionOperator](...builtConditions)
}
result = operatorMap[conditionOperator](...builtConditions)
}
} else {
// It's a path - and there can be multiple comparisons on a single path.
@@ -77,6 +73,7 @@ export async function parseParams({
if (typeof pathOperators === 'object') {
for (const operator of Object.keys(pathOperators)) {
if (validOperators.includes(operator as Operator)) {
const val = where[relationOrPath][operator]
const {
columnName,
constraints: queryConstraints,
@@ -95,10 +92,9 @@ export async function parseParams({
pathSegments: relationOrPath.replace(/__/g, '.').split('.'),
selectFields,
tableName,
value: val,
})
const val = where[relationOrPath][operator]
queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => {
if (typeof value === 'string' && value.indexOf('%') > -1) {
constraints.push(operatorMap.like(constraintTable[col], value))
@@ -169,6 +165,7 @@ export async function parseParams({
}
const sanitizedQueryValue = sanitizeQueryValue({
adapter,
field,
operator,
relationOrPath,
@@ -207,6 +204,16 @@ export async function parseParams({
break
}
if (operator === 'equals' && queryValue === null) {
constraints.push(isNull(rawColumn || table[columnName]))
break
}
if (operator === 'not_equals' && queryValue === null) {
constraints.push(isNotNull(rawColumn || table[columnName]))
break
}
constraints.push(
operatorMap[queryOperator](rawColumn || table[columnName], queryValue),
)

View File

@@ -2,7 +2,10 @@ import { APIError } from 'payload/errors'
import { type Field, type TabAsField, fieldAffectsData } from 'payload/types'
import { createArrayFromCommaDelineated } from 'payload/utilities'
import type { PostgresAdapter } from '../types'
type SanitizeQueryValueArgs = {
adapter: PostgresAdapter
field: Field | TabAsField
operator: string
relationOrPath: string
@@ -10,6 +13,7 @@ type SanitizeQueryValueArgs = {
}
export const sanitizeQueryValue = ({
adapter,
field,
operator: operatorArg,
relationOrPath,
@@ -27,8 +31,10 @@ export const sanitizeQueryValue = ({
) {
const allPossibleIDTypes: (number | string)[] = []
formattedValue.forEach((val) => {
if (typeof val === 'string') {
if (adapter.idType !== 'uuid' && typeof val === 'string') {
allPossibleIDTypes.push(val, parseInt(val))
} else if (typeof val === 'string') {
allPossibleIDTypes.push(val)
} else {
allPossibleIDTypes.push(val, String(val))
}

View File

@@ -1,35 +1,31 @@
/* eslint-disable no-param-reassign */
import type { Relation } from 'drizzle-orm'
import type { IndexBuilder, PgColumnBuilder, UniqueConstraintBuilder } from 'drizzle-orm/pg-core'
import type {
IndexBuilder,
PgColumnBuilder,
PgTableWithColumns,
UniqueConstraintBuilder,
} from 'drizzle-orm/pg-core'
import type { Field } from 'payload/types'
import { relations } from 'drizzle-orm'
import {
index,
integer,
numeric,
pgTable,
serial,
timestamp,
unique,
varchar,
} from 'drizzle-orm/pg-core'
import { index, integer, numeric, serial, timestamp, unique, varchar } from 'drizzle-orm/pg-core'
import { fieldAffectsData } from 'payload/types'
import toSnakeCase from 'to-snake-case'
import type { GenericColumns, GenericTable, PostgresAdapter } from '../types'
import type { GenericColumns, GenericTable, IDType, PostgresAdapter } from '../types'
import { getConfigIDType } from './getConfigIDType'
import { parentIDColumnMap } from './parentIDColumnMap'
import { setColumnID } from './setColumnID'
import { traverseFields } from './traverseFields'
type Args = {
adapter: PostgresAdapter
baseColumns?: Record<string, PgColumnBuilder>
baseExtraConfig?: Record<string, (cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder>
buildTexts?: boolean
buildNumbers?: boolean
buildRelationships?: boolean
buildTexts?: boolean
disableNotNull: boolean
disableUnique: boolean
fields: Field[]
@@ -42,8 +38,8 @@ type Args = {
}
type Result = {
hasManyTextField: 'index' | boolean
hasManyNumberField: 'index' | boolean
hasManyTextField: 'index' | boolean
relationsToBuild: Map<string, string>
}
@@ -51,9 +47,9 @@ export const buildTable = ({
adapter,
baseColumns = {},
baseExtraConfig = {},
buildTexts,
buildNumbers,
buildRelationships,
buildTexts,
disableNotNull,
disableUnique = false,
fields,
@@ -77,39 +73,32 @@ export const buildTable = ({
const localesColumns: Record<string, PgColumnBuilder> = {}
const localesIndexes: Record<string, (cols: GenericColumns) => IndexBuilder> = {}
let localesTable: GenericTable
let textsTable: GenericTable
let numbersTable: GenericTable
let localesTable: GenericTable | PgTableWithColumns<any>
let textsTable: GenericTable | PgTableWithColumns<any>
let numbersTable: GenericTable | PgTableWithColumns<any>
// Relationships to the base collection
const relationships: Set<string> = rootRelationships || new Set()
let relationshipsTable: GenericTable
let relationshipsTable: GenericTable | PgTableWithColumns<any>
// Drizzle relations
const relationsToBuild: Map<string, string> = new Map()
const idColType = getConfigIDType(fields)
const idColType: IDType = setColumnID({ adapter, columns, fields })
const idColTypeMap = {
integer: serial,
numeric,
varchar,
}
columns.id = idColTypeMap[idColType]('id').primaryKey()
;({
hasLocalizedField,
hasLocalizedManyTextField,
hasLocalizedManyNumberField,
hasLocalizedManyTextField,
hasLocalizedRelationshipField,
hasManyTextField,
hasManyNumberField,
hasManyTextField,
} = traverseFields({
adapter,
buildTexts,
buildNumbers,
buildRelationships,
buildTexts,
columns,
disableNotNull,
disableUnique,
@@ -143,7 +132,7 @@ export const buildTable = ({
.notNull()
}
const table = pgTable(tableName, columns, (cols) => {
const table = adapter.pgSchema.table(tableName, columns, (cols) => {
const extraConfig = Object.entries(baseExtraConfig).reduce((config, [key, func]) => {
config[key] = func(cols)
return config
@@ -165,7 +154,7 @@ export const buildTable = ({
.references(() => table.id, { onDelete: 'cascade' })
.notNull()
localesTable = pgTable(localeTableName, localesColumns, (cols) => {
localesTable = adapter.pgSchema.table(localeTableName, localesColumns, (cols) => {
return Object.entries(localesIndexes).reduce(
(acc, [colName, func]) => {
acc[colName] = func(cols)
@@ -196,29 +185,29 @@ export const buildTable = ({
const textsTableName = `${rootTableName}_texts`
const columns: Record<string, PgColumnBuilder> = {
id: serial('id').primaryKey(),
text: varchar('text'),
order: integer('order').notNull(),
parent: parentIDColumnMap[idColType]('parent_id')
.references(() => table.id, { onDelete: 'cascade' })
.notNull(),
path: varchar('path').notNull(),
text: varchar('text'),
}
if (hasLocalizedManyTextField) {
columns.locale = adapter.enums.enum__locales('locale')
}
textsTable = pgTable(textsTableName, columns, (cols) => {
textsTable = adapter.pgSchema.table(textsTableName, columns, (cols) => {
const indexes: Record<string, IndexBuilder> = {
orderParentIdx: index('order_parent_idx').on(cols.order, cols.parent),
orderParentIdx: index(`${textsTableName}_order_parent_idx`).on(cols.order, cols.parent),
}
if (hasManyTextField === 'index') {
indexes.text_idx = index('text_idx').on(cols.text)
indexes.text_idx = index(`${textsTableName}_text_idx`).on(cols.text)
}
if (hasLocalizedManyTextField) {
indexes.localeParent = index('locale_parent').on(cols.locale, cols.parent)
indexes.localeParent = index(`${textsTableName}_locale_parent`).on(cols.locale, cols.parent)
}
return indexes
@@ -252,17 +241,20 @@ export const buildTable = ({
columns.locale = adapter.enums.enum__locales('locale')
}
numbersTable = pgTable(numbersTableName, columns, (cols) => {
numbersTable = adapter.pgSchema.table(numbersTableName, columns, (cols) => {
const indexes: Record<string, IndexBuilder> = {
orderParentIdx: index('order_parent_idx').on(cols.order, cols.parent),
orderParentIdx: index(`${numbersTableName}_order_parent_idx`).on(cols.order, cols.parent),
}
if (hasManyNumberField === 'index') {
indexes.numberIdx = index('number_idx').on(cols.number)
indexes.numberIdx = index(`${numbersTableName}_number_idx`).on(cols.number)
}
if (hasLocalizedManyNumberField) {
indexes.localeParent = index('locale_parent').on(cols.locale, cols.parent)
indexes.localeParent = index(`${numbersTableName}_locale_parent`).on(
cols.locale,
cols.parent,
)
}
return indexes
@@ -297,7 +289,7 @@ export const buildTable = ({
relationships.forEach((relationTo) => {
const formattedRelationTo = toSnakeCase(relationTo)
let colType = 'integer'
let colType = adapter.idType === 'uuid' ? 'uuid' : 'integer'
const relatedCollectionCustomID = adapter.payload.collections[
relationTo
].config.fields.find((field) => fieldAffectsData(field) && field.name === 'id')
@@ -311,19 +303,23 @@ export const buildTable = ({
const relationshipsTableName = `${tableName}_rels`
relationshipsTable = pgTable(relationshipsTableName, relationshipColumns, (cols) => {
const result: Record<string, unknown> = {
order: index('order_idx').on(cols.order),
parentIdx: index('parent_idx').on(cols.parent),
pathIdx: index('path_idx').on(cols.path),
}
relationshipsTable = adapter.pgSchema.table(
relationshipsTableName,
relationshipColumns,
(cols) => {
const result: Record<string, unknown> = {
order: index(`${relationshipsTableName}_order_idx`).on(cols.order),
parentIdx: index(`${relationshipsTableName}_parent_idx`).on(cols.parent),
pathIdx: index(`${relationshipsTableName}_path_idx`).on(cols.path),
}
if (hasLocalizedRelationshipField) {
result.localeIdx = index('locale_idx').on(cols.locale)
}
if (hasLocalizedRelationshipField) {
result.localeIdx = index(`${relationshipsTableName}_locale_idx`).on(cols.locale)
}
return result
})
return result
},
)
adapter.tables[relationshipsTableName] = relationshipsTable
@@ -381,5 +377,5 @@ export const buildTable = ({
adapter.relations[`relations_${tableName}`] = tableRelations
return { hasManyTextField, hasManyNumberField, relationsToBuild }
return { hasManyNumberField, hasManyTextField, relationsToBuild }
}

View File

@@ -6,10 +6,11 @@ import type { GenericColumn } from '../types'
type CreateIndexArgs = {
columnName: string
name: string | string[]
tableName: string
unique?: boolean
}
export const createIndex = ({ name, columnName, unique }: CreateIndexArgs) => {
export const createIndex = ({ name, columnName, tableName, unique }: CreateIndexArgs) => {
return (table: { [x: string]: GenericColumn }) => {
let columns
if (Array.isArray(name)) {
@@ -20,7 +21,8 @@ export const createIndex = ({ name, columnName, unique }: CreateIndexArgs) => {
} else {
columns = [table[name]]
}
if (unique) return uniqueIndex(`${columnName}_idx`).on(columns[0], ...columns.slice(1))
return index(`${columnName}_idx`).on(columns[0], ...columns.slice(1))
if (unique)
return uniqueIndex(`${tableName}_${columnName}_idx`).on(columns[0], ...columns.slice(1))
return index(`${tableName}_${columnName}_idx`).on(columns[0], ...columns.slice(1))
}
}

View File

@@ -1,17 +0,0 @@
import { type Field, fieldAffectsData } from 'payload/types'
export const getConfigIDType = (fields: Field[]): string => {
const idField = fields.find((field) => fieldAffectsData(field) && field.name === 'id')
if (idField) {
if (idField.type === 'number') {
return 'numeric'
}
if (idField.type === 'text') {
return 'varchar'
}
}
return 'integer'
}

View File

@@ -1,7 +1,13 @@
import { integer, numeric, varchar } from 'drizzle-orm/pg-core'
import { integer, numeric, uuid, varchar } from 'drizzle-orm/pg-core'
export const parentIDColumnMap = {
import type { IDType } from '../types'
export const parentIDColumnMap: Record<
IDType,
typeof integer<string> | typeof numeric<string> | typeof uuid<string> | typeof varchar
> = {
integer,
numeric,
uuid,
varchar,
}

View File

@@ -0,0 +1,33 @@
import type { PgColumnBuilder } from 'drizzle-orm/pg-core'
import { numeric, serial, uuid, varchar } from 'drizzle-orm/pg-core'
import { type Field, fieldAffectsData } from 'payload/types'
import { flattenTopLevelFields } from 'payload/utilities'
import type { IDType, PostgresAdapter } from '../types'
type Args = { adapter: PostgresAdapter; columns: Record<string, PgColumnBuilder>; fields: Field[] }
export const setColumnID = ({ adapter, columns, fields }: Args): IDType => {
const idField = flattenTopLevelFields(fields).find(
(field) => fieldAffectsData(field) && field.name === 'id',
)
if (idField) {
if (idField.type === 'number') {
columns.id = numeric('id').primaryKey()
return 'numeric'
}
if (idField.type === 'text') {
columns.id = varchar('id').primaryKey()
return 'varchar'
}
}
if (adapter.idType === 'uuid') {
columns.id = uuid('id').defaultRandom().primaryKey()
return 'uuid'
}
columns.id = serial('id').primaryKey()
return 'integer'
}

View File

@@ -6,6 +6,7 @@ import type { Field, TabAsField } from 'payload/types'
import { relations } from 'drizzle-orm'
import {
PgNumericBuilder,
PgUUIDBuilder,
PgVarcharBuilder,
boolean,
index,
@@ -21,7 +22,7 @@ import { InvalidConfiguration } from 'payload/errors'
import { fieldAffectsData, optionIsObject } from 'payload/types'
import toSnakeCase from 'to-snake-case'
import type { GenericColumns, PostgresAdapter } from '../types'
import type { GenericColumns, IDType, PostgresAdapter } from '../types'
import { hasLocalesTable } from '../utilities/hasLocalesTable'
import { buildTable } from './build'
@@ -32,9 +33,9 @@ import { validateExistingBlockIsIdentical } from './validateExistingBlockIsIdent
type Args = {
adapter: PostgresAdapter
buildTexts: boolean
buildNumbers: boolean
buildRelationships: boolean
buildTexts: boolean
columnPrefix?: string
columns: Record<string, PgColumnBuilder>
disableNotNull: boolean
@@ -56,18 +57,18 @@ type Args = {
type Result = {
hasLocalizedField: boolean
hasLocalizedManyTextField: boolean
hasLocalizedManyNumberField: boolean
hasLocalizedManyTextField: boolean
hasLocalizedRelationshipField: boolean
hasManyTextField: 'index' | boolean
hasManyNumberField: 'index' | boolean
hasManyTextField: 'index' | boolean
}
export const traverseFields = ({
adapter,
buildTexts,
buildNumbers,
buildRelationships,
buildTexts,
columnPrefix,
columns,
disableNotNull,
@@ -93,7 +94,8 @@ export const traverseFields = ({
let hasManyNumberField: 'index' | boolean = false
let hasLocalizedManyNumberField = false
let parentIDColType = 'integer'
let parentIDColType: IDType = 'integer'
if (columns.id instanceof PgUUIDBuilder) parentIDColType = 'uuid'
if (columns.id instanceof PgNumericBuilder) parentIDColType = 'numeric'
if (columns.id instanceof PgVarcharBuilder) parentIDColType = 'varchar'
@@ -122,7 +124,7 @@ export const traverseFields = ({
if (
(field.unique || field.index) &&
!['array', 'blocks', 'group', 'point', 'relationship', 'upload'].includes(field.type) &&
!(field.type === 'number' && field.hasMany === true)
!('hasMany' in field && field.hasMany === true)
) {
const unique = disableUnique !== true && field.unique
if (unique) {
@@ -132,9 +134,10 @@ export const traverseFields = ({
}
adapter.fieldConstraints[rootTableName][`${columnName}_idx`] = constraintValue
}
targetIndexes[`${field.name}Idx`] = createIndex({
targetIndexes[`${newTableName}_${field.name}Idx`] = createIndex({
name: fieldName,
columnName,
tableName: newTableName,
unique,
})
}
@@ -241,17 +244,18 @@ export const traverseFields = ({
string,
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
> = {
orderIdx: (cols) => index('order_idx').on(cols.order),
parentIdx: (cols) => index('parent_idx').on(cols.parent),
orderIdx: (cols) => index(`${selectTableName}_order_idx`).on(cols.order),
parentIdx: (cols) => index(`${selectTableName}_parent_idx`).on(cols.parent),
}
if (field.localized) {
baseColumns.locale = adapter.enums.enum__locales('locale').notNull()
baseExtraConfig.localeIdx = (cols) => index('locale_idx').on(cols.locale)
baseExtraConfig.localeIdx = (cols) =>
index(`${selectTableName}_locale_idx`).on(cols.locale)
}
if (field.index) {
baseExtraConfig.value = (cols) => index('value_idx').on(cols.value)
baseExtraConfig.value = (cols) => index(`${selectTableName}_value_idx`).on(cols.value)
}
buildTable({
@@ -304,18 +308,19 @@ export const traverseFields = ({
string,
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
> = {
_orderIdx: (cols) => index('_order_idx').on(cols._order),
_parentIDIdx: (cols) => index('_parent_id_idx').on(cols._parentID),
_orderIdx: (cols) => index(`${arrayTableName}_order_idx`).on(cols._order),
_parentIDIdx: (cols) => index(`${arrayTableName}_parent_id_idx`).on(cols._parentID),
}
if (field.localized && adapter.payload.config.localization) {
baseColumns._locale = adapter.enums.enum__locales('_locale').notNull()
baseExtraConfig._localeIdx = (cols) => index('_locale_idx').on(cols._locale)
baseExtraConfig._localeIdx = (cols) =>
index(`${arrayTableName}_locale_idx`).on(cols._locale)
}
const {
hasManyTextField: subHasManyTextField,
hasManyNumberField: subHasManyNumberField,
hasManyTextField: subHasManyTextField,
relationsToBuild: subRelationsToBuild,
} = buildTable({
adapter,
@@ -384,19 +389,20 @@ export const traverseFields = ({
string,
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
> = {
_orderIdx: (cols) => index('order_idx').on(cols._order),
_parentIDIdx: (cols) => index('parent_id_idx').on(cols._parentID),
_pathIdx: (cols) => index('path_idx').on(cols._path),
_orderIdx: (cols) => index(`${blockTableName}_order_idx`).on(cols._order),
_parentIDIdx: (cols) => index(`${blockTableName}_parent_id_idx`).on(cols._parentID),
_pathIdx: (cols) => index(`${blockTableName}_path_idx`).on(cols._path),
}
if (field.localized && adapter.payload.config.localization) {
baseColumns._locale = adapter.enums.enum__locales('_locale').notNull()
baseExtraConfig._localeIdx = (cols) => index('locale_idx').on(cols._locale)
baseExtraConfig._localeIdx = (cols) =>
index(`${blockTableName}_locale_idx`).on(cols._locale)
}
const {
hasManyTextField: subHasManyTextField,
hasManyNumberField: subHasManyNumberField,
hasManyTextField: subHasManyTextField,
relationsToBuild: subRelationsToBuild,
} = buildTable({
adapter,
@@ -465,16 +471,16 @@ export const traverseFields = ({
if (!('name' in field)) {
const {
hasLocalizedField: groupHasLocalizedField,
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
hasLocalizedManyNumberField: groupHasLocalizedManyNumberField,
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
hasLocalizedRelationshipField: groupHasLocalizedRelationshipField,
hasManyTextField: groupHasManyTextField,
hasManyNumberField: groupHasManyNumberField,
hasManyTextField: groupHasManyTextField,
} = traverseFields({
adapter,
buildTexts,
buildNumbers,
buildRelationships,
buildTexts,
columnPrefix,
columns,
disableNotNull,
@@ -507,16 +513,16 @@ export const traverseFields = ({
const {
hasLocalizedField: groupHasLocalizedField,
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
hasLocalizedManyNumberField: groupHasLocalizedManyNumberField,
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
hasLocalizedRelationshipField: groupHasLocalizedRelationshipField,
hasManyTextField: groupHasManyTextField,
hasManyNumberField: groupHasManyNumberField,
hasManyTextField: groupHasManyTextField,
} = traverseFields({
adapter,
buildTexts,
buildNumbers,
buildRelationships,
buildTexts,
columnPrefix: `${columnName}_`,
columns,
disableNotNull: disableNotNullFromHere,
@@ -550,16 +556,16 @@ export const traverseFields = ({
const {
hasLocalizedField: tabHasLocalizedField,
hasLocalizedManyTextField: tabHasLocalizedManyTextField,
hasLocalizedManyNumberField: tabHasLocalizedManyNumberField,
hasLocalizedManyTextField: tabHasLocalizedManyTextField,
hasLocalizedRelationshipField: tabHasLocalizedRelationshipField,
hasManyTextField: tabHasManyTextField,
hasManyNumberField: tabHasManyNumberField,
hasManyTextField: tabHasManyTextField,
} = traverseFields({
adapter,
buildTexts,
buildNumbers,
buildRelationships,
buildTexts,
columnPrefix,
columns,
disableNotNull: disableNotNullFromHere,
@@ -593,16 +599,16 @@ export const traverseFields = ({
const disableNotNullFromHere = Boolean(field.admin?.condition) || disableNotNull
const {
hasLocalizedField: rowHasLocalizedField,
hasLocalizedManyTextField: rowHasLocalizedManyTextField,
hasLocalizedManyNumberField: rowHasLocalizedManyNumberField,
hasLocalizedManyTextField: rowHasLocalizedManyTextField,
hasLocalizedRelationshipField: rowHasLocalizedRelationshipField,
hasManyTextField: rowHasManyTextField,
hasManyNumberField: rowHasManyNumberField,
hasManyTextField: rowHasManyTextField,
} = traverseFields({
adapter,
buildTexts,
buildNumbers,
buildRelationships,
buildTexts,
columnPrefix,
columns,
disableNotNull: disableNotNullFromHere,
@@ -663,10 +669,10 @@ export const traverseFields = ({
return {
hasLocalizedField,
hasLocalizedManyTextField,
hasLocalizedManyNumberField,
hasLocalizedManyTextField,
hasLocalizedRelationshipField,
hasManyTextField,
hasManyNumberField,
hasManyTextField,
}
}

View File

@@ -7,18 +7,28 @@ import type {
Relations,
} from 'drizzle-orm'
import type { NodePgDatabase, NodePgQueryResultHKT } from 'drizzle-orm/node-postgres'
import type { PgColumn, PgEnum, PgTableWithColumns, PgTransaction } from 'drizzle-orm/pg-core'
import type {
PgColumn,
PgEnum,
PgSchema,
PgTableWithColumns,
PgTransaction,
} from 'drizzle-orm/pg-core'
import type { PgTableFn } from 'drizzle-orm/pg-core/table'
import type { Payload } from 'payload'
import type { BaseDatabaseAdapter } from 'payload/database'
import type { PayloadRequest } from 'payload/types'
import type { Pool, PoolConfig } from 'pg'
export type DrizzleDB = NodePgDatabase<Record<string, unknown>>
export type Args = {
idType?: 'serial' | 'uuid'
logger?: DrizzleConfig['logger']
migrationDir?: string
pool: PoolConfig
logger?: DrizzleConfig['logger']
push?: boolean
schemaName?: string
}
export type GenericColumn = PgColumn<
@@ -49,13 +59,21 @@ export type DrizzleTransaction = PgTransaction<
export type PostgresAdapter = BaseDatabaseAdapter & {
drizzle: DrizzleDB
logger: DrizzleConfig['logger']
enums: Record<string, GenericEnum>
/**
* An object keyed on each table, with a key value pair where the constraint name is the key, followed by the dot-notation field name
* Used for returning properly formed errors from unique fields
*/
fieldConstraints: Record<string, Record<string, string>>
idType: Args['idType']
logger: DrizzleConfig['logger']
pgSchema?: { table: PgTableFn } | PgSchema
pool: Pool
poolOptions: Args['pool']
push: boolean
relations: Record<string, GenericRelation>
schema: Record<string, GenericEnum | GenericRelation | GenericTable>
schemaName?: Args['schemaName']
sessions: {
[id: string]: {
db: DrizzleTransaction
@@ -63,18 +81,15 @@ export type PostgresAdapter = BaseDatabaseAdapter & {
resolve: () => Promise<void>
}
}
tables: Record<string, GenericTable>
/**
* An object keyed on each table, with a key value pair where the constraint name is the key, followed by the dot-notation field name
* Used for returning properly formed errors from unique fields
*/
fieldConstraints: Record<string, Record<string, string>>
tables: Record<string, GenericTable | PgTableWithColumns<any>>
}
export type IDType = 'integer' | 'numeric' | 'uuid' | 'varchar'
export type PostgresAdapterResult = (args: { payload: Payload }) => PostgresAdapter
export type MigrateUpArgs = { payload: Payload }
export type MigrateDownArgs = { payload: Payload }
export type MigrateUpArgs = { payload: Payload; req?: Partial<PayloadRequest> }
export type MigrateDownArgs = { payload: Payload; req?: Partial<PayloadRequest> }
declare module 'payload' {
export interface DatabaseAdapter
@@ -82,6 +97,7 @@ declare module 'payload' {
BaseDatabaseAdapter {
drizzle: DrizzleDB
enums: Record<string, GenericEnum>
fieldConstraints: Record<string, Record<string, string>>
pool: Pool
push: boolean
relations: Record<string, GenericRelation>
@@ -94,6 +110,5 @@ declare module 'payload' {
}
}
tables: Record<string, GenericTable>
fieldConstraints: Record<string, Record<string, string>>
}
}

View File

@@ -36,11 +36,11 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
}
}
const parentID = parentRows[parentRowIndex].id || parentRows[parentRowIndex]._parentID
const parentID = parentRows[parentRowIndex].id
// Add any sub arrays that need to be created
// We will call this recursively below
arrayRows.forEach((arrayRow) => {
arrayRows.forEach((arrayRow, i) => {
if (Object.keys(arrayRow.arrays).length > 0) {
rowsByTable[tableName].arrays.push(arrayRow.arrays)
}
@@ -53,6 +53,9 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
arrayRowLocaleData._parentID = arrayRow.row.id
arrayRowLocaleData._locale = arrayRowLocale
rowsByTable[tableName].locales.push(arrayRowLocaleData)
if (!arrayRow.row.id) {
arrayRowLocaleData._getParentID = (rows) => rows[i].id
}
})
})
})
@@ -61,12 +64,23 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
// Insert all corresponding arrays
// (one insert per array table)
for (const [tableName, row] of Object.entries(rowsByTable)) {
// the nested arrays need the ID for the parentID foreign key
let insertedRows: Args['parentRows']
if (row.rows.length > 0) {
await db.insert(adapter.tables[tableName]).values(row.rows).returning()
insertedRows = await db.insert(adapter.tables[tableName]).values(row.rows).returning()
}
// Insert locale rows
if (adapter.tables[`${tableName}_locales`] && row.locales.length > 0) {
if (!row.locales[0]._parentID) {
row.locales = row.locales.map((localeRow, i) => {
if (typeof localeRow._getParentID === 'function') {
localeRow._parentID = localeRow._getParentID(insertedRows)
delete localeRow._getParentID
}
return localeRow
})
}
await db.insert(adapter.tables[`${tableName}_locales`]).values(row.locales).returning()
}
@@ -76,7 +90,7 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
adapter,
arrays: row.arrays,
db,
parentRows: row.rows,
parentRows: insertedRows,
})
}
}

View File

@@ -1,13 +1,17 @@
import { sql } from 'drizzle-orm'
import type { DrizzleDB } from '../types'
import type { PostgresAdapter } from '../types'
export const createMigrationTable = async (db: DrizzleDB): Promise<void> => {
await db.execute(sql`CREATE TABLE IF NOT EXISTS "payload_migrations" (
export const createMigrationTable = async (adapter: PostgresAdapter): Promise<void> => {
const prependSchema = adapter.schemaName ? `"${adapter.schemaName}".` : ''
await adapter.drizzle.execute(
sql.raw(`CREATE TABLE IF NOT EXISTS ${prependSchema}"payload_migrations" (
"id" serial PRIMARY KEY NOT NULL,
"name" varchar,
"batch" numeric,
"updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
"created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
);`)
);`),
)
}

View File

@@ -1,6 +1,6 @@
{
"name": "payload",
"version": "2.8.2",
"version": "2.11.2",
"description": "Node, React and MongoDB Headless CMS and Application Framework",
"license": "MIT",
"main": "./dist/index.js",
@@ -59,7 +59,7 @@
"@faceless-ui/scroll-info": "1.3.0",
"@faceless-ui/window-info": "2.1.1",
"@monaco-editor/react": "4.5.1",
"@swc/core": "1.3.76",
"@swc/core": "1.3.107",
"@swc/register": "0.1.10",
"body-parser": "1.20.2",
"body-scroll-lock": "4.0.0-beta.0",
@@ -101,7 +101,6 @@
"jwt-decode": "3.1.2",
"md5": "2.3.0",
"method-override": "3.0.0",
"micro-memoize": "4.1.2",
"minimist": "1.2.8",
"mkdirp": "1.0.4",
"monaco-editor": "0.38.0",
@@ -193,7 +192,7 @@
"get-port": "5.1.1",
"mini-css-extract-plugin": "1.6.2",
"node-fetch": "2.6.12",
"nodemon": "3.0.1",
"nodemon": "3.0.3",
"object.assign": "4.1.4",
"object.entries": "1.1.6",
"passport-strategy": "1.0.0",

View File

@@ -24,11 +24,16 @@ export const Collapsible: React.FC<Props> = ({
}) => {
const [collapsedLocal, setCollapsedLocal] = useState(Boolean(initCollapsed))
const [hoveringToggle, setHoveringToggle] = useState(false)
const isNested = useCollapsible()
const { withinCollapsible } = useCollapsible()
const { t } = useTranslation('fields')
const collapsed = typeof collapsedFromProps === 'boolean' ? collapsedFromProps : collapsedLocal
const toggleCollapsible = React.useCallback(() => {
if (typeof onToggle === 'function') onToggle(!collapsed)
setCollapsedLocal(!collapsed)
}, [onToggle, collapsed])
return (
<div
className={[
@@ -36,14 +41,14 @@ export const Collapsible: React.FC<Props> = ({
className,
dragHandleProps && `${baseClass}--has-drag-handle`,
collapsed && `${baseClass}--collapsed`,
isNested && `${baseClass}--nested`,
withinCollapsible && `${baseClass}--nested`,
hoveringToggle && `${baseClass}--hovered`,
`${baseClass}--style-${collapsibleStyle}`,
]
.filter(Boolean)
.join(' ')}
>
<CollapsibleProvider>
<CollapsibleProvider collapsed={collapsed} toggle={toggleCollapsible}>
<div
className={`${baseClass}__toggle-wrap`}
onMouseEnter={() => setHoveringToggle(true)}
@@ -65,10 +70,7 @@ export const Collapsible: React.FC<Props> = ({
]
.filter(Boolean)
.join(' ')}
onClick={() => {
if (typeof onToggle === 'function') onToggle(!collapsed)
setCollapsedLocal(!collapsed)
}}
onClick={toggleCollapsible}
type="button"
>
<span>{t('toggleBlock')}</span>

View File

@@ -1,14 +1,35 @@
import React, { createContext, useContext } from 'react'
const Context = createContext(false)
type ContextType = {
collapsed: boolean
isVisible: boolean
toggle: () => void
withinCollapsible: boolean
}
const Context = createContext({
collapsed: false,
isVisible: true,
toggle: () => {},
withinCollapsible: true,
})
export const CollapsibleProvider: React.FC<{
children?: React.ReactNode
collapsed?: boolean
toggle: () => void
withinCollapsible?: boolean
}> = ({ children, withinCollapsible = true }) => {
return <Context.Provider value={withinCollapsible}>{children}</Context.Provider>
}> = ({ children, collapsed, toggle, withinCollapsible = true }) => {
const { collapsed: parentIsCollapsed, isVisible } = useCollapsible()
const contextValue = React.useMemo((): ContextType => {
return {
collapsed: Boolean(collapsed),
isVisible: isVisible && !parentIsCollapsed,
toggle,
withinCollapsible,
}
}, [collapsed, withinCollapsible, toggle, parentIsCollapsed, isVisible])
return <Context.Provider value={contextValue}>{children}</Context.Provider>
}
export const useCollapsible = (): boolean => useContext(Context)
export default Context
export const useCollapsible = (): ContextType => useContext(Context)

View File

@@ -64,7 +64,7 @@ const DeleteDocument: React.FC<Props> = (props) => {
if (res.status < 400) {
setDeleting(false)
toggleModal(modalSlug)
toast.success(t('titleDeleted', { label: getTranslation(singular, i18n), title }))
toast.success(json.message || t('titleDeleted', { label: getTranslation(singular, i18n), title }))
return history.push(`${admin}/collections/${slug}`)
}

View File

@@ -33,7 +33,7 @@ export const DocumentControls: React.FC<{
id?: string
isAccountView?: boolean
isEditing?: boolean
permissions?: CollectionPermission | GlobalPermission | null
permissions?: CollectionPermission | GlobalPermission
}> = (props) => {
const {
id,

View File

@@ -52,21 +52,9 @@ const Content: React.FC<DocumentDrawerProps> = ({
const { id, docPermissions, getDocPreferences } = useDocumentInfo()
// The component definition could come from multiple places in the config
// we need to cascade into the proper component from the top-down
// 1. "components.Edit"
// 2. "components.Edit.Default"
// 3. "components.Edit.Default.Component"
const CustomEditView =
typeof Edit === 'function'
? Edit
: typeof Edit === 'object' && typeof Edit.Default === 'function'
? Edit.Default
: typeof Edit?.Default === 'object' &&
'Component' in Edit.Default &&
typeof Edit.Default.Component === 'function'
? Edit.Default.Component
: undefined
// If they are replacing the entire edit view, use that.
// Else let the DefaultEdit determine what to render.
const CustomEditView = typeof Edit === 'function' ? Edit : undefined
const [fields, setFields] = useState(() => formatFields(collectionConfig, true))

View File

@@ -20,7 +20,7 @@ export const getCustomViews = (args: {
? collection?.admin?.components?.views?.Edit
: undefined
const defaultViewKeys = Object.keys(defaultCollectionViews)
const defaultViewKeys = Object.keys(defaultCollectionViews())
customViews = Object.entries(collectionViewsConfig || {}).reduce((prev, [key, view]) => {
if (defaultViewKeys.includes(key)) {
@@ -38,7 +38,7 @@ export const getCustomViews = (args: {
? global?.admin?.components?.views?.Edit
: undefined
const defaultViewKeys = Object.keys(defaultGlobalViews)
const defaultViewKeys = Object.keys(defaultGlobalViews())
customViews = Object.entries(globalViewsConfig || {}).reduce((prev, [key, view]) => {
if (defaultViewKeys.includes(key)) {

View File

@@ -13,6 +13,7 @@ import { fieldTypes } from '../../forms/field-types'
import X from '../../icons/X'
import { useAuth } from '../../utilities/Auth'
import { useConfig } from '../../utilities/Config'
import { DocumentInfoProvider } from '../../utilities/DocumentInfo'
import { OperationContext } from '../../utilities/OperationProvider'
import { SelectAllStatus, useSelection } from '../../views/collections/List/SelectionProvider'
import { Drawer, DrawerToggler } from '../Drawer'
@@ -120,53 +121,55 @@ const EditMany: React.FC<Props> = (props) => {
{t('edit')}
</DrawerToggler>
<Drawer header={null} slug={drawerSlug}>
<OperationContext.Provider value="update">
<Form className={`${baseClass}__form`} onSuccess={onSuccess}>
<div className={`${baseClass}__main`}>
<div className={`${baseClass}__header`}>
<h2 className={`${baseClass}__header__title`}>
{t('editingLabel', { count, label: getTranslation(plural, i18n) })}
</h2>
<button
aria-label={t('close')}
className={`${baseClass}__header__close`}
id={`close-drawer__${drawerSlug}`}
onClick={() => closeModal(drawerSlug)}
type="button"
>
<X />
</button>
</div>
<FieldSelect fields={fields} setSelected={setSelected} />
<RenderFields fieldSchema={selected} fieldTypes={fieldTypes} />
<div className={`${baseClass}__sidebar-wrap`}>
<div className={`${baseClass}__sidebar`}>
<div className={`${baseClass}__sidebar-sticky-wrap`}>
<div className={`${baseClass}__document-actions`}>
{collection.versions ? (
<React.Fragment>
<Publish
<DocumentInfoProvider collection={collection}>
<OperationContext.Provider value="update">
<Form className={`${baseClass}__form`} onSuccess={onSuccess}>
<div className={`${baseClass}__main`}>
<div className={`${baseClass}__header`}>
<h2 className={`${baseClass}__header__title`}>
{t('editingLabel', { count, label: getTranslation(plural, i18n) })}
</h2>
<button
aria-label={t('close')}
className={`${baseClass}__header__close`}
id={`close-drawer__${drawerSlug}`}
onClick={() => closeModal(drawerSlug)}
type="button"
>
<X />
</button>
</div>
<FieldSelect fields={fields} setSelected={setSelected} />
<RenderFields fieldSchema={selected} fieldTypes={fieldTypes} />
<div className={`${baseClass}__sidebar-wrap`}>
<div className={`${baseClass}__sidebar`}>
<div className={`${baseClass}__sidebar-sticky-wrap`}>
<div className={`${baseClass}__document-actions`}>
{collection.versions ? (
<React.Fragment>
<Publish
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
disabled={selected.length === 0}
/>
<SaveDraft
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
disabled={selected.length === 0}
/>
</React.Fragment>
) : (
<Submit
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
disabled={selected.length === 0}
/>
<SaveDraft
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
disabled={selected.length === 0}
/>
</React.Fragment>
) : (
<Submit
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
disabled={selected.length === 0}
/>
)}
)}
</div>
</div>
</div>
</div>
</div>
</div>
</Form>
</OperationContext.Provider>
</Form>
</OperationContext.Provider>
</DocumentInfoProvider>
</Drawer>
</div>
)

View File

@@ -133,9 +133,10 @@ export const ListDrawerContent: React.FC<ListDrawerProps> = ({
const moreThanOneAvailableCollection = enabledCollectionConfigs.length > 1
useEffect(() => {
const { slug, admin: { listSearchableFields } = {} } = selectedCollectionConfig
const { slug, admin: { listSearchableFields } = {}, versions } = selectedCollectionConfig
const params: {
cacheBust?: number
draft?: string
limit?: number
page?: number
search?: string
@@ -172,6 +173,7 @@ export const ListDrawerContent: React.FC<ListDrawerProps> = ({
if (sort) params.sort = sort
if (cacheBust) params.cacheBust = cacheBust
if (copyOfWhere) params.where = copyOfWhere
if (versions?.drafts) params.draft = 'true'
setParams(params)
}, [

View File

@@ -12,42 +12,83 @@ import { fieldAffectsData, fieldHasSubFields, tabHasName } from '../../../../../
import getValueWithDefault from '../../../../../fields/getDefaultValue'
import { iterateFields } from './iterateFields'
type Args = {
export type AddFieldStatePromiseArgs = {
/**
* if all parents are localized, then the field is localized
*/
anyParentLocalized?: boolean
config: SanitizedConfig
data: Data
field: NonPresentationalField
/**
* You can use this to filter down to only `localized` fields that require transalation (type: text, textarea, etc.). Another plugin might want to look for only `point` type fields to do some GIS function. With the filter function you can go in like a surgeon.
*/
filter?: (args: AddFieldStatePromiseArgs) => boolean
/**
* Force the value of fields like arrays or blocks to be the full value instead of the length @default false
*/
forceFullValue?: boolean
fullData: Data
id: number | string
/**
* Whether the field schema should be included in the state
*/
includeSchema?: boolean
locale: string
/**
* Whether to omit parent fields in the state. @default false
*/
omitParents?: boolean
operation: 'create' | 'update'
passesCondition: boolean
path: string
preferences: {
[key: string]: unknown
}
/**
* Whether to skip checking the field's condition. @default false
*/
skipConditionChecks?: boolean
/**
* Whether to skip validating the field. @default false
*/
skipValidation?: boolean
state: Fields
t: TFunction
user: User
}
export const addFieldStatePromise = async ({
id,
config,
data,
field,
fullData,
locale,
operation,
passesCondition,
path,
preferences,
state,
t,
user,
}: Args): Promise<void> => {
/**
* Flattens the fields schema and fields data.
* The output is the field path (e.g. array.0.name) mapped to a FormField object.
*/
export const addFieldStatePromise = async (args: AddFieldStatePromiseArgs): Promise<void> => {
const {
id,
anyParentLocalized = false,
config,
data,
field,
filter,
forceFullValue = false,
fullData,
includeSchema = false,
locale,
omitParents = false,
operation,
passesCondition,
path,
preferences,
skipConditionChecks = false,
skipValidation = false,
state,
t,
user,
} = args
if (fieldAffectsData(field)) {
const fieldState: FormField = {
condition: field.admin?.condition,
fieldSchema: includeSchema ? field : undefined,
initialValue: undefined,
passesCondition,
valid: true,
@@ -66,9 +107,9 @@ export const addFieldStatePromise = async ({
data[field.name] = valueWithDefault
}
let validationResult: boolean | string = true
let validationResult: string | true = true
if (typeof fieldState.validate === 'function') {
if (typeof fieldState.validate === 'function' && !skipValidation) {
validationResult = await fieldState.validate(data?.[field.name], {
...field,
id,
@@ -96,24 +137,36 @@ export const addFieldStatePromise = async ({
const rowPath = `${path}${field.name}.${i}.`
row.id = row?.id || new ObjectID().toHexString()
state[`${rowPath}id`] = {
initialValue: row.id,
valid: true,
value: row.id,
if (!omitParents && (!filter || filter(args))) {
state[`${rowPath}id`] = {
fieldSchema: includeSchema
? field.fields.find((field) => 'name' in field && field.name === 'id')
: undefined,
initialValue: row.id,
valid: true,
value: row.id,
}
}
acc.promises.push(
iterateFields({
id,
anyParentLocalized: field.localized || anyParentLocalized,
config,
data: row,
fields: field.fields,
filter,
forceFullValue,
fullData,
includeSchema,
locale,
omitParents,
operation,
parentPassesCondition: passesCondition,
path: rowPath,
preferences,
skipConditionChecks,
skipValidation,
state,
t,
user,
@@ -146,8 +199,8 @@ export const addFieldStatePromise = async ({
fieldState.value = null
fieldState.initialValue = null
} else {
fieldState.value = arrayValue.length
fieldState.initialValue = arrayValue.length
fieldState.value = forceFullValue ? arrayValue : arrayValue.length
fieldState.initialValue = forceFullValue ? arrayValue : arrayValue.length
if (arrayValue.length > 0) {
fieldState.disableFormData = true
@@ -157,7 +210,9 @@ export const addFieldStatePromise = async ({
fieldState.rows = rowMetadata
// Add field to state
state[`${path}${field.name}`] = fieldState
if (!omitParents && (!filter || filter(args))) {
state[`${path}${field.name}`] = fieldState
}
break
}
@@ -173,36 +228,60 @@ export const addFieldStatePromise = async ({
if (block) {
row.id = row?.id || new ObjectID().toHexString()
state[`${rowPath}id`] = {
initialValue: row.id,
valid: true,
value: row.id,
}
if (!omitParents && (!filter || filter(args))) {
state[`${rowPath}id`] = {
fieldSchema: includeSchema
? block.fields.find(
(blockField) => 'name' in blockField && blockField.name === 'id',
)
: undefined,
initialValue: row.id,
valid: true,
value: row.id,
}
state[`${rowPath}blockType`] = {
initialValue: row.blockType,
valid: true,
value: row.blockType,
}
state[`${rowPath}blockType`] = {
fieldSchema: includeSchema
? block.fields.find(
(blockField) => 'name' in blockField && blockField.name === 'blockType',
)
: undefined,
initialValue: row.blockType,
valid: true,
value: row.blockType,
}
state[`${rowPath}blockName`] = {
initialValue: row.blockName,
valid: true,
value: row.blockName,
state[`${rowPath}blockName`] = {
fieldSchema: includeSchema
? block.fields.find(
(blockField) => 'name' in blockField && blockField.name === 'blockName',
)
: undefined,
initialValue: row.blockName,
valid: true,
value: row.blockName,
}
}
acc.promises.push(
iterateFields({
id,
anyParentLocalized: field.localized || anyParentLocalized,
config,
data: row,
fields: block.fields,
filter,
forceFullValue,
fullData,
includeSchema,
locale,
omitParents,
operation,
parentPassesCondition: passesCondition,
path: rowPath,
preferences,
skipConditionChecks,
skipValidation,
state,
t,
user,
@@ -237,8 +316,8 @@ export const addFieldStatePromise = async ({
fieldState.value = null
fieldState.initialValue = null
} else {
fieldState.value = blocksValue.length
fieldState.initialValue = blocksValue.length
fieldState.value = forceFullValue ? blocksValue : blocksValue.length
fieldState.initialValue = forceFullValue ? blocksValue : blocksValue.length
if (blocksValue.length > 0) {
fieldState.disableFormData = true
@@ -248,7 +327,9 @@ export const addFieldStatePromise = async ({
fieldState.rows = rowMetadata
// Add field to state
state[`${path}${field.name}`] = fieldState
if (!omitParents && (!filter || filter(args))) {
state[`${path}${field.name}`] = fieldState
}
break
}
@@ -256,15 +337,22 @@ export const addFieldStatePromise = async ({
case 'group': {
await iterateFields({
id,
anyParentLocalized: field.localized || anyParentLocalized,
config,
data: data?.[field.name] || {},
fields: field.fields,
filter,
forceFullValue,
fullData,
includeSchema,
locale,
omitParents,
operation,
parentPassesCondition: passesCondition,
path: `${path}${field.name}.`,
preferences,
skipConditionChecks,
skipValidation,
state,
t,
user,
@@ -324,7 +412,9 @@ export const addFieldStatePromise = async ({
fieldState.initialValue = relationshipValue
}
state[`${path}${field.name}`] = fieldState
if (!filter || filter(args)) {
state[`${path}${field.name}`] = fieldState
}
break
}
@@ -337,7 +427,9 @@ export const addFieldStatePromise = async ({
fieldState.value = relationshipValue
fieldState.initialValue = relationshipValue
state[`${path}${field.name}`] = fieldState
if (!filter || filter(args)) {
state[`${path}${field.name}`] = fieldState
}
break
}
@@ -347,7 +439,9 @@ export const addFieldStatePromise = async ({
fieldState.initialValue = valueWithDefault
// Add field to state
state[`${path}${field.name}`] = fieldState
if (!filter || filter(args)) {
state[`${path}${field.name}`] = fieldState
}
break
}
@@ -356,15 +450,22 @@ export const addFieldStatePromise = async ({
// Handle field types that do not use names (row, etc)
await iterateFields({
id,
anyParentLocalized: field.localized || anyParentLocalized,
config,
data,
fields: field.fields,
filter,
forceFullValue,
fullData,
includeSchema,
locale,
omitParents,
operation,
parentPassesCondition: passesCondition,
path,
preferences,
skipConditionChecks,
skipValidation,
state,
t,
user,
@@ -373,15 +474,22 @@ export const addFieldStatePromise = async ({
const promises = field.tabs.map((tab) =>
iterateFields({
id,
anyParentLocalized: tab.localized || anyParentLocalized,
config,
data: tabHasName(tab) ? data?.[tab.name] : data,
fields: tab.fields,
filter,
forceFullValue,
fullData,
includeSchema,
locale,
omitParents,
operation,
parentPassesCondition: passesCondition,
path: tabHasName(tab) ? `${path}${tab.name}.` : path,
preferences,
skipConditionChecks,
skipValidation,
state,
t,
user,

View File

@@ -4,65 +4,123 @@ import type { User } from '../../../../../auth'
import type { SanitizedConfig } from '../../../../../config/types'
import type { Field as FieldSchema } from '../../../../../fields/config/types'
import type { Data, Fields } from '../types'
import type { AddFieldStatePromiseArgs } from './addFieldStatePromise'
import { fieldIsPresentationalOnly } from '../../../../../fields/config/types'
import { addFieldStatePromise } from './addFieldStatePromise'
type Args = {
config: SanitizedConfig
/**
* if any parents is localized, then the field is localized. @default false
*/
anyParentLocalized?: boolean
/**
* config is only needed for validation
*/
config?: SanitizedConfig
data: Data
fields: FieldSchema[]
filter?: (args: AddFieldStatePromiseArgs) => boolean
/**
* Force the value of fields like arrays or blocks to be the full value instead of the length @default false
*/
forceFullValue?: boolean
fullData: Data
id: number | string
id?: number | string
/**
* Whether the field schema should be included in the state. @default false
*/
includeSchema?: boolean
/**
* operation is only needed for checking field conditions
*/
locale: string
/**
* Whether to omit parent fields in the state. @default false
*/
omitParents?: boolean
/**
* operation is only needed for validation
*/
operation: 'create' | 'update'
parentPassesCondition: boolean
path: string
preferences: {
parentPassesCondition?: boolean
/**
* The initial path of the field. @default ''
*/
path?: string
preferences?: {
[key: string]: unknown
}
state: Fields
/**
* Whether to skip checking the field's condition. @default false
*/
skipConditionChecks?: boolean
/**
* Whether to skip validating the field. @default false
*/
skipValidation?: boolean
state?: Fields
t: TFunction
user: User
}
/**
* Flattens the fields schema and fields data
*/
export const iterateFields = async ({
id,
anyParentLocalized = false,
config,
data,
fields,
filter,
forceFullValue = false,
fullData,
includeSchema = false,
locale,
omitParents = false,
operation,
parentPassesCondition,
parentPassesCondition = true,
path = '',
preferences,
state,
skipConditionChecks = false,
skipValidation = false,
state = {},
t,
user,
}: Args): Promise<void> => {
const promises = []
fields.forEach((field) => {
const initialData = data
if (!fieldIsPresentationalOnly(field) && !field?.admin?.disabled) {
const passesCondition = Boolean(
(field?.admin?.condition
? Boolean(field.admin.condition(fullData || {}, initialData || {}, { user }))
: true) && parentPassesCondition,
)
let passesCondition = true
if (!skipConditionChecks) {
passesCondition = Boolean(
(field?.admin?.condition
? Boolean(field.admin.condition(fullData || {}, data || {}, { user }))
: true) && parentPassesCondition,
)
}
promises.push(
addFieldStatePromise({
id,
anyParentLocalized,
config,
data,
field,
filter,
forceFullValue,
fullData,
includeSchema,
locale,
omitParents,
operation,
passesCondition,
path,
preferences,
skipConditionChecks,
skipValidation,
state,
t,
user,

View File

@@ -2,11 +2,22 @@ import { unflatten as flatleyUnflatten } from 'flatley'
import type { Data, Fields } from './types'
const reduceFieldsToValues = (fields: Fields, unflatten?: boolean): Data => {
/**
* Reduce flattened form fields (Fields) to just map to the respective values instead of the full FormField object
*
* @param unflatten This also unflattens the data if `unflatten` is true. The unflattened data should match the original data structure
* @param ignoreDisableFormData - if true, will include fields that have `disableFormData` set to true, for example, blocks or arrays fields.
*
*/
const reduceFieldsToValues = (
fields: Fields,
unflatten?: boolean,
ignoreDisableFormData?: boolean,
): Data => {
const data = {}
Object.keys(fields).forEach((key) => {
if (!fields[key].disableFormData) {
if (ignoreDisableFormData === true || !fields[key].disableFormData) {
data[key] = fields[key].value
}
})

View File

@@ -20,6 +20,7 @@ export type FormField = {
condition?: Condition
disableFormData?: boolean
errorMessage?: string
fieldSchema?: FieldConfig
initialValue: unknown
passesCondition?: boolean
rows?: Row[]

View File

@@ -33,7 +33,7 @@ const Group: React.FC<Props> = (props) => {
permissions,
} = props
const isWithinCollapsible = useCollapsible()
const { withinCollapsible } = useCollapsible()
const isWithinGroup = useGroup()
const isWithinRow = useRow()
const isWithinTab = useTabs()
@@ -43,7 +43,7 @@ const Group: React.FC<Props> = (props) => {
const groupHasErrors = submitted && errorCount > 0
const path = pathFromProps || name
const isTopLevel = !(isWithinCollapsible || isWithinGroup || isWithinRow)
const isTopLevel = !(withinCollapsible || isWithinGroup || isWithinRow)
return (
<div
@@ -51,7 +51,7 @@ const Group: React.FC<Props> = (props) => {
fieldBaseClass,
baseClass,
isTopLevel && `${baseClass}--top-level`,
isWithinCollapsible && `${baseClass}--within-collapsible`,
withinCollapsible && `${baseClass}--within-collapsible`,
isWithinGroup && `${baseClass}--within-group`,
isWithinRow && `${baseClass}--within-row`,
isWithinTab && `${baseClass}--within-tab`,

View File

@@ -151,7 +151,7 @@ const NumberField: React.FC<Props> = (props) => {
if (isOverHasMany) {
return t('validation:limitReached', { max: maxRows, value: value.length + 1 })
}
return t('general:noOptions')
return null
}}
numberOnly
onChange={handleHasManyChange}
@@ -170,7 +170,7 @@ const NumberField: React.FC<Props> = (props) => {
onChange={handleChange}
onWheel={(e) => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
// @ts-expect-error
e.target.blur()
}}
placeholder={getTranslation(placeholder, i18n)}

View File

@@ -9,7 +9,7 @@ const reduceToIDs = (options) =>
return [...ids, ...reduceToIDs(option.options)]
}
return [...ids, option.value]
return [...ids, { id: option.value, relationTo: option.relationTo }]
}, [])
const sortOptions = (options: Option[]): Option[] =>
@@ -63,10 +63,12 @@ const optionsReducer = (state: OptionGroup[], action: Action): OptionGroup[] =>
const optionsToAddTo = newOptions.find(
(optionGroup) => optionGroup.label === collection.labels.plural,
)
const newSubOptions = docs.reduce((docSubOptions, doc) => {
if (loadedIDs.indexOf(doc.id) === -1) {
loadedIDs.push(doc.id)
if (
loadedIDs.filter((item) => item.id === doc.id && item.relationTo === relation).length ===
0
) {
loadedIDs.push({ id: doc.id, relationTo: relation })
const docTitle = formatUseAsTitle({
collection,
@@ -89,7 +91,10 @@ const optionsReducer = (state: OptionGroup[], action: Action): OptionGroup[] =>
}, [])
ids.forEach((id) => {
if (!loadedIDs.includes(id)) {
if (
loadedIDs.filter((item) => item.id === id && item.relationTo === relation).length === 0
) {
loadedIDs.push({ id, relationTo: relation })
newSubOptions.push({
label: `${i18n.t('general:untitled')} - ID: ${id}`,
relationTo: relation,

View File

@@ -29,9 +29,14 @@ type RichTextAdapterBase<
}) => Promise<void> | null
outputSchema?: ({
field,
interfaceNameDefinitions,
isRequired,
}: {
field: RichTextField<Value, AdapterProps, ExtraFieldProperties>
/**
* Allows you to define new top-level interfaces that can be re-used in the output schema.
*/
interfaceNameDefinitions: Map<string, JSONSchema4>
isRequired: boolean
}) => JSONSchema4
populationPromise?: (data: {

View File

@@ -83,7 +83,7 @@ const TabsField: React.FC<Props> = (props) => {
const { preferencesKey } = useDocumentInfo()
const { i18n } = useTranslation()
const isWithinCollapsible = useCollapsible()
const { withinCollapsible } = useCollapsible()
const [activeTabIndex, setActiveTabIndex] = useState<number>(0)
const tabsPrefKey = `tabs-${indexPath}`
@@ -138,7 +138,7 @@ const TabsField: React.FC<Props> = (props) => {
fieldBaseClass,
className,
baseClass,
isWithinCollapsible && `${baseClass}--within-collapsible`,
withinCollapsible && `${baseClass}--within-collapsible`,
]
.filter(Boolean)
.join(' ')}

View File

@@ -110,7 +110,7 @@ const TextInput: React.FC<TextInputProps> = (props) => {
if (isOverHasMany) {
return t('validation:limitReached', { max: maxRows, value: value.length + 1 })
}
return t('general:noOptions')
return null
}}
onChange={onChange}
options={[]}

View File

@@ -137,6 +137,7 @@ const UploadInput: React.FC<UploadInputProps> = (props) => {
fieldBaseClass,
baseClass,
className,
`field-${path.replace(/\./g, '__')}`,
showError && 'error',
readOnly && 'read-only',
]

View File

@@ -40,7 +40,6 @@ export const DocumentInfoProvider: React.FC<Props> = ({
const [publishedDoc, setPublishedDoc] = useState<TypeWithID & TypeWithTimestamps>(null)
const [versions, setVersions] = useState<PaginatedDocs<Version>>(null)
const [unpublishedVersions, setUnpublishedVersions] = useState<PaginatedDocs<Version>>(null)
const [docPermissions, setDocPermissions] = useState<DocumentPermissions>(null)
const baseURL = `${serverURL}${api}`
let slug: string
@@ -62,6 +61,10 @@ export const DocumentInfoProvider: React.FC<Props> = ({
}
}
const [docPermissions, setDocPermissions] = useState<DocumentPermissions>(
permissions[pluralType][slug],
)
const getVersions = useCallback(async () => {
let versionFetchURL
let publishedFetchURL
@@ -215,14 +218,14 @@ export const DocumentInfoProvider: React.FC<Props> = ({
'Accept-Language': i18n.language,
},
})
const json = await res.json()
setDocPermissions(json)
} else {
// fallback to permissions from the entity type
// (i.e. create has no id)
setDocPermissions(permissions[pluralType][slug])
try {
const json = await res.json()
setDocPermissions(json)
} catch (e) {
console.error('Unable to fetch document permissions', e)
}
}
}, [serverURL, api, pluralType, slug, id, permissions, i18n.language, code])
}, [serverURL, api, pluralType, slug, id, i18n.language, code])
const getDocPreferences = useCallback(async () => {
return getPreference<DocumentPreferences>(preferencesKey)
@@ -262,6 +265,7 @@ export const DocumentInfoProvider: React.FC<Props> = ({
const value: ContextType = {
id,
slug,
collection,
docPermissions,
getDocPermissions,
@@ -271,7 +275,6 @@ export const DocumentInfoProvider: React.FC<Props> = ({
preferencesKey,
publishedDoc,
setDocFieldPreferences,
slug,
unpublishedVersions,
versions,
}

View File

@@ -12,7 +12,7 @@ import type { TypeWithVersion } from '../../../../versions/types'
export type Version = TypeWithVersion<any>
export type DocumentPermissions = CollectionPermission | GlobalPermission | null
export type DocumentPermissions = CollectionPermission | GlobalPermission
export type ContextType = {
collection?: SanitizedCollectionConfig

View File

@@ -17,9 +17,9 @@ export type globalViewType =
| 'Version'
| 'Versions'
export const defaultGlobalViews: {
export const defaultGlobalViews = (): {
[key in globalViewType]: React.ComponentType<any>
} = {
} => ({
API,
Default: DefaultGlobalEdit,
LivePreview: LivePreviewView,
@@ -27,7 +27,7 @@ export const defaultGlobalViews: {
Relationships: null,
Version: VersionView,
Versions: VersionsView,
}
})
export const CustomGlobalComponent = (
args: GlobalEditViewProps & {
@@ -43,18 +43,14 @@ export const CustomGlobalComponent = (
// For example, the Edit view:
// 1. Edit?.Default
// 2. Edit?.Default?.Component
// TODO: Remove the `@ts-ignore` when a Typescript wizard arrives
// For some reason `Component` does not exist on type `Edit[view]` no matter how narrow the type is
const Component =
typeof Edit === 'object' && typeof Edit[view] === 'function'
? Edit[view]
: typeof Edit === 'object' &&
typeof Edit?.[view] === 'object' &&
// @ts-ignore
typeof Edit[view].Component === 'function'
? // @ts-ignore
Edit[view].Component
: defaultGlobalViews[view]
? Edit[view].Component
: defaultGlobalViews()[view]
if (Component) {
return <Component {...args} />

View File

@@ -17,9 +17,9 @@ export type collectionViewType =
| 'Version'
| 'Versions'
export const defaultCollectionViews: {
export const defaultCollectionViews = (): {
[key in collectionViewType]: React.ComponentType<any>
} = {
} => ({
API,
Default: DefaultCollectionEdit,
LivePreview: LivePreviewView,
@@ -27,7 +27,7 @@ export const defaultCollectionViews: {
Relationships: null,
Version: VersionView,
Versions: VersionsView,
}
})
export const CustomCollectionComponent = (
args: CollectionEditViewProps & {
@@ -43,18 +43,15 @@ export const CustomCollectionComponent = (
// For example, the Edit view:
// 1. Edit?.Default
// 2. Edit?.Default?.Component
// TODO: Remove the `@ts-ignore` when a Typescript wizard arrives
// For some reason `Component` does not exist on type `Edit[view]` no matter how narrow the type is
const Component =
typeof Edit === 'object' && typeof Edit[view] === 'function'
? Edit[view]
: typeof Edit === 'object' &&
typeof Edit?.[view] === 'object' &&
// @ts-ignore
typeof Edit[view].Component === 'function'
? // @ts-ignore
Edit[view].Component
: defaultCollectionViews[view]
? Edit[view].Component
: defaultCollectionViews()[view]
if (Component) {
return <Component {...args} />

View File

@@ -74,21 +74,22 @@ const DefaultCell: React.FC<Props> = (props) => {
if (collection.upload && fieldAffectsData(field) && field.name === 'filename') {
CellComponent = cellComponents.File
} else {
return (
<WrapElement {...wrapElementProps}>
{(cellData === '' || typeof cellData === 'undefined') &&
'label' in field &&
t('noLabel', {
if (!cellData && 'label' in field) {
return (
<WrapElement {...wrapElementProps}>
{t('noLabel', {
label: getTranslation(
typeof field.label === 'function' ? 'data' : field.label || 'data',
i18n,
),
})}
{typeof cellData === 'string' && cellData}
{typeof cellData === 'number' && cellData}
{typeof cellData === 'object' && JSON.stringify(cellData)}
</WrapElement>
)
</WrapElement>
)
} else if (typeof cellData === 'string' || typeof cellData === 'number') {
return <WrapElement {...wrapElementProps}>{cellData}</WrapElement>
} else if (typeof cellData === 'object') {
return <WrapElement {...wrapElementProps}>{JSON.stringify(cellData)}</WrapElement>
}
}
}

View File

@@ -7,13 +7,14 @@ import { extractTranslations } from '../../translations/extractTranslations'
const labels = extractTranslations(['authentication:enableAPIKey', 'authentication:apiKey'])
const encryptKey: FieldHook = ({ req, value }) =>
value ? req.payload.encrypt(value as string) : undefined
value ? req.payload.encrypt(value as string) : null
const decryptKey: FieldHook = ({ req, value }) =>
value ? req.payload.decrypt(value as string) : undefined
export default [
{
name: 'enableAPIKey',
type: 'checkbox',
admin: {
components: {
Field: () => null,
@@ -21,10 +22,10 @@ export default [
},
defaultValue: false,
label: labels['authentication:enableAPIKey'],
type: 'checkbox',
},
{
name: 'apiKey',
type: 'text',
admin: {
components: {
Field: () => null,
@@ -35,10 +36,10 @@ export default [
beforeChange: [encryptKey],
},
label: labels['authentication:apiKey'],
type: 'text',
},
{
name: 'apiKeyIndex',
type: 'text',
admin: {
disabled: true,
},
@@ -59,6 +60,5 @@ export default [
},
],
},
type: 'text',
},
] as Field[]

View File

@@ -29,37 +29,38 @@ async function forgotPassword(incomingArgs: Arguments): Promise<null | string> {
let args = incomingArgs
// /////////////////////////////////////
// beforeOperation - Collection
// /////////////////////////////////////
await args.collection.config.hooks.beforeOperation.reduce(async (priorHook, hook) => {
await priorHook
args =
(await hook({
args,
collection: args.collection?.config,
context: args.req.context,
operation: 'forgotPassword',
})) || args
}, Promise.resolve())
const {
collection: { config: collectionConfig },
data,
disableEmail,
expiration,
req: {
payload: { config, emailOptions, sendEmail: email },
payload,
t,
},
req,
} = args
try {
const shouldCommit = await initTransaction(req)
const shouldCommit = await initTransaction(args.req)
// /////////////////////////////////////
// beforeOperation - Collection
// /////////////////////////////////////
await args.collection.config.hooks.beforeOperation.reduce(async (priorHook, hook) => {
await priorHook
args =
(await hook({
args,
collection: args.collection?.config,
context: args.req.context,
operation: 'forgotPassword',
req: args.req,
})) || args
}, Promise.resolve())
const {
collection: { config: collectionConfig },
data,
disableEmail,
expiration,
req: {
payload: { config, emailOptions, sendEmail: email },
payload,
t,
},
req,
} = args
// /////////////////////////////////////
// Forget password
@@ -159,7 +160,7 @@ async function forgotPassword(incomingArgs: Arguments): Promise<null | string> {
return token
} catch (error: unknown) {
await killTransaction(req)
await killTransaction(args.req)
throw error
}
}

View File

@@ -3,10 +3,8 @@ import type { PayloadRequest } from '../../../express/types'
import type { Payload } from '../../../payload'
import type { Result } from '../forgotPassword'
import { getDataLoader } from '../../../collections/dataloader'
import { APIError } from '../../../errors'
import { setRequestContext } from '../../../express/setRequestContext'
import { i18nInit } from '../../../translations/init'
import { createLocalReq } from '../../../utilities/createLocalReq'
import forgotPassword from '../forgotPassword'
export type Options<T extends keyof GeneratedTypes['collections']> = {
@@ -24,15 +22,7 @@ async function localForgotPassword<T extends keyof GeneratedTypes['collections']
payload: Payload,
options: Options<T>,
): Promise<Result> {
const {
collection: collectionSlug,
context,
data,
disableEmail,
expiration,
req = {} as PayloadRequest,
} = options
setRequestContext(req, context)
const { collection: collectionSlug, data, disableEmail, expiration } = options
const collection = payload.collections[collectionSlug]
@@ -44,12 +34,7 @@ async function localForgotPassword<T extends keyof GeneratedTypes['collections']
)
}
req.payloadAPI = req.payloadAPI || 'local'
req.payload = payload
req.i18n = i18nInit(payload.config.i18n)
if (!req.t) req.t = req.i18n.t
if (!req.payloadDataLoader) req.payloadDataLoader = getDataLoader(req)
const req = createLocalReq(options, payload)
return forgotPassword({
collection,

View File

@@ -5,10 +5,8 @@ import type { GeneratedTypes } from '../../../index'
import type { Payload } from '../../../payload'
import type { Result } from '../login'
import { getDataLoader } from '../../../collections/dataloader'
import { APIError } from '../../../errors'
import { setRequestContext } from '../../../express/setRequestContext'
import { i18nInit } from '../../../translations/init'
import { createLocalReq } from '../../../utilities/createLocalReq'
import login from '../login'
export type Options<TSlug extends keyof GeneratedTypes['collections']> = {
@@ -33,25 +31,14 @@ async function localLogin<TSlug extends keyof GeneratedTypes['collections']>(
): Promise<Result & { user: GeneratedTypes['collections'][TSlug] }> {
const {
collection: collectionSlug,
context,
data,
depth,
fallbackLocale: fallbackLocaleArg = options?.req?.fallbackLocale,
locale: localeArg = null,
overrideAccess = true,
req = {} as PayloadRequest,
res,
showHiddenFields,
} = options
setRequestContext(req, context)
const collection = payload.collections[collectionSlug]
const localizationConfig = payload?.config?.localization
const defaultLocale = localizationConfig ? localizationConfig.defaultLocale : null
const locale = localeArg || req?.locale || defaultLocale
const fallbackLocale = localizationConfig
? localizationConfig.locales.find(({ code }) => locale === code)?.fallbackLocale
: null
if (!collection) {
throw new APIError(
@@ -59,12 +46,7 @@ async function localLogin<TSlug extends keyof GeneratedTypes['collections']>(
)
}
req.payloadAPI = req.payloadAPI || 'local'
req.payload = payload
req.i18n = i18nInit(payload.config.i18n)
if (!req.t) req.t = req.i18n.t
if (!req.payloadDataLoader) req.payloadDataLoader = getDataLoader(req)
const req = createLocalReq(options, payload)
const args = {
collection,
@@ -76,12 +58,6 @@ async function localLogin<TSlug extends keyof GeneratedTypes['collections']>(
showHiddenFields,
}
if (locale) args.req.locale = locale
if (fallbackLocale) {
args.req.fallbackLocale =
typeof fallbackLocaleArg !== 'undefined' ? fallbackLocaleArg : fallbackLocale || defaultLocale
}
return login<TSlug>(args)
}

View File

@@ -3,10 +3,8 @@ import type { PayloadRequest } from '../../../express/types'
import type { Payload } from '../../../payload'
import type { Result } from '../resetPassword'
import { getDataLoader } from '../../../collections/dataloader'
import { APIError } from '../../../errors'
import { setRequestContext } from '../../../express/setRequestContext'
import { i18nInit } from '../../../translations/init'
import { createLocalReq } from '../../../utilities/createLocalReq'
import resetPassword from '../resetPassword'
export type Options<T extends keyof GeneratedTypes['collections']> = {
@@ -24,15 +22,7 @@ async function localResetPassword<T extends keyof GeneratedTypes['collections']>
payload: Payload,
options: Options<T>,
): Promise<Result> {
const {
collection: collectionSlug,
context,
data,
overrideAccess,
req = {} as PayloadRequest,
} = options
setRequestContext(req, context)
const { collection: collectionSlug, data, overrideAccess } = options
const collection = payload.collections[collectionSlug]
@@ -44,12 +34,7 @@ async function localResetPassword<T extends keyof GeneratedTypes['collections']>
)
}
req.payload = payload
req.payloadAPI = req.payloadAPI || 'local'
req.i18n = i18nInit(payload.config.i18n)
if (!req.t) req.t = req.i18n.t
if (!req.payloadDataLoader) req.payloadDataLoader = getDataLoader(req)
const req = createLocalReq(options, payload)
return resetPassword({
collection,

View File

@@ -2,10 +2,8 @@ import type { GeneratedTypes, RequestContext } from '../../../'
import type { PayloadRequest } from '../../../express/types'
import type { Payload } from '../../../payload'
import { getDataLoader } from '../../../collections/dataloader'
import { APIError } from '../../../errors'
import { setRequestContext } from '../../../express/setRequestContext'
import { i18nInit } from '../../../translations/init'
import { createLocalReq } from '../../../utilities/createLocalReq'
import unlock from '../unlock'
export type Options<T extends keyof GeneratedTypes['collections']> = {
@@ -22,14 +20,7 @@ async function localUnlock<T extends keyof GeneratedTypes['collections']>(
payload: Payload,
options: Options<T>,
): Promise<boolean> {
const {
collection: collectionSlug,
context,
data,
overrideAccess = true,
req = {} as PayloadRequest,
} = options
setRequestContext(req, context)
const { collection: collectionSlug, data, overrideAccess = true } = options
const collection = payload.collections[collectionSlug]
@@ -39,12 +30,7 @@ async function localUnlock<T extends keyof GeneratedTypes['collections']>(
)
}
req.payload = payload
req.payloadAPI = req.payloadAPI || 'local'
req.i18n = i18nInit(payload.config.i18n)
if (!req.t) req.t = req.i18n.t
if (!req.payloadDataLoader) req.payloadDataLoader = getDataLoader(req)
const req = createLocalReq(options, payload)
return unlock({
collection,

View File

@@ -3,8 +3,7 @@ import type { PayloadRequest } from '../../../express/types'
import type { Payload } from '../../../payload'
import { APIError } from '../../../errors'
import { setRequestContext } from '../../../express/setRequestContext'
import { i18nInit } from '../../../translations/init'
import { createLocalReq } from '../../../utilities/createLocalReq'
import verifyEmail from '../verifyEmail'
export type Options<T extends keyof GeneratedTypes['collections']> = {
@@ -18,8 +17,7 @@ async function localVerifyEmail<T extends keyof GeneratedTypes['collections']>(
payload: Payload,
options: Options<T>,
): Promise<boolean> {
const { collection: collectionSlug, context, req = {} as PayloadRequest, token } = options
setRequestContext(req, context)
const { collection: collectionSlug, token } = options
const collection = payload.collections[collectionSlug]
@@ -29,9 +27,7 @@ async function localVerifyEmail<T extends keyof GeneratedTypes['collections']>(
)
}
req.payload = payload
req.payloadAPI = req.payloadAPI || 'local'
req.i18n = i18nInit(payload.config.i18n)
const req = createLocalReq(options, payload)
return verifyEmail({
collection,

View File

@@ -18,8 +18,8 @@ import sanitizeInternalFields from '../../utilities/sanitizeInternalFields'
import isLocked from '../isLocked'
import { authenticateLocalStrategy } from '../strategies/local/authenticate'
import { incrementLoginAttempts } from '../strategies/local/incrementLoginAttempts'
import { resetLoginAttempts } from '../strategies/local/resetLoginAttempts'
import { getFieldsToSign } from './getFieldsToSign'
import unlock from './unlock'
export type Result = {
exp?: number
@@ -45,37 +45,40 @@ async function login<TSlug extends keyof GeneratedTypes['collections']>(
): Promise<Result & { user: GeneratedTypes['collections'][TSlug] }> {
let args = incomingArgs
// /////////////////////////////////////
// beforeOperation - Collection
// /////////////////////////////////////
await args.collection.config.hooks.beforeOperation.reduce(async (priorHook, hook) => {
await priorHook
args =
(await hook({
args,
collection: args.collection?.config,
context: args.req.context,
operation: 'login',
})) || args
}, Promise.resolve())
const {
collection: { config: collectionConfig },
data,
depth,
overrideAccess,
req,
req: {
payload,
payload: { config, secret },
},
showHiddenFields,
} = args
try {
const shouldCommit = await initTransaction(req)
const shouldCommit = await initTransaction(args.req)
// /////////////////////////////////////
// beforeOperation - Collection
// /////////////////////////////////////
await args.collection.config.hooks.beforeOperation.reduce(async (priorHook, hook) => {
await priorHook
args =
(await hook({
args,
collection: args.collection?.config,
context: args.req.context,
operation: 'login',
req: args.req,
})) || args
}, Promise.resolve())
const {
collection: { config: collectionConfig },
data,
depth,
overrideAccess,
req,
req: {
fallbackLocale,
locale,
payload,
payload: { config, secret },
},
showHiddenFields,
} = args
// /////////////////////////////////////
// Login
@@ -115,16 +118,16 @@ async function login<TSlug extends keyof GeneratedTypes['collections']>(
})
}
if (shouldCommit) await commitTransaction(req)
throw new AuthenticationError(req.t)
}
if (maxLoginAttemptsEnabled) {
await unlock({
collection: {
config: collectionConfig,
},
data,
overrideAccess: true,
await resetLoginAttempts({
collection: collectionConfig,
doc: user,
payload: req.payload,
req,
})
}
@@ -195,7 +198,9 @@ async function login<TSlug extends keyof GeneratedTypes['collections']>(
context: req.context,
depth,
doc: user,
fallbackLocale,
global: null,
locale,
overrideAccess,
req,
showHiddenFields,
@@ -262,7 +267,7 @@ async function login<TSlug extends keyof GeneratedTypes['collections']>(
return result
} catch (error: unknown) {
await killTransaction(req)
await killTransaction(args.req)
throw error
}
}

View File

@@ -9,7 +9,10 @@ import type { Document } from '../../types'
import { buildAfterOperation } from '../../collections/operations/utils'
import { Forbidden } from '../../errors'
import { commitTransaction } from '../../utilities/commitTransaction'
import getCookieExpiration from '../../utilities/getCookieExpiration'
import { initTransaction } from '../../utilities/initTransaction'
import { killTransaction } from '../../utilities/killTransaction'
import { getFieldsToSign } from './getFieldsToSign'
export type Result = {
@@ -28,120 +31,130 @@ export type Arguments = {
async function refresh(incomingArgs: Arguments): Promise<Result> {
let args = incomingArgs
// /////////////////////////////////////
// beforeOperation - Collection
// /////////////////////////////////////
try {
const shouldCommit = await initTransaction(args.req)
await args.collection.config.hooks.beforeOperation.reduce(
async (priorHook: BeforeOperationHook | Promise<void>, hook: BeforeOperationHook) => {
await priorHook
// /////////////////////////////////////
// beforeOperation - Collection
// /////////////////////////////////////
args =
(await hook({
args,
collection: args.collection?.config,
context: args.req.context,
operation: 'refresh',
})) || args
},
Promise.resolve(),
)
await args.collection.config.hooks.beforeOperation.reduce(
async (priorHook: BeforeOperationHook | Promise<void>, hook: BeforeOperationHook) => {
await priorHook
// /////////////////////////////////////
// Refresh
// /////////////////////////////////////
args =
(await hook({
args,
collection: args.collection?.config,
context: args.req.context,
operation: 'refresh',
req: args.req,
})) || args
},
Promise.resolve(),
)
const {
collection: { config: collectionConfig },
req: {
payload: { config, secret },
},
} = args
// /////////////////////////////////////
// Refresh
// /////////////////////////////////////
if (typeof args.token !== 'string' || !args.req.user) throw new Forbidden(args.req.t)
const {
collection: { config: collectionConfig },
req: {
payload: { config, secret },
},
} = args
const parsedURL = url.parse(args.req.url)
const isGraphQL = parsedURL.pathname === config.routes.graphQL
if (typeof args.token !== 'string' || !args.req.user) throw new Forbidden(args.req.t)
const user = await args.req.payload.findByID({
id: args.req.user.id,
collection: args.req.user.collection,
depth: isGraphQL ? 0 : args.collection.config.auth.depth,
req: args.req,
})
const parsedURL = url.parse(args.req.url)
const isGraphQL = parsedURL.pathname === config.routes.graphQL
const fieldsToSign = getFieldsToSign({
collectionConfig,
email: user?.email as string,
user: args?.req?.user,
})
const user = await args.req.payload.findByID({
id: args.req.user.id,
collection: args.req.user.collection,
depth: isGraphQL ? 0 : args.collection.config.auth.depth,
req: args.req,
})
const refreshedToken = jwt.sign(fieldsToSign, secret, {
expiresIn: collectionConfig.auth.tokenExpiration,
})
const fieldsToSign = getFieldsToSign({
collectionConfig,
email: user?.email as string,
user: args?.req?.user,
})
const exp = (jwt.decode(refreshedToken) as Record<string, unknown>).exp as number
const refreshedToken = jwt.sign(fieldsToSign, secret, {
expiresIn: collectionConfig.auth.tokenExpiration,
})
if (args.res) {
const cookieOptions = {
domain: undefined,
expires: getCookieExpiration(collectionConfig.auth.tokenExpiration),
httpOnly: true,
path: '/',
sameSite: collectionConfig.auth.cookies.sameSite,
secure: collectionConfig.auth.cookies.secure,
const exp = (jwt.decode(refreshedToken) as Record<string, unknown>).exp as number
if (args.res) {
const cookieOptions = {
domain: undefined,
expires: getCookieExpiration(collectionConfig.auth.tokenExpiration),
httpOnly: true,
path: '/',
sameSite: collectionConfig.auth.cookies.sameSite,
secure: collectionConfig.auth.cookies.secure,
}
if (collectionConfig.auth.cookies.domain)
cookieOptions.domain = collectionConfig.auth.cookies.domain
args.res.cookie(`${config.cookiePrefix}-token`, refreshedToken, cookieOptions)
}
if (collectionConfig.auth.cookies.domain)
cookieOptions.domain = collectionConfig.auth.cookies.domain
let result: Result = {
exp,
refreshedToken,
user,
}
args.res.cookie(`${config.cookiePrefix}-token`, refreshedToken, cookieOptions)
// /////////////////////////////////////
// After Refresh - Collection
// /////////////////////////////////////
await collectionConfig.hooks.afterRefresh.reduce(async (priorHook, hook) => {
await priorHook
result =
(await hook({
collection: args.collection?.config,
context: args.req.context,
exp,
req: args.req,
res: args.res,
token: refreshedToken,
})) || result
}, Promise.resolve())
// /////////////////////////////////////
// afterOperation - Collection
// /////////////////////////////////////
result = await buildAfterOperation({
args,
collection: args.collection?.config,
operation: 'refresh',
result,
})
// /////////////////////////////////////
// Return results
// /////////////////////////////////////
if (collectionConfig.auth.removeTokenFromResponses) {
delete result.refreshedToken
}
if (shouldCommit) await commitTransaction(args.req)
return result
} catch (error: unknown) {
await killTransaction(args.req)
throw error
}
let result: Result = {
exp,
refreshedToken,
user,
}
// /////////////////////////////////////
// After Refresh - Collection
// /////////////////////////////////////
await collectionConfig.hooks.afterRefresh.reduce(async (priorHook, hook) => {
await priorHook
result =
(await hook({
collection: args.collection?.config,
context: args.req.context,
exp,
req: args.req,
res: args.res,
token: refreshedToken,
})) || result
}, Promise.resolve())
// /////////////////////////////////////
// afterOperation - Collection
// /////////////////////////////////////
result = await buildAfterOperation({
args,
collection: args.collection?.config,
operation: 'refresh',
result,
})
// /////////////////////////////////////
// Return results
// /////////////////////////////////////
if (collectionConfig.auth.removeTokenFromResponses) {
delete result.refreshedToken
}
return result
}
export default refresh

Some files were not shown because too many files have changed in this diff Show More