Compare commits
112 Commits
db-postgre
...
db-postgre
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5d1cad3adb | ||
|
|
e31f72da8e | ||
|
|
7aa058d604 | ||
|
|
64e80d242e | ||
|
|
e8f2ca484e | ||
|
|
ceca5c4e97 | ||
|
|
ee13736288 | ||
|
|
815bdfac0b | ||
|
|
7a7f0ed7e8 | ||
|
|
ad42d541b3 | ||
|
|
32ed95e1ee | ||
|
|
70e57fef18 | ||
|
|
0a07f607b9 | ||
|
|
3918fc7c21 | ||
|
|
13f71ac475 | ||
|
|
07720e777a | ||
|
|
efff47e400 | ||
|
|
453ac218ea | ||
|
|
d4b09bd9cd | ||
|
|
dd67e03fc1 | ||
|
|
548de80bee | ||
|
|
2c05fbbb5e | ||
|
|
9b54659818 | ||
|
|
e9f550406e | ||
|
|
98b87e2278 | ||
|
|
5f3d0169be | ||
|
|
35c2a085ef | ||
|
|
1ac943ed5e | ||
|
|
25cee8bb10 | ||
|
|
419aef452d | ||
|
|
ea52489126 | ||
|
|
e80c70acae | ||
|
|
70b0064d0b | ||
|
|
9636bf6efd | ||
|
|
8f4d0da4e0 | ||
|
|
f0f1dbdcb0 | ||
|
|
a895aee8b1 | ||
|
|
aa1dac08c1 | ||
|
|
b8cd1c6ba4 | ||
|
|
6344464bc6 | ||
|
|
5d4022f144 | ||
|
|
bf942fdfa6 | ||
|
|
d6c25783cf | ||
|
|
82e9d31127 | ||
|
|
399e606b34 | ||
|
|
0d18822062 | ||
|
|
00fc0343da | ||
|
|
6323965c65 | ||
|
|
6d6823c3e5 | ||
|
|
ca70298436 | ||
|
|
4f565759f6 | ||
|
|
df39602758 | ||
|
|
6ea6172afa | ||
|
|
486774796d | ||
|
|
1cd1c38764 | ||
|
|
f6d7da7510 | ||
|
|
cdc4cb971b | ||
|
|
e0191b54e1 | ||
|
|
2315781f18 | ||
|
|
a0a58e7fd2 | ||
|
|
e1813fb884 | ||
|
|
da184d40ec | ||
|
|
ca8675f89d | ||
|
|
e8c6c9338d | ||
|
|
558534aff8 | ||
|
|
29c901ba9b | ||
|
|
f3876c2a39 | ||
|
|
c3a3942969 | ||
|
|
23b135b963 | ||
|
|
e3c8105cc2 | ||
|
|
2c71aaef75 | ||
|
|
922fb9b7fa | ||
|
|
0740d5095e | ||
|
|
b392d656fe | ||
|
|
c0eef90cdc | ||
|
|
db22cbdf21 | ||
|
|
1e8a6b7899 | ||
|
|
5d934ba02d | ||
|
|
f651665f2f | ||
|
|
5d3659d48a | ||
|
|
47106d5a1a | ||
|
|
afa2b942e0 | ||
|
|
20ddd0de5b | ||
|
|
64f705c3c9 | ||
|
|
b30ea8aa6b | ||
|
|
471d2113a7 | ||
|
|
8725d41164 | ||
|
|
0bd81aa25a | ||
|
|
8c09ca9be5 | ||
|
|
90d7ee3e65 | ||
|
|
58bbd8c00f | ||
|
|
003ad065c3 | ||
|
|
70715926a8 | ||
|
|
b3a6bfacf2 | ||
|
|
e1d9accb27 | ||
|
|
f2f55a84cc | ||
|
|
eba53ba60a | ||
|
|
f73d503fec | ||
|
|
6930c4e9f2 | ||
|
|
3eb681e847 | ||
|
|
cb4638cfa1 | ||
|
|
b40e9f85a2 | ||
|
|
e5a7907a72 | ||
|
|
3f25d1ca84 | ||
|
|
d5720bea7b | ||
|
|
8ce15c8b07 | ||
|
|
9f5efef78f | ||
|
|
dfba5222f3 | ||
|
|
b99d24fcfa | ||
|
|
836ed77568 | ||
|
|
1c5d5b07c8 | ||
|
|
da5f1f2240 |
86
.github/workflows/main.yml
vendored
86
.github/workflows/main.yml
vendored
@@ -2,9 +2,9 @@ name: build
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize]
|
||||
types: [ opened, reopened, synchronize ]
|
||||
push:
|
||||
branches: ['main']
|
||||
branches: [ 'main' ]
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
@@ -15,25 +15,25 @@ jobs:
|
||||
needs_build: ${{ steps.filter.outputs.needs_build }}
|
||||
templates: ${{ steps.filter.outputs.templates }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 25
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
needs_build:
|
||||
- '.github/workflows/**'
|
||||
- 'packages/**'
|
||||
- 'test/**'
|
||||
- 'pnpm-lock.yaml'
|
||||
- 'package.json'
|
||||
templates:
|
||||
- 'templates/**'
|
||||
- name: Log all filter results
|
||||
run: |
|
||||
echo "needs_build: ${{ steps.filter.outputs.needs_build }}"
|
||||
echo "templates: ${{ steps.filter.outputs.templates }}"
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 25
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
needs_build:
|
||||
- '.github/workflows/**'
|
||||
- 'packages/**'
|
||||
- 'test/**'
|
||||
- 'pnpm-lock.yaml'
|
||||
- 'package.json'
|
||||
templates:
|
||||
- 'templates/**'
|
||||
- name: Log all filter results
|
||||
run: |
|
||||
echo "needs_build: ${{ steps.filter.outputs.needs_build }}"
|
||||
echo "templates: ${{ steps.filter.outputs.templates }}"
|
||||
|
||||
core-build:
|
||||
needs: changes
|
||||
@@ -85,11 +85,15 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
database: [mongoose, postgres]
|
||||
database: [mongoose, postgres, postgres-custom-schema, postgres-uuid, supabase]
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: payloadtests
|
||||
AWS_ENDPOINT_URL: http://127.0.0.1:4566
|
||||
AWS_ACCESS_KEY_ID: localstack
|
||||
AWS_SECRET_ACCESS_KEY: localstack
|
||||
AWS_REGION: us-east-1
|
||||
|
||||
steps:
|
||||
- name: Use Node.js 18
|
||||
@@ -109,6 +113,9 @@ jobs:
|
||||
path: ./*
|
||||
key: ${{ github.sha }}-${{ github.run_number }}
|
||||
|
||||
- name: Start LocalStack
|
||||
run: pnpm docker:start
|
||||
|
||||
- name: Start PostgreSQL
|
||||
uses: CasperWA/postgresql-action@v1.2
|
||||
with:
|
||||
@@ -116,15 +123,40 @@ jobs:
|
||||
postgresql db: ${{ env.POSTGRES_DB }}
|
||||
postgresql user: ${{ env.POSTGRES_USER }}
|
||||
postgresql password: ${{ env.POSTGRES_PASSWORD }}
|
||||
if: matrix.database == 'postgres'
|
||||
if: startsWith(matrix.database, 'postgres')
|
||||
|
||||
- name: Install Supabase CLI
|
||||
uses: supabase/setup-cli@v1
|
||||
with:
|
||||
version: latest
|
||||
if: matrix.database == 'supabase'
|
||||
|
||||
- name: Initialize Supabase
|
||||
run: |
|
||||
supabase init
|
||||
supabase start
|
||||
if: matrix.database == 'supabase'
|
||||
|
||||
- name: Wait for PostgreSQL
|
||||
run: sleep 30
|
||||
if: startsWith(matrix.database, 'postgres')
|
||||
|
||||
- run: sleep 30
|
||||
- name: Configure PostgreSQL
|
||||
run: |
|
||||
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "CREATE ROLE runner SUPERUSER LOGIN;"
|
||||
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "SELECT version();"
|
||||
echo "POSTGRES_URL=postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" >> $GITHUB_ENV
|
||||
if: matrix.database == 'postgres'
|
||||
if: startsWith(matrix.database, 'postgres')
|
||||
|
||||
- name: Configure PostgreSQL with custom schema
|
||||
run: |
|
||||
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "CREATE SCHEMA custom;"
|
||||
if: matrix.database == 'postgres-custom-schema'
|
||||
|
||||
- name: Configure Supabase
|
||||
run: |
|
||||
echo "POSTGRES_URL=postgresql://postgres:postgres@127.0.0.1:54322/postgres" >> $GITHUB_ENV
|
||||
if: matrix.database == 'supabase'
|
||||
|
||||
- name: Component Tests
|
||||
run: pnpm test:components
|
||||
@@ -142,7 +174,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
part: [1/8, 2/8, 3/8, 4/8, 5/8, 6/8, 7/8, 8/8]
|
||||
part: [ 1/8, 2/8, 3/8, 4/8, 5/8, 6/8, 7/8, 8/8 ]
|
||||
|
||||
steps:
|
||||
- name: Use Node.js 18
|
||||
@@ -290,7 +322,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
template: [blank, website, ecommerce]
|
||||
template: [ blank, website, ecommerce ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -6,7 +6,9 @@ dist
|
||||
|
||||
test-results
|
||||
.devcontainer
|
||||
.localstack
|
||||
/migrations
|
||||
.localstack
|
||||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/node,macos,windows,webstorm,sublimetext,visualstudiocode
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=node,macos,windows,webstorm,sublimetext,visualstudiocode
|
||||
|
||||
2
.idea/runConfigurations/Run_Dev_Fields.xml
generated
2
.idea/runConfigurations/Run_Dev_Fields.xml
generated
@@ -1,5 +1,5 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Run Dev Fields" type="NodeJSConfigurationType" application-parameters="fields" path-to-js-file="node_modules/.pnpm/nodemon@3.0.1/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
|
||||
<configuration default="false" name="Run Dev Fields" type="NodeJSConfigurationType" application-parameters="fields" path-to-js-file="node_modules/.pnpm/nodemon@3.0.3/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
2
.idea/runConfigurations/Run_Dev__community.xml
generated
2
.idea/runConfigurations/Run_Dev__community.xml
generated
@@ -1,5 +1,5 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Run Dev _community" type="NodeJSConfigurationType" application-parameters="_community" path-to-js-file="node_modules/.pnpm/nodemon@3.0.1/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
|
||||
<configuration default="false" name="Run Dev _community" type="NodeJSConfigurationType" application-parameters="_community" path-to-js-file="node_modules/.pnpm/nodemon@3.0.3/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
114
CHANGELOG.md
114
CHANGELOG.md
@@ -1,11 +1,120 @@
|
||||
## [2.11.2](https://github.com/payloadcms/payload/compare/v2.11.1...v2.11.2) (2024-02-23)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **db-postgres:** configurable custom schema to use ([#5047](https://github.com/payloadcms/payload/issues/5047)) ([e8f2ca4](https://github.com/payloadcms/payload/commit/e8f2ca484ee56cd7767d5111e46ebd24752ff8de))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Add Context Provider in EditMany Component ([#5005](https://github.com/payloadcms/payload/issues/5005)) ([70e57fe](https://github.com/payloadcms/payload/commit/70e57fef184f7fcf56344ea755465f246f2253a5))
|
||||
* **db-mongodb:** unique sparse for not required fields ([#5114](https://github.com/payloadcms/payload/issues/5114)) ([815bdfa](https://github.com/payloadcms/payload/commit/815bdfac0b0afbff2a20e54d5aee64b90f6b3a77))
|
||||
* **db-postgres:** set _parentID for array nested localized fields ([#5117](https://github.com/payloadcms/payload/issues/5117)) ([ceca5c4](https://github.com/payloadcms/payload/commit/ceca5c4e97f53f1346797a31b6abfc0375e98215))
|
||||
* disabling API Key does not remove the key ([#5145](https://github.com/payloadcms/payload/issues/5145)) ([7a7f0ed](https://github.com/payloadcms/payload/commit/7a7f0ed7e8132253be607c111c160163b84bd770))
|
||||
* handle thrown errors in config-level afterError hook ([#5147](https://github.com/payloadcms/payload/issues/5147)) ([32ed95e](https://github.com/payloadcms/payload/commit/32ed95e1ee87409db234f1b7bd6d2e462fd9ed5d))
|
||||
* only replace the drawer content with full edit component if it exists ([#5144](https://github.com/payloadcms/payload/issues/5144)) ([0a07f60](https://github.com/payloadcms/payload/commit/0a07f607b9fb1217ad956cd05b2a84a4042a19ca))
|
||||
* transaction error from access endpoint ([#5156](https://github.com/payloadcms/payload/issues/5156)) ([ad42d54](https://github.com/payloadcms/payload/commit/ad42d541b342ed56463b81cee6d6307df6f06d7f))
|
||||
|
||||
## [2.11.1](https://github.com/payloadcms/payload/compare/v2.11.0...v2.11.1) (2024-02-16)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **db-postgres:** adds idType to use uuid or serial id columns ([#3864](https://github.com/payloadcms/payload/issues/3864)) ([d6c2578](https://github.com/payloadcms/payload/commit/d6c25783cfa97983bf9db27ceb5ccd39a62c62f1))
|
||||
* **db-postgres:** reconnect after disconnection from database ([#5086](https://github.com/payloadcms/payload/issues/5086)) ([bf942fd](https://github.com/payloadcms/payload/commit/bf942fdfa6ea9c26cf05295cc9db646bf31fa622))
|
||||
* **plugin-search:** add req to beforeSync args for transactions ([#5068](https://github.com/payloadcms/payload/issues/5068)) ([98b87e2](https://github.com/payloadcms/payload/commit/98b87e22782c0a788f79326f22be05a6b176ad74))
|
||||
* **richtext-lexical:** add justify aligment to AlignFeature ([#4035](https://github.com/payloadcms/payload/issues/4035)) ([#4868](https://github.com/payloadcms/payload/issues/4868)) ([6d6823c](https://github.com/payloadcms/payload/commit/6d6823c3e5609a58eeeeb8d043945a762f9463df))
|
||||
* **richtext-lexical:** AddBlock handle for all nodes, even if they aren't empty paragraphs ([#5063](https://github.com/payloadcms/payload/issues/5063)) ([00fc034](https://github.com/payloadcms/payload/commit/00fc0343dabf184d5bab418d47c403b3ad11698f))
|
||||
* **richtext-lexical:** Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground ([#5066](https://github.com/payloadcms/payload/issues/5066)) ([0d18822](https://github.com/payloadcms/payload/commit/0d18822062275c1826c8e2c3da2571a2b3483310))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **db-mongodb:** find versions pagination ([#5091](https://github.com/payloadcms/payload/issues/5091)) ([5d4022f](https://github.com/payloadcms/payload/commit/5d4022f1445e2809c01cb1dd599280f0a56cdc6e))
|
||||
* **db-postgres:** query using blockType ([#5044](https://github.com/payloadcms/payload/issues/5044)) ([35c2a08](https://github.com/payloadcms/payload/commit/35c2a085efa6d5ad59779960874bc9728a17e3a0))
|
||||
* filterOptions errors cause transaction to abort ([#5079](https://github.com/payloadcms/payload/issues/5079)) ([5f3d016](https://github.com/payloadcms/payload/commit/5f3d0169bee21e1c0963dbd7ede9fe5f1c46a5a5))
|
||||
* **plugin-form-builder:** hooks do not respect transactions ([#5069](https://github.com/payloadcms/payload/issues/5069)) ([82e9d31](https://github.com/payloadcms/payload/commit/82e9d31127c8df83c5bed92a5ffdab76d331900f))
|
||||
* remove collection findByID caching ([#5034](https://github.com/payloadcms/payload/issues/5034)) ([1ac943e](https://github.com/payloadcms/payload/commit/1ac943ed5e8416883b863147fdf3c23380955559))
|
||||
* **richtext-lexical:** do not remove adjacent paragraph node when inserting certain nodes in empty editor ([#5061](https://github.com/payloadcms/payload/issues/5061)) ([6323965](https://github.com/payloadcms/payload/commit/6323965c652ea68dffeb716957b124d165b9ce96))
|
||||
* **uploads:** account for serverURL when retrieving external file ([#5102](https://github.com/payloadcms/payload/issues/5102)) ([25cee8b](https://github.com/payloadcms/payload/commit/25cee8bb102bf80b3a4bfb4b4e46712722cc7f0d))
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES: @payloadcms/richtext-lexical
|
||||
|
||||
* **richtext-lexical:** Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground (#5066)
|
||||
|
||||
- You HAVE to make sure that any versions of the lexical packages (IF you have any installed) match the lexical version which richtext-lexical uses: v0.13.1. If you do not do this, you may be plagued by React useContext / "cannot find active editor state" errors
|
||||
- Updates to lexical's API, e.g. the removal of INTERNAL_isPointSelection, could be breaking depending on your code. Please consult the [lexical changelog](https://github.com/facebook/lexical/blob/main/CHANGELOG.md).
|
||||
|
||||
## [2.11.0](https://github.com/payloadcms/payload/compare/v2.10.1...v2.11.0) (2024-02-09)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* exposes collapsible provider with more functionality ([#5043](https://github.com/payloadcms/payload/issues/5043)) ([df39602](https://github.com/payloadcms/payload/commit/df39602758ae8dc3765bb48e51f7a657babfa559))
|
||||
|
||||
## [2.10.1](https://github.com/payloadcms/payload/compare/v2.10.0...v2.10.1) (2024-02-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* clearable cells handle null values ([#5038](https://github.com/payloadcms/payload/issues/5038)) ([f6d7da7](https://github.com/payloadcms/payload/commit/f6d7da751039df25066b51bb91d6453e1a4efd82))
|
||||
* **db-mongodb:** handle null values with exists ([#5037](https://github.com/payloadcms/payload/issues/5037)) ([cdc4cb9](https://github.com/payloadcms/payload/commit/cdc4cb971b9180ba2ed09741f5af1a3c18292828))
|
||||
* **db-postgres:** handle nested docs with drafts ([#5012](https://github.com/payloadcms/payload/issues/5012)) ([da184d4](https://github.com/payloadcms/payload/commit/da184d40ece74bffb224002eb5df8f6987d65043))
|
||||
* ensures docs with the same id are shown in relationship field select ([#4859](https://github.com/payloadcms/payload/issues/4859)) ([e1813fb](https://github.com/payloadcms/payload/commit/e1813fb884e0dc84203fcbab87527a99a4d3a5d7))
|
||||
* query relationships by explicit id field ([#5022](https://github.com/payloadcms/payload/issues/5022)) ([a0a58e7](https://github.com/payloadcms/payload/commit/a0a58e7fd20dff54d210c968f4d5defd67441bdd))
|
||||
* **richtext-lexical:** make editor reactive to initialValue changes ([#5010](https://github.com/payloadcms/payload/issues/5010)) ([2315781](https://github.com/payloadcms/payload/commit/2315781f1891ddde4b4c5f2f0cfa1c17af85b7a9))
|
||||
|
||||
## [2.10.0](https://github.com/payloadcms/payload/compare/v2.9.0...v2.10.0) (2024-02-06)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add more options to addFieldStatePromise so that it can be used for field flattening ([#4799](https://github.com/payloadcms/payload/issues/4799)) ([8725d41](https://github.com/payloadcms/payload/commit/8725d411645bb0270376e235669f46be2227ecc0))
|
||||
* extend transactions to cover after and beforeOperation hooks ([#4960](https://github.com/payloadcms/payload/issues/4960)) ([1e8a6b7](https://github.com/payloadcms/payload/commit/1e8a6b7899f7b1e6451cc4d777602208478b483c))
|
||||
* previousValue and previousSiblingDoc args added to beforeChange field hooks ([#4958](https://github.com/payloadcms/payload/issues/4958)) ([5d934ba](https://github.com/payloadcms/payload/commit/5d934ba02d07d98f781ce983228858ee5ce5c226))
|
||||
* re-use existing logger instance passed to payload.init ([#3124](https://github.com/payloadcms/payload/issues/3124)) ([471d211](https://github.com/payloadcms/payload/commit/471d2113a790dc0d54b2f8ed84e6899310efd600))
|
||||
* **richtext-lexical:** Blocks: generate type definitions for blocks fields ([#4529](https://github.com/payloadcms/payload/issues/4529)) ([90d7ee3](https://github.com/payloadcms/payload/commit/90d7ee3e6535d51290fc734b284ff3811dbda1f8))
|
||||
* use deletion success message from server if provided ([#4966](https://github.com/payloadcms/payload/issues/4966)) ([e3c8105](https://github.com/payloadcms/payload/commit/e3c8105cc2ed6fdf8007d97cd7b5556fc71ed724))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **db-postgres:** filtering relationships with drafts enabled ([#4998](https://github.com/payloadcms/payload/issues/4998)) ([c3a3942](https://github.com/payloadcms/payload/commit/c3a39429697e9d335e9be199e7caafb82eb26219))
|
||||
* **db-postgres:** handle schema changes with supabase ([#4968](https://github.com/payloadcms/payload/issues/4968)) ([5d3659d](https://github.com/payloadcms/payload/commit/5d3659d48ad8bbf5d96fbcd80434d2287cab97e0))
|
||||
* **db-postgres:** indexes not created for non unique field names ([#4967](https://github.com/payloadcms/payload/issues/4967)) ([64f705c](https://github.com/payloadcms/payload/commit/64f705c3c94148972f67e8175e718015760d6430))
|
||||
* **db-postgres:** indexes not creating for relationships, arrays, hasmany and blocks ([#4976](https://github.com/payloadcms/payload/issues/4976)) ([47106d5](https://github.com/payloadcms/payload/commit/47106d5a1af2ebd073fbbc6e474174c3d3835e5c))
|
||||
* **db-postgres:** localized field sort count ([#4997](https://github.com/payloadcms/payload/issues/4997)) ([f3876c2](https://github.com/payloadcms/payload/commit/f3876c2a39efe19a1864213306725aadcc14f130))
|
||||
* ensures docPermissions fallback to collection permissions on create ([#4969](https://github.com/payloadcms/payload/issues/4969)) ([afa2b94](https://github.com/payloadcms/payload/commit/afa2b942e0aad90c55744ae13e0ffe1cefa4585d))
|
||||
* **migrations:** safely create migration file when no name passed ([#4995](https://github.com/payloadcms/payload/issues/4995)) ([0740d50](https://github.com/payloadcms/payload/commit/0740d5095ee1aef13e4e37f6b174d529f0f2d993))
|
||||
* **plugin-seo:** tabbedUI with email field causes duplicate field ([#4944](https://github.com/payloadcms/payload/issues/4944)) ([db22cbd](https://github.com/payloadcms/payload/commit/db22cbdf21a39ed0604ab96c57ca4242eac82ce7))
|
||||
|
||||
## [2.9.0](https://github.com/payloadcms/payload/compare/v2.8.2...v2.9.0) (2024-01-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* forceAcceptWarning migration arg added to accept prompts ([#4874](https://github.com/payloadcms/payload/issues/4874)) ([eba53ba](https://github.com/payloadcms/payload/commit/eba53ba60afd7c5d37389377ed06a9b556058d49))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* afterLogin hook write conflicts ([#4904](https://github.com/payloadcms/payload/issues/4904)) ([3eb681e](https://github.com/payloadcms/payload/commit/3eb681e847e9c55eaaa69c22bea4f4e66c7eac36))
|
||||
* **db-postgres:** migrate down error ([#4861](https://github.com/payloadcms/payload/issues/4861)) ([dfba522](https://github.com/payloadcms/payload/commit/dfba5222f3abf3f236dc9212a28e1aec7d7214d5))
|
||||
* **db-postgres:** query unset relation ([#4862](https://github.com/payloadcms/payload/issues/4862)) ([8ce15c8](https://github.com/payloadcms/payload/commit/8ce15c8b07800397a50dcf790c263ed5b3cfad53))
|
||||
* migrate down missing filter for latest batch ([#4860](https://github.com/payloadcms/payload/issues/4860)) ([b99d24f](https://github.com/payloadcms/payload/commit/b99d24fcfa698c493ea01c41621201abe18fabe3))
|
||||
* **plugin-cloud-storage:** slow get file performance large collections ([#4927](https://github.com/payloadcms/payload/issues/4927)) ([f73d503](https://github.com/payloadcms/payload/commit/f73d503fecdfa5cefdc26ab9aad60b00563f881e))
|
||||
* remove No Options dropdown from hasMany fields ([#4899](https://github.com/payloadcms/payload/issues/4899)) ([e5a7907](https://github.com/payloadcms/payload/commit/e5a7907a72c1371447ac2f71fce213ed22246092))
|
||||
* upload input drawer does not show draft versions ([#4903](https://github.com/payloadcms/payload/issues/4903)) ([6930c4e](https://github.com/payloadcms/payload/commit/6930c4e9f2200853121391ad8f8df48ea66c40a4))
|
||||
|
||||
## [2.8.2](https://github.com/payloadcms/payload/compare/v2.8.1...v2.8.2) (2024-01-16)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **db-postgres:** support drizzle logging config ([#4809](https://github.com/payloadcms/payload/issues/4809)) ([371353f](https://github.com/payloadcms/payload/commit/371353f1535fbab4ebd9f56fc14fd10a30eec289))
|
||||
* **plugin-form-builder:** add validation for form ID when creating a submission
|
||||
* **plugin-seo:** allow field and interface overrides
|
||||
* **plugin-form-builder:** add validation for form ID when creating a submission ([#4730](https://github.com/payloadcms/payload/pull/4730))
|
||||
* **plugin-seo:** add support for interfaceName and fieldOverrides ([#4695](https://github.com/payloadcms/payload/pull/4695))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
@@ -14,6 +123,7 @@
|
||||
* **db-postgres:** Remove duplicate keys from response ([#4747](https://github.com/payloadcms/payload/issues/4747)) ([eb9e771](https://github.com/payloadcms/payload/commit/eb9e771a9ca03636486d36654f215b73435574cb))
|
||||
* **db-postgres:** validateExistingBlockIsIdentical with arrays ([3b88adc](https://github.com/payloadcms/payload/commit/3b88adc7d0594af63ce190c40c9ee3905df67a31))
|
||||
* **db-postgres:** validateExistingBlockIsIdentical with other tables ([0647c87](https://github.com/payloadcms/payload/commit/0647c870f15dc1b122734b678c2abeb6f56377d4))
|
||||
* **plugin-seo:** fix missing spread operator in URL generator function ([#4723](https://github.com/payloadcms/payload/pull/4723))
|
||||
* removes max-width from field-types class & correctly sets it on uploads ([#4829](https://github.com/payloadcms/payload/issues/4829)) ([ee5390a](https://github.com/payloadcms/payload/commit/ee5390aaca37a4154cde8392b60f091ec3e5175c))
|
||||
|
||||
## [2.8.1](https://github.com/payloadcms/payload/compare/v2.8.0...v2.8.1) (2024-01-12)
|
||||
|
||||
@@ -635,6 +635,37 @@ export const CustomArrayManager = () => {
|
||||
]}
|
||||
/>
|
||||
|
||||
### useCollapsible
|
||||
|
||||
The `useCollapsible` hook allows you to control parent collapsibles:
|
||||
|
||||
| Property | Description |
|
||||
|---------------------------|--------------------------------------------------------------------------------------------------------------------|
|
||||
| **`collapsed`** | State of the collapsible. `true` if open, `false` if collapsed |
|
||||
| **`isVisible`** | If nested, determine if the nearest collapsible is visible. `true` if no parent is closed, `false` otherwise |
|
||||
| **`toggle`** | Toggles the state of the nearest collapsible |
|
||||
| **`withinCollapsible`** | Determine when you are within another collaspible | |
|
||||
|
||||
**Example:**
|
||||
|
||||
```tsx
|
||||
import React from 'react'
|
||||
|
||||
import { useCollapsible } from 'payload/components/utilities'
|
||||
|
||||
const CustomComponent: React.FC = () => {
|
||||
const { collapsed, toggle } = useCollapsible()
|
||||
return (
|
||||
<div>
|
||||
<p className="field-type">I am {collapsed ? 'closed' : 'open'}</p>
|
||||
<button onClick={toggle} type="button">
|
||||
Toggle
|
||||
</button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
### useDocumentInfo
|
||||
|
||||
The `useDocumentInfo` hook provides lots of information about the document currently being edited, including the following:
|
||||
@@ -774,8 +805,8 @@ const MyComponent: React.FC = () => {
|
||||
return (
|
||||
<>
|
||||
<span>The current theme is {theme} and autoMode is {autoMode}</span>
|
||||
<button
|
||||
type="button"
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setTheme(prev => prev === "light" ? "dark" : "light")}
|
||||
>
|
||||
Toggle theme
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: Postgres
|
||||
label: Postgres
|
||||
order: 50
|
||||
desc: Payload supports Postgres through an officially supported Drizzle database adapter.
|
||||
desc: Payload supports Postgres through an officially supported Drizzle database adapter.
|
||||
keywords: Postgres, documentation, typescript, Content Management System, cms, headless, javascript, node, react, express
|
||||
---
|
||||
|
||||
@@ -37,11 +37,12 @@ export default buildConfig({
|
||||
|
||||
### Options
|
||||
|
||||
| Option | Description |
|
||||
| ----------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `pool` | [Pool connection options](https://orm.drizzle.team/docs/quick-postgresql/node-postgres) that will be passed to Drizzle and `node-postgres`. |
|
||||
| `push` | Disable Drizzle's [`db push`](https://orm.drizzle.team/kit-docs/overview#prototyping-with-db-push) in development mode. By default, `push` is enabled for development mode only. |
|
||||
| `migrationDir` | Customize the directory that migrations are stored. |
|
||||
| Option | Description |
|
||||
|----------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `pool` | [Pool connection options](https://orm.drizzle.team/docs/quick-postgresql/node-postgres) that will be passed to Drizzle and `node-postgres`. |
|
||||
| `push` | Disable Drizzle's [`db push`](https://orm.drizzle.team/kit-docs/overview#prototyping-with-db-push) in development mode. By default, `push` is enabled for development mode only. |
|
||||
| `migrationDir` | Customize the directory that migrations are stored. |
|
||||
| `schemaName` | A string for the postgres schema to use, defaults to 'public'. |
|
||||
|
||||
### Access to Drizzle
|
||||
|
||||
@@ -65,7 +66,7 @@ In addition to exposing Drizzle directly, all of the tables, Drizzle relations,
|
||||
|
||||
Drizzle exposes two ways to work locally in development mode.
|
||||
|
||||
The first is [`db push`](https://orm.drizzle.team/kit-docs/overview#prototyping-with-db-push), which automatically pushes changes you make to your Payload config (and therefore, Drizzle schema) to your database so you don't have to manually migrate every time you change your Payload config. This only works in development mode, and should not be mixed with manually running [`migrate`](/docs/database/migrations) commands.
|
||||
The first is [`db push`](https://orm.drizzle.team/kit-docs/overview#prototyping-with-db-push), which automatically pushes changes you make to your Payload config (and therefore, Drizzle schema) to your database so you don't have to manually migrate every time you change your Payload config. This only works in development mode, and should not be mixed with manually running [`migrate`](/docs/database/migrations) commands.
|
||||
|
||||
You will be warned if any changes that you make will entail data loss while in development mode. Push is enabled by default, but you can opt out if you'd like.
|
||||
|
||||
@@ -77,11 +78,11 @@ Migrations are extremely powerful thanks to the seamless way that Payload and Dr
|
||||
|
||||
1. You are building your Payload config locally, with a local database used for testing.
|
||||
1. You have left the default setting of `push` enabled, so every time you change your Payload config (add or remove fields, collections, etc.), Drizzle will automatically push changes to your local DB.
|
||||
1. Once you're done with your changes, or have completed a feature, you can run `npm run payload migrate:create`.
|
||||
1. Once you're done with your changes, or have completed a feature, you can run `npm run payload migrate:create`.
|
||||
1. Payload and Drizzle will look for any existing migrations, and automatically generate all SQL changes necessary to convert your schema from its prior state into the state of your current Payload config, and store the resulting DDL in a newly created migration.
|
||||
1. Once you're ready to go to production, you will be able to run `npm run payload migrate` against your production database, which will apply any new migrations that have not yet run.
|
||||
1. Now your production database is in sync with your Payload config!
|
||||
|
||||
<Banner type="warning">
|
||||
Warning: do not mix "push" and migrations with your local development database. If you use "push" locally, and then try to migrate, Payload will throw a warning, telling you that these two methods are not meant to be used interchangeably.
|
||||
</Banner>
|
||||
</Banner>
|
||||
|
||||
@@ -28,7 +28,7 @@ This field uses the `monaco-react` editor syntax highlighting.
|
||||
| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) |
|
||||
| **`label`** | Text used as a field label in the Admin panel or an object with keys for each language. |
|
||||
| **`unique`** | Enforce that each entry in the Collection has a unique value for this field. |
|
||||
| **`index`** | Build a an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
|
||||
| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
|
||||
| **`validate`** | Provide a custom validation function that will be executed on both the Admin panel and the backend. [More](/docs/fields/overview#validation) |
|
||||
| **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/config), include its data in the user JWT. |
|
||||
| **`hooks`** | Provide field-based hooks to control logic for this field. [More](/docs/fields/overview#field-level-hooks) |
|
||||
|
||||
@@ -46,6 +46,7 @@ export const Page: CollectionConfig = {
|
||||
- [Date](/docs/fields/date) - date / time field that saves a timestamp
|
||||
- [Email](/docs/fields/email) - validates the entry is a properly formatted email
|
||||
- [Group](/docs/fields/group) - nest fields within an object
|
||||
- [JSON](/docs/fields/json) - saves actual JSON in the database
|
||||
- [Number](/docs/fields/number) - field that enforces that its value be a number
|
||||
- [Point](/docs/fields/point) - geometric coordinates for location data
|
||||
- [Radio](/docs/fields/radio) - radio button group, allowing only one value to be selected
|
||||
|
||||
@@ -38,7 +38,7 @@ caption="Admin panel screenshot of a Relationship field"
|
||||
| **`label`** | Text used as a field label in the Admin panel or an object with keys for each language. |
|
||||
| **`unique`** | Enforce that each entry in the Collection has a unique value for this field. |
|
||||
| **`validate`** | Provide a custom validation function that will be executed on both the Admin panel and the backend. [More](/docs/fields/overview#validation) |
|
||||
| **`index`** | Build a an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
|
||||
| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
|
||||
| **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/config), include its data in the user JWT. |
|
||||
| **`hooks`** | Provide field-based hooks to control logic for this field. [More](/docs/fields/overview#field-level-hooks) |
|
||||
| **`access`** | Provide field-based access control to denote what users can see and do with this field's data. [More](/docs/fields/overview#field-level-access-control) |
|
||||
|
||||
@@ -75,6 +75,7 @@ import { CollectionBeforeOperationHook } from 'payload/types'
|
||||
const beforeOperationHook: CollectionBeforeOperationHook = async ({
|
||||
args, // original arguments passed into the operation
|
||||
operation, // name of the operation
|
||||
req, // full express request
|
||||
}) => {
|
||||
return args // return modified operation arguments as necessary
|
||||
}
|
||||
@@ -209,6 +210,7 @@ import { CollectionAfterOperationHook } from 'payload/types'
|
||||
const afterOperationHook: CollectionAfterOperationHook = async ({
|
||||
args, // arguments passed into the operation
|
||||
operation, // name of the operation
|
||||
req, // full express request
|
||||
result, // the result of the operation, before modifications
|
||||
}) => {
|
||||
return result // return modified result as necessary
|
||||
|
||||
@@ -6,7 +6,8 @@ desc: Hooks can be added to any fields, and optionally modify the return value o
|
||||
keywords: hooks, fields, config, configuration, documentation, Content Management System, cms, headless, javascript, node, react, express
|
||||
---
|
||||
|
||||
Field-level hooks offer incredible potential for encapsulating your logic. They help to isolate concerns and package up functionalities to be easily reusable across your projects.
|
||||
Field-level hooks offer incredible potential for encapsulating your logic. They help to isolate concerns and package up
|
||||
functionalities to be easily reusable across your projects.
|
||||
|
||||
**Example use cases include:**
|
||||
|
||||
@@ -46,7 +47,8 @@ const ExampleField: Field = {
|
||||
|
||||
## Arguments and return values
|
||||
|
||||
All field-level hooks are formatted to accept the same arguments, although some arguments may be `undefined` based on which field hook you are utilizing.
|
||||
All field-level hooks are formatted to accept the same arguments, although some arguments may be `undefined` based on
|
||||
which field hook you are utilizing.
|
||||
|
||||
<Banner type="success">
|
||||
<strong>Tip:</strong>
|
||||
@@ -69,10 +71,10 @@ Field Hooks receive one `args` argument that contains the following properties:
|
||||
| **`operation`** | A string relating to which operation the field type is currently executing within. Useful within `beforeValidate`, `beforeChange`, and `afterChange` hooks to differentiate between `create` and `update` operations. |
|
||||
| **`originalDoc`** | The full original document in `update` operations. In the `afterChange` hook, this is the resulting document of the operation. |
|
||||
| **`previousDoc`** | The document before changes were applied, only in `afterChange` hooks. |
|
||||
| **`previousSiblingDoc`** | The sibling data from the previous document in `afterChange` hook. |
|
||||
| **`previousSiblingDoc`** | The sibling data of the document before changes being applied, only in `beforeChange` and `afterChange` hook. |
|
||||
| **`req`** | The Express `request` object. It is mocked for Local API operations. |
|
||||
| **`value`** | The value of the field. |
|
||||
| **`previousValue`** | The previous value of the field, before changes were applied, only in `afterChange` hooks. |
|
||||
| **`previousValue`** | The previous value of the field, before changes, only in `beforeChange` and `afterChange` hooks. |
|
||||
| **`context`** | Context passed to this hook. More info can be found under [Context](/docs/hooks/context) |
|
||||
| **`field`** | The field which the hook is running against. |
|
||||
| **`collection`** | The collection which the field belongs to. If the field belongs to a global, this will be null. |
|
||||
@@ -80,7 +82,8 @@ Field Hooks receive one `args` argument that contains the following properties:
|
||||
|
||||
#### Return value
|
||||
|
||||
All field hooks can optionally modify the return value of the field before the operation continues. Field Hooks may optionally return the value that should be used within the field.
|
||||
All field hooks can optionally modify the return value of the field before the operation continues. Field Hooks may
|
||||
optionally return the value that should be used within the field.
|
||||
|
||||
<Banner type="warning">
|
||||
<strong>Important</strong>
|
||||
@@ -92,11 +95,14 @@ All field hooks can optionally modify the return value of the field before the o
|
||||
|
||||
## Examples of Field Hooks
|
||||
|
||||
To better illustrate how field-level hooks can be applied, here are some specific examples. These demonstrate the flexibility and potential of field hooks in different contexts. Remember, these examples are just a starting point - the true potential of field-level hooks lies in their adaptability to a wide array of use cases.
|
||||
To better illustrate how field-level hooks can be applied, here are some specific examples. These demonstrate the
|
||||
flexibility and potential of field hooks in different contexts. Remember, these examples are just a starting point - the
|
||||
true potential of field-level hooks lies in their adaptability to a wide array of use cases.
|
||||
|
||||
### beforeValidate
|
||||
|
||||
Runs before the `update` operation. This hook allows you to pre-process or format field data before it undergoes validation.
|
||||
Runs before the `update` operation. This hook allows you to pre-process or format field data before it undergoes
|
||||
validation.
|
||||
|
||||
```ts
|
||||
import { Field } from 'payload/types'
|
||||
@@ -113,11 +119,15 @@ const usernameField: Field = {
|
||||
}
|
||||
```
|
||||
|
||||
In this example, the `beforeValidate` hook is used to process the `username` field. The hook takes the incoming value of the field and transforms it by trimming whitespace and converting it to lowercase. This ensures that the username is stored in a consistent format in the database.
|
||||
In this example, the `beforeValidate` hook is used to process the `username` field. The hook takes the incoming value of
|
||||
the field and transforms it by trimming whitespace and converting it to lowercase. This ensures that the username is
|
||||
stored in a consistent format in the database.
|
||||
|
||||
### beforeChange
|
||||
|
||||
Immediately following validation, `beforeChange` hooks will run within `create` and `update` operations. At this stage, you can be confident that the field data that will be saved to the document is valid in accordance to your field validations.
|
||||
Immediately following validation, `beforeChange` hooks will run within `create` and `update` operations. At this stage,
|
||||
you can be confident that the field data that will be saved to the document is valid in accordance to your field
|
||||
validations.
|
||||
|
||||
```ts
|
||||
import { Field } from 'payload/types'
|
||||
@@ -136,11 +146,14 @@ const emailField: Field = {
|
||||
}
|
||||
```
|
||||
|
||||
In the `emailField`, the `beforeChange` hook checks the `operation` type. If the operation is `create`, it performs additional validation or transformation on the email field value. This allows for operation-specific logic to be applied to the field.
|
||||
In the `emailField`, the `beforeChange` hook checks the `operation` type. If the operation is `create`, it performs
|
||||
additional validation or transformation on the email field value. This allows for operation-specific logic to be applied
|
||||
to the field.
|
||||
|
||||
### afterChange
|
||||
|
||||
The `afterChange` hook is executed after a field's value has been changed and saved in the database. This hook is useful for post-processing or triggering side effects based on the new value of the field.
|
||||
The `afterChange` hook is executed after a field's value has been changed and saved in the database. This hook is useful
|
||||
for post-processing or triggering side effects based on the new value of the field.
|
||||
|
||||
```ts
|
||||
import { Field } from 'payload/types'
|
||||
@@ -165,11 +178,15 @@ const membershipStatusField: Field = {
|
||||
}
|
||||
```
|
||||
|
||||
In this example, the `afterChange` hook is used with a `membershipStatusField`, which allows users to select their membership level (Standard, Premium, VIP). The hook monitors changes in the membership status. When a change occurs, it logs the update and can be used to trigger further actions, such as tracking conversion from one tier to another or notifying them about changes in their membership benefits.
|
||||
In this example, the `afterChange` hook is used with a `membershipStatusField`, which allows users to select their
|
||||
membership level (Standard, Premium, VIP). The hook monitors changes in the membership status. When a change occurs, it
|
||||
logs the update and can be used to trigger further actions, such as tracking conversion from one tier to another or
|
||||
notifying them about changes in their membership benefits.
|
||||
|
||||
### afterRead
|
||||
|
||||
The `afterRead` hook is invoked after a field value is read from the database. This is ideal for formatting or transforming the field data for output.
|
||||
The `afterRead` hook is invoked after a field value is read from the database. This is ideal for formatting or
|
||||
transforming the field data for output.
|
||||
|
||||
```ts
|
||||
import { Field } from 'payload/types'
|
||||
@@ -186,8 +203,9 @@ const dateField: Field = {
|
||||
}
|
||||
```
|
||||
|
||||
Here, the `afterRead` hook for the `dateField` is used to format the date into a more readable format using `toLocaleDateString()`. This hook modifies the way the date is presented to the user, making it more user-friendly.
|
||||
|
||||
Here, the `afterRead` hook for the `dateField` is used to format the date into a more readable format
|
||||
using `toLocaleDateString()`. This hook modifies the way the date is presented to the user, making it more
|
||||
user-friendly.
|
||||
|
||||
## TypeScript
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ If your Hook simply performs a side-effect, such as updating a CRM, it might be
|
||||
|
||||
#### Server-only execution
|
||||
|
||||
Payload Hooks do not have any effect within the Payload Admin panel. You can safely [remove your hooks](/docs/admin/webpack#aliasing-server-only-modules) from your Admin panel's code by customizing the Webpack config, which not only keeps your Admin bundles' filesize small but also ensures that any server-side only code does not cause problems within browser environments.
|
||||
Payload Hooks are only triggered on the server. You can safely [remove your hooks](/docs/admin/webpack#aliasing-server-only-modules) from your Admin panel's client-side code by customizing the Webpack config, which not only keeps your Admin bundles' filesize small but also ensures that any server-side only code does not cause problems within browser environments.
|
||||
|
||||
## Hook Types
|
||||
|
||||
|
||||
@@ -98,6 +98,13 @@ On boot, a seed script is included to scaffold a basic database for you to use a
|
||||
|
||||
> NOTICE: seeding the database is destructive because it drops your current database to populate a fresh one from the seed template. Only run this command if you are starting a new project or can afford to lose your current data.
|
||||
|
||||
### Conflicting routes
|
||||
|
||||
>In a monorepo when routes are bootstrapped to the same host, they can conflict with Payload's own routes if they have the same name. In our template we've named the Nextjs API routes to `next` to avoid this conflict.
|
||||
>
|
||||
>This can happen with any other routes conflicting with Payload such as `admin` and we recommend using different names for custom routes.
|
||||
>Alternatively you can also rename Payload's own routes via the [configuration](https://payloadcms.com/docs/configuration/overview).
|
||||
|
||||
## Production
|
||||
|
||||
To run Payload in production, you need to build and serve the Admin panel. To do so, follow these steps:
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
|
||||
export async function POST(): Promise<NextResponse> {
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
10
examples/custom-server/src/app/next/test-get/route.ts
Normal file
10
examples/custom-server/src/app/next/test-get/route.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
|
||||
/**
|
||||
* The Next.js API routes can conflict with Payload's own routes if they share the same path
|
||||
* To avoid this you can customise the path of Payload or the API route of Nextjs as we've done here
|
||||
* See readme: https://github.com/payloadcms/payload/tree/main/examples/custom-server#conflicting-routes
|
||||
* */
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
10
examples/custom-server/src/app/next/test-post/route.ts
Normal file
10
examples/custom-server/src/app/next/test-post/route.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
|
||||
/**
|
||||
* The Next.js API routes can conflict with Payload's own routes if they share the same path
|
||||
* To avoid this you can customise the path of Payload or the API route of Nextjs as we've done here
|
||||
* See readme: https://github.com/payloadcms/payload/tree/main/examples/custom-server#conflicting-routes
|
||||
* */
|
||||
export async function POST(): Promise<NextResponse> {
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
2
examples/hierarchy/.env.example
Normal file
2
examples/hierarchy/.env.example
Normal file
@@ -0,0 +1,2 @@
|
||||
DATABASE_URI=mongodb://127.0.0.1/payload-template-blank
|
||||
PAYLOAD_SECRET=YOUR_SECRET_HERE
|
||||
6
examples/hierarchy/.gitignore
vendored
Normal file
6
examples/hierarchy/.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
build
|
||||
dist
|
||||
/media
|
||||
node_modules
|
||||
.DS_Store
|
||||
.env
|
||||
8
examples/hierarchy/.prettierrc.js
Normal file
8
examples/hierarchy/.prettierrc.js
Normal file
@@ -0,0 +1,8 @@
|
||||
module.exports = {
|
||||
printWidth: 100,
|
||||
parser: 'typescript',
|
||||
semi: false,
|
||||
singleQuote: true,
|
||||
trailingComma: 'all',
|
||||
arrowParens: 'avoid',
|
||||
}
|
||||
58
examples/hierarchy/README.md
Normal file
58
examples/hierarchy/README.md
Normal file
@@ -0,0 +1,58 @@
|
||||
# Payload Hierarchy Example
|
||||
|
||||
This example demonstrates how to achieve a virtual hierarchy between documents in your [Payload](https://github.com/payloadcms/payload) application.
|
||||
|
||||
## Quick Start
|
||||
|
||||
To spin up the project locally, follow these steps:
|
||||
|
||||
1. First clone the repo
|
||||
1. Then `cd YOUR_PROJECT_REPO && cp .env.example .env`
|
||||
1. Next `yarn && yarn dev` (or `docker-compose up`, see [Docker](#docker))
|
||||
1. Now `open http://localhost:3000/admin` to access the admin panel
|
||||
1. Create your first admin user using the form on the page
|
||||
|
||||
That's it! Changes made in `./src` will be reflected in your app.
|
||||
|
||||
## How it works
|
||||
|
||||
This example achieves parent/child relationships between your documents through the use of virtual fields. When you query a document with the `?children=true` query param, an afterRead hook is used to populate the documents within its own tree.
|
||||
|
||||
For more information on how virtual fields, see the [Official Virtual Fields Example](https://github.com/payloadcms/payload/tree/main/examples/virtual-fields).
|
||||
|
||||
### Collections
|
||||
|
||||
See the [Collections](https://payloadcms.com/docs/configuration/collections) docs for details on how to extend any of this functionality.
|
||||
|
||||
- #### Users
|
||||
|
||||
The `users` collection is a default payload users collection.
|
||||
|
||||
- #### Entities
|
||||
|
||||
The `entities` collection can define a parent as any other entity. It has a virtual field that will also populate children when it is called via the API using a query `children=true`. See [Virtual Fields](https://github.com/payloadcms/payload/tree/main/examples/virtual-fields) for more details on how virtual fields work.
|
||||
|
||||
The virtual field retrieves __all__ children which includes other entities and people.
|
||||
|
||||
- #### People
|
||||
|
||||
The `people` collection is a collection that can define an array of parent entities. It also has an allocation field. This is for demonstrating attaching data to a parent-child relationship.
|
||||
|
||||
## Development
|
||||
|
||||
To spin up this example locally, follow the [Quick Start](#quick-start).
|
||||
|
||||
## Production
|
||||
|
||||
To run Payload in production, you need to build and serve the Admin panel. To do so, follow these steps:
|
||||
|
||||
1. First invoke the `payload build` script by running `yarn build` or `npm run build` in your project root. This creates a `./build` directory with a production-ready admin bundle.
|
||||
1. Then run `yarn serve` or `npm run serve` to run Node in production and serve Payload from the `./build` directory.
|
||||
|
||||
### Deployment
|
||||
|
||||
The easiest way to deploy your project is to use [Payload Cloud](https://payloadcms.com/new/import), a one-click hosting solution to deploy production-ready instances of your Payload apps directly from your GitHub repo. You can also deploy your app manually, check out the [deployment documentation](https://payloadcms.com/docs/production/deployment) for full details.
|
||||
|
||||
## Questions
|
||||
|
||||
If you have any issues or questions, reach out to us on [Discord](https://discord.com/invite/payload) or start a [GitHub discussion](https://github.com/payloadcms/payload/discussions).
|
||||
6
examples/hierarchy/nodemon.json
Normal file
6
examples/hierarchy/nodemon.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/nodemon.json",
|
||||
"ext": "ts",
|
||||
"exec": "ts-node src/server.ts -- -I",
|
||||
"stdin": false
|
||||
}
|
||||
35
examples/hierarchy/package.json
Normal file
35
examples/hierarchy/package.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "hierarchy",
|
||||
"description": "A hierarchy example with Payload",
|
||||
"version": "1.0.0",
|
||||
"main": "dist/server.js",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"dev": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts nodemon",
|
||||
"build:payload": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload build",
|
||||
"build:server": "tsc",
|
||||
"build": "yarn copyfiles && yarn build:payload && yarn build:server",
|
||||
"serve": "cross-env PAYLOAD_CONFIG_PATH=dist/payload.config.js NODE_ENV=production node dist/server.js",
|
||||
"copyfiles": "copyfiles -u 1 \"src/**/*.{html,css,scss,ttf,woff,woff2,eot,svg,jpg,png}\" dist/",
|
||||
"generate:types": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload generate:types",
|
||||
"generate:graphQLSchema": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload generate:graphQLSchema",
|
||||
"payload": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload"
|
||||
},
|
||||
"dependencies": {
|
||||
"@payloadcms/bundler-webpack": "^1.0.0",
|
||||
"@payloadcms/db-mongodb": "^1.0.0",
|
||||
"@payloadcms/plugin-cloud": "^3.0.0",
|
||||
"@payloadcms/richtext-slate": "^1.0.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"dotenv": "^8.2.0",
|
||||
"express": "^4.17.1",
|
||||
"payload": "^2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/express": "^4.17.9",
|
||||
"copyfiles": "^2.4.1",
|
||||
"nodemon": "^2.0.6",
|
||||
"ts-node": "^9.1.1",
|
||||
"typescript": "^4.8.4"
|
||||
}
|
||||
}
|
||||
79
examples/hierarchy/src/collections/Entities.ts
Normal file
79
examples/hierarchy/src/collections/Entities.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { CollectionConfig } from 'payload/types'
|
||||
|
||||
export const Entities: CollectionConfig = {
|
||||
slug: 'entities',
|
||||
admin: {
|
||||
useAsTitle: 'name',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
required: true,
|
||||
},
|
||||
// - This field is populated by setting the query parameter 'children=true'
|
||||
// - This is a virtual field used to track a child relationship
|
||||
// - Only relationship information is returned by this field
|
||||
// - Data beyond relationships is not stored in this field
|
||||
{
|
||||
name: 'children',
|
||||
type: 'relationship',
|
||||
relationTo: ['entities', 'people'],
|
||||
access: {
|
||||
create: () => false,
|
||||
update: () => false,
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
async ({ data, req }) => {
|
||||
const { id } = data
|
||||
|
||||
if (!req.query.children) return
|
||||
|
||||
const people = await req.payload.find({
|
||||
req,
|
||||
collection: 'people',
|
||||
where: {
|
||||
'parents.parent': { equals: id },
|
||||
},
|
||||
limit: 0,
|
||||
depth: 0,
|
||||
pagination: false,
|
||||
})
|
||||
|
||||
const entities = await req.payload.find({
|
||||
req,
|
||||
collection: 'entities',
|
||||
where: {
|
||||
parent: { equals: id },
|
||||
},
|
||||
limit: 0,
|
||||
depth: 0,
|
||||
pagination: false,
|
||||
})
|
||||
|
||||
return [
|
||||
...entities.docs.map(entity => {
|
||||
return {
|
||||
relationTo: 'entity',
|
||||
value: entity,
|
||||
}
|
||||
}),
|
||||
...people.docs.map(person => {
|
||||
return {
|
||||
relationTo: 'people',
|
||||
value: person,
|
||||
}
|
||||
}),
|
||||
]
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'parent',
|
||||
type: 'relationship',
|
||||
relationTo: 'entities',
|
||||
},
|
||||
],
|
||||
}
|
||||
32
examples/hierarchy/src/collections/People.ts
Normal file
32
examples/hierarchy/src/collections/People.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { CollectionConfig } from 'payload/types'
|
||||
|
||||
export const People: CollectionConfig = {
|
||||
slug: 'people',
|
||||
admin: {
|
||||
useAsTitle: 'name',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'parents',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'parent',
|
||||
type: 'relationship',
|
||||
relationTo: 'entities',
|
||||
},
|
||||
{
|
||||
name: 'allocation',
|
||||
type: 'number',
|
||||
min: 0,
|
||||
max: 100,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
15
examples/hierarchy/src/collections/Users.ts
Normal file
15
examples/hierarchy/src/collections/Users.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { CollectionConfig } from 'payload/types'
|
||||
|
||||
const Users: CollectionConfig = {
|
||||
slug: 'users',
|
||||
auth: true,
|
||||
admin: {
|
||||
useAsTitle: 'email',
|
||||
},
|
||||
fields: [
|
||||
// Email added by default
|
||||
// Add more fields as needed
|
||||
],
|
||||
}
|
||||
|
||||
export default Users
|
||||
30
examples/hierarchy/src/payload.config.ts
Normal file
30
examples/hierarchy/src/payload.config.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import path from 'path'
|
||||
|
||||
import { payloadCloud } from '@payloadcms/plugin-cloud'
|
||||
import { mongooseAdapter } from '@payloadcms/db-mongodb'
|
||||
import { webpackBundler } from '@payloadcms/bundler-webpack'
|
||||
import { slateEditor } from '@payloadcms/richtext-slate'
|
||||
import { buildConfig } from 'payload/config'
|
||||
|
||||
import Users from './collections/Users'
|
||||
import { Entities } from './collections/Entities'
|
||||
import { People } from './collections/People'
|
||||
|
||||
export default buildConfig({
|
||||
admin: {
|
||||
user: Users.slug,
|
||||
bundler: webpackBundler(),
|
||||
},
|
||||
editor: slateEditor({}),
|
||||
collections: [Users, Entities, People],
|
||||
typescript: {
|
||||
outputFile: path.resolve(__dirname, 'payload-types.ts'),
|
||||
},
|
||||
graphQL: {
|
||||
schemaOutputFile: path.resolve(__dirname, 'generated-schema.graphql'),
|
||||
},
|
||||
plugins: [payloadCloud()],
|
||||
db: mongooseAdapter({
|
||||
url: process.env.DATABASE_URI,
|
||||
}),
|
||||
})
|
||||
27
examples/hierarchy/src/server.ts
Normal file
27
examples/hierarchy/src/server.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import express from 'express'
|
||||
import payload from 'payload'
|
||||
|
||||
require('dotenv').config()
|
||||
const app = express()
|
||||
|
||||
// Redirect root to Admin panel
|
||||
app.get('/', (_, res) => {
|
||||
res.redirect('/admin')
|
||||
})
|
||||
|
||||
const start = async () => {
|
||||
// Initialize Payload
|
||||
await payload.init({
|
||||
secret: process.env.PAYLOAD_SECRET,
|
||||
express: app,
|
||||
onInit: async () => {
|
||||
payload.logger.info(`Payload Admin URL: ${payload.getAdminURL()}`)
|
||||
},
|
||||
})
|
||||
|
||||
// Add your own express routes here
|
||||
|
||||
app.listen(3000)
|
||||
}
|
||||
|
||||
start()
|
||||
22
examples/hierarchy/tsconfig.json
Normal file
22
examples/hierarchy/tsconfig.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es5",
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": true,
|
||||
"strict": false,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"jsx": "react",
|
||||
"paths": {
|
||||
"payload/generated-types": ["./src/payload-types.ts"]
|
||||
}
|
||||
},
|
||||
"include": ["src"],
|
||||
"exclude": ["node_modules", "dist", "build"],
|
||||
"ts-node": {
|
||||
"transpileOnly": true,
|
||||
"swc": true
|
||||
}
|
||||
}
|
||||
7896
examples/hierarchy/yarn.lock
Normal file
7896
examples/hierarchy/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
@@ -2,26 +2,27 @@ import type { AfterLoginHook } from 'payload/dist/collections/config/types'
|
||||
|
||||
export const recordLastLoggedInTenant: AfterLoginHook = async ({ req, user }) => {
|
||||
try {
|
||||
const relatedOrg = await req.payload.find({
|
||||
collection: 'tenants',
|
||||
where: {
|
||||
'domains.domain': {
|
||||
in: [req.headers.host],
|
||||
},
|
||||
},
|
||||
depth: 0,
|
||||
limit: 1,
|
||||
})
|
||||
|
||||
if (relatedOrg.docs.length > 0) {
|
||||
await req.payload.update({
|
||||
id: user.id,
|
||||
collection: 'users',
|
||||
data: {
|
||||
lastLoggedInTenant: relatedOrg.docs[0].id,
|
||||
const relatedOrg = await req.payload
|
||||
.find({
|
||||
collection: 'tenants',
|
||||
where: {
|
||||
'domains.domain': {
|
||||
in: [req.headers.host],
|
||||
},
|
||||
},
|
||||
depth: 0,
|
||||
limit: 1,
|
||||
})
|
||||
}
|
||||
?.then(res => res.docs?.[0])
|
||||
|
||||
await req.payload.update({
|
||||
id: user.id,
|
||||
collection: 'users',
|
||||
data: {
|
||||
lastLoggedInTenant: relatedOrg?.id || null,
|
||||
},
|
||||
req,
|
||||
})
|
||||
} catch (err: unknown) {
|
||||
req.payload.logger.error(`Error recording last logged in tenant for user ${user.id}: ${err}`)
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ export const isSuperOrTenantAdmin = async (args: { req: PayloadRequest }): Promi
|
||||
},
|
||||
depth: 0,
|
||||
limit: 1,
|
||||
req,
|
||||
})
|
||||
|
||||
// if this tenant does not exist, deny access
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
26
package.json
26
package.json
@@ -15,9 +15,13 @@
|
||||
"dev:generate-graphql-schema": "ts-node -T ./test/generateGraphQLSchema.ts",
|
||||
"dev:generate-types": "ts-node -T ./test/generateTypes.ts",
|
||||
"dev:postgres": "pnpm --filter payload run dev:postgres",
|
||||
"docker:restart": "pnpm docker:stop --remove-orphans && pnpm docker:start",
|
||||
"docker:start": "docker-compose -f packages/plugin-cloud-storage/docker-compose.yml up -d",
|
||||
"docker:stop": "docker-compose -f packages/plugin-cloud-storage/docker-compose.yml down",
|
||||
"fix": "eslint \"packages/**/*.ts\" --fix",
|
||||
"lint": "eslint \"packages/**/*.ts\"",
|
||||
"lint-staged": "lint-staged",
|
||||
"prepare": "husky install",
|
||||
"pretest": "pnpm build",
|
||||
"reinstall": "pnpm clean:unix && pnpm install",
|
||||
"script:list-packages": "tsx ./scripts/list-packages.ts",
|
||||
@@ -29,10 +33,10 @@
|
||||
"test:e2e:headed": "cross-env DISABLE_LOGGING=true playwright test --headed",
|
||||
"test:int:postgres": "cross-env PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles",
|
||||
"test:int": "cross-env DISABLE_LOGGING=true jest --forceExit --detectOpenHandles",
|
||||
"translateNewKeys": "pnpm --filter payload run translateNewKeys",
|
||||
"prepare": "husky install"
|
||||
"translateNewKeys": "pnpm --filter payload run translateNewKeys"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@aws-sdk/client-s3": "^3.142.0",
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@playwright/test": "1.40.1",
|
||||
"@swc/cli": "^0.1.62",
|
||||
@@ -64,7 +68,7 @@
|
||||
"copyfiles": "2.4.1",
|
||||
"cross-env": "7.0.3",
|
||||
"dotenv": "8.6.0",
|
||||
"drizzle-orm": "0.28.5",
|
||||
"drizzle-orm": "0.29.3",
|
||||
"express": "4.18.2",
|
||||
"form-data": "3.0.1",
|
||||
"fs-extra": "10.1.0",
|
||||
@@ -77,12 +81,12 @@
|
||||
"jest": "29.7.0",
|
||||
"jest-environment-jsdom": "29.7.0",
|
||||
"jwt-decode": "3.1.2",
|
||||
"lexical": "0.12.5",
|
||||
"lexical": "0.13.1",
|
||||
"lint-staged": "^14.0.1",
|
||||
"minimist": "1.2.8",
|
||||
"mongodb-memory-server": "^9",
|
||||
"node-fetch": "2.6.12",
|
||||
"nodemon": "3.0.2",
|
||||
"nodemon": "3.0.3",
|
||||
"prettier": "^3.0.3",
|
||||
"prompts": "2.4.2",
|
||||
"qs": "6.11.2",
|
||||
@@ -94,7 +98,7 @@
|
||||
"slash": "3.0.0",
|
||||
"slate": "0.91.4",
|
||||
"tempfile": "^3.0.0",
|
||||
"ts-node": "10.9.1",
|
||||
"ts-node": "10.9.2",
|
||||
"turbo": "^1.11.1",
|
||||
"typescript": "5.2.2",
|
||||
"uuid": "^9.0.1"
|
||||
@@ -104,6 +108,16 @@
|
||||
"react-i18next": "11.18.6",
|
||||
"react-router-dom": "5.3.4"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"copyfiles": "$copyfiles",
|
||||
"cross-env": "$cross-env",
|
||||
"dotenv": "$dotenv",
|
||||
"drizzle-orm": "$drizzle-orm",
|
||||
"ts-node": "$ts-node",
|
||||
"typescript": "$typescript"
|
||||
}
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14",
|
||||
"pnpm": ">=8"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "1.3.2",
|
||||
"version": "1.4.3",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"repository": "https://github.com/payloadcms/payload",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -29,15 +29,18 @@ export const connect: Connect = async function connect(this: MongooseAdapter, pa
|
||||
urlToConnect = process.env.PAYLOAD_TEST_MONGO_URL
|
||||
} else {
|
||||
connectionOptions.dbName = 'payloadmemory'
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server')
|
||||
const { MongoMemoryReplSet } = require('mongodb-memory-server')
|
||||
const getPort = require('get-port')
|
||||
|
||||
const port = await getPort()
|
||||
this.mongoMemoryServer = await MongoMemoryServer.create({
|
||||
this.mongoMemoryServer = await MongoMemoryReplSet.create({
|
||||
instance: {
|
||||
dbName: 'payloadmemory',
|
||||
port,
|
||||
},
|
||||
replSet: {
|
||||
count: 3,
|
||||
},
|
||||
})
|
||||
|
||||
urlToConnect = this.mongoMemoryServer.getUri()
|
||||
|
||||
@@ -32,7 +32,7 @@ export const createMigration: CreateMigration = async function createMigration({
|
||||
|
||||
// Check for predefined migration.
|
||||
// Either passed in via --file or prefixed with @payloadcms/db-mongodb/
|
||||
if (file || migrationName.startsWith('@payloadcms/db-mongodb/')) {
|
||||
if (file || migrationName?.startsWith('@payloadcms/db-mongodb/')) {
|
||||
if (!file) file = migrationName
|
||||
|
||||
const predefinedMigrationName = file.replace('@payloadcms/db-mongodb/', '')
|
||||
@@ -59,8 +59,8 @@ export const createMigration: CreateMigration = async function createMigration({
|
||||
|
||||
const timestamp = `${formattedDate}_${formattedTime}`
|
||||
|
||||
const formattedName = migrationName.replace(/\W/g, '_')
|
||||
const fileName = `${timestamp}_${formattedName}.ts`
|
||||
const formattedName = migrationName?.replace(/\W/g, '_')
|
||||
const fileName = migrationName ? `${timestamp}_${formattedName}.ts` : `${timestamp}_migration.ts`
|
||||
const filePath = `${dir}/${fileName}`
|
||||
fs.writeFileSync(filePath, migrationFileContent)
|
||||
payload.logger.info({ msg: `Migration created at ${filePath}` })
|
||||
|
||||
@@ -63,7 +63,6 @@ export const findVersions: FindVersions = async function findVersions(
|
||||
lean: true,
|
||||
leanWithId: true,
|
||||
limit,
|
||||
offset: skip || 0,
|
||||
options,
|
||||
page,
|
||||
pagination,
|
||||
|
||||
@@ -11,25 +11,30 @@ import type { MongooseAdapter } from '.'
|
||||
/**
|
||||
* Drop the current database and run all migrate up functions
|
||||
*/
|
||||
export async function migrateFresh(this: MongooseAdapter): Promise<void> {
|
||||
export async function migrateFresh(
|
||||
this: MongooseAdapter,
|
||||
{ forceAcceptWarning = false }: { forceAcceptWarning?: boolean },
|
||||
): Promise<void> {
|
||||
const { payload } = this
|
||||
|
||||
const { confirm: acceptWarning } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
if (!forceAcceptWarning) {
|
||||
const { confirm: acceptWarning } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
|
||||
},
|
||||
},
|
||||
)
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if (!acceptWarning) {
|
||||
process.exit(0)
|
||||
if (!acceptWarning) {
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
payload.logger.info({
|
||||
|
||||
@@ -14,8 +14,10 @@ import type {
|
||||
DateField,
|
||||
EmailField,
|
||||
Field,
|
||||
FieldAffectingData,
|
||||
GroupField,
|
||||
JSONField,
|
||||
NonPresentationalField,
|
||||
NumberField,
|
||||
PointField,
|
||||
RadioField,
|
||||
@@ -23,12 +25,12 @@ import type {
|
||||
RichTextField,
|
||||
RowField,
|
||||
SelectField,
|
||||
Tab,
|
||||
TabsField,
|
||||
TextField,
|
||||
TextareaField,
|
||||
UploadField,
|
||||
} from 'payload/types'
|
||||
import type { FieldAffectingData, NonPresentationalField, Tab, UnnamedTab } from 'payload/types'
|
||||
|
||||
import { Schema } from 'mongoose'
|
||||
import {
|
||||
@@ -61,7 +63,15 @@ const formatBaseSchema = (field: FieldAffectingData, buildSchemaOptions: BuildSc
|
||||
unique: (!disableUnique && field.unique) || false,
|
||||
}
|
||||
|
||||
if (schema.unique && (field.localized || draftsEnabled)) {
|
||||
if (
|
||||
schema.unique &&
|
||||
(field.localized ||
|
||||
draftsEnabled ||
|
||||
(fieldAffectsData(field) &&
|
||||
field.type !== 'group' &&
|
||||
field.type !== 'tab' &&
|
||||
field.required !== true))
|
||||
) {
|
||||
schema.sparse = true
|
||||
}
|
||||
|
||||
@@ -79,7 +89,6 @@ const localizeSchema = (
|
||||
) => {
|
||||
if (fieldIsLocalized(entity) && localization && Array.isArray(localization.locales)) {
|
||||
return {
|
||||
localized: true,
|
||||
type: localization.localeCodes.reduce(
|
||||
(localeSchema, locale) => ({
|
||||
...localeSchema,
|
||||
@@ -89,6 +98,7 @@ const localizeSchema = (
|
||||
_id: false,
|
||||
},
|
||||
),
|
||||
localized: true,
|
||||
}
|
||||
}
|
||||
return schema
|
||||
@@ -140,7 +150,6 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
) => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
default: undefined,
|
||||
type: [
|
||||
buildSchema(config, field.fields, {
|
||||
allowIDField: true,
|
||||
@@ -153,6 +162,7 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
},
|
||||
}),
|
||||
],
|
||||
default: undefined,
|
||||
}
|
||||
|
||||
schema.add({
|
||||
@@ -166,8 +176,8 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const fieldSchema = {
|
||||
default: undefined,
|
||||
type: [new Schema({}, { _id: false, discriminatorKey: 'blockType' })],
|
||||
default: undefined,
|
||||
}
|
||||
|
||||
schema.add({
|
||||
@@ -187,12 +197,12 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
if (field.localized && config.localization) {
|
||||
config.localization.localeCodes.forEach((localeCode) => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore Possible incorrect typing in mongoose types, this works
|
||||
// @ts-expect-error Possible incorrect typing in mongoose types, this works
|
||||
schema.path(`${field.name}.${localeCode}`).discriminator(blockItem.slug, blockSchema)
|
||||
})
|
||||
} else {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore Possible incorrect typing in mongoose types, this works
|
||||
// @ts-expect-error Possible incorrect typing in mongoose types, this works
|
||||
schema.path(field.name).discriminator(blockItem.slug, blockSchema)
|
||||
}
|
||||
})
|
||||
@@ -325,14 +335,14 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema: SchemaTypeOptions<unknown> = {
|
||||
type: {
|
||||
type: String,
|
||||
enum: ['Point'],
|
||||
},
|
||||
coordinates: {
|
||||
type: [Number],
|
||||
default: field.defaultValue || undefined,
|
||||
required: false,
|
||||
type: [Number],
|
||||
},
|
||||
type: {
|
||||
enum: ['Point'],
|
||||
type: String,
|
||||
},
|
||||
}
|
||||
if (buildSchemaOptions.disableUnique && field.unique && field.localized) {
|
||||
@@ -366,11 +376,11 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
): void => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: String,
|
||||
enum: field.options.map((option) => {
|
||||
if (typeof option === 'object') return option.value
|
||||
return option
|
||||
}),
|
||||
type: String,
|
||||
}
|
||||
|
||||
schema.add({
|
||||
@@ -388,7 +398,6 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
|
||||
if (field.localized && config.localization) {
|
||||
schemaToReturn = {
|
||||
localized: true,
|
||||
type: config.localization.localeCodes.reduce((locales, locale) => {
|
||||
let localeSchema: { [key: string]: any } = {}
|
||||
|
||||
@@ -396,56 +405,57 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
localeSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
_id: false,
|
||||
relationTo: { enum: field.relationTo, type: String },
|
||||
type: Schema.Types.Mixed,
|
||||
relationTo: { type: String, enum: field.relationTo },
|
||||
value: {
|
||||
refPath: `${field.name}.${locale}.relationTo`,
|
||||
type: Schema.Types.Mixed,
|
||||
refPath: `${field.name}.${locale}.relationTo`,
|
||||
},
|
||||
}
|
||||
} else {
|
||||
localeSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
ref: field.relationTo,
|
||||
type: Schema.Types.Mixed,
|
||||
ref: field.relationTo,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...locales,
|
||||
[locale]: field.hasMany ? { default: undefined, type: [localeSchema] } : localeSchema,
|
||||
[locale]: field.hasMany ? { type: [localeSchema], default: undefined } : localeSchema,
|
||||
}
|
||||
}, {}),
|
||||
localized: true,
|
||||
}
|
||||
} else if (hasManyRelations) {
|
||||
schemaToReturn = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
_id: false,
|
||||
relationTo: { enum: field.relationTo, type: String },
|
||||
type: Schema.Types.Mixed,
|
||||
relationTo: { type: String, enum: field.relationTo },
|
||||
value: {
|
||||
refPath: `${field.name}.relationTo`,
|
||||
type: Schema.Types.Mixed,
|
||||
refPath: `${field.name}.relationTo`,
|
||||
},
|
||||
}
|
||||
|
||||
if (field.hasMany) {
|
||||
schemaToReturn = {
|
||||
default: undefined,
|
||||
type: [schemaToReturn],
|
||||
default: undefined,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
schemaToReturn = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
ref: field.relationTo,
|
||||
type: Schema.Types.Mixed,
|
||||
ref: field.relationTo,
|
||||
}
|
||||
|
||||
if (field.hasMany) {
|
||||
schemaToReturn = {
|
||||
default: undefined,
|
||||
type: [schemaToReturn],
|
||||
default: undefined,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -488,11 +498,11 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
): void => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: String,
|
||||
enum: field.options.map((option) => {
|
||||
if (typeof option === 'object') return option.value
|
||||
return option
|
||||
}),
|
||||
type: String,
|
||||
}
|
||||
|
||||
if (buildSchemaOptions.draftsEnabled || !field.required) {
|
||||
@@ -576,8 +586,8 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
): void => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
ref: field.relationTo,
|
||||
type: Schema.Types.Mixed,
|
||||
ref: field.relationTo,
|
||||
}
|
||||
|
||||
schema.add({
|
||||
|
||||
@@ -77,6 +77,7 @@ export const sanitizeQueryValue = ({
|
||||
// Object equality requires the value to be the first key in the object that is being queried.
|
||||
if (
|
||||
operator === 'equals' &&
|
||||
formattedValue &&
|
||||
typeof formattedValue === 'object' &&
|
||||
formattedValue.value &&
|
||||
formattedValue.relationTo
|
||||
@@ -156,6 +157,23 @@ export const sanitizeQueryValue = ({
|
||||
|
||||
if (operator === 'exists') {
|
||||
formattedValue = formattedValue === 'true' || formattedValue === true
|
||||
|
||||
// Clearable fields
|
||||
if (['relationship', 'select', 'upload'].includes(field.type)) {
|
||||
if (formattedValue) {
|
||||
return {
|
||||
rawQuery: {
|
||||
$and: [{ [path]: { $exists: true } }, { [path]: { $ne: null } }],
|
||||
},
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
rawQuery: {
|
||||
$or: [{ [path]: { $exists: false } }, { [path]: { $eq: null } }],
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { operator: formattedOperator, val: formattedValue }
|
||||
|
||||
@@ -17,7 +17,11 @@ export const rollbackTransaction: RollbackTransaction = async function rollbackT
|
||||
}
|
||||
|
||||
// the first call for rollback should be aborted and deleted causing any other operations with the same transaction to fail
|
||||
await this.sessions[id].abortTransaction()
|
||||
await this.sessions[id].endSession()
|
||||
try {
|
||||
await this.sessions[id].abortTransaction()
|
||||
await this.sessions[id].endSession()
|
||||
} catch (error) {
|
||||
// ignore the error as it is likely a race condition from multiple errors
|
||||
}
|
||||
delete this.sessions[id]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "0.4.0",
|
||||
"version": "0.7.0",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"repository": "https://github.com/payloadcms/payload",
|
||||
"license": "MIT",
|
||||
@@ -22,7 +22,7 @@
|
||||
"dependencies": {
|
||||
"@libsql/client": "^0.3.1",
|
||||
"console-table-printer": "2.11.2",
|
||||
"drizzle-kit": "0.20.5-608ae62",
|
||||
"drizzle-kit": "0.20.14-1f2c838",
|
||||
"drizzle-orm": "0.29.3",
|
||||
"pg": "8.11.3",
|
||||
"prompts": "2.4.2",
|
||||
|
||||
@@ -1,13 +1,51 @@
|
||||
import type { Payload } from 'payload'
|
||||
import type { Connect } from 'payload/database'
|
||||
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { drizzle } from 'drizzle-orm/node-postgres'
|
||||
import { numeric, pgTable, timestamp, varchar } from 'drizzle-orm/pg-core'
|
||||
import { numeric, timestamp, varchar } from 'drizzle-orm/pg-core'
|
||||
import { Pool } from 'pg'
|
||||
import prompts from 'prompts'
|
||||
|
||||
import type { PostgresAdapter } from './types'
|
||||
|
||||
const connectWithReconnect = async function ({
|
||||
adapter,
|
||||
payload,
|
||||
reconnect = false,
|
||||
}: {
|
||||
adapter: PostgresAdapter
|
||||
payload: Payload
|
||||
reconnect?: boolean
|
||||
}) {
|
||||
let result
|
||||
|
||||
if (!reconnect) {
|
||||
result = await adapter.pool.connect()
|
||||
} else {
|
||||
try {
|
||||
result = await adapter.pool.connect()
|
||||
} catch (err) {
|
||||
setTimeout(() => {
|
||||
payload.logger.info('Reconnecting to postgres')
|
||||
void connectWithReconnect({ adapter, payload, reconnect: true })
|
||||
}, 1000)
|
||||
}
|
||||
}
|
||||
if (!result) {
|
||||
return
|
||||
}
|
||||
result.prependListener('error', (err) => {
|
||||
try {
|
||||
if (err.code === 'ECONNRESET') {
|
||||
void connectWithReconnect({ adapter, payload, reconnect: true })
|
||||
}
|
||||
} catch (err) {
|
||||
// swallow error
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export const connect: Connect = async function connect(this: PostgresAdapter, payload) {
|
||||
this.schema = {
|
||||
...this.tables,
|
||||
@@ -17,14 +55,19 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
|
||||
|
||||
try {
|
||||
this.pool = new Pool(this.poolOptions)
|
||||
await this.pool.connect()
|
||||
await connectWithReconnect({ adapter: this, payload })
|
||||
|
||||
const logger = this.logger || false
|
||||
|
||||
this.drizzle = drizzle(this.pool, { schema: this.schema, logger })
|
||||
this.drizzle = drizzle(this.pool, { logger, schema: this.schema })
|
||||
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
|
||||
this.payload.logger.info('---- DROPPING TABLES ----')
|
||||
await this.drizzle.execute(sql`drop schema public cascade;
|
||||
create schema public;`)
|
||||
this.payload.logger.info(`---- DROPPING TABLES SCHEMA(${this.schemaName || 'public'}) ----`)
|
||||
await this.drizzle.execute(
|
||||
sql.raw(`
|
||||
drop schema if exists ${this.schemaName || 'public'} cascade;
|
||||
create schema ${this.schemaName || 'public'};
|
||||
`),
|
||||
)
|
||||
this.payload.logger.info('---- DROPPED TABLES ----')
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -81,7 +124,7 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
|
||||
await apply()
|
||||
|
||||
// Migration table def in order to use query using drizzle
|
||||
const migrationsSchema = pgTable('payload_migrations', {
|
||||
const migrationsSchema = this.pgSchema.table('payload_migrations', {
|
||||
name: varchar('name'),
|
||||
batch: numeric('batch'),
|
||||
created_at: timestamp('created_at'),
|
||||
|
||||
@@ -53,7 +53,7 @@ const getDefaultDrizzleSnapshot = (): DrizzleSnapshotJSON => ({
|
||||
|
||||
export const createMigration: CreateMigration = async function createMigration(
|
||||
this: PostgresAdapter,
|
||||
{ migrationName, payload },
|
||||
{ forceAcceptWarning, migrationName, payload },
|
||||
) {
|
||||
const dir = payload.db.migrationDir
|
||||
if (!fs.existsSync(dir)) {
|
||||
@@ -95,7 +95,7 @@ export const createMigration: CreateMigration = async function createMigration(
|
||||
const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)
|
||||
|
||||
if (!sqlStatementsUp.length && !sqlStatementsDown.length) {
|
||||
if (!sqlStatementsUp.length && !sqlStatementsDown.length && !forceAcceptWarning) {
|
||||
const { confirm: shouldCreateBlankMigration } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
|
||||
@@ -158,7 +158,7 @@ export const findMany = async function find({
|
||||
query: db
|
||||
.select({
|
||||
count: sql<number>`count
|
||||
(*)`,
|
||||
(DISTINCT ${adapter.tables[tableName].id})`,
|
||||
})
|
||||
.from(table)
|
||||
.where(where),
|
||||
|
||||
@@ -78,7 +78,7 @@ export const traverseFields = ({
|
||||
with: {},
|
||||
}
|
||||
|
||||
const arrayTableName = `${currentTableName}_${toSnakeCase(field.name)}`
|
||||
const arrayTableName = `${currentTableName}_${path}${toSnakeCase(field.name)}`
|
||||
|
||||
if (adapter.tables[`${arrayTableName}_locales`]) withArray.with._locales = _locales
|
||||
currentArgs.with[`${path}${field.name}`] = withArray
|
||||
|
||||
@@ -42,7 +42,7 @@ export type { MigrateDownArgs, MigrateUpArgs } from './types'
|
||||
export function postgresAdapter(args: Args): PostgresAdapterResult {
|
||||
function adapter({ payload }: { payload: Payload }) {
|
||||
const migrationDir = findMigrationDir(args.migrationDir)
|
||||
|
||||
const idType = args.idType || 'serial'
|
||||
return createDatabaseAdapter<PostgresAdapter>({
|
||||
name: 'postgres',
|
||||
|
||||
@@ -50,12 +50,15 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
|
||||
drizzle: undefined,
|
||||
enums: {},
|
||||
fieldConstraints: {},
|
||||
idType,
|
||||
logger: args.logger,
|
||||
pgSchema: undefined,
|
||||
pool: undefined,
|
||||
poolOptions: args.pool,
|
||||
push: args.push,
|
||||
relations: {},
|
||||
schema: {},
|
||||
schemaName: args.schemaName,
|
||||
sessions: {},
|
||||
tables: {},
|
||||
|
||||
@@ -68,7 +71,10 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
|
||||
createGlobalVersion,
|
||||
createMigration,
|
||||
createVersion,
|
||||
defaultIDType: 'number',
|
||||
/**
|
||||
* This represents how a default ID is treated in Payload as were a field type
|
||||
*/
|
||||
defaultIDType: idType === 'serial' ? 'number' : 'text',
|
||||
deleteMany,
|
||||
deleteOne,
|
||||
deleteVersions,
|
||||
|
||||
@@ -2,16 +2,21 @@
|
||||
import type { Init } from 'payload/database'
|
||||
import type { SanitizedCollectionConfig } from 'payload/types'
|
||||
|
||||
import { pgEnum } from 'drizzle-orm/pg-core'
|
||||
import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'
|
||||
import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload/versions'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { PostgresAdapter } from './types'
|
||||
|
||||
import { buildTable } from './schema/build'
|
||||
import { getConfigIDType } from './schema/getConfigIDType'
|
||||
|
||||
export const init: Init = async function init(this: PostgresAdapter) {
|
||||
if (this.schemaName) {
|
||||
this.pgSchema = pgSchema(this.schemaName)
|
||||
} else {
|
||||
this.pgSchema = { table: pgTable }
|
||||
}
|
||||
|
||||
if (this.payload.config.localization) {
|
||||
this.enums.enum__locales = pgEnum(
|
||||
'_locales',
|
||||
@@ -24,9 +29,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
|
||||
buildTable({
|
||||
adapter: this,
|
||||
buildTexts: true,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!collection?.versions?.drafts,
|
||||
disableUnique: false,
|
||||
fields: collection.fields,
|
||||
@@ -38,13 +43,11 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
const versionsTableName = `_${tableName}_v`
|
||||
const versionFields = buildVersionCollectionFields(collection)
|
||||
|
||||
const versionsParentIDColType = getConfigIDType(collection.fields)
|
||||
|
||||
buildTable({
|
||||
adapter: this,
|
||||
buildTexts: true,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!collection.versions?.drafts,
|
||||
disableUnique: true,
|
||||
fields: versionFields,
|
||||
@@ -59,9 +62,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
|
||||
buildTable({
|
||||
adapter: this,
|
||||
buildTexts: true,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!global?.versions?.drafts,
|
||||
disableUnique: false,
|
||||
fields: global.fields,
|
||||
@@ -75,9 +78,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
|
||||
buildTable({
|
||||
adapter: this,
|
||||
buildTexts: true,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!global.versions?.drafts,
|
||||
disableUnique: true,
|
||||
fields: versionFields,
|
||||
|
||||
@@ -39,7 +39,7 @@ export async function migrate(this: PostgresAdapter): Promise<void> {
|
||||
latestBatch = Number(migrationsInDB[0]?.batch)
|
||||
}
|
||||
} else {
|
||||
await createMigrationTable(this.drizzle)
|
||||
await createMigrationTable(this)
|
||||
}
|
||||
|
||||
if (migrationsInDB.find((m) => m.batch === -1)) {
|
||||
|
||||
@@ -37,7 +37,7 @@ export async function migrateDown(this: PostgresAdapter): Promise<void> {
|
||||
}
|
||||
|
||||
const start = Date.now()
|
||||
const req = {} as PayloadRequest
|
||||
const req = { payload } as PayloadRequest
|
||||
|
||||
try {
|
||||
payload.logger.info({ msg: `Migrating down: ${migrationFile.name}` })
|
||||
|
||||
@@ -14,33 +14,40 @@ import { parseError } from './utilities/parseError'
|
||||
/**
|
||||
* Drop the current database and run all migrate up functions
|
||||
*/
|
||||
export async function migrateFresh(this: PostgresAdapter): Promise<void> {
|
||||
export async function migrateFresh(
|
||||
this: PostgresAdapter,
|
||||
{ forceAcceptWarning = false },
|
||||
): Promise<void> {
|
||||
const { payload } = this
|
||||
|
||||
const { confirm: acceptWarning } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
if (forceAcceptWarning === false) {
|
||||
const { confirm: acceptWarning } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
|
||||
},
|
||||
},
|
||||
)
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if (!acceptWarning) {
|
||||
process.exit(0)
|
||||
if (!acceptWarning) {
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
payload.logger.info({
|
||||
msg: `Dropping database.`,
|
||||
})
|
||||
|
||||
await this.drizzle.execute(sql`drop schema public cascade;
|
||||
create schema public;`)
|
||||
await this.drizzle.execute(
|
||||
sql.raw(`drop schema ${this.schemaName || 'public'} cascade;
|
||||
create schema ${this.schemaName || 'public'};`),
|
||||
)
|
||||
|
||||
const migrationFiles = await readMigrationFiles({ payload })
|
||||
payload.logger.debug({
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import type { PgTableWithColumns } from 'drizzle-orm/pg-core'
|
||||
import type { Field, Where } from 'payload/types'
|
||||
|
||||
import { asc, desc } from 'drizzle-orm'
|
||||
@@ -12,7 +13,7 @@ export type BuildQueryJoins = Record<string, SQL>
|
||||
|
||||
export type BuildQueryJoinAliases = {
|
||||
condition: SQL
|
||||
table: GenericTable
|
||||
table: GenericTable | PgTableWithColumns<any>
|
||||
}[]
|
||||
|
||||
type BuildQueryArgs = {
|
||||
@@ -75,6 +76,7 @@ const buildQuery = async function buildQuery({
|
||||
pathSegments: sortPath.replace(/__/g, '.').split('.'),
|
||||
selectFields,
|
||||
tableName,
|
||||
value: sortPath,
|
||||
})
|
||||
orderBy.column = sortTable?.[sortTableColumnName]
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import type { Field, FieldAffectingData, TabAsField } from 'payload/types'
|
||||
import type { PgTableWithColumns } from 'drizzle-orm/pg-core'
|
||||
import type { Field, FieldAffectingData, NumberField, TabAsField, TextField } from 'payload/types'
|
||||
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { and, eq, like, sql } from 'drizzle-orm'
|
||||
import { alias } from 'drizzle-orm/pg-core'
|
||||
import { APIError } from 'payload/errors'
|
||||
import { fieldAffectsData, tabHasName } from 'payload/types'
|
||||
@@ -15,7 +16,7 @@ import type { BuildQueryJoinAliases, BuildQueryJoins } from './buildQuery'
|
||||
|
||||
type Constraint = {
|
||||
columnName: string
|
||||
table: GenericTable
|
||||
table: GenericTable | PgTableWithColumns<any>
|
||||
value: unknown
|
||||
}
|
||||
|
||||
@@ -26,12 +27,12 @@ type TableColumn = {
|
||||
getNotNullColumnByValue?: (val: unknown) => string
|
||||
pathSegments?: string[]
|
||||
rawColumn?: SQL
|
||||
table: GenericTable
|
||||
table: GenericTable | PgTableWithColumns<any>
|
||||
}
|
||||
|
||||
type Args = {
|
||||
adapter: PostgresAdapter
|
||||
aliasTable?: GenericTable
|
||||
aliasTable?: GenericTable | PgTableWithColumns<any>
|
||||
collectionPath: string
|
||||
columnPrefix?: string
|
||||
constraintPath?: string
|
||||
@@ -44,6 +45,14 @@ type Args = {
|
||||
rootTableName?: string
|
||||
selectFields: Record<string, GenericColumn>
|
||||
tableName: string
|
||||
/**
|
||||
* If creating a new table name for arrays and blocks, this suffix should be appended to the table name
|
||||
*/
|
||||
tableNameSuffix?: string
|
||||
/**
|
||||
* The raw value of the query before sanitization
|
||||
*/
|
||||
value: unknown
|
||||
}
|
||||
/**
|
||||
* Transforms path to table and column name
|
||||
@@ -65,6 +74,8 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName: incomingRootTableName,
|
||||
selectFields,
|
||||
tableName,
|
||||
tableNameSuffix = '',
|
||||
value,
|
||||
}: Args): TableColumn => {
|
||||
const fieldPath = incomingSegments[0]
|
||||
let locale = incomingLocale
|
||||
@@ -83,8 +94,8 @@ export const getTableColumnFromPath = ({
|
||||
constraints,
|
||||
field: {
|
||||
name: 'id',
|
||||
type: 'number',
|
||||
},
|
||||
type: adapter.idType === 'uuid' ? 'text' : 'number',
|
||||
} as TextField | NumberField,
|
||||
table: adapter.tables[newTableName],
|
||||
}
|
||||
}
|
||||
@@ -125,6 +136,8 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix,
|
||||
value,
|
||||
})
|
||||
}
|
||||
case 'tab': {
|
||||
@@ -134,7 +147,7 @@ export const getTableColumnFromPath = ({
|
||||
aliasTable,
|
||||
collectionPath,
|
||||
columnPrefix: `${columnPrefix}${field.name}_`,
|
||||
constraintPath,
|
||||
constraintPath: `${constraintPath}${field.name}.`,
|
||||
constraints,
|
||||
fields: field.fields,
|
||||
joinAliases,
|
||||
@@ -144,6 +157,8 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix: `${tableNameSuffix}${toSnakeCase(field.name)}_`,
|
||||
value,
|
||||
})
|
||||
}
|
||||
return getTableColumnFromPath({
|
||||
@@ -161,6 +176,8 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -185,7 +202,7 @@ export const getTableColumnFromPath = ({
|
||||
aliasTable,
|
||||
collectionPath,
|
||||
columnPrefix: `${columnPrefix}${field.name}_`,
|
||||
constraintPath,
|
||||
constraintPath: `${constraintPath}${field.name}.`,
|
||||
constraints,
|
||||
fields: field.fields,
|
||||
joinAliases,
|
||||
@@ -195,11 +212,13 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix: `${tableNameSuffix}${toSnakeCase(field.name)}_`,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
case 'array': {
|
||||
newTableName = `${tableName}_${toSnakeCase(field.name)}`
|
||||
newTableName = `${tableName}_${tableNameSuffix}${toSnakeCase(field.name)}`
|
||||
constraintPath = `${constraintPath}${field.name}.%.`
|
||||
if (locale && field.localized && adapter.payload.config.localization) {
|
||||
joins[newTableName] = and(
|
||||
@@ -232,12 +251,39 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
case 'blocks': {
|
||||
let blockTableColumn: TableColumn
|
||||
let newTableName: string
|
||||
|
||||
// handle blockType queries
|
||||
if (pathSegments[1] === 'blockType') {
|
||||
// find the block config using the value
|
||||
const blockTypes = Array.isArray(value) ? value : [value]
|
||||
blockTypes.forEach((blockType) => {
|
||||
const block = field.blocks.find((block) => block.slug === blockType)
|
||||
newTableName = `${tableName}_blocks_${toSnakeCase(block.slug)}`
|
||||
joins[newTableName] = eq(
|
||||
adapter.tables[tableName].id,
|
||||
adapter.tables[newTableName]._parentID,
|
||||
)
|
||||
constraints.push({
|
||||
columnName: '_path',
|
||||
table: adapter.tables[newTableName],
|
||||
value: pathSegments[0],
|
||||
})
|
||||
})
|
||||
return {
|
||||
constraints,
|
||||
field,
|
||||
getNotNullColumnByValue: () => 'id',
|
||||
table: adapter.tables[tableName],
|
||||
}
|
||||
}
|
||||
|
||||
const hasBlockField = field.blocks.some((block) => {
|
||||
newTableName = `${tableName}_blocks_${toSnakeCase(block.slug)}`
|
||||
constraintPath = `${constraintPath}${field.name}.%.`
|
||||
@@ -258,6 +304,7 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields: blockSelectFields,
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
} catch (error) {
|
||||
// this is fine, not every block will have the field
|
||||
@@ -298,9 +345,6 @@ export const getTableColumnFromPath = ({
|
||||
table: blockTableColumn.table,
|
||||
}
|
||||
}
|
||||
if (pathSegments[1] === 'blockType') {
|
||||
throw new APIError('Querying on blockType is not supported')
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
@@ -317,21 +361,15 @@ export const getTableColumnFromPath = ({
|
||||
|
||||
// Join in the relationships table
|
||||
joinAliases.push({
|
||||
condition: eq(
|
||||
(aliasTable || adapter.tables[rootTableName]).id,
|
||||
aliasRelationshipTable.parent,
|
||||
condition: and(
|
||||
eq((aliasTable || adapter.tables[rootTableName]).id, aliasRelationshipTable.parent),
|
||||
like(aliasRelationshipTable.path, `${constraintPath}${field.name}`),
|
||||
),
|
||||
table: aliasRelationshipTable,
|
||||
})
|
||||
|
||||
selectFields[`${relationTableName}.path`] = aliasRelationshipTable.path
|
||||
|
||||
constraints.push({
|
||||
columnName: 'path',
|
||||
table: aliasRelationshipTable,
|
||||
value: `${constraintPath}${field.name}`,
|
||||
})
|
||||
|
||||
let newAliasTable
|
||||
|
||||
if (typeof field.relationTo === 'string') {
|
||||
@@ -346,7 +384,7 @@ export const getTableColumnFromPath = ({
|
||||
table: newAliasTable,
|
||||
})
|
||||
|
||||
if (newCollectionPath === '') {
|
||||
if (newCollectionPath === '' || newCollectionPath === 'id') {
|
||||
return {
|
||||
columnName: `${field.relationTo}ID`,
|
||||
constraints,
|
||||
@@ -394,6 +432,7 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName: newTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -428,7 +467,7 @@ export const getTableColumnFromPath = ({
|
||||
columnName: `${columnPrefix}${field.name}`,
|
||||
constraints,
|
||||
field,
|
||||
pathSegments: pathSegments,
|
||||
pathSegments,
|
||||
table: targetTable,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,11 +63,7 @@ export async function parseParams({
|
||||
where: condition,
|
||||
})
|
||||
if (builtConditions.length > 0) {
|
||||
if (result) {
|
||||
result = operatorMap[conditionOperator](result, ...builtConditions)
|
||||
} else {
|
||||
result = operatorMap[conditionOperator](...builtConditions)
|
||||
}
|
||||
result = operatorMap[conditionOperator](...builtConditions)
|
||||
}
|
||||
} else {
|
||||
// It's a path - and there can be multiple comparisons on a single path.
|
||||
@@ -77,6 +73,7 @@ export async function parseParams({
|
||||
if (typeof pathOperators === 'object') {
|
||||
for (const operator of Object.keys(pathOperators)) {
|
||||
if (validOperators.includes(operator as Operator)) {
|
||||
const val = where[relationOrPath][operator]
|
||||
const {
|
||||
columnName,
|
||||
constraints: queryConstraints,
|
||||
@@ -95,10 +92,9 @@ export async function parseParams({
|
||||
pathSegments: relationOrPath.replace(/__/g, '.').split('.'),
|
||||
selectFields,
|
||||
tableName,
|
||||
value: val,
|
||||
})
|
||||
|
||||
const val = where[relationOrPath][operator]
|
||||
|
||||
queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => {
|
||||
if (typeof value === 'string' && value.indexOf('%') > -1) {
|
||||
constraints.push(operatorMap.like(constraintTable[col], value))
|
||||
@@ -169,6 +165,7 @@ export async function parseParams({
|
||||
}
|
||||
|
||||
const sanitizedQueryValue = sanitizeQueryValue({
|
||||
adapter,
|
||||
field,
|
||||
operator,
|
||||
relationOrPath,
|
||||
@@ -207,6 +204,16 @@ export async function parseParams({
|
||||
break
|
||||
}
|
||||
|
||||
if (operator === 'equals' && queryValue === null) {
|
||||
constraints.push(isNull(rawColumn || table[columnName]))
|
||||
break
|
||||
}
|
||||
|
||||
if (operator === 'not_equals' && queryValue === null) {
|
||||
constraints.push(isNotNull(rawColumn || table[columnName]))
|
||||
break
|
||||
}
|
||||
|
||||
constraints.push(
|
||||
operatorMap[queryOperator](rawColumn || table[columnName], queryValue),
|
||||
)
|
||||
|
||||
@@ -2,7 +2,10 @@ import { APIError } from 'payload/errors'
|
||||
import { type Field, type TabAsField, fieldAffectsData } from 'payload/types'
|
||||
import { createArrayFromCommaDelineated } from 'payload/utilities'
|
||||
|
||||
import type { PostgresAdapter } from '../types'
|
||||
|
||||
type SanitizeQueryValueArgs = {
|
||||
adapter: PostgresAdapter
|
||||
field: Field | TabAsField
|
||||
operator: string
|
||||
relationOrPath: string
|
||||
@@ -10,6 +13,7 @@ type SanitizeQueryValueArgs = {
|
||||
}
|
||||
|
||||
export const sanitizeQueryValue = ({
|
||||
adapter,
|
||||
field,
|
||||
operator: operatorArg,
|
||||
relationOrPath,
|
||||
@@ -27,8 +31,10 @@ export const sanitizeQueryValue = ({
|
||||
) {
|
||||
const allPossibleIDTypes: (number | string)[] = []
|
||||
formattedValue.forEach((val) => {
|
||||
if (typeof val === 'string') {
|
||||
if (adapter.idType !== 'uuid' && typeof val === 'string') {
|
||||
allPossibleIDTypes.push(val, parseInt(val))
|
||||
} else if (typeof val === 'string') {
|
||||
allPossibleIDTypes.push(val)
|
||||
} else {
|
||||
allPossibleIDTypes.push(val, String(val))
|
||||
}
|
||||
|
||||
@@ -1,35 +1,31 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import type { Relation } from 'drizzle-orm'
|
||||
import type { IndexBuilder, PgColumnBuilder, UniqueConstraintBuilder } from 'drizzle-orm/pg-core'
|
||||
import type {
|
||||
IndexBuilder,
|
||||
PgColumnBuilder,
|
||||
PgTableWithColumns,
|
||||
UniqueConstraintBuilder,
|
||||
} from 'drizzle-orm/pg-core'
|
||||
import type { Field } from 'payload/types'
|
||||
|
||||
import { relations } from 'drizzle-orm'
|
||||
import {
|
||||
index,
|
||||
integer,
|
||||
numeric,
|
||||
pgTable,
|
||||
serial,
|
||||
timestamp,
|
||||
unique,
|
||||
varchar,
|
||||
} from 'drizzle-orm/pg-core'
|
||||
import { index, integer, numeric, serial, timestamp, unique, varchar } from 'drizzle-orm/pg-core'
|
||||
import { fieldAffectsData } from 'payload/types'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { GenericColumns, GenericTable, PostgresAdapter } from '../types'
|
||||
import type { GenericColumns, GenericTable, IDType, PostgresAdapter } from '../types'
|
||||
|
||||
import { getConfigIDType } from './getConfigIDType'
|
||||
import { parentIDColumnMap } from './parentIDColumnMap'
|
||||
import { setColumnID } from './setColumnID'
|
||||
import { traverseFields } from './traverseFields'
|
||||
|
||||
type Args = {
|
||||
adapter: PostgresAdapter
|
||||
baseColumns?: Record<string, PgColumnBuilder>
|
||||
baseExtraConfig?: Record<string, (cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder>
|
||||
buildTexts?: boolean
|
||||
buildNumbers?: boolean
|
||||
buildRelationships?: boolean
|
||||
buildTexts?: boolean
|
||||
disableNotNull: boolean
|
||||
disableUnique: boolean
|
||||
fields: Field[]
|
||||
@@ -42,8 +38,8 @@ type Args = {
|
||||
}
|
||||
|
||||
type Result = {
|
||||
hasManyTextField: 'index' | boolean
|
||||
hasManyNumberField: 'index' | boolean
|
||||
hasManyTextField: 'index' | boolean
|
||||
relationsToBuild: Map<string, string>
|
||||
}
|
||||
|
||||
@@ -51,9 +47,9 @@ export const buildTable = ({
|
||||
adapter,
|
||||
baseColumns = {},
|
||||
baseExtraConfig = {},
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
disableNotNull,
|
||||
disableUnique = false,
|
||||
fields,
|
||||
@@ -77,39 +73,32 @@ export const buildTable = ({
|
||||
|
||||
const localesColumns: Record<string, PgColumnBuilder> = {}
|
||||
const localesIndexes: Record<string, (cols: GenericColumns) => IndexBuilder> = {}
|
||||
let localesTable: GenericTable
|
||||
let textsTable: GenericTable
|
||||
let numbersTable: GenericTable
|
||||
let localesTable: GenericTable | PgTableWithColumns<any>
|
||||
let textsTable: GenericTable | PgTableWithColumns<any>
|
||||
let numbersTable: GenericTable | PgTableWithColumns<any>
|
||||
|
||||
// Relationships to the base collection
|
||||
const relationships: Set<string> = rootRelationships || new Set()
|
||||
|
||||
let relationshipsTable: GenericTable
|
||||
let relationshipsTable: GenericTable | PgTableWithColumns<any>
|
||||
|
||||
// Drizzle relations
|
||||
const relationsToBuild: Map<string, string> = new Map()
|
||||
|
||||
const idColType = getConfigIDType(fields)
|
||||
const idColType: IDType = setColumnID({ adapter, columns, fields })
|
||||
|
||||
const idColTypeMap = {
|
||||
integer: serial,
|
||||
numeric,
|
||||
varchar,
|
||||
}
|
||||
|
||||
columns.id = idColTypeMap[idColType]('id').primaryKey()
|
||||
;({
|
||||
hasLocalizedField,
|
||||
hasLocalizedManyTextField,
|
||||
hasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField,
|
||||
hasManyTextField,
|
||||
hasManyNumberField,
|
||||
hasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columns,
|
||||
disableNotNull,
|
||||
disableUnique,
|
||||
@@ -143,7 +132,7 @@ export const buildTable = ({
|
||||
.notNull()
|
||||
}
|
||||
|
||||
const table = pgTable(tableName, columns, (cols) => {
|
||||
const table = adapter.pgSchema.table(tableName, columns, (cols) => {
|
||||
const extraConfig = Object.entries(baseExtraConfig).reduce((config, [key, func]) => {
|
||||
config[key] = func(cols)
|
||||
return config
|
||||
@@ -165,7 +154,7 @@ export const buildTable = ({
|
||||
.references(() => table.id, { onDelete: 'cascade' })
|
||||
.notNull()
|
||||
|
||||
localesTable = pgTable(localeTableName, localesColumns, (cols) => {
|
||||
localesTable = adapter.pgSchema.table(localeTableName, localesColumns, (cols) => {
|
||||
return Object.entries(localesIndexes).reduce(
|
||||
(acc, [colName, func]) => {
|
||||
acc[colName] = func(cols)
|
||||
@@ -196,29 +185,29 @@ export const buildTable = ({
|
||||
const textsTableName = `${rootTableName}_texts`
|
||||
const columns: Record<string, PgColumnBuilder> = {
|
||||
id: serial('id').primaryKey(),
|
||||
text: varchar('text'),
|
||||
order: integer('order').notNull(),
|
||||
parent: parentIDColumnMap[idColType]('parent_id')
|
||||
.references(() => table.id, { onDelete: 'cascade' })
|
||||
.notNull(),
|
||||
path: varchar('path').notNull(),
|
||||
text: varchar('text'),
|
||||
}
|
||||
|
||||
if (hasLocalizedManyTextField) {
|
||||
columns.locale = adapter.enums.enum__locales('locale')
|
||||
}
|
||||
|
||||
textsTable = pgTable(textsTableName, columns, (cols) => {
|
||||
textsTable = adapter.pgSchema.table(textsTableName, columns, (cols) => {
|
||||
const indexes: Record<string, IndexBuilder> = {
|
||||
orderParentIdx: index('order_parent_idx').on(cols.order, cols.parent),
|
||||
orderParentIdx: index(`${textsTableName}_order_parent_idx`).on(cols.order, cols.parent),
|
||||
}
|
||||
|
||||
if (hasManyTextField === 'index') {
|
||||
indexes.text_idx = index('text_idx').on(cols.text)
|
||||
indexes.text_idx = index(`${textsTableName}_text_idx`).on(cols.text)
|
||||
}
|
||||
|
||||
if (hasLocalizedManyTextField) {
|
||||
indexes.localeParent = index('locale_parent').on(cols.locale, cols.parent)
|
||||
indexes.localeParent = index(`${textsTableName}_locale_parent`).on(cols.locale, cols.parent)
|
||||
}
|
||||
|
||||
return indexes
|
||||
@@ -252,17 +241,20 @@ export const buildTable = ({
|
||||
columns.locale = adapter.enums.enum__locales('locale')
|
||||
}
|
||||
|
||||
numbersTable = pgTable(numbersTableName, columns, (cols) => {
|
||||
numbersTable = adapter.pgSchema.table(numbersTableName, columns, (cols) => {
|
||||
const indexes: Record<string, IndexBuilder> = {
|
||||
orderParentIdx: index('order_parent_idx').on(cols.order, cols.parent),
|
||||
orderParentIdx: index(`${numbersTableName}_order_parent_idx`).on(cols.order, cols.parent),
|
||||
}
|
||||
|
||||
if (hasManyNumberField === 'index') {
|
||||
indexes.numberIdx = index('number_idx').on(cols.number)
|
||||
indexes.numberIdx = index(`${numbersTableName}_number_idx`).on(cols.number)
|
||||
}
|
||||
|
||||
if (hasLocalizedManyNumberField) {
|
||||
indexes.localeParent = index('locale_parent').on(cols.locale, cols.parent)
|
||||
indexes.localeParent = index(`${numbersTableName}_locale_parent`).on(
|
||||
cols.locale,
|
||||
cols.parent,
|
||||
)
|
||||
}
|
||||
|
||||
return indexes
|
||||
@@ -297,7 +289,7 @@ export const buildTable = ({
|
||||
|
||||
relationships.forEach((relationTo) => {
|
||||
const formattedRelationTo = toSnakeCase(relationTo)
|
||||
let colType = 'integer'
|
||||
let colType = adapter.idType === 'uuid' ? 'uuid' : 'integer'
|
||||
const relatedCollectionCustomID = adapter.payload.collections[
|
||||
relationTo
|
||||
].config.fields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
@@ -311,19 +303,23 @@ export const buildTable = ({
|
||||
|
||||
const relationshipsTableName = `${tableName}_rels`
|
||||
|
||||
relationshipsTable = pgTable(relationshipsTableName, relationshipColumns, (cols) => {
|
||||
const result: Record<string, unknown> = {
|
||||
order: index('order_idx').on(cols.order),
|
||||
parentIdx: index('parent_idx').on(cols.parent),
|
||||
pathIdx: index('path_idx').on(cols.path),
|
||||
}
|
||||
relationshipsTable = adapter.pgSchema.table(
|
||||
relationshipsTableName,
|
||||
relationshipColumns,
|
||||
(cols) => {
|
||||
const result: Record<string, unknown> = {
|
||||
order: index(`${relationshipsTableName}_order_idx`).on(cols.order),
|
||||
parentIdx: index(`${relationshipsTableName}_parent_idx`).on(cols.parent),
|
||||
pathIdx: index(`${relationshipsTableName}_path_idx`).on(cols.path),
|
||||
}
|
||||
|
||||
if (hasLocalizedRelationshipField) {
|
||||
result.localeIdx = index('locale_idx').on(cols.locale)
|
||||
}
|
||||
if (hasLocalizedRelationshipField) {
|
||||
result.localeIdx = index(`${relationshipsTableName}_locale_idx`).on(cols.locale)
|
||||
}
|
||||
|
||||
return result
|
||||
})
|
||||
return result
|
||||
},
|
||||
)
|
||||
|
||||
adapter.tables[relationshipsTableName] = relationshipsTable
|
||||
|
||||
@@ -381,5 +377,5 @@ export const buildTable = ({
|
||||
|
||||
adapter.relations[`relations_${tableName}`] = tableRelations
|
||||
|
||||
return { hasManyTextField, hasManyNumberField, relationsToBuild }
|
||||
return { hasManyNumberField, hasManyTextField, relationsToBuild }
|
||||
}
|
||||
|
||||
@@ -6,10 +6,11 @@ import type { GenericColumn } from '../types'
|
||||
type CreateIndexArgs = {
|
||||
columnName: string
|
||||
name: string | string[]
|
||||
tableName: string
|
||||
unique?: boolean
|
||||
}
|
||||
|
||||
export const createIndex = ({ name, columnName, unique }: CreateIndexArgs) => {
|
||||
export const createIndex = ({ name, columnName, tableName, unique }: CreateIndexArgs) => {
|
||||
return (table: { [x: string]: GenericColumn }) => {
|
||||
let columns
|
||||
if (Array.isArray(name)) {
|
||||
@@ -20,7 +21,8 @@ export const createIndex = ({ name, columnName, unique }: CreateIndexArgs) => {
|
||||
} else {
|
||||
columns = [table[name]]
|
||||
}
|
||||
if (unique) return uniqueIndex(`${columnName}_idx`).on(columns[0], ...columns.slice(1))
|
||||
return index(`${columnName}_idx`).on(columns[0], ...columns.slice(1))
|
||||
if (unique)
|
||||
return uniqueIndex(`${tableName}_${columnName}_idx`).on(columns[0], ...columns.slice(1))
|
||||
return index(`${tableName}_${columnName}_idx`).on(columns[0], ...columns.slice(1))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
import { type Field, fieldAffectsData } from 'payload/types'
|
||||
|
||||
export const getConfigIDType = (fields: Field[]): string => {
|
||||
const idField = fields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
|
||||
if (idField) {
|
||||
if (idField.type === 'number') {
|
||||
return 'numeric'
|
||||
}
|
||||
|
||||
if (idField.type === 'text') {
|
||||
return 'varchar'
|
||||
}
|
||||
}
|
||||
|
||||
return 'integer'
|
||||
}
|
||||
@@ -1,7 +1,13 @@
|
||||
import { integer, numeric, varchar } from 'drizzle-orm/pg-core'
|
||||
import { integer, numeric, uuid, varchar } from 'drizzle-orm/pg-core'
|
||||
|
||||
export const parentIDColumnMap = {
|
||||
import type { IDType } from '../types'
|
||||
|
||||
export const parentIDColumnMap: Record<
|
||||
IDType,
|
||||
typeof integer<string> | typeof numeric<string> | typeof uuid<string> | typeof varchar
|
||||
> = {
|
||||
integer,
|
||||
numeric,
|
||||
uuid,
|
||||
varchar,
|
||||
}
|
||||
|
||||
33
packages/db-postgres/src/schema/setColumnID.ts
Normal file
33
packages/db-postgres/src/schema/setColumnID.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import type { PgColumnBuilder } from 'drizzle-orm/pg-core'
|
||||
|
||||
import { numeric, serial, uuid, varchar } from 'drizzle-orm/pg-core'
|
||||
import { type Field, fieldAffectsData } from 'payload/types'
|
||||
import { flattenTopLevelFields } from 'payload/utilities'
|
||||
|
||||
import type { IDType, PostgresAdapter } from '../types'
|
||||
|
||||
type Args = { adapter: PostgresAdapter; columns: Record<string, PgColumnBuilder>; fields: Field[] }
|
||||
export const setColumnID = ({ adapter, columns, fields }: Args): IDType => {
|
||||
const idField = flattenTopLevelFields(fields).find(
|
||||
(field) => fieldAffectsData(field) && field.name === 'id',
|
||||
)
|
||||
if (idField) {
|
||||
if (idField.type === 'number') {
|
||||
columns.id = numeric('id').primaryKey()
|
||||
return 'numeric'
|
||||
}
|
||||
|
||||
if (idField.type === 'text') {
|
||||
columns.id = varchar('id').primaryKey()
|
||||
return 'varchar'
|
||||
}
|
||||
}
|
||||
|
||||
if (adapter.idType === 'uuid') {
|
||||
columns.id = uuid('id').defaultRandom().primaryKey()
|
||||
return 'uuid'
|
||||
}
|
||||
|
||||
columns.id = serial('id').primaryKey()
|
||||
return 'integer'
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import type { Field, TabAsField } from 'payload/types'
|
||||
import { relations } from 'drizzle-orm'
|
||||
import {
|
||||
PgNumericBuilder,
|
||||
PgUUIDBuilder,
|
||||
PgVarcharBuilder,
|
||||
boolean,
|
||||
index,
|
||||
@@ -21,7 +22,7 @@ import { InvalidConfiguration } from 'payload/errors'
|
||||
import { fieldAffectsData, optionIsObject } from 'payload/types'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { GenericColumns, PostgresAdapter } from '../types'
|
||||
import type { GenericColumns, IDType, PostgresAdapter } from '../types'
|
||||
|
||||
import { hasLocalesTable } from '../utilities/hasLocalesTable'
|
||||
import { buildTable } from './build'
|
||||
@@ -32,9 +33,9 @@ import { validateExistingBlockIsIdentical } from './validateExistingBlockIsIdent
|
||||
|
||||
type Args = {
|
||||
adapter: PostgresAdapter
|
||||
buildTexts: boolean
|
||||
buildNumbers: boolean
|
||||
buildRelationships: boolean
|
||||
buildTexts: boolean
|
||||
columnPrefix?: string
|
||||
columns: Record<string, PgColumnBuilder>
|
||||
disableNotNull: boolean
|
||||
@@ -56,18 +57,18 @@ type Args = {
|
||||
|
||||
type Result = {
|
||||
hasLocalizedField: boolean
|
||||
hasLocalizedManyTextField: boolean
|
||||
hasLocalizedManyNumberField: boolean
|
||||
hasLocalizedManyTextField: boolean
|
||||
hasLocalizedRelationshipField: boolean
|
||||
hasManyTextField: 'index' | boolean
|
||||
hasManyNumberField: 'index' | boolean
|
||||
hasManyTextField: 'index' | boolean
|
||||
}
|
||||
|
||||
export const traverseFields = ({
|
||||
adapter,
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull,
|
||||
@@ -93,7 +94,8 @@ export const traverseFields = ({
|
||||
let hasManyNumberField: 'index' | boolean = false
|
||||
let hasLocalizedManyNumberField = false
|
||||
|
||||
let parentIDColType = 'integer'
|
||||
let parentIDColType: IDType = 'integer'
|
||||
if (columns.id instanceof PgUUIDBuilder) parentIDColType = 'uuid'
|
||||
if (columns.id instanceof PgNumericBuilder) parentIDColType = 'numeric'
|
||||
if (columns.id instanceof PgVarcharBuilder) parentIDColType = 'varchar'
|
||||
|
||||
@@ -122,7 +124,7 @@ export const traverseFields = ({
|
||||
if (
|
||||
(field.unique || field.index) &&
|
||||
!['array', 'blocks', 'group', 'point', 'relationship', 'upload'].includes(field.type) &&
|
||||
!(field.type === 'number' && field.hasMany === true)
|
||||
!('hasMany' in field && field.hasMany === true)
|
||||
) {
|
||||
const unique = disableUnique !== true && field.unique
|
||||
if (unique) {
|
||||
@@ -132,9 +134,10 @@ export const traverseFields = ({
|
||||
}
|
||||
adapter.fieldConstraints[rootTableName][`${columnName}_idx`] = constraintValue
|
||||
}
|
||||
targetIndexes[`${field.name}Idx`] = createIndex({
|
||||
targetIndexes[`${newTableName}_${field.name}Idx`] = createIndex({
|
||||
name: fieldName,
|
||||
columnName,
|
||||
tableName: newTableName,
|
||||
unique,
|
||||
})
|
||||
}
|
||||
@@ -241,17 +244,18 @@ export const traverseFields = ({
|
||||
string,
|
||||
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
|
||||
> = {
|
||||
orderIdx: (cols) => index('order_idx').on(cols.order),
|
||||
parentIdx: (cols) => index('parent_idx').on(cols.parent),
|
||||
orderIdx: (cols) => index(`${selectTableName}_order_idx`).on(cols.order),
|
||||
parentIdx: (cols) => index(`${selectTableName}_parent_idx`).on(cols.parent),
|
||||
}
|
||||
|
||||
if (field.localized) {
|
||||
baseColumns.locale = adapter.enums.enum__locales('locale').notNull()
|
||||
baseExtraConfig.localeIdx = (cols) => index('locale_idx').on(cols.locale)
|
||||
baseExtraConfig.localeIdx = (cols) =>
|
||||
index(`${selectTableName}_locale_idx`).on(cols.locale)
|
||||
}
|
||||
|
||||
if (field.index) {
|
||||
baseExtraConfig.value = (cols) => index('value_idx').on(cols.value)
|
||||
baseExtraConfig.value = (cols) => index(`${selectTableName}_value_idx`).on(cols.value)
|
||||
}
|
||||
|
||||
buildTable({
|
||||
@@ -304,18 +308,19 @@ export const traverseFields = ({
|
||||
string,
|
||||
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
|
||||
> = {
|
||||
_orderIdx: (cols) => index('_order_idx').on(cols._order),
|
||||
_parentIDIdx: (cols) => index('_parent_id_idx').on(cols._parentID),
|
||||
_orderIdx: (cols) => index(`${arrayTableName}_order_idx`).on(cols._order),
|
||||
_parentIDIdx: (cols) => index(`${arrayTableName}_parent_id_idx`).on(cols._parentID),
|
||||
}
|
||||
|
||||
if (field.localized && adapter.payload.config.localization) {
|
||||
baseColumns._locale = adapter.enums.enum__locales('_locale').notNull()
|
||||
baseExtraConfig._localeIdx = (cols) => index('_locale_idx').on(cols._locale)
|
||||
baseExtraConfig._localeIdx = (cols) =>
|
||||
index(`${arrayTableName}_locale_idx`).on(cols._locale)
|
||||
}
|
||||
|
||||
const {
|
||||
hasManyTextField: subHasManyTextField,
|
||||
hasManyNumberField: subHasManyNumberField,
|
||||
hasManyTextField: subHasManyTextField,
|
||||
relationsToBuild: subRelationsToBuild,
|
||||
} = buildTable({
|
||||
adapter,
|
||||
@@ -384,19 +389,20 @@ export const traverseFields = ({
|
||||
string,
|
||||
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
|
||||
> = {
|
||||
_orderIdx: (cols) => index('order_idx').on(cols._order),
|
||||
_parentIDIdx: (cols) => index('parent_id_idx').on(cols._parentID),
|
||||
_pathIdx: (cols) => index('path_idx').on(cols._path),
|
||||
_orderIdx: (cols) => index(`${blockTableName}_order_idx`).on(cols._order),
|
||||
_parentIDIdx: (cols) => index(`${blockTableName}_parent_id_idx`).on(cols._parentID),
|
||||
_pathIdx: (cols) => index(`${blockTableName}_path_idx`).on(cols._path),
|
||||
}
|
||||
|
||||
if (field.localized && adapter.payload.config.localization) {
|
||||
baseColumns._locale = adapter.enums.enum__locales('_locale').notNull()
|
||||
baseExtraConfig._localeIdx = (cols) => index('locale_idx').on(cols._locale)
|
||||
baseExtraConfig._localeIdx = (cols) =>
|
||||
index(`${blockTableName}_locale_idx`).on(cols._locale)
|
||||
}
|
||||
|
||||
const {
|
||||
hasManyTextField: subHasManyTextField,
|
||||
hasManyNumberField: subHasManyNumberField,
|
||||
hasManyTextField: subHasManyTextField,
|
||||
relationsToBuild: subRelationsToBuild,
|
||||
} = buildTable({
|
||||
adapter,
|
||||
@@ -465,16 +471,16 @@ export const traverseFields = ({
|
||||
if (!('name' in field)) {
|
||||
const {
|
||||
hasLocalizedField: groupHasLocalizedField,
|
||||
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
|
||||
hasLocalizedManyNumberField: groupHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: groupHasLocalizedRelationshipField,
|
||||
hasManyTextField: groupHasManyTextField,
|
||||
hasManyNumberField: groupHasManyNumberField,
|
||||
hasManyTextField: groupHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull,
|
||||
@@ -507,16 +513,16 @@ export const traverseFields = ({
|
||||
|
||||
const {
|
||||
hasLocalizedField: groupHasLocalizedField,
|
||||
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
|
||||
hasLocalizedManyNumberField: groupHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: groupHasLocalizedRelationshipField,
|
||||
hasManyTextField: groupHasManyTextField,
|
||||
hasManyNumberField: groupHasManyNumberField,
|
||||
hasManyTextField: groupHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix: `${columnName}_`,
|
||||
columns,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
@@ -550,16 +556,16 @@ export const traverseFields = ({
|
||||
|
||||
const {
|
||||
hasLocalizedField: tabHasLocalizedField,
|
||||
hasLocalizedManyTextField: tabHasLocalizedManyTextField,
|
||||
hasLocalizedManyNumberField: tabHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: tabHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: tabHasLocalizedRelationshipField,
|
||||
hasManyTextField: tabHasManyTextField,
|
||||
hasManyNumberField: tabHasManyNumberField,
|
||||
hasManyTextField: tabHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
@@ -593,16 +599,16 @@ export const traverseFields = ({
|
||||
const disableNotNullFromHere = Boolean(field.admin?.condition) || disableNotNull
|
||||
const {
|
||||
hasLocalizedField: rowHasLocalizedField,
|
||||
hasLocalizedManyTextField: rowHasLocalizedManyTextField,
|
||||
hasLocalizedManyNumberField: rowHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: rowHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: rowHasLocalizedRelationshipField,
|
||||
hasManyTextField: rowHasManyTextField,
|
||||
hasManyNumberField: rowHasManyNumberField,
|
||||
hasManyTextField: rowHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
@@ -663,10 +669,10 @@ export const traverseFields = ({
|
||||
|
||||
return {
|
||||
hasLocalizedField,
|
||||
hasLocalizedManyTextField,
|
||||
hasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField,
|
||||
hasManyTextField,
|
||||
hasManyNumberField,
|
||||
hasManyTextField,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,18 +7,28 @@ import type {
|
||||
Relations,
|
||||
} from 'drizzle-orm'
|
||||
import type { NodePgDatabase, NodePgQueryResultHKT } from 'drizzle-orm/node-postgres'
|
||||
import type { PgColumn, PgEnum, PgTableWithColumns, PgTransaction } from 'drizzle-orm/pg-core'
|
||||
import type {
|
||||
PgColumn,
|
||||
PgEnum,
|
||||
PgSchema,
|
||||
PgTableWithColumns,
|
||||
PgTransaction,
|
||||
} from 'drizzle-orm/pg-core'
|
||||
import type { PgTableFn } from 'drizzle-orm/pg-core/table'
|
||||
import type { Payload } from 'payload'
|
||||
import type { BaseDatabaseAdapter } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
import type { Pool, PoolConfig } from 'pg'
|
||||
|
||||
export type DrizzleDB = NodePgDatabase<Record<string, unknown>>
|
||||
|
||||
export type Args = {
|
||||
idType?: 'serial' | 'uuid'
|
||||
logger?: DrizzleConfig['logger']
|
||||
migrationDir?: string
|
||||
pool: PoolConfig
|
||||
logger?: DrizzleConfig['logger']
|
||||
push?: boolean
|
||||
schemaName?: string
|
||||
}
|
||||
|
||||
export type GenericColumn = PgColumn<
|
||||
@@ -49,13 +59,21 @@ export type DrizzleTransaction = PgTransaction<
|
||||
|
||||
export type PostgresAdapter = BaseDatabaseAdapter & {
|
||||
drizzle: DrizzleDB
|
||||
logger: DrizzleConfig['logger']
|
||||
enums: Record<string, GenericEnum>
|
||||
/**
|
||||
* An object keyed on each table, with a key value pair where the constraint name is the key, followed by the dot-notation field name
|
||||
* Used for returning properly formed errors from unique fields
|
||||
*/
|
||||
fieldConstraints: Record<string, Record<string, string>>
|
||||
idType: Args['idType']
|
||||
logger: DrizzleConfig['logger']
|
||||
pgSchema?: { table: PgTableFn } | PgSchema
|
||||
pool: Pool
|
||||
poolOptions: Args['pool']
|
||||
push: boolean
|
||||
relations: Record<string, GenericRelation>
|
||||
schema: Record<string, GenericEnum | GenericRelation | GenericTable>
|
||||
schemaName?: Args['schemaName']
|
||||
sessions: {
|
||||
[id: string]: {
|
||||
db: DrizzleTransaction
|
||||
@@ -63,18 +81,15 @@ export type PostgresAdapter = BaseDatabaseAdapter & {
|
||||
resolve: () => Promise<void>
|
||||
}
|
||||
}
|
||||
tables: Record<string, GenericTable>
|
||||
/**
|
||||
* An object keyed on each table, with a key value pair where the constraint name is the key, followed by the dot-notation field name
|
||||
* Used for returning properly formed errors from unique fields
|
||||
*/
|
||||
fieldConstraints: Record<string, Record<string, string>>
|
||||
tables: Record<string, GenericTable | PgTableWithColumns<any>>
|
||||
}
|
||||
|
||||
export type IDType = 'integer' | 'numeric' | 'uuid' | 'varchar'
|
||||
|
||||
export type PostgresAdapterResult = (args: { payload: Payload }) => PostgresAdapter
|
||||
|
||||
export type MigrateUpArgs = { payload: Payload }
|
||||
export type MigrateDownArgs = { payload: Payload }
|
||||
export type MigrateUpArgs = { payload: Payload; req?: Partial<PayloadRequest> }
|
||||
export type MigrateDownArgs = { payload: Payload; req?: Partial<PayloadRequest> }
|
||||
|
||||
declare module 'payload' {
|
||||
export interface DatabaseAdapter
|
||||
@@ -82,6 +97,7 @@ declare module 'payload' {
|
||||
BaseDatabaseAdapter {
|
||||
drizzle: DrizzleDB
|
||||
enums: Record<string, GenericEnum>
|
||||
fieldConstraints: Record<string, Record<string, string>>
|
||||
pool: Pool
|
||||
push: boolean
|
||||
relations: Record<string, GenericRelation>
|
||||
@@ -94,6 +110,5 @@ declare module 'payload' {
|
||||
}
|
||||
}
|
||||
tables: Record<string, GenericTable>
|
||||
fieldConstraints: Record<string, Record<string, string>>
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,11 +36,11 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
}
|
||||
}
|
||||
|
||||
const parentID = parentRows[parentRowIndex].id || parentRows[parentRowIndex]._parentID
|
||||
const parentID = parentRows[parentRowIndex].id
|
||||
|
||||
// Add any sub arrays that need to be created
|
||||
// We will call this recursively below
|
||||
arrayRows.forEach((arrayRow) => {
|
||||
arrayRows.forEach((arrayRow, i) => {
|
||||
if (Object.keys(arrayRow.arrays).length > 0) {
|
||||
rowsByTable[tableName].arrays.push(arrayRow.arrays)
|
||||
}
|
||||
@@ -53,6 +53,9 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
arrayRowLocaleData._parentID = arrayRow.row.id
|
||||
arrayRowLocaleData._locale = arrayRowLocale
|
||||
rowsByTable[tableName].locales.push(arrayRowLocaleData)
|
||||
if (!arrayRow.row.id) {
|
||||
arrayRowLocaleData._getParentID = (rows) => rows[i].id
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -61,12 +64,23 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
// Insert all corresponding arrays
|
||||
// (one insert per array table)
|
||||
for (const [tableName, row] of Object.entries(rowsByTable)) {
|
||||
// the nested arrays need the ID for the parentID foreign key
|
||||
let insertedRows: Args['parentRows']
|
||||
if (row.rows.length > 0) {
|
||||
await db.insert(adapter.tables[tableName]).values(row.rows).returning()
|
||||
insertedRows = await db.insert(adapter.tables[tableName]).values(row.rows).returning()
|
||||
}
|
||||
|
||||
// Insert locale rows
|
||||
if (adapter.tables[`${tableName}_locales`] && row.locales.length > 0) {
|
||||
if (!row.locales[0]._parentID) {
|
||||
row.locales = row.locales.map((localeRow, i) => {
|
||||
if (typeof localeRow._getParentID === 'function') {
|
||||
localeRow._parentID = localeRow._getParentID(insertedRows)
|
||||
delete localeRow._getParentID
|
||||
}
|
||||
return localeRow
|
||||
})
|
||||
}
|
||||
await db.insert(adapter.tables[`${tableName}_locales`]).values(row.locales).returning()
|
||||
}
|
||||
|
||||
@@ -76,7 +90,7 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
adapter,
|
||||
arrays: row.arrays,
|
||||
db,
|
||||
parentRows: row.rows,
|
||||
parentRows: insertedRows,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
import { sql } from 'drizzle-orm'
|
||||
|
||||
import type { DrizzleDB } from '../types'
|
||||
import type { PostgresAdapter } from '../types'
|
||||
|
||||
export const createMigrationTable = async (db: DrizzleDB): Promise<void> => {
|
||||
await db.execute(sql`CREATE TABLE IF NOT EXISTS "payload_migrations" (
|
||||
export const createMigrationTable = async (adapter: PostgresAdapter): Promise<void> => {
|
||||
const prependSchema = adapter.schemaName ? `"${adapter.schemaName}".` : ''
|
||||
|
||||
await adapter.drizzle.execute(
|
||||
sql.raw(`CREATE TABLE IF NOT EXISTS ${prependSchema}"payload_migrations" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"name" varchar,
|
||||
"batch" numeric,
|
||||
"updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
|
||||
);`)
|
||||
);`),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload",
|
||||
"version": "2.8.2",
|
||||
"version": "2.11.2",
|
||||
"description": "Node, React and MongoDB Headless CMS and Application Framework",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
@@ -59,7 +59,7 @@
|
||||
"@faceless-ui/scroll-info": "1.3.0",
|
||||
"@faceless-ui/window-info": "2.1.1",
|
||||
"@monaco-editor/react": "4.5.1",
|
||||
"@swc/core": "1.3.76",
|
||||
"@swc/core": "1.3.107",
|
||||
"@swc/register": "0.1.10",
|
||||
"body-parser": "1.20.2",
|
||||
"body-scroll-lock": "4.0.0-beta.0",
|
||||
@@ -101,7 +101,6 @@
|
||||
"jwt-decode": "3.1.2",
|
||||
"md5": "2.3.0",
|
||||
"method-override": "3.0.0",
|
||||
"micro-memoize": "4.1.2",
|
||||
"minimist": "1.2.8",
|
||||
"mkdirp": "1.0.4",
|
||||
"monaco-editor": "0.38.0",
|
||||
@@ -193,7 +192,7 @@
|
||||
"get-port": "5.1.1",
|
||||
"mini-css-extract-plugin": "1.6.2",
|
||||
"node-fetch": "2.6.12",
|
||||
"nodemon": "3.0.1",
|
||||
"nodemon": "3.0.3",
|
||||
"object.assign": "4.1.4",
|
||||
"object.entries": "1.1.6",
|
||||
"passport-strategy": "1.0.0",
|
||||
|
||||
@@ -24,11 +24,16 @@ export const Collapsible: React.FC<Props> = ({
|
||||
}) => {
|
||||
const [collapsedLocal, setCollapsedLocal] = useState(Boolean(initCollapsed))
|
||||
const [hoveringToggle, setHoveringToggle] = useState(false)
|
||||
const isNested = useCollapsible()
|
||||
const { withinCollapsible } = useCollapsible()
|
||||
const { t } = useTranslation('fields')
|
||||
|
||||
const collapsed = typeof collapsedFromProps === 'boolean' ? collapsedFromProps : collapsedLocal
|
||||
|
||||
const toggleCollapsible = React.useCallback(() => {
|
||||
if (typeof onToggle === 'function') onToggle(!collapsed)
|
||||
setCollapsedLocal(!collapsed)
|
||||
}, [onToggle, collapsed])
|
||||
|
||||
return (
|
||||
<div
|
||||
className={[
|
||||
@@ -36,14 +41,14 @@ export const Collapsible: React.FC<Props> = ({
|
||||
className,
|
||||
dragHandleProps && `${baseClass}--has-drag-handle`,
|
||||
collapsed && `${baseClass}--collapsed`,
|
||||
isNested && `${baseClass}--nested`,
|
||||
withinCollapsible && `${baseClass}--nested`,
|
||||
hoveringToggle && `${baseClass}--hovered`,
|
||||
`${baseClass}--style-${collapsibleStyle}`,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ')}
|
||||
>
|
||||
<CollapsibleProvider>
|
||||
<CollapsibleProvider collapsed={collapsed} toggle={toggleCollapsible}>
|
||||
<div
|
||||
className={`${baseClass}__toggle-wrap`}
|
||||
onMouseEnter={() => setHoveringToggle(true)}
|
||||
@@ -65,10 +70,7 @@ export const Collapsible: React.FC<Props> = ({
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ')}
|
||||
onClick={() => {
|
||||
if (typeof onToggle === 'function') onToggle(!collapsed)
|
||||
setCollapsedLocal(!collapsed)
|
||||
}}
|
||||
onClick={toggleCollapsible}
|
||||
type="button"
|
||||
>
|
||||
<span>{t('toggleBlock')}</span>
|
||||
|
||||
@@ -1,14 +1,35 @@
|
||||
import React, { createContext, useContext } from 'react'
|
||||
|
||||
const Context = createContext(false)
|
||||
type ContextType = {
|
||||
collapsed: boolean
|
||||
isVisible: boolean
|
||||
toggle: () => void
|
||||
withinCollapsible: boolean
|
||||
}
|
||||
const Context = createContext({
|
||||
collapsed: false,
|
||||
isVisible: true,
|
||||
toggle: () => {},
|
||||
withinCollapsible: true,
|
||||
})
|
||||
|
||||
export const CollapsibleProvider: React.FC<{
|
||||
children?: React.ReactNode
|
||||
collapsed?: boolean
|
||||
toggle: () => void
|
||||
withinCollapsible?: boolean
|
||||
}> = ({ children, withinCollapsible = true }) => {
|
||||
return <Context.Provider value={withinCollapsible}>{children}</Context.Provider>
|
||||
}> = ({ children, collapsed, toggle, withinCollapsible = true }) => {
|
||||
const { collapsed: parentIsCollapsed, isVisible } = useCollapsible()
|
||||
|
||||
const contextValue = React.useMemo((): ContextType => {
|
||||
return {
|
||||
collapsed: Boolean(collapsed),
|
||||
isVisible: isVisible && !parentIsCollapsed,
|
||||
toggle,
|
||||
withinCollapsible,
|
||||
}
|
||||
}, [collapsed, withinCollapsible, toggle, parentIsCollapsed, isVisible])
|
||||
return <Context.Provider value={contextValue}>{children}</Context.Provider>
|
||||
}
|
||||
|
||||
export const useCollapsible = (): boolean => useContext(Context)
|
||||
|
||||
export default Context
|
||||
export const useCollapsible = (): ContextType => useContext(Context)
|
||||
|
||||
@@ -64,7 +64,7 @@ const DeleteDocument: React.FC<Props> = (props) => {
|
||||
if (res.status < 400) {
|
||||
setDeleting(false)
|
||||
toggleModal(modalSlug)
|
||||
toast.success(t('titleDeleted', { label: getTranslation(singular, i18n), title }))
|
||||
toast.success(json.message || t('titleDeleted', { label: getTranslation(singular, i18n), title }))
|
||||
return history.push(`${admin}/collections/${slug}`)
|
||||
}
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ export const DocumentControls: React.FC<{
|
||||
id?: string
|
||||
isAccountView?: boolean
|
||||
isEditing?: boolean
|
||||
permissions?: CollectionPermission | GlobalPermission | null
|
||||
permissions?: CollectionPermission | GlobalPermission
|
||||
}> = (props) => {
|
||||
const {
|
||||
id,
|
||||
|
||||
@@ -52,21 +52,9 @@ const Content: React.FC<DocumentDrawerProps> = ({
|
||||
|
||||
const { id, docPermissions, getDocPreferences } = useDocumentInfo()
|
||||
|
||||
// The component definition could come from multiple places in the config
|
||||
// we need to cascade into the proper component from the top-down
|
||||
// 1. "components.Edit"
|
||||
// 2. "components.Edit.Default"
|
||||
// 3. "components.Edit.Default.Component"
|
||||
const CustomEditView =
|
||||
typeof Edit === 'function'
|
||||
? Edit
|
||||
: typeof Edit === 'object' && typeof Edit.Default === 'function'
|
||||
? Edit.Default
|
||||
: typeof Edit?.Default === 'object' &&
|
||||
'Component' in Edit.Default &&
|
||||
typeof Edit.Default.Component === 'function'
|
||||
? Edit.Default.Component
|
||||
: undefined
|
||||
// If they are replacing the entire edit view, use that.
|
||||
// Else let the DefaultEdit determine what to render.
|
||||
const CustomEditView = typeof Edit === 'function' ? Edit : undefined
|
||||
|
||||
const [fields, setFields] = useState(() => formatFields(collectionConfig, true))
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ export const getCustomViews = (args: {
|
||||
? collection?.admin?.components?.views?.Edit
|
||||
: undefined
|
||||
|
||||
const defaultViewKeys = Object.keys(defaultCollectionViews)
|
||||
const defaultViewKeys = Object.keys(defaultCollectionViews())
|
||||
|
||||
customViews = Object.entries(collectionViewsConfig || {}).reduce((prev, [key, view]) => {
|
||||
if (defaultViewKeys.includes(key)) {
|
||||
@@ -38,7 +38,7 @@ export const getCustomViews = (args: {
|
||||
? global?.admin?.components?.views?.Edit
|
||||
: undefined
|
||||
|
||||
const defaultViewKeys = Object.keys(defaultGlobalViews)
|
||||
const defaultViewKeys = Object.keys(defaultGlobalViews())
|
||||
|
||||
customViews = Object.entries(globalViewsConfig || {}).reduce((prev, [key, view]) => {
|
||||
if (defaultViewKeys.includes(key)) {
|
||||
|
||||
@@ -13,6 +13,7 @@ import { fieldTypes } from '../../forms/field-types'
|
||||
import X from '../../icons/X'
|
||||
import { useAuth } from '../../utilities/Auth'
|
||||
import { useConfig } from '../../utilities/Config'
|
||||
import { DocumentInfoProvider } from '../../utilities/DocumentInfo'
|
||||
import { OperationContext } from '../../utilities/OperationProvider'
|
||||
import { SelectAllStatus, useSelection } from '../../views/collections/List/SelectionProvider'
|
||||
import { Drawer, DrawerToggler } from '../Drawer'
|
||||
@@ -120,53 +121,55 @@ const EditMany: React.FC<Props> = (props) => {
|
||||
{t('edit')}
|
||||
</DrawerToggler>
|
||||
<Drawer header={null} slug={drawerSlug}>
|
||||
<OperationContext.Provider value="update">
|
||||
<Form className={`${baseClass}__form`} onSuccess={onSuccess}>
|
||||
<div className={`${baseClass}__main`}>
|
||||
<div className={`${baseClass}__header`}>
|
||||
<h2 className={`${baseClass}__header__title`}>
|
||||
{t('editingLabel', { count, label: getTranslation(plural, i18n) })}
|
||||
</h2>
|
||||
<button
|
||||
aria-label={t('close')}
|
||||
className={`${baseClass}__header__close`}
|
||||
id={`close-drawer__${drawerSlug}`}
|
||||
onClick={() => closeModal(drawerSlug)}
|
||||
type="button"
|
||||
>
|
||||
<X />
|
||||
</button>
|
||||
</div>
|
||||
<FieldSelect fields={fields} setSelected={setSelected} />
|
||||
<RenderFields fieldSchema={selected} fieldTypes={fieldTypes} />
|
||||
<div className={`${baseClass}__sidebar-wrap`}>
|
||||
<div className={`${baseClass}__sidebar`}>
|
||||
<div className={`${baseClass}__sidebar-sticky-wrap`}>
|
||||
<div className={`${baseClass}__document-actions`}>
|
||||
{collection.versions ? (
|
||||
<React.Fragment>
|
||||
<Publish
|
||||
<DocumentInfoProvider collection={collection}>
|
||||
<OperationContext.Provider value="update">
|
||||
<Form className={`${baseClass}__form`} onSuccess={onSuccess}>
|
||||
<div className={`${baseClass}__main`}>
|
||||
<div className={`${baseClass}__header`}>
|
||||
<h2 className={`${baseClass}__header__title`}>
|
||||
{t('editingLabel', { count, label: getTranslation(plural, i18n) })}
|
||||
</h2>
|
||||
<button
|
||||
aria-label={t('close')}
|
||||
className={`${baseClass}__header__close`}
|
||||
id={`close-drawer__${drawerSlug}`}
|
||||
onClick={() => closeModal(drawerSlug)}
|
||||
type="button"
|
||||
>
|
||||
<X />
|
||||
</button>
|
||||
</div>
|
||||
<FieldSelect fields={fields} setSelected={setSelected} />
|
||||
<RenderFields fieldSchema={selected} fieldTypes={fieldTypes} />
|
||||
<div className={`${baseClass}__sidebar-wrap`}>
|
||||
<div className={`${baseClass}__sidebar`}>
|
||||
<div className={`${baseClass}__sidebar-sticky-wrap`}>
|
||||
<div className={`${baseClass}__document-actions`}>
|
||||
{collection.versions ? (
|
||||
<React.Fragment>
|
||||
<Publish
|
||||
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
|
||||
disabled={selected.length === 0}
|
||||
/>
|
||||
<SaveDraft
|
||||
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
|
||||
disabled={selected.length === 0}
|
||||
/>
|
||||
</React.Fragment>
|
||||
) : (
|
||||
<Submit
|
||||
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
|
||||
disabled={selected.length === 0}
|
||||
/>
|
||||
<SaveDraft
|
||||
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
|
||||
disabled={selected.length === 0}
|
||||
/>
|
||||
</React.Fragment>
|
||||
) : (
|
||||
<Submit
|
||||
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
|
||||
disabled={selected.length === 0}
|
||||
/>
|
||||
)}
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Form>
|
||||
</OperationContext.Provider>
|
||||
</Form>
|
||||
</OperationContext.Provider>
|
||||
</DocumentInfoProvider>
|
||||
</Drawer>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -133,9 +133,10 @@ export const ListDrawerContent: React.FC<ListDrawerProps> = ({
|
||||
const moreThanOneAvailableCollection = enabledCollectionConfigs.length > 1
|
||||
|
||||
useEffect(() => {
|
||||
const { slug, admin: { listSearchableFields } = {} } = selectedCollectionConfig
|
||||
const { slug, admin: { listSearchableFields } = {}, versions } = selectedCollectionConfig
|
||||
const params: {
|
||||
cacheBust?: number
|
||||
draft?: string
|
||||
limit?: number
|
||||
page?: number
|
||||
search?: string
|
||||
@@ -172,6 +173,7 @@ export const ListDrawerContent: React.FC<ListDrawerProps> = ({
|
||||
if (sort) params.sort = sort
|
||||
if (cacheBust) params.cacheBust = cacheBust
|
||||
if (copyOfWhere) params.where = copyOfWhere
|
||||
if (versions?.drafts) params.draft = 'true'
|
||||
|
||||
setParams(params)
|
||||
}, [
|
||||
|
||||
@@ -12,42 +12,83 @@ import { fieldAffectsData, fieldHasSubFields, tabHasName } from '../../../../../
|
||||
import getValueWithDefault from '../../../../../fields/getDefaultValue'
|
||||
import { iterateFields } from './iterateFields'
|
||||
|
||||
type Args = {
|
||||
export type AddFieldStatePromiseArgs = {
|
||||
/**
|
||||
* if all parents are localized, then the field is localized
|
||||
*/
|
||||
anyParentLocalized?: boolean
|
||||
config: SanitizedConfig
|
||||
data: Data
|
||||
field: NonPresentationalField
|
||||
/**
|
||||
* You can use this to filter down to only `localized` fields that require transalation (type: text, textarea, etc.). Another plugin might want to look for only `point` type fields to do some GIS function. With the filter function you can go in like a surgeon.
|
||||
*/
|
||||
filter?: (args: AddFieldStatePromiseArgs) => boolean
|
||||
/**
|
||||
* Force the value of fields like arrays or blocks to be the full value instead of the length @default false
|
||||
*/
|
||||
forceFullValue?: boolean
|
||||
fullData: Data
|
||||
id: number | string
|
||||
/**
|
||||
* Whether the field schema should be included in the state
|
||||
*/
|
||||
includeSchema?: boolean
|
||||
locale: string
|
||||
/**
|
||||
* Whether to omit parent fields in the state. @default false
|
||||
*/
|
||||
omitParents?: boolean
|
||||
operation: 'create' | 'update'
|
||||
passesCondition: boolean
|
||||
path: string
|
||||
preferences: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
/**
|
||||
* Whether to skip checking the field's condition. @default false
|
||||
*/
|
||||
skipConditionChecks?: boolean
|
||||
/**
|
||||
* Whether to skip validating the field. @default false
|
||||
*/
|
||||
skipValidation?: boolean
|
||||
state: Fields
|
||||
t: TFunction
|
||||
user: User
|
||||
}
|
||||
|
||||
export const addFieldStatePromise = async ({
|
||||
id,
|
||||
config,
|
||||
data,
|
||||
field,
|
||||
fullData,
|
||||
locale,
|
||||
operation,
|
||||
passesCondition,
|
||||
path,
|
||||
preferences,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
}: Args): Promise<void> => {
|
||||
/**
|
||||
* Flattens the fields schema and fields data.
|
||||
* The output is the field path (e.g. array.0.name) mapped to a FormField object.
|
||||
*/
|
||||
export const addFieldStatePromise = async (args: AddFieldStatePromiseArgs): Promise<void> => {
|
||||
const {
|
||||
id,
|
||||
anyParentLocalized = false,
|
||||
config,
|
||||
data,
|
||||
field,
|
||||
filter,
|
||||
forceFullValue = false,
|
||||
fullData,
|
||||
includeSchema = false,
|
||||
locale,
|
||||
omitParents = false,
|
||||
operation,
|
||||
passesCondition,
|
||||
path,
|
||||
preferences,
|
||||
skipConditionChecks = false,
|
||||
skipValidation = false,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
} = args
|
||||
if (fieldAffectsData(field)) {
|
||||
const fieldState: FormField = {
|
||||
condition: field.admin?.condition,
|
||||
fieldSchema: includeSchema ? field : undefined,
|
||||
initialValue: undefined,
|
||||
passesCondition,
|
||||
valid: true,
|
||||
@@ -66,9 +107,9 @@ export const addFieldStatePromise = async ({
|
||||
data[field.name] = valueWithDefault
|
||||
}
|
||||
|
||||
let validationResult: boolean | string = true
|
||||
let validationResult: string | true = true
|
||||
|
||||
if (typeof fieldState.validate === 'function') {
|
||||
if (typeof fieldState.validate === 'function' && !skipValidation) {
|
||||
validationResult = await fieldState.validate(data?.[field.name], {
|
||||
...field,
|
||||
id,
|
||||
@@ -96,24 +137,36 @@ export const addFieldStatePromise = async ({
|
||||
const rowPath = `${path}${field.name}.${i}.`
|
||||
row.id = row?.id || new ObjectID().toHexString()
|
||||
|
||||
state[`${rowPath}id`] = {
|
||||
initialValue: row.id,
|
||||
valid: true,
|
||||
value: row.id,
|
||||
if (!omitParents && (!filter || filter(args))) {
|
||||
state[`${rowPath}id`] = {
|
||||
fieldSchema: includeSchema
|
||||
? field.fields.find((field) => 'name' in field && field.name === 'id')
|
||||
: undefined,
|
||||
initialValue: row.id,
|
||||
valid: true,
|
||||
value: row.id,
|
||||
}
|
||||
}
|
||||
|
||||
acc.promises.push(
|
||||
iterateFields({
|
||||
id,
|
||||
anyParentLocalized: field.localized || anyParentLocalized,
|
||||
config,
|
||||
data: row,
|
||||
fields: field.fields,
|
||||
filter,
|
||||
forceFullValue,
|
||||
fullData,
|
||||
includeSchema,
|
||||
locale,
|
||||
omitParents,
|
||||
operation,
|
||||
parentPassesCondition: passesCondition,
|
||||
path: rowPath,
|
||||
preferences,
|
||||
skipConditionChecks,
|
||||
skipValidation,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
@@ -146,8 +199,8 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.value = null
|
||||
fieldState.initialValue = null
|
||||
} else {
|
||||
fieldState.value = arrayValue.length
|
||||
fieldState.initialValue = arrayValue.length
|
||||
fieldState.value = forceFullValue ? arrayValue : arrayValue.length
|
||||
fieldState.initialValue = forceFullValue ? arrayValue : arrayValue.length
|
||||
|
||||
if (arrayValue.length > 0) {
|
||||
fieldState.disableFormData = true
|
||||
@@ -157,7 +210,9 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.rows = rowMetadata
|
||||
|
||||
// Add field to state
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
if (!omitParents && (!filter || filter(args))) {
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
@@ -173,36 +228,60 @@ export const addFieldStatePromise = async ({
|
||||
if (block) {
|
||||
row.id = row?.id || new ObjectID().toHexString()
|
||||
|
||||
state[`${rowPath}id`] = {
|
||||
initialValue: row.id,
|
||||
valid: true,
|
||||
value: row.id,
|
||||
}
|
||||
if (!omitParents && (!filter || filter(args))) {
|
||||
state[`${rowPath}id`] = {
|
||||
fieldSchema: includeSchema
|
||||
? block.fields.find(
|
||||
(blockField) => 'name' in blockField && blockField.name === 'id',
|
||||
)
|
||||
: undefined,
|
||||
initialValue: row.id,
|
||||
valid: true,
|
||||
value: row.id,
|
||||
}
|
||||
|
||||
state[`${rowPath}blockType`] = {
|
||||
initialValue: row.blockType,
|
||||
valid: true,
|
||||
value: row.blockType,
|
||||
}
|
||||
state[`${rowPath}blockType`] = {
|
||||
fieldSchema: includeSchema
|
||||
? block.fields.find(
|
||||
(blockField) => 'name' in blockField && blockField.name === 'blockType',
|
||||
)
|
||||
: undefined,
|
||||
initialValue: row.blockType,
|
||||
valid: true,
|
||||
value: row.blockType,
|
||||
}
|
||||
|
||||
state[`${rowPath}blockName`] = {
|
||||
initialValue: row.blockName,
|
||||
valid: true,
|
||||
value: row.blockName,
|
||||
state[`${rowPath}blockName`] = {
|
||||
fieldSchema: includeSchema
|
||||
? block.fields.find(
|
||||
(blockField) => 'name' in blockField && blockField.name === 'blockName',
|
||||
)
|
||||
: undefined,
|
||||
initialValue: row.blockName,
|
||||
valid: true,
|
||||
value: row.blockName,
|
||||
}
|
||||
}
|
||||
|
||||
acc.promises.push(
|
||||
iterateFields({
|
||||
id,
|
||||
anyParentLocalized: field.localized || anyParentLocalized,
|
||||
config,
|
||||
data: row,
|
||||
fields: block.fields,
|
||||
filter,
|
||||
forceFullValue,
|
||||
fullData,
|
||||
includeSchema,
|
||||
locale,
|
||||
omitParents,
|
||||
operation,
|
||||
parentPassesCondition: passesCondition,
|
||||
path: rowPath,
|
||||
preferences,
|
||||
skipConditionChecks,
|
||||
skipValidation,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
@@ -237,8 +316,8 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.value = null
|
||||
fieldState.initialValue = null
|
||||
} else {
|
||||
fieldState.value = blocksValue.length
|
||||
fieldState.initialValue = blocksValue.length
|
||||
fieldState.value = forceFullValue ? blocksValue : blocksValue.length
|
||||
fieldState.initialValue = forceFullValue ? blocksValue : blocksValue.length
|
||||
|
||||
if (blocksValue.length > 0) {
|
||||
fieldState.disableFormData = true
|
||||
@@ -248,7 +327,9 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.rows = rowMetadata
|
||||
|
||||
// Add field to state
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
if (!omitParents && (!filter || filter(args))) {
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
@@ -256,15 +337,22 @@ export const addFieldStatePromise = async ({
|
||||
case 'group': {
|
||||
await iterateFields({
|
||||
id,
|
||||
anyParentLocalized: field.localized || anyParentLocalized,
|
||||
config,
|
||||
data: data?.[field.name] || {},
|
||||
fields: field.fields,
|
||||
filter,
|
||||
forceFullValue,
|
||||
fullData,
|
||||
includeSchema,
|
||||
locale,
|
||||
omitParents,
|
||||
operation,
|
||||
parentPassesCondition: passesCondition,
|
||||
path: `${path}${field.name}.`,
|
||||
preferences,
|
||||
skipConditionChecks,
|
||||
skipValidation,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
@@ -324,7 +412,9 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.initialValue = relationshipValue
|
||||
}
|
||||
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
if (!filter || filter(args)) {
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
@@ -337,7 +427,9 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.value = relationshipValue
|
||||
fieldState.initialValue = relationshipValue
|
||||
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
if (!filter || filter(args)) {
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
@@ -347,7 +439,9 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.initialValue = valueWithDefault
|
||||
|
||||
// Add field to state
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
if (!filter || filter(args)) {
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
@@ -356,15 +450,22 @@ export const addFieldStatePromise = async ({
|
||||
// Handle field types that do not use names (row, etc)
|
||||
await iterateFields({
|
||||
id,
|
||||
anyParentLocalized: field.localized || anyParentLocalized,
|
||||
config,
|
||||
data,
|
||||
fields: field.fields,
|
||||
filter,
|
||||
forceFullValue,
|
||||
fullData,
|
||||
includeSchema,
|
||||
locale,
|
||||
omitParents,
|
||||
operation,
|
||||
parentPassesCondition: passesCondition,
|
||||
path,
|
||||
preferences,
|
||||
skipConditionChecks,
|
||||
skipValidation,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
@@ -373,15 +474,22 @@ export const addFieldStatePromise = async ({
|
||||
const promises = field.tabs.map((tab) =>
|
||||
iterateFields({
|
||||
id,
|
||||
anyParentLocalized: tab.localized || anyParentLocalized,
|
||||
config,
|
||||
data: tabHasName(tab) ? data?.[tab.name] : data,
|
||||
fields: tab.fields,
|
||||
filter,
|
||||
forceFullValue,
|
||||
fullData,
|
||||
includeSchema,
|
||||
locale,
|
||||
omitParents,
|
||||
operation,
|
||||
parentPassesCondition: passesCondition,
|
||||
path: tabHasName(tab) ? `${path}${tab.name}.` : path,
|
||||
preferences,
|
||||
skipConditionChecks,
|
||||
skipValidation,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
|
||||
@@ -4,65 +4,123 @@ import type { User } from '../../../../../auth'
|
||||
import type { SanitizedConfig } from '../../../../../config/types'
|
||||
import type { Field as FieldSchema } from '../../../../../fields/config/types'
|
||||
import type { Data, Fields } from '../types'
|
||||
import type { AddFieldStatePromiseArgs } from './addFieldStatePromise'
|
||||
|
||||
import { fieldIsPresentationalOnly } from '../../../../../fields/config/types'
|
||||
import { addFieldStatePromise } from './addFieldStatePromise'
|
||||
|
||||
type Args = {
|
||||
config: SanitizedConfig
|
||||
/**
|
||||
* if any parents is localized, then the field is localized. @default false
|
||||
*/
|
||||
anyParentLocalized?: boolean
|
||||
/**
|
||||
* config is only needed for validation
|
||||
*/
|
||||
config?: SanitizedConfig
|
||||
data: Data
|
||||
fields: FieldSchema[]
|
||||
filter?: (args: AddFieldStatePromiseArgs) => boolean
|
||||
/**
|
||||
* Force the value of fields like arrays or blocks to be the full value instead of the length @default false
|
||||
*/
|
||||
forceFullValue?: boolean
|
||||
fullData: Data
|
||||
id: number | string
|
||||
id?: number | string
|
||||
/**
|
||||
* Whether the field schema should be included in the state. @default false
|
||||
*/
|
||||
includeSchema?: boolean
|
||||
|
||||
/**
|
||||
* operation is only needed for checking field conditions
|
||||
*/
|
||||
locale: string
|
||||
/**
|
||||
* Whether to omit parent fields in the state. @default false
|
||||
*/
|
||||
omitParents?: boolean
|
||||
/**
|
||||
* operation is only needed for validation
|
||||
*/
|
||||
operation: 'create' | 'update'
|
||||
parentPassesCondition: boolean
|
||||
path: string
|
||||
preferences: {
|
||||
parentPassesCondition?: boolean
|
||||
/**
|
||||
* The initial path of the field. @default ''
|
||||
*/
|
||||
path?: string
|
||||
preferences?: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
state: Fields
|
||||
/**
|
||||
* Whether to skip checking the field's condition. @default false
|
||||
*/
|
||||
skipConditionChecks?: boolean
|
||||
/**
|
||||
* Whether to skip validating the field. @default false
|
||||
*/
|
||||
skipValidation?: boolean
|
||||
state?: Fields
|
||||
t: TFunction
|
||||
user: User
|
||||
}
|
||||
|
||||
/**
|
||||
* Flattens the fields schema and fields data
|
||||
*/
|
||||
export const iterateFields = async ({
|
||||
id,
|
||||
anyParentLocalized = false,
|
||||
config,
|
||||
data,
|
||||
fields,
|
||||
filter,
|
||||
forceFullValue = false,
|
||||
fullData,
|
||||
includeSchema = false,
|
||||
locale,
|
||||
omitParents = false,
|
||||
operation,
|
||||
parentPassesCondition,
|
||||
parentPassesCondition = true,
|
||||
path = '',
|
||||
preferences,
|
||||
state,
|
||||
skipConditionChecks = false,
|
||||
skipValidation = false,
|
||||
state = {},
|
||||
t,
|
||||
user,
|
||||
}: Args): Promise<void> => {
|
||||
const promises = []
|
||||
fields.forEach((field) => {
|
||||
const initialData = data
|
||||
if (!fieldIsPresentationalOnly(field) && !field?.admin?.disabled) {
|
||||
const passesCondition = Boolean(
|
||||
(field?.admin?.condition
|
||||
? Boolean(field.admin.condition(fullData || {}, initialData || {}, { user }))
|
||||
: true) && parentPassesCondition,
|
||||
)
|
||||
let passesCondition = true
|
||||
if (!skipConditionChecks) {
|
||||
passesCondition = Boolean(
|
||||
(field?.admin?.condition
|
||||
? Boolean(field.admin.condition(fullData || {}, data || {}, { user }))
|
||||
: true) && parentPassesCondition,
|
||||
)
|
||||
}
|
||||
|
||||
promises.push(
|
||||
addFieldStatePromise({
|
||||
id,
|
||||
anyParentLocalized,
|
||||
config,
|
||||
data,
|
||||
field,
|
||||
filter,
|
||||
forceFullValue,
|
||||
fullData,
|
||||
includeSchema,
|
||||
locale,
|
||||
omitParents,
|
||||
operation,
|
||||
passesCondition,
|
||||
path,
|
||||
preferences,
|
||||
skipConditionChecks,
|
||||
skipValidation,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
|
||||
@@ -2,11 +2,22 @@ import { unflatten as flatleyUnflatten } from 'flatley'
|
||||
|
||||
import type { Data, Fields } from './types'
|
||||
|
||||
const reduceFieldsToValues = (fields: Fields, unflatten?: boolean): Data => {
|
||||
/**
|
||||
* Reduce flattened form fields (Fields) to just map to the respective values instead of the full FormField object
|
||||
*
|
||||
* @param unflatten This also unflattens the data if `unflatten` is true. The unflattened data should match the original data structure
|
||||
* @param ignoreDisableFormData - if true, will include fields that have `disableFormData` set to true, for example, blocks or arrays fields.
|
||||
*
|
||||
*/
|
||||
const reduceFieldsToValues = (
|
||||
fields: Fields,
|
||||
unflatten?: boolean,
|
||||
ignoreDisableFormData?: boolean,
|
||||
): Data => {
|
||||
const data = {}
|
||||
|
||||
Object.keys(fields).forEach((key) => {
|
||||
if (!fields[key].disableFormData) {
|
||||
if (ignoreDisableFormData === true || !fields[key].disableFormData) {
|
||||
data[key] = fields[key].value
|
||||
}
|
||||
})
|
||||
|
||||
@@ -20,6 +20,7 @@ export type FormField = {
|
||||
condition?: Condition
|
||||
disableFormData?: boolean
|
||||
errorMessage?: string
|
||||
fieldSchema?: FieldConfig
|
||||
initialValue: unknown
|
||||
passesCondition?: boolean
|
||||
rows?: Row[]
|
||||
|
||||
@@ -33,7 +33,7 @@ const Group: React.FC<Props> = (props) => {
|
||||
permissions,
|
||||
} = props
|
||||
|
||||
const isWithinCollapsible = useCollapsible()
|
||||
const { withinCollapsible } = useCollapsible()
|
||||
const isWithinGroup = useGroup()
|
||||
const isWithinRow = useRow()
|
||||
const isWithinTab = useTabs()
|
||||
@@ -43,7 +43,7 @@ const Group: React.FC<Props> = (props) => {
|
||||
const groupHasErrors = submitted && errorCount > 0
|
||||
|
||||
const path = pathFromProps || name
|
||||
const isTopLevel = !(isWithinCollapsible || isWithinGroup || isWithinRow)
|
||||
const isTopLevel = !(withinCollapsible || isWithinGroup || isWithinRow)
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -51,7 +51,7 @@ const Group: React.FC<Props> = (props) => {
|
||||
fieldBaseClass,
|
||||
baseClass,
|
||||
isTopLevel && `${baseClass}--top-level`,
|
||||
isWithinCollapsible && `${baseClass}--within-collapsible`,
|
||||
withinCollapsible && `${baseClass}--within-collapsible`,
|
||||
isWithinGroup && `${baseClass}--within-group`,
|
||||
isWithinRow && `${baseClass}--within-row`,
|
||||
isWithinTab && `${baseClass}--within-tab`,
|
||||
|
||||
@@ -151,7 +151,7 @@ const NumberField: React.FC<Props> = (props) => {
|
||||
if (isOverHasMany) {
|
||||
return t('validation:limitReached', { max: maxRows, value: value.length + 1 })
|
||||
}
|
||||
return t('general:noOptions')
|
||||
return null
|
||||
}}
|
||||
numberOnly
|
||||
onChange={handleHasManyChange}
|
||||
@@ -170,7 +170,7 @@ const NumberField: React.FC<Props> = (props) => {
|
||||
onChange={handleChange}
|
||||
onWheel={(e) => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
// @ts-expect-error
|
||||
e.target.blur()
|
||||
}}
|
||||
placeholder={getTranslation(placeholder, i18n)}
|
||||
|
||||
@@ -9,7 +9,7 @@ const reduceToIDs = (options) =>
|
||||
return [...ids, ...reduceToIDs(option.options)]
|
||||
}
|
||||
|
||||
return [...ids, option.value]
|
||||
return [...ids, { id: option.value, relationTo: option.relationTo }]
|
||||
}, [])
|
||||
|
||||
const sortOptions = (options: Option[]): Option[] =>
|
||||
@@ -63,10 +63,12 @@ const optionsReducer = (state: OptionGroup[], action: Action): OptionGroup[] =>
|
||||
const optionsToAddTo = newOptions.find(
|
||||
(optionGroup) => optionGroup.label === collection.labels.plural,
|
||||
)
|
||||
|
||||
const newSubOptions = docs.reduce((docSubOptions, doc) => {
|
||||
if (loadedIDs.indexOf(doc.id) === -1) {
|
||||
loadedIDs.push(doc.id)
|
||||
if (
|
||||
loadedIDs.filter((item) => item.id === doc.id && item.relationTo === relation).length ===
|
||||
0
|
||||
) {
|
||||
loadedIDs.push({ id: doc.id, relationTo: relation })
|
||||
|
||||
const docTitle = formatUseAsTitle({
|
||||
collection,
|
||||
@@ -89,7 +91,10 @@ const optionsReducer = (state: OptionGroup[], action: Action): OptionGroup[] =>
|
||||
}, [])
|
||||
|
||||
ids.forEach((id) => {
|
||||
if (!loadedIDs.includes(id)) {
|
||||
if (
|
||||
loadedIDs.filter((item) => item.id === id && item.relationTo === relation).length === 0
|
||||
) {
|
||||
loadedIDs.push({ id, relationTo: relation })
|
||||
newSubOptions.push({
|
||||
label: `${i18n.t('general:untitled')} - ID: ${id}`,
|
||||
relationTo: relation,
|
||||
|
||||
@@ -29,9 +29,14 @@ type RichTextAdapterBase<
|
||||
}) => Promise<void> | null
|
||||
outputSchema?: ({
|
||||
field,
|
||||
interfaceNameDefinitions,
|
||||
isRequired,
|
||||
}: {
|
||||
field: RichTextField<Value, AdapterProps, ExtraFieldProperties>
|
||||
/**
|
||||
* Allows you to define new top-level interfaces that can be re-used in the output schema.
|
||||
*/
|
||||
interfaceNameDefinitions: Map<string, JSONSchema4>
|
||||
isRequired: boolean
|
||||
}) => JSONSchema4
|
||||
populationPromise?: (data: {
|
||||
|
||||
@@ -83,7 +83,7 @@ const TabsField: React.FC<Props> = (props) => {
|
||||
const { preferencesKey } = useDocumentInfo()
|
||||
const { i18n } = useTranslation()
|
||||
|
||||
const isWithinCollapsible = useCollapsible()
|
||||
const { withinCollapsible } = useCollapsible()
|
||||
const [activeTabIndex, setActiveTabIndex] = useState<number>(0)
|
||||
const tabsPrefKey = `tabs-${indexPath}`
|
||||
|
||||
@@ -138,7 +138,7 @@ const TabsField: React.FC<Props> = (props) => {
|
||||
fieldBaseClass,
|
||||
className,
|
||||
baseClass,
|
||||
isWithinCollapsible && `${baseClass}--within-collapsible`,
|
||||
withinCollapsible && `${baseClass}--within-collapsible`,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ')}
|
||||
|
||||
@@ -110,7 +110,7 @@ const TextInput: React.FC<TextInputProps> = (props) => {
|
||||
if (isOverHasMany) {
|
||||
return t('validation:limitReached', { max: maxRows, value: value.length + 1 })
|
||||
}
|
||||
return t('general:noOptions')
|
||||
return null
|
||||
}}
|
||||
onChange={onChange}
|
||||
options={[]}
|
||||
|
||||
@@ -137,6 +137,7 @@ const UploadInput: React.FC<UploadInputProps> = (props) => {
|
||||
fieldBaseClass,
|
||||
baseClass,
|
||||
className,
|
||||
`field-${path.replace(/\./g, '__')}`,
|
||||
showError && 'error',
|
||||
readOnly && 'read-only',
|
||||
]
|
||||
|
||||
@@ -40,7 +40,6 @@ export const DocumentInfoProvider: React.FC<Props> = ({
|
||||
const [publishedDoc, setPublishedDoc] = useState<TypeWithID & TypeWithTimestamps>(null)
|
||||
const [versions, setVersions] = useState<PaginatedDocs<Version>>(null)
|
||||
const [unpublishedVersions, setUnpublishedVersions] = useState<PaginatedDocs<Version>>(null)
|
||||
const [docPermissions, setDocPermissions] = useState<DocumentPermissions>(null)
|
||||
|
||||
const baseURL = `${serverURL}${api}`
|
||||
let slug: string
|
||||
@@ -62,6 +61,10 @@ export const DocumentInfoProvider: React.FC<Props> = ({
|
||||
}
|
||||
}
|
||||
|
||||
const [docPermissions, setDocPermissions] = useState<DocumentPermissions>(
|
||||
permissions[pluralType][slug],
|
||||
)
|
||||
|
||||
const getVersions = useCallback(async () => {
|
||||
let versionFetchURL
|
||||
let publishedFetchURL
|
||||
@@ -215,14 +218,14 @@ export const DocumentInfoProvider: React.FC<Props> = ({
|
||||
'Accept-Language': i18n.language,
|
||||
},
|
||||
})
|
||||
const json = await res.json()
|
||||
setDocPermissions(json)
|
||||
} else {
|
||||
// fallback to permissions from the entity type
|
||||
// (i.e. create has no id)
|
||||
setDocPermissions(permissions[pluralType][slug])
|
||||
try {
|
||||
const json = await res.json()
|
||||
setDocPermissions(json)
|
||||
} catch (e) {
|
||||
console.error('Unable to fetch document permissions', e)
|
||||
}
|
||||
}
|
||||
}, [serverURL, api, pluralType, slug, id, permissions, i18n.language, code])
|
||||
}, [serverURL, api, pluralType, slug, id, i18n.language, code])
|
||||
|
||||
const getDocPreferences = useCallback(async () => {
|
||||
return getPreference<DocumentPreferences>(preferencesKey)
|
||||
@@ -262,6 +265,7 @@ export const DocumentInfoProvider: React.FC<Props> = ({
|
||||
|
||||
const value: ContextType = {
|
||||
id,
|
||||
slug,
|
||||
collection,
|
||||
docPermissions,
|
||||
getDocPermissions,
|
||||
@@ -271,7 +275,6 @@ export const DocumentInfoProvider: React.FC<Props> = ({
|
||||
preferencesKey,
|
||||
publishedDoc,
|
||||
setDocFieldPreferences,
|
||||
slug,
|
||||
unpublishedVersions,
|
||||
versions,
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ import type { TypeWithVersion } from '../../../../versions/types'
|
||||
|
||||
export type Version = TypeWithVersion<any>
|
||||
|
||||
export type DocumentPermissions = CollectionPermission | GlobalPermission | null
|
||||
export type DocumentPermissions = CollectionPermission | GlobalPermission
|
||||
|
||||
export type ContextType = {
|
||||
collection?: SanitizedCollectionConfig
|
||||
|
||||
@@ -17,9 +17,9 @@ export type globalViewType =
|
||||
| 'Version'
|
||||
| 'Versions'
|
||||
|
||||
export const defaultGlobalViews: {
|
||||
export const defaultGlobalViews = (): {
|
||||
[key in globalViewType]: React.ComponentType<any>
|
||||
} = {
|
||||
} => ({
|
||||
API,
|
||||
Default: DefaultGlobalEdit,
|
||||
LivePreview: LivePreviewView,
|
||||
@@ -27,7 +27,7 @@ export const defaultGlobalViews: {
|
||||
Relationships: null,
|
||||
Version: VersionView,
|
||||
Versions: VersionsView,
|
||||
}
|
||||
})
|
||||
|
||||
export const CustomGlobalComponent = (
|
||||
args: GlobalEditViewProps & {
|
||||
@@ -43,18 +43,14 @@ export const CustomGlobalComponent = (
|
||||
// For example, the Edit view:
|
||||
// 1. Edit?.Default
|
||||
// 2. Edit?.Default?.Component
|
||||
// TODO: Remove the `@ts-ignore` when a Typescript wizard arrives
|
||||
// For some reason `Component` does not exist on type `Edit[view]` no matter how narrow the type is
|
||||
const Component =
|
||||
typeof Edit === 'object' && typeof Edit[view] === 'function'
|
||||
? Edit[view]
|
||||
: typeof Edit === 'object' &&
|
||||
typeof Edit?.[view] === 'object' &&
|
||||
// @ts-ignore
|
||||
typeof Edit[view].Component === 'function'
|
||||
? // @ts-ignore
|
||||
Edit[view].Component
|
||||
: defaultGlobalViews[view]
|
||||
? Edit[view].Component
|
||||
: defaultGlobalViews()[view]
|
||||
|
||||
if (Component) {
|
||||
return <Component {...args} />
|
||||
|
||||
@@ -17,9 +17,9 @@ export type collectionViewType =
|
||||
| 'Version'
|
||||
| 'Versions'
|
||||
|
||||
export const defaultCollectionViews: {
|
||||
export const defaultCollectionViews = (): {
|
||||
[key in collectionViewType]: React.ComponentType<any>
|
||||
} = {
|
||||
} => ({
|
||||
API,
|
||||
Default: DefaultCollectionEdit,
|
||||
LivePreview: LivePreviewView,
|
||||
@@ -27,7 +27,7 @@ export const defaultCollectionViews: {
|
||||
Relationships: null,
|
||||
Version: VersionView,
|
||||
Versions: VersionsView,
|
||||
}
|
||||
})
|
||||
|
||||
export const CustomCollectionComponent = (
|
||||
args: CollectionEditViewProps & {
|
||||
@@ -43,18 +43,15 @@ export const CustomCollectionComponent = (
|
||||
// For example, the Edit view:
|
||||
// 1. Edit?.Default
|
||||
// 2. Edit?.Default?.Component
|
||||
// TODO: Remove the `@ts-ignore` when a Typescript wizard arrives
|
||||
// For some reason `Component` does not exist on type `Edit[view]` no matter how narrow the type is
|
||||
|
||||
const Component =
|
||||
typeof Edit === 'object' && typeof Edit[view] === 'function'
|
||||
? Edit[view]
|
||||
: typeof Edit === 'object' &&
|
||||
typeof Edit?.[view] === 'object' &&
|
||||
// @ts-ignore
|
||||
typeof Edit[view].Component === 'function'
|
||||
? // @ts-ignore
|
||||
Edit[view].Component
|
||||
: defaultCollectionViews[view]
|
||||
? Edit[view].Component
|
||||
: defaultCollectionViews()[view]
|
||||
|
||||
if (Component) {
|
||||
return <Component {...args} />
|
||||
|
||||
@@ -74,21 +74,22 @@ const DefaultCell: React.FC<Props> = (props) => {
|
||||
if (collection.upload && fieldAffectsData(field) && field.name === 'filename') {
|
||||
CellComponent = cellComponents.File
|
||||
} else {
|
||||
return (
|
||||
<WrapElement {...wrapElementProps}>
|
||||
{(cellData === '' || typeof cellData === 'undefined') &&
|
||||
'label' in field &&
|
||||
t('noLabel', {
|
||||
if (!cellData && 'label' in field) {
|
||||
return (
|
||||
<WrapElement {...wrapElementProps}>
|
||||
{t('noLabel', {
|
||||
label: getTranslation(
|
||||
typeof field.label === 'function' ? 'data' : field.label || 'data',
|
||||
i18n,
|
||||
),
|
||||
})}
|
||||
{typeof cellData === 'string' && cellData}
|
||||
{typeof cellData === 'number' && cellData}
|
||||
{typeof cellData === 'object' && JSON.stringify(cellData)}
|
||||
</WrapElement>
|
||||
)
|
||||
</WrapElement>
|
||||
)
|
||||
} else if (typeof cellData === 'string' || typeof cellData === 'number') {
|
||||
return <WrapElement {...wrapElementProps}>{cellData}</WrapElement>
|
||||
} else if (typeof cellData === 'object') {
|
||||
return <WrapElement {...wrapElementProps}>{JSON.stringify(cellData)}</WrapElement>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,13 +7,14 @@ import { extractTranslations } from '../../translations/extractTranslations'
|
||||
const labels = extractTranslations(['authentication:enableAPIKey', 'authentication:apiKey'])
|
||||
|
||||
const encryptKey: FieldHook = ({ req, value }) =>
|
||||
value ? req.payload.encrypt(value as string) : undefined
|
||||
value ? req.payload.encrypt(value as string) : null
|
||||
const decryptKey: FieldHook = ({ req, value }) =>
|
||||
value ? req.payload.decrypt(value as string) : undefined
|
||||
|
||||
export default [
|
||||
{
|
||||
name: 'enableAPIKey',
|
||||
type: 'checkbox',
|
||||
admin: {
|
||||
components: {
|
||||
Field: () => null,
|
||||
@@ -21,10 +22,10 @@ export default [
|
||||
},
|
||||
defaultValue: false,
|
||||
label: labels['authentication:enableAPIKey'],
|
||||
type: 'checkbox',
|
||||
},
|
||||
{
|
||||
name: 'apiKey',
|
||||
type: 'text',
|
||||
admin: {
|
||||
components: {
|
||||
Field: () => null,
|
||||
@@ -35,10 +36,10 @@ export default [
|
||||
beforeChange: [encryptKey],
|
||||
},
|
||||
label: labels['authentication:apiKey'],
|
||||
type: 'text',
|
||||
},
|
||||
{
|
||||
name: 'apiKeyIndex',
|
||||
type: 'text',
|
||||
admin: {
|
||||
disabled: true,
|
||||
},
|
||||
@@ -59,6 +60,5 @@ export default [
|
||||
},
|
||||
],
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
] as Field[]
|
||||
|
||||
@@ -29,37 +29,38 @@ async function forgotPassword(incomingArgs: Arguments): Promise<null | string> {
|
||||
|
||||
let args = incomingArgs
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await args.collection.config.hooks.beforeOperation.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
args =
|
||||
(await hook({
|
||||
args,
|
||||
collection: args.collection?.config,
|
||||
context: args.req.context,
|
||||
operation: 'forgotPassword',
|
||||
})) || args
|
||||
}, Promise.resolve())
|
||||
|
||||
const {
|
||||
collection: { config: collectionConfig },
|
||||
data,
|
||||
disableEmail,
|
||||
expiration,
|
||||
req: {
|
||||
payload: { config, emailOptions, sendEmail: email },
|
||||
payload,
|
||||
t,
|
||||
},
|
||||
req,
|
||||
} = args
|
||||
|
||||
try {
|
||||
const shouldCommit = await initTransaction(req)
|
||||
const shouldCommit = await initTransaction(args.req)
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await args.collection.config.hooks.beforeOperation.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
args =
|
||||
(await hook({
|
||||
args,
|
||||
collection: args.collection?.config,
|
||||
context: args.req.context,
|
||||
operation: 'forgotPassword',
|
||||
req: args.req,
|
||||
})) || args
|
||||
}, Promise.resolve())
|
||||
|
||||
const {
|
||||
collection: { config: collectionConfig },
|
||||
data,
|
||||
disableEmail,
|
||||
expiration,
|
||||
req: {
|
||||
payload: { config, emailOptions, sendEmail: email },
|
||||
payload,
|
||||
t,
|
||||
},
|
||||
req,
|
||||
} = args
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Forget password
|
||||
@@ -159,7 +160,7 @@ async function forgotPassword(incomingArgs: Arguments): Promise<null | string> {
|
||||
|
||||
return token
|
||||
} catch (error: unknown) {
|
||||
await killTransaction(req)
|
||||
await killTransaction(args.req)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,10 +3,8 @@ import type { PayloadRequest } from '../../../express/types'
|
||||
import type { Payload } from '../../../payload'
|
||||
import type { Result } from '../forgotPassword'
|
||||
|
||||
import { getDataLoader } from '../../../collections/dataloader'
|
||||
import { APIError } from '../../../errors'
|
||||
import { setRequestContext } from '../../../express/setRequestContext'
|
||||
import { i18nInit } from '../../../translations/init'
|
||||
import { createLocalReq } from '../../../utilities/createLocalReq'
|
||||
import forgotPassword from '../forgotPassword'
|
||||
|
||||
export type Options<T extends keyof GeneratedTypes['collections']> = {
|
||||
@@ -24,15 +22,7 @@ async function localForgotPassword<T extends keyof GeneratedTypes['collections']
|
||||
payload: Payload,
|
||||
options: Options<T>,
|
||||
): Promise<Result> {
|
||||
const {
|
||||
collection: collectionSlug,
|
||||
context,
|
||||
data,
|
||||
disableEmail,
|
||||
expiration,
|
||||
req = {} as PayloadRequest,
|
||||
} = options
|
||||
setRequestContext(req, context)
|
||||
const { collection: collectionSlug, data, disableEmail, expiration } = options
|
||||
|
||||
const collection = payload.collections[collectionSlug]
|
||||
|
||||
@@ -44,12 +34,7 @@ async function localForgotPassword<T extends keyof GeneratedTypes['collections']
|
||||
)
|
||||
}
|
||||
|
||||
req.payloadAPI = req.payloadAPI || 'local'
|
||||
req.payload = payload
|
||||
req.i18n = i18nInit(payload.config.i18n)
|
||||
|
||||
if (!req.t) req.t = req.i18n.t
|
||||
if (!req.payloadDataLoader) req.payloadDataLoader = getDataLoader(req)
|
||||
const req = createLocalReq(options, payload)
|
||||
|
||||
return forgotPassword({
|
||||
collection,
|
||||
|
||||
@@ -5,10 +5,8 @@ import type { GeneratedTypes } from '../../../index'
|
||||
import type { Payload } from '../../../payload'
|
||||
import type { Result } from '../login'
|
||||
|
||||
import { getDataLoader } from '../../../collections/dataloader'
|
||||
import { APIError } from '../../../errors'
|
||||
import { setRequestContext } from '../../../express/setRequestContext'
|
||||
import { i18nInit } from '../../../translations/init'
|
||||
import { createLocalReq } from '../../../utilities/createLocalReq'
|
||||
import login from '../login'
|
||||
|
||||
export type Options<TSlug extends keyof GeneratedTypes['collections']> = {
|
||||
@@ -33,25 +31,14 @@ async function localLogin<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
): Promise<Result & { user: GeneratedTypes['collections'][TSlug] }> {
|
||||
const {
|
||||
collection: collectionSlug,
|
||||
context,
|
||||
data,
|
||||
depth,
|
||||
fallbackLocale: fallbackLocaleArg = options?.req?.fallbackLocale,
|
||||
locale: localeArg = null,
|
||||
overrideAccess = true,
|
||||
req = {} as PayloadRequest,
|
||||
res,
|
||||
showHiddenFields,
|
||||
} = options
|
||||
setRequestContext(req, context)
|
||||
|
||||
const collection = payload.collections[collectionSlug]
|
||||
const localizationConfig = payload?.config?.localization
|
||||
const defaultLocale = localizationConfig ? localizationConfig.defaultLocale : null
|
||||
const locale = localeArg || req?.locale || defaultLocale
|
||||
const fallbackLocale = localizationConfig
|
||||
? localizationConfig.locales.find(({ code }) => locale === code)?.fallbackLocale
|
||||
: null
|
||||
|
||||
if (!collection) {
|
||||
throw new APIError(
|
||||
@@ -59,12 +46,7 @@ async function localLogin<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
)
|
||||
}
|
||||
|
||||
req.payloadAPI = req.payloadAPI || 'local'
|
||||
req.payload = payload
|
||||
req.i18n = i18nInit(payload.config.i18n)
|
||||
|
||||
if (!req.t) req.t = req.i18n.t
|
||||
if (!req.payloadDataLoader) req.payloadDataLoader = getDataLoader(req)
|
||||
const req = createLocalReq(options, payload)
|
||||
|
||||
const args = {
|
||||
collection,
|
||||
@@ -76,12 +58,6 @@ async function localLogin<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
showHiddenFields,
|
||||
}
|
||||
|
||||
if (locale) args.req.locale = locale
|
||||
if (fallbackLocale) {
|
||||
args.req.fallbackLocale =
|
||||
typeof fallbackLocaleArg !== 'undefined' ? fallbackLocaleArg : fallbackLocale || defaultLocale
|
||||
}
|
||||
|
||||
return login<TSlug>(args)
|
||||
}
|
||||
|
||||
|
||||
@@ -3,10 +3,8 @@ import type { PayloadRequest } from '../../../express/types'
|
||||
import type { Payload } from '../../../payload'
|
||||
import type { Result } from '../resetPassword'
|
||||
|
||||
import { getDataLoader } from '../../../collections/dataloader'
|
||||
import { APIError } from '../../../errors'
|
||||
import { setRequestContext } from '../../../express/setRequestContext'
|
||||
import { i18nInit } from '../../../translations/init'
|
||||
import { createLocalReq } from '../../../utilities/createLocalReq'
|
||||
import resetPassword from '../resetPassword'
|
||||
|
||||
export type Options<T extends keyof GeneratedTypes['collections']> = {
|
||||
@@ -24,15 +22,7 @@ async function localResetPassword<T extends keyof GeneratedTypes['collections']>
|
||||
payload: Payload,
|
||||
options: Options<T>,
|
||||
): Promise<Result> {
|
||||
const {
|
||||
collection: collectionSlug,
|
||||
context,
|
||||
data,
|
||||
overrideAccess,
|
||||
req = {} as PayloadRequest,
|
||||
} = options
|
||||
|
||||
setRequestContext(req, context)
|
||||
const { collection: collectionSlug, data, overrideAccess } = options
|
||||
|
||||
const collection = payload.collections[collectionSlug]
|
||||
|
||||
@@ -44,12 +34,7 @@ async function localResetPassword<T extends keyof GeneratedTypes['collections']>
|
||||
)
|
||||
}
|
||||
|
||||
req.payload = payload
|
||||
req.payloadAPI = req.payloadAPI || 'local'
|
||||
req.i18n = i18nInit(payload.config.i18n)
|
||||
|
||||
if (!req.t) req.t = req.i18n.t
|
||||
if (!req.payloadDataLoader) req.payloadDataLoader = getDataLoader(req)
|
||||
const req = createLocalReq(options, payload)
|
||||
|
||||
return resetPassword({
|
||||
collection,
|
||||
|
||||
@@ -2,10 +2,8 @@ import type { GeneratedTypes, RequestContext } from '../../../'
|
||||
import type { PayloadRequest } from '../../../express/types'
|
||||
import type { Payload } from '../../../payload'
|
||||
|
||||
import { getDataLoader } from '../../../collections/dataloader'
|
||||
import { APIError } from '../../../errors'
|
||||
import { setRequestContext } from '../../../express/setRequestContext'
|
||||
import { i18nInit } from '../../../translations/init'
|
||||
import { createLocalReq } from '../../../utilities/createLocalReq'
|
||||
import unlock from '../unlock'
|
||||
|
||||
export type Options<T extends keyof GeneratedTypes['collections']> = {
|
||||
@@ -22,14 +20,7 @@ async function localUnlock<T extends keyof GeneratedTypes['collections']>(
|
||||
payload: Payload,
|
||||
options: Options<T>,
|
||||
): Promise<boolean> {
|
||||
const {
|
||||
collection: collectionSlug,
|
||||
context,
|
||||
data,
|
||||
overrideAccess = true,
|
||||
req = {} as PayloadRequest,
|
||||
} = options
|
||||
setRequestContext(req, context)
|
||||
const { collection: collectionSlug, data, overrideAccess = true } = options
|
||||
|
||||
const collection = payload.collections[collectionSlug]
|
||||
|
||||
@@ -39,12 +30,7 @@ async function localUnlock<T extends keyof GeneratedTypes['collections']>(
|
||||
)
|
||||
}
|
||||
|
||||
req.payload = payload
|
||||
req.payloadAPI = req.payloadAPI || 'local'
|
||||
req.i18n = i18nInit(payload.config.i18n)
|
||||
|
||||
if (!req.t) req.t = req.i18n.t
|
||||
if (!req.payloadDataLoader) req.payloadDataLoader = getDataLoader(req)
|
||||
const req = createLocalReq(options, payload)
|
||||
|
||||
return unlock({
|
||||
collection,
|
||||
|
||||
@@ -3,8 +3,7 @@ import type { PayloadRequest } from '../../../express/types'
|
||||
import type { Payload } from '../../../payload'
|
||||
|
||||
import { APIError } from '../../../errors'
|
||||
import { setRequestContext } from '../../../express/setRequestContext'
|
||||
import { i18nInit } from '../../../translations/init'
|
||||
import { createLocalReq } from '../../../utilities/createLocalReq'
|
||||
import verifyEmail from '../verifyEmail'
|
||||
|
||||
export type Options<T extends keyof GeneratedTypes['collections']> = {
|
||||
@@ -18,8 +17,7 @@ async function localVerifyEmail<T extends keyof GeneratedTypes['collections']>(
|
||||
payload: Payload,
|
||||
options: Options<T>,
|
||||
): Promise<boolean> {
|
||||
const { collection: collectionSlug, context, req = {} as PayloadRequest, token } = options
|
||||
setRequestContext(req, context)
|
||||
const { collection: collectionSlug, token } = options
|
||||
|
||||
const collection = payload.collections[collectionSlug]
|
||||
|
||||
@@ -29,9 +27,7 @@ async function localVerifyEmail<T extends keyof GeneratedTypes['collections']>(
|
||||
)
|
||||
}
|
||||
|
||||
req.payload = payload
|
||||
req.payloadAPI = req.payloadAPI || 'local'
|
||||
req.i18n = i18nInit(payload.config.i18n)
|
||||
const req = createLocalReq(options, payload)
|
||||
|
||||
return verifyEmail({
|
||||
collection,
|
||||
|
||||
@@ -18,8 +18,8 @@ import sanitizeInternalFields from '../../utilities/sanitizeInternalFields'
|
||||
import isLocked from '../isLocked'
|
||||
import { authenticateLocalStrategy } from '../strategies/local/authenticate'
|
||||
import { incrementLoginAttempts } from '../strategies/local/incrementLoginAttempts'
|
||||
import { resetLoginAttempts } from '../strategies/local/resetLoginAttempts'
|
||||
import { getFieldsToSign } from './getFieldsToSign'
|
||||
import unlock from './unlock'
|
||||
|
||||
export type Result = {
|
||||
exp?: number
|
||||
@@ -45,37 +45,40 @@ async function login<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
): Promise<Result & { user: GeneratedTypes['collections'][TSlug] }> {
|
||||
let args = incomingArgs
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await args.collection.config.hooks.beforeOperation.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
args =
|
||||
(await hook({
|
||||
args,
|
||||
collection: args.collection?.config,
|
||||
context: args.req.context,
|
||||
operation: 'login',
|
||||
})) || args
|
||||
}, Promise.resolve())
|
||||
|
||||
const {
|
||||
collection: { config: collectionConfig },
|
||||
data,
|
||||
depth,
|
||||
overrideAccess,
|
||||
req,
|
||||
req: {
|
||||
payload,
|
||||
payload: { config, secret },
|
||||
},
|
||||
showHiddenFields,
|
||||
} = args
|
||||
|
||||
try {
|
||||
const shouldCommit = await initTransaction(req)
|
||||
const shouldCommit = await initTransaction(args.req)
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await args.collection.config.hooks.beforeOperation.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
args =
|
||||
(await hook({
|
||||
args,
|
||||
collection: args.collection?.config,
|
||||
context: args.req.context,
|
||||
operation: 'login',
|
||||
req: args.req,
|
||||
})) || args
|
||||
}, Promise.resolve())
|
||||
|
||||
const {
|
||||
collection: { config: collectionConfig },
|
||||
data,
|
||||
depth,
|
||||
overrideAccess,
|
||||
req,
|
||||
req: {
|
||||
fallbackLocale,
|
||||
locale,
|
||||
payload,
|
||||
payload: { config, secret },
|
||||
},
|
||||
showHiddenFields,
|
||||
} = args
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Login
|
||||
@@ -115,16 +118,16 @@ async function login<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
})
|
||||
}
|
||||
|
||||
if (shouldCommit) await commitTransaction(req)
|
||||
|
||||
throw new AuthenticationError(req.t)
|
||||
}
|
||||
|
||||
if (maxLoginAttemptsEnabled) {
|
||||
await unlock({
|
||||
collection: {
|
||||
config: collectionConfig,
|
||||
},
|
||||
data,
|
||||
overrideAccess: true,
|
||||
await resetLoginAttempts({
|
||||
collection: collectionConfig,
|
||||
doc: user,
|
||||
payload: req.payload,
|
||||
req,
|
||||
})
|
||||
}
|
||||
@@ -195,7 +198,9 @@ async function login<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
context: req.context,
|
||||
depth,
|
||||
doc: user,
|
||||
fallbackLocale,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
@@ -262,7 +267,7 @@ async function login<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
|
||||
return result
|
||||
} catch (error: unknown) {
|
||||
await killTransaction(req)
|
||||
await killTransaction(args.req)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,10 @@ import type { Document } from '../../types'
|
||||
|
||||
import { buildAfterOperation } from '../../collections/operations/utils'
|
||||
import { Forbidden } from '../../errors'
|
||||
import { commitTransaction } from '../../utilities/commitTransaction'
|
||||
import getCookieExpiration from '../../utilities/getCookieExpiration'
|
||||
import { initTransaction } from '../../utilities/initTransaction'
|
||||
import { killTransaction } from '../../utilities/killTransaction'
|
||||
import { getFieldsToSign } from './getFieldsToSign'
|
||||
|
||||
export type Result = {
|
||||
@@ -28,120 +31,130 @@ export type Arguments = {
|
||||
async function refresh(incomingArgs: Arguments): Promise<Result> {
|
||||
let args = incomingArgs
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
try {
|
||||
const shouldCommit = await initTransaction(args.req)
|
||||
|
||||
await args.collection.config.hooks.beforeOperation.reduce(
|
||||
async (priorHook: BeforeOperationHook | Promise<void>, hook: BeforeOperationHook) => {
|
||||
await priorHook
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
args =
|
||||
(await hook({
|
||||
args,
|
||||
collection: args.collection?.config,
|
||||
context: args.req.context,
|
||||
operation: 'refresh',
|
||||
})) || args
|
||||
},
|
||||
Promise.resolve(),
|
||||
)
|
||||
await args.collection.config.hooks.beforeOperation.reduce(
|
||||
async (priorHook: BeforeOperationHook | Promise<void>, hook: BeforeOperationHook) => {
|
||||
await priorHook
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Refresh
|
||||
// /////////////////////////////////////
|
||||
args =
|
||||
(await hook({
|
||||
args,
|
||||
collection: args.collection?.config,
|
||||
context: args.req.context,
|
||||
operation: 'refresh',
|
||||
req: args.req,
|
||||
})) || args
|
||||
},
|
||||
Promise.resolve(),
|
||||
)
|
||||
|
||||
const {
|
||||
collection: { config: collectionConfig },
|
||||
req: {
|
||||
payload: { config, secret },
|
||||
},
|
||||
} = args
|
||||
// /////////////////////////////////////
|
||||
// Refresh
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (typeof args.token !== 'string' || !args.req.user) throw new Forbidden(args.req.t)
|
||||
const {
|
||||
collection: { config: collectionConfig },
|
||||
req: {
|
||||
payload: { config, secret },
|
||||
},
|
||||
} = args
|
||||
|
||||
const parsedURL = url.parse(args.req.url)
|
||||
const isGraphQL = parsedURL.pathname === config.routes.graphQL
|
||||
if (typeof args.token !== 'string' || !args.req.user) throw new Forbidden(args.req.t)
|
||||
|
||||
const user = await args.req.payload.findByID({
|
||||
id: args.req.user.id,
|
||||
collection: args.req.user.collection,
|
||||
depth: isGraphQL ? 0 : args.collection.config.auth.depth,
|
||||
req: args.req,
|
||||
})
|
||||
const parsedURL = url.parse(args.req.url)
|
||||
const isGraphQL = parsedURL.pathname === config.routes.graphQL
|
||||
|
||||
const fieldsToSign = getFieldsToSign({
|
||||
collectionConfig,
|
||||
email: user?.email as string,
|
||||
user: args?.req?.user,
|
||||
})
|
||||
const user = await args.req.payload.findByID({
|
||||
id: args.req.user.id,
|
||||
collection: args.req.user.collection,
|
||||
depth: isGraphQL ? 0 : args.collection.config.auth.depth,
|
||||
req: args.req,
|
||||
})
|
||||
|
||||
const refreshedToken = jwt.sign(fieldsToSign, secret, {
|
||||
expiresIn: collectionConfig.auth.tokenExpiration,
|
||||
})
|
||||
const fieldsToSign = getFieldsToSign({
|
||||
collectionConfig,
|
||||
email: user?.email as string,
|
||||
user: args?.req?.user,
|
||||
})
|
||||
|
||||
const exp = (jwt.decode(refreshedToken) as Record<string, unknown>).exp as number
|
||||
const refreshedToken = jwt.sign(fieldsToSign, secret, {
|
||||
expiresIn: collectionConfig.auth.tokenExpiration,
|
||||
})
|
||||
|
||||
if (args.res) {
|
||||
const cookieOptions = {
|
||||
domain: undefined,
|
||||
expires: getCookieExpiration(collectionConfig.auth.tokenExpiration),
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: collectionConfig.auth.cookies.sameSite,
|
||||
secure: collectionConfig.auth.cookies.secure,
|
||||
const exp = (jwt.decode(refreshedToken) as Record<string, unknown>).exp as number
|
||||
|
||||
if (args.res) {
|
||||
const cookieOptions = {
|
||||
domain: undefined,
|
||||
expires: getCookieExpiration(collectionConfig.auth.tokenExpiration),
|
||||
httpOnly: true,
|
||||
path: '/',
|
||||
sameSite: collectionConfig.auth.cookies.sameSite,
|
||||
secure: collectionConfig.auth.cookies.secure,
|
||||
}
|
||||
|
||||
if (collectionConfig.auth.cookies.domain)
|
||||
cookieOptions.domain = collectionConfig.auth.cookies.domain
|
||||
|
||||
args.res.cookie(`${config.cookiePrefix}-token`, refreshedToken, cookieOptions)
|
||||
}
|
||||
|
||||
if (collectionConfig.auth.cookies.domain)
|
||||
cookieOptions.domain = collectionConfig.auth.cookies.domain
|
||||
let result: Result = {
|
||||
exp,
|
||||
refreshedToken,
|
||||
user,
|
||||
}
|
||||
|
||||
args.res.cookie(`${config.cookiePrefix}-token`, refreshedToken, cookieOptions)
|
||||
// /////////////////////////////////////
|
||||
// After Refresh - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await collectionConfig.hooks.afterRefresh.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
result =
|
||||
(await hook({
|
||||
collection: args.collection?.config,
|
||||
context: args.req.context,
|
||||
exp,
|
||||
req: args.req,
|
||||
res: args.res,
|
||||
token: refreshedToken,
|
||||
})) || result
|
||||
}, Promise.resolve())
|
||||
|
||||
// /////////////////////////////////////
|
||||
// afterOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
result = await buildAfterOperation({
|
||||
args,
|
||||
collection: args.collection?.config,
|
||||
operation: 'refresh',
|
||||
result,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Return results
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (collectionConfig.auth.removeTokenFromResponses) {
|
||||
delete result.refreshedToken
|
||||
}
|
||||
|
||||
if (shouldCommit) await commitTransaction(args.req)
|
||||
|
||||
return result
|
||||
} catch (error: unknown) {
|
||||
await killTransaction(args.req)
|
||||
throw error
|
||||
}
|
||||
|
||||
let result: Result = {
|
||||
exp,
|
||||
refreshedToken,
|
||||
user,
|
||||
}
|
||||
|
||||
// /////////////////////////////////////
|
||||
// After Refresh - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await collectionConfig.hooks.afterRefresh.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
result =
|
||||
(await hook({
|
||||
collection: args.collection?.config,
|
||||
context: args.req.context,
|
||||
exp,
|
||||
req: args.req,
|
||||
res: args.res,
|
||||
token: refreshedToken,
|
||||
})) || result
|
||||
}, Promise.resolve())
|
||||
|
||||
// /////////////////////////////////////
|
||||
// afterOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
result = await buildAfterOperation({
|
||||
args,
|
||||
collection: args.collection?.config,
|
||||
operation: 'refresh',
|
||||
result,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Return results
|
||||
// /////////////////////////////////////
|
||||
|
||||
if (collectionConfig.auth.removeTokenFromResponses) {
|
||||
delete result.refreshedToken
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export default refresh
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user