Compare commits
119 Commits
db-postgre
...
db-postgre
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
548de80bee | ||
|
|
2c05fbbb5e | ||
|
|
9b54659818 | ||
|
|
e9f550406e | ||
|
|
98b87e2278 | ||
|
|
5f3d0169be | ||
|
|
35c2a085ef | ||
|
|
1ac943ed5e | ||
|
|
25cee8bb10 | ||
|
|
419aef452d | ||
|
|
ea52489126 | ||
|
|
e80c70acae | ||
|
|
70b0064d0b | ||
|
|
9636bf6efd | ||
|
|
8f4d0da4e0 | ||
|
|
f0f1dbdcb0 | ||
|
|
a895aee8b1 | ||
|
|
aa1dac08c1 | ||
|
|
b8cd1c6ba4 | ||
|
|
6344464bc6 | ||
|
|
5d4022f144 | ||
|
|
bf942fdfa6 | ||
|
|
d6c25783cf | ||
|
|
82e9d31127 | ||
|
|
399e606b34 | ||
|
|
0d18822062 | ||
|
|
00fc0343da | ||
|
|
6323965c65 | ||
|
|
6d6823c3e5 | ||
|
|
ca70298436 | ||
|
|
4f565759f6 | ||
|
|
df39602758 | ||
|
|
6ea6172afa | ||
|
|
486774796d | ||
|
|
1cd1c38764 | ||
|
|
f6d7da7510 | ||
|
|
cdc4cb971b | ||
|
|
e0191b54e1 | ||
|
|
2315781f18 | ||
|
|
a0a58e7fd2 | ||
|
|
e1813fb884 | ||
|
|
da184d40ec | ||
|
|
ca8675f89d | ||
|
|
e8c6c9338d | ||
|
|
558534aff8 | ||
|
|
29c901ba9b | ||
|
|
f3876c2a39 | ||
|
|
c3a3942969 | ||
|
|
23b135b963 | ||
|
|
e3c8105cc2 | ||
|
|
2c71aaef75 | ||
|
|
922fb9b7fa | ||
|
|
0740d5095e | ||
|
|
b392d656fe | ||
|
|
c0eef90cdc | ||
|
|
db22cbdf21 | ||
|
|
1e8a6b7899 | ||
|
|
5d934ba02d | ||
|
|
f651665f2f | ||
|
|
5d3659d48a | ||
|
|
47106d5a1a | ||
|
|
afa2b942e0 | ||
|
|
20ddd0de5b | ||
|
|
64f705c3c9 | ||
|
|
b30ea8aa6b | ||
|
|
471d2113a7 | ||
|
|
8725d41164 | ||
|
|
0bd81aa25a | ||
|
|
8c09ca9be5 | ||
|
|
90d7ee3e65 | ||
|
|
58bbd8c00f | ||
|
|
003ad065c3 | ||
|
|
70715926a8 | ||
|
|
b3a6bfacf2 | ||
|
|
e1d9accb27 | ||
|
|
f2f55a84cc | ||
|
|
eba53ba60a | ||
|
|
f73d503fec | ||
|
|
6930c4e9f2 | ||
|
|
3eb681e847 | ||
|
|
cb4638cfa1 | ||
|
|
b40e9f85a2 | ||
|
|
e5a7907a72 | ||
|
|
3f25d1ca84 | ||
|
|
d5720bea7b | ||
|
|
8ce15c8b07 | ||
|
|
9f5efef78f | ||
|
|
dfba5222f3 | ||
|
|
b99d24fcfa | ||
|
|
836ed77568 | ||
|
|
1c5d5b07c8 | ||
|
|
da5f1f2240 | ||
|
|
c84c58c7b4 | ||
|
|
1c1b8f3cec | ||
|
|
3f69f83180 | ||
|
|
371353f153 | ||
|
|
a92c6334b6 | ||
|
|
eb9e771a9c | ||
|
|
ee5390aaca | ||
|
|
a861311c5a | ||
|
|
74c3fe1bb2 | ||
|
|
a2be50279e | ||
|
|
403eb06acf | ||
|
|
f5c2cd74cc | ||
|
|
a6a1963ec6 | ||
|
|
0647c870f1 | ||
|
|
3b88adc7d0 | ||
|
|
82383a5b5f | ||
|
|
f9dda628b2 | ||
|
|
93eb0e4a31 | ||
|
|
2e362f44f4 | ||
|
|
775502b161 | ||
|
|
84d75ce6ca | ||
|
|
175cf229c0 | ||
|
|
2b731c1088 | ||
|
|
6affa1c304 | ||
|
|
57dc93da5d | ||
|
|
28d3f73c2a | ||
|
|
7eae86bcb3 |
2
.github/ISSUE_TEMPLATE/1.bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/1.bug_report.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: Bug Report
|
||||
description: Create a bug report for Payload
|
||||
labels: ['possible-bug']
|
||||
labels: ['[possible-bug]']
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
81
.github/workflows/main.yml
vendored
81
.github/workflows/main.yml
vendored
@@ -2,9 +2,9 @@ name: build
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize]
|
||||
types: [ opened, reopened, synchronize ]
|
||||
push:
|
||||
branches: ['main']
|
||||
branches: [ 'main' ]
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
@@ -15,25 +15,25 @@ jobs:
|
||||
needs_build: ${{ steps.filter.outputs.needs_build }}
|
||||
templates: ${{ steps.filter.outputs.templates }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 25
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
needs_build:
|
||||
- '.github/workflows/**'
|
||||
- 'packages/**'
|
||||
- 'test/**'
|
||||
- 'pnpm-lock.yaml'
|
||||
- 'package.json'
|
||||
templates:
|
||||
- 'templates/**'
|
||||
- name: Log all filter results
|
||||
run: |
|
||||
echo "needs_build: ${{ steps.filter.outputs.needs_build }}"
|
||||
echo "templates: ${{ steps.filter.outputs.templates }}"
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 25
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
needs_build:
|
||||
- '.github/workflows/**'
|
||||
- 'packages/**'
|
||||
- 'test/**'
|
||||
- 'pnpm-lock.yaml'
|
||||
- 'package.json'
|
||||
templates:
|
||||
- 'templates/**'
|
||||
- name: Log all filter results
|
||||
run: |
|
||||
echo "needs_build: ${{ steps.filter.outputs.needs_build }}"
|
||||
echo "templates: ${{ steps.filter.outputs.templates }}"
|
||||
|
||||
core-build:
|
||||
needs: changes
|
||||
@@ -85,11 +85,15 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
database: [mongoose, postgres]
|
||||
database: [ mongoose, postgres, postgres-uuid, supabase ]
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: payloadtests
|
||||
AWS_ENDPOINT_URL: http://127.0.0.1:4566
|
||||
AWS_ACCESS_KEY_ID: localstack
|
||||
AWS_SECRET_ACCESS_KEY: localstack
|
||||
AWS_REGION: us-east-1
|
||||
|
||||
steps:
|
||||
- name: Use Node.js 18
|
||||
@@ -109,6 +113,9 @@ jobs:
|
||||
path: ./*
|
||||
key: ${{ github.sha }}-${{ github.run_number }}
|
||||
|
||||
- name: Start LocalStack
|
||||
run: pnpm docker:start
|
||||
|
||||
- name: Start PostgreSQL
|
||||
uses: CasperWA/postgresql-action@v1.2
|
||||
with:
|
||||
@@ -116,15 +123,35 @@ jobs:
|
||||
postgresql db: ${{ env.POSTGRES_DB }}
|
||||
postgresql user: ${{ env.POSTGRES_USER }}
|
||||
postgresql password: ${{ env.POSTGRES_PASSWORD }}
|
||||
if: matrix.database == 'postgres'
|
||||
if: matrix.database == 'postgres' || matrix.database == 'postgres-uuid'
|
||||
|
||||
- name: Install Supabase CLI
|
||||
uses: supabase/setup-cli@v1
|
||||
with:
|
||||
version: latest
|
||||
if: matrix.database == 'supabase'
|
||||
|
||||
- name: Initialize Supabase
|
||||
run: |
|
||||
supabase init
|
||||
supabase start
|
||||
if: matrix.database == 'supabase'
|
||||
|
||||
- name: Wait for PostgreSQL
|
||||
run: sleep 30
|
||||
if: matrix.database == 'postgres' || matrix.database == 'postgres-uuid'
|
||||
|
||||
- run: sleep 30
|
||||
- name: Configure PostgreSQL
|
||||
run: |
|
||||
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "CREATE ROLE runner SUPERUSER LOGIN;"
|
||||
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "SELECT version();"
|
||||
echo "POSTGRES_URL=postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" >> $GITHUB_ENV
|
||||
if: matrix.database == 'postgres'
|
||||
if: matrix.database == 'postgres' || matrix.database == 'postgres-uuid'
|
||||
|
||||
- name: Configure Supabase
|
||||
run: |
|
||||
echo "POSTGRES_URL=postgresql://postgres:postgres@127.0.0.1:54322/postgres" >> $GITHUB_ENV
|
||||
if: matrix.database == 'supabase'
|
||||
|
||||
- name: Component Tests
|
||||
run: pnpm test:components
|
||||
@@ -142,7 +169,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
part: [1/8, 2/8, 3/8, 4/8, 5/8, 6/8, 7/8, 8/8]
|
||||
part: [ 1/8, 2/8, 3/8, 4/8, 5/8, 6/8, 7/8, 8/8 ]
|
||||
|
||||
steps:
|
||||
- name: Use Node.js 18
|
||||
@@ -290,7 +317,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
template: [blank, website, ecommerce]
|
||||
template: [ blank, website, ecommerce ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -6,7 +6,9 @@ dist
|
||||
|
||||
test-results
|
||||
.devcontainer
|
||||
.localstack
|
||||
/migrations
|
||||
.localstack
|
||||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/node,macos,windows,webstorm,sublimetext,visualstudiocode
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=node,macos,windows,webstorm,sublimetext,visualstudiocode
|
||||
|
||||
2
.idea/runConfigurations/Run_Dev_Fields.xml
generated
2
.idea/runConfigurations/Run_Dev_Fields.xml
generated
@@ -1,5 +1,5 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Run Dev Fields" type="NodeJSConfigurationType" application-parameters="fields" path-to-js-file="node_modules/.pnpm/nodemon@3.0.1/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
|
||||
<configuration default="false" name="Run Dev Fields" type="NodeJSConfigurationType" application-parameters="fields" path-to-js-file="node_modules/.pnpm/nodemon@3.0.3/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
2
.idea/runConfigurations/Run_Dev__community.xml
generated
2
.idea/runConfigurations/Run_Dev__community.xml
generated
@@ -1,5 +1,5 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Run Dev _community" type="NodeJSConfigurationType" application-parameters="_community" path-to-js-file="node_modules/.pnpm/nodemon@3.0.1/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
|
||||
<configuration default="false" name="Run Dev _community" type="NodeJSConfigurationType" application-parameters="_community" path-to-js-file="node_modules/.pnpm/nodemon@3.0.3/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
113
CHANGELOG.md
113
CHANGELOG.md
@@ -1,3 +1,116 @@
|
||||
## [2.11.1](https://github.com/payloadcms/payload/compare/v2.11.0...v2.11.1) (2024-02-16)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **richtext-lexical:** Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground (#5066)
|
||||
|
||||
### Features
|
||||
|
||||
* **db-postgres:** adds idType to use uuid or serial id columns ([#3864](https://github.com/payloadcms/payload/issues/3864)) ([d6c2578](https://github.com/payloadcms/payload/commit/d6c25783cfa97983bf9db27ceb5ccd39a62c62f1))
|
||||
* **db-postgres:** reconnect after disconnection from database ([#5086](https://github.com/payloadcms/payload/issues/5086)) ([bf942fd](https://github.com/payloadcms/payload/commit/bf942fdfa6ea9c26cf05295cc9db646bf31fa622))
|
||||
* **plugin-search:** add req to beforeSync args for transactions ([#5068](https://github.com/payloadcms/payload/issues/5068)) ([98b87e2](https://github.com/payloadcms/payload/commit/98b87e22782c0a788f79326f22be05a6b176ad74))
|
||||
* **richtext-lexical:** add justify aligment to AlignFeature ([#4035](https://github.com/payloadcms/payload/issues/4035)) ([#4868](https://github.com/payloadcms/payload/issues/4868)) ([6d6823c](https://github.com/payloadcms/payload/commit/6d6823c3e5609a58eeeeb8d043945a762f9463df))
|
||||
* **richtext-lexical:** AddBlock handle for all nodes, even if they aren't empty paragraphs ([#5063](https://github.com/payloadcms/payload/issues/5063)) ([00fc034](https://github.com/payloadcms/payload/commit/00fc0343dabf184d5bab418d47c403b3ad11698f))
|
||||
* **richtext-lexical:** Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground ([#5066](https://github.com/payloadcms/payload/issues/5066)) ([0d18822](https://github.com/payloadcms/payload/commit/0d18822062275c1826c8e2c3da2571a2b3483310))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **db-mongodb:** find versions pagination ([#5091](https://github.com/payloadcms/payload/issues/5091)) ([5d4022f](https://github.com/payloadcms/payload/commit/5d4022f1445e2809c01cb1dd599280f0a56cdc6e))
|
||||
* **db-postgres:** query using blockType ([#5044](https://github.com/payloadcms/payload/issues/5044)) ([35c2a08](https://github.com/payloadcms/payload/commit/35c2a085efa6d5ad59779960874bc9728a17e3a0))
|
||||
* filterOptions errors cause transaction to abort ([#5079](https://github.com/payloadcms/payload/issues/5079)) ([5f3d016](https://github.com/payloadcms/payload/commit/5f3d0169bee21e1c0963dbd7ede9fe5f1c46a5a5))
|
||||
* **plugin-form-builder:** hooks do not respect transactions ([#5069](https://github.com/payloadcms/payload/issues/5069)) ([82e9d31](https://github.com/payloadcms/payload/commit/82e9d31127c8df83c5bed92a5ffdab76d331900f))
|
||||
* remove collection findByID caching ([#5034](https://github.com/payloadcms/payload/issues/5034)) ([1ac943e](https://github.com/payloadcms/payload/commit/1ac943ed5e8416883b863147fdf3c23380955559))
|
||||
* **richtext-lexical:** do not remove adjacent paragraph node when inserting certain nodes in empty editor ([#5061](https://github.com/payloadcms/payload/issues/5061)) ([6323965](https://github.com/payloadcms/payload/commit/6323965c652ea68dffeb716957b124d165b9ce96))
|
||||
* **uploads:** account for serverURL when retrieving external file ([#5102](https://github.com/payloadcms/payload/issues/5102)) ([25cee8b](https://github.com/payloadcms/payload/commit/25cee8bb102bf80b3a4bfb4b4e46712722cc7f0d))
|
||||
|
||||
## [2.11.0](https://github.com/payloadcms/payload/compare/v2.10.1...v2.11.0) (2024-02-09)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* exposes collapsible provider with more functionality ([#5043](https://github.com/payloadcms/payload/issues/5043)) ([df39602](https://github.com/payloadcms/payload/commit/df39602758ae8dc3765bb48e51f7a657babfa559))
|
||||
|
||||
## [2.10.1](https://github.com/payloadcms/payload/compare/v2.10.0...v2.10.1) (2024-02-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* clearable cells handle null values ([#5038](https://github.com/payloadcms/payload/issues/5038)) ([f6d7da7](https://github.com/payloadcms/payload/commit/f6d7da751039df25066b51bb91d6453e1a4efd82))
|
||||
* **db-mongodb:** handle null values with exists ([#5037](https://github.com/payloadcms/payload/issues/5037)) ([cdc4cb9](https://github.com/payloadcms/payload/commit/cdc4cb971b9180ba2ed09741f5af1a3c18292828))
|
||||
* **db-postgres:** handle nested docs with drafts ([#5012](https://github.com/payloadcms/payload/issues/5012)) ([da184d4](https://github.com/payloadcms/payload/commit/da184d40ece74bffb224002eb5df8f6987d65043))
|
||||
* ensures docs with the same id are shown in relationship field select ([#4859](https://github.com/payloadcms/payload/issues/4859)) ([e1813fb](https://github.com/payloadcms/payload/commit/e1813fb884e0dc84203fcbab87527a99a4d3a5d7))
|
||||
* query relationships by explicit id field ([#5022](https://github.com/payloadcms/payload/issues/5022)) ([a0a58e7](https://github.com/payloadcms/payload/commit/a0a58e7fd20dff54d210c968f4d5defd67441bdd))
|
||||
* **richtext-lexical:** make editor reactive to initialValue changes ([#5010](https://github.com/payloadcms/payload/issues/5010)) ([2315781](https://github.com/payloadcms/payload/commit/2315781f1891ddde4b4c5f2f0cfa1c17af85b7a9))
|
||||
|
||||
## [2.10.0](https://github.com/payloadcms/payload/compare/v2.9.0...v2.10.0) (2024-02-06)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add more options to addFieldStatePromise so that it can be used for field flattening ([#4799](https://github.com/payloadcms/payload/issues/4799)) ([8725d41](https://github.com/payloadcms/payload/commit/8725d411645bb0270376e235669f46be2227ecc0))
|
||||
* extend transactions to cover after and beforeOperation hooks ([#4960](https://github.com/payloadcms/payload/issues/4960)) ([1e8a6b7](https://github.com/payloadcms/payload/commit/1e8a6b7899f7b1e6451cc4d777602208478b483c))
|
||||
* previousValue and previousSiblingDoc args added to beforeChange field hooks ([#4958](https://github.com/payloadcms/payload/issues/4958)) ([5d934ba](https://github.com/payloadcms/payload/commit/5d934ba02d07d98f781ce983228858ee5ce5c226))
|
||||
* re-use existing logger instance passed to payload.init ([#3124](https://github.com/payloadcms/payload/issues/3124)) ([471d211](https://github.com/payloadcms/payload/commit/471d2113a790dc0d54b2f8ed84e6899310efd600))
|
||||
* **richtext-lexical:** Blocks: generate type definitions for blocks fields ([#4529](https://github.com/payloadcms/payload/issues/4529)) ([90d7ee3](https://github.com/payloadcms/payload/commit/90d7ee3e6535d51290fc734b284ff3811dbda1f8))
|
||||
* use deletion success message from server if provided ([#4966](https://github.com/payloadcms/payload/issues/4966)) ([e3c8105](https://github.com/payloadcms/payload/commit/e3c8105cc2ed6fdf8007d97cd7b5556fc71ed724))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **db-postgres:** filtering relationships with drafts enabled ([#4998](https://github.com/payloadcms/payload/issues/4998)) ([c3a3942](https://github.com/payloadcms/payload/commit/c3a39429697e9d335e9be199e7caafb82eb26219))
|
||||
* **db-postgres:** handle schema changes with supabase ([#4968](https://github.com/payloadcms/payload/issues/4968)) ([5d3659d](https://github.com/payloadcms/payload/commit/5d3659d48ad8bbf5d96fbcd80434d2287cab97e0))
|
||||
* **db-postgres:** indexes not created for non unique field names ([#4967](https://github.com/payloadcms/payload/issues/4967)) ([64f705c](https://github.com/payloadcms/payload/commit/64f705c3c94148972f67e8175e718015760d6430))
|
||||
* **db-postgres:** indexes not creating for relationships, arrays, hasmany and blocks ([#4976](https://github.com/payloadcms/payload/issues/4976)) ([47106d5](https://github.com/payloadcms/payload/commit/47106d5a1af2ebd073fbbc6e474174c3d3835e5c))
|
||||
* **db-postgres:** localized field sort count ([#4997](https://github.com/payloadcms/payload/issues/4997)) ([f3876c2](https://github.com/payloadcms/payload/commit/f3876c2a39efe19a1864213306725aadcc14f130))
|
||||
* ensures docPermissions fallback to collection permissions on create ([#4969](https://github.com/payloadcms/payload/issues/4969)) ([afa2b94](https://github.com/payloadcms/payload/commit/afa2b942e0aad90c55744ae13e0ffe1cefa4585d))
|
||||
* **migrations:** safely create migration file when no name passed ([#4995](https://github.com/payloadcms/payload/issues/4995)) ([0740d50](https://github.com/payloadcms/payload/commit/0740d5095ee1aef13e4e37f6b174d529f0f2d993))
|
||||
* **plugin-seo:** tabbedUI with email field causes duplicate field ([#4944](https://github.com/payloadcms/payload/issues/4944)) ([db22cbd](https://github.com/payloadcms/payload/commit/db22cbdf21a39ed0604ab96c57ca4242eac82ce7))
|
||||
|
||||
## [2.9.0](https://github.com/payloadcms/payload/compare/v2.8.2...v2.9.0) (2024-01-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* forceAcceptWarning migration arg added to accept prompts ([#4874](https://github.com/payloadcms/payload/issues/4874)) ([eba53ba](https://github.com/payloadcms/payload/commit/eba53ba60afd7c5d37389377ed06a9b556058d49))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* afterLogin hook write conflicts ([#4904](https://github.com/payloadcms/payload/issues/4904)) ([3eb681e](https://github.com/payloadcms/payload/commit/3eb681e847e9c55eaaa69c22bea4f4e66c7eac36))
|
||||
* **db-postgres:** migrate down error ([#4861](https://github.com/payloadcms/payload/issues/4861)) ([dfba522](https://github.com/payloadcms/payload/commit/dfba5222f3abf3f236dc9212a28e1aec7d7214d5))
|
||||
* **db-postgres:** query unset relation ([#4862](https://github.com/payloadcms/payload/issues/4862)) ([8ce15c8](https://github.com/payloadcms/payload/commit/8ce15c8b07800397a50dcf790c263ed5b3cfad53))
|
||||
* migrate down missing filter for latest batch ([#4860](https://github.com/payloadcms/payload/issues/4860)) ([b99d24f](https://github.com/payloadcms/payload/commit/b99d24fcfa698c493ea01c41621201abe18fabe3))
|
||||
* **plugin-cloud-storage:** slow get file performance large collections ([#4927](https://github.com/payloadcms/payload/issues/4927)) ([f73d503](https://github.com/payloadcms/payload/commit/f73d503fecdfa5cefdc26ab9aad60b00563f881e))
|
||||
* remove No Options dropdown from hasMany fields ([#4899](https://github.com/payloadcms/payload/issues/4899)) ([e5a7907](https://github.com/payloadcms/payload/commit/e5a7907a72c1371447ac2f71fce213ed22246092))
|
||||
* upload input drawer does not show draft versions ([#4903](https://github.com/payloadcms/payload/issues/4903)) ([6930c4e](https://github.com/payloadcms/payload/commit/6930c4e9f2200853121391ad8f8df48ea66c40a4))
|
||||
|
||||
## [2.8.2](https://github.com/payloadcms/payload/compare/v2.8.1...v2.8.2) (2024-01-16)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **db-postgres:** support drizzle logging config ([#4809](https://github.com/payloadcms/payload/issues/4809)) ([371353f](https://github.com/payloadcms/payload/commit/371353f1535fbab4ebd9f56fc14fd10a30eec289))
|
||||
* **plugin-form-builder:** add validation for form ID when creating a submission ([#4730](https://github.com/payloadcms/payload/pull/4730))
|
||||
* **plugin-seo:** add support for interfaceName and fieldOverrides ([#4695](https://github.com/payloadcms/payload/pull/4695))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **db-mongodb:** mongodb versions creating duplicates ([#4825](https://github.com/payloadcms/payload/issues/4825)) ([a861311](https://github.com/payloadcms/payload/commit/a861311c5a98126700f98f9a2ab380782e754717))
|
||||
* **db-mongodb:** transactionOptions=false typeErrors ([82383a5](https://github.com/payloadcms/payload/commit/82383a5b5f52785115c0feb970da70e91971b7ca))
|
||||
* **db-postgres:** Remove duplicate keys from response ([#4747](https://github.com/payloadcms/payload/issues/4747)) ([eb9e771](https://github.com/payloadcms/payload/commit/eb9e771a9ca03636486d36654f215b73435574cb))
|
||||
* **db-postgres:** validateExistingBlockIsIdentical with arrays ([3b88adc](https://github.com/payloadcms/payload/commit/3b88adc7d0594af63ce190c40c9ee3905df67a31))
|
||||
* **db-postgres:** validateExistingBlockIsIdentical with other tables ([0647c87](https://github.com/payloadcms/payload/commit/0647c870f15dc1b122734b678c2abeb6f56377d4))
|
||||
* **plugin-seo:** fix missing spread operator in URL generator function ([#4723](https://github.com/payloadcms/payload/pull/4723))
|
||||
* removes max-width from field-types class & correctly sets it on uploads ([#4829](https://github.com/payloadcms/payload/issues/4829)) ([ee5390a](https://github.com/payloadcms/payload/commit/ee5390aaca37a4154cde8392b60f091ec3e5175c))
|
||||
|
||||
## [2.8.1](https://github.com/payloadcms/payload/compare/v2.8.0...v2.8.1) (2024-01-12)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* corrects config usage in build bin script ([#4796](https://github.com/payloadcms/payload/issues/4796)) ([775502b](https://github.com/payloadcms/payload/commit/775502b1616c1bd35a3044438e253a0e84219f99))
|
||||
|
||||
## [2.8.0](https://github.com/payloadcms/payload/compare/v2.7.0...v2.8.0) (2024-01-12)
|
||||
|
||||
|
||||
|
||||
@@ -635,6 +635,37 @@ export const CustomArrayManager = () => {
|
||||
]}
|
||||
/>
|
||||
|
||||
### useCollapsible
|
||||
|
||||
The `useCollapsible` hook allows you to control parent collapsibles:
|
||||
|
||||
| Property | Description |
|
||||
|---------------------------|--------------------------------------------------------------------------------------------------------------------|
|
||||
| **`collapsed`** | State of the collapsible. `true` if open, `false` if collapsed |
|
||||
| **`isVisible`** | If nested, determine if the nearest collapsible is visible. `true` if no parent is closed, `false` otherwise |
|
||||
| **`toggle`** | Toggles the state of the nearest collapsible |
|
||||
| **`withinCollapsible`** | Determine when you are within another collaspible | |
|
||||
|
||||
**Example:**
|
||||
|
||||
```tsx
|
||||
import React from 'react'
|
||||
|
||||
import { useCollapsible } from 'payload/components/utilities'
|
||||
|
||||
const CustomComponent: React.FC = () => {
|
||||
const { collapsed, toggle } = useCollapsible()
|
||||
return (
|
||||
<div>
|
||||
<p className="field-type">I am {collapsed ? 'closed' : 'open'}</p>
|
||||
<button onClick={toggle} type="button">
|
||||
Toggle
|
||||
</button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
### useDocumentInfo
|
||||
|
||||
The `useDocumentInfo` hook provides lots of information about the document currently being edited, including the following:
|
||||
@@ -774,8 +805,8 @@ const MyComponent: React.FC = () => {
|
||||
return (
|
||||
<>
|
||||
<span>The current theme is {theme} and autoMode is {autoMode}</span>
|
||||
<button
|
||||
type="button"
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setTheme(prev => prev === "light" ? "dark" : "light")}
|
||||
>
|
||||
Toggle theme
|
||||
|
||||
@@ -28,7 +28,7 @@ This field uses the `monaco-react` editor syntax highlighting.
|
||||
| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) |
|
||||
| **`label`** | Text used as a field label in the Admin panel or an object with keys for each language. |
|
||||
| **`unique`** | Enforce that each entry in the Collection has a unique value for this field. |
|
||||
| **`index`** | Build a an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
|
||||
| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
|
||||
| **`validate`** | Provide a custom validation function that will be executed on both the Admin panel and the backend. [More](/docs/fields/overview#validation) |
|
||||
| **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/config), include its data in the user JWT. |
|
||||
| **`hooks`** | Provide field-based hooks to control logic for this field. [More](/docs/fields/overview#field-level-hooks) |
|
||||
|
||||
@@ -46,6 +46,7 @@ export const Page: CollectionConfig = {
|
||||
- [Date](/docs/fields/date) - date / time field that saves a timestamp
|
||||
- [Email](/docs/fields/email) - validates the entry is a properly formatted email
|
||||
- [Group](/docs/fields/group) - nest fields within an object
|
||||
- [JSON](/docs/fields/json) - saves actual JSON in the database
|
||||
- [Number](/docs/fields/number) - field that enforces that its value be a number
|
||||
- [Point](/docs/fields/point) - geometric coordinates for location data
|
||||
- [Radio](/docs/fields/radio) - radio button group, allowing only one value to be selected
|
||||
|
||||
@@ -38,7 +38,7 @@ caption="Admin panel screenshot of a Relationship field"
|
||||
| **`label`** | Text used as a field label in the Admin panel or an object with keys for each language. |
|
||||
| **`unique`** | Enforce that each entry in the Collection has a unique value for this field. |
|
||||
| **`validate`** | Provide a custom validation function that will be executed on both the Admin panel and the backend. [More](/docs/fields/overview#validation) |
|
||||
| **`index`** | Build a an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
|
||||
| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. |
|
||||
| **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/config), include its data in the user JWT. |
|
||||
| **`hooks`** | Provide field-based hooks to control logic for this field. [More](/docs/fields/overview#field-level-hooks) |
|
||||
| **`access`** | Provide field-based access control to denote what users can see and do with this field's data. [More](/docs/fields/overview#field-level-access-control) |
|
||||
|
||||
@@ -75,6 +75,7 @@ import { CollectionBeforeOperationHook } from 'payload/types'
|
||||
const beforeOperationHook: CollectionBeforeOperationHook = async ({
|
||||
args, // original arguments passed into the operation
|
||||
operation, // name of the operation
|
||||
req, // full express request
|
||||
}) => {
|
||||
return args // return modified operation arguments as necessary
|
||||
}
|
||||
@@ -209,6 +210,7 @@ import { CollectionAfterOperationHook } from 'payload/types'
|
||||
const afterOperationHook: CollectionAfterOperationHook = async ({
|
||||
args, // arguments passed into the operation
|
||||
operation, // name of the operation
|
||||
req, // full express request
|
||||
result, // the result of the operation, before modifications
|
||||
}) => {
|
||||
return result // return modified result as necessary
|
||||
|
||||
@@ -6,7 +6,8 @@ desc: Hooks can be added to any fields, and optionally modify the return value o
|
||||
keywords: hooks, fields, config, configuration, documentation, Content Management System, cms, headless, javascript, node, react, express
|
||||
---
|
||||
|
||||
Field-level hooks offer incredible potential for encapsulating your logic. They help to isolate concerns and package up functionalities to be easily reusable across your projects.
|
||||
Field-level hooks offer incredible potential for encapsulating your logic. They help to isolate concerns and package up
|
||||
functionalities to be easily reusable across your projects.
|
||||
|
||||
**Example use cases include:**
|
||||
|
||||
@@ -46,7 +47,8 @@ const ExampleField: Field = {
|
||||
|
||||
## Arguments and return values
|
||||
|
||||
All field-level hooks are formatted to accept the same arguments, although some arguments may be `undefined` based on which field hook you are utilizing.
|
||||
All field-level hooks are formatted to accept the same arguments, although some arguments may be `undefined` based on
|
||||
which field hook you are utilizing.
|
||||
|
||||
<Banner type="success">
|
||||
<strong>Tip:</strong>
|
||||
@@ -69,10 +71,10 @@ Field Hooks receive one `args` argument that contains the following properties:
|
||||
| **`operation`** | A string relating to which operation the field type is currently executing within. Useful within `beforeValidate`, `beforeChange`, and `afterChange` hooks to differentiate between `create` and `update` operations. |
|
||||
| **`originalDoc`** | The full original document in `update` operations. In the `afterChange` hook, this is the resulting document of the operation. |
|
||||
| **`previousDoc`** | The document before changes were applied, only in `afterChange` hooks. |
|
||||
| **`previousSiblingDoc`** | The sibling data from the previous document in `afterChange` hook. |
|
||||
| **`previousSiblingDoc`** | The sibling data of the document before changes being applied, only in `beforeChange` and `afterChange` hook. |
|
||||
| **`req`** | The Express `request` object. It is mocked for Local API operations. |
|
||||
| **`value`** | The value of the field. |
|
||||
| **`previousValue`** | The previous value of the field, before changes were applied, only in `afterChange` hooks. |
|
||||
| **`previousValue`** | The previous value of the field, before changes, only in `beforeChange` and `afterChange` hooks. |
|
||||
| **`context`** | Context passed to this hook. More info can be found under [Context](/docs/hooks/context) |
|
||||
| **`field`** | The field which the hook is running against. |
|
||||
| **`collection`** | The collection which the field belongs to. If the field belongs to a global, this will be null. |
|
||||
@@ -80,7 +82,8 @@ Field Hooks receive one `args` argument that contains the following properties:
|
||||
|
||||
#### Return value
|
||||
|
||||
All field hooks can optionally modify the return value of the field before the operation continues. Field Hooks may optionally return the value that should be used within the field.
|
||||
All field hooks can optionally modify the return value of the field before the operation continues. Field Hooks may
|
||||
optionally return the value that should be used within the field.
|
||||
|
||||
<Banner type="warning">
|
||||
<strong>Important</strong>
|
||||
@@ -92,11 +95,14 @@ All field hooks can optionally modify the return value of the field before the o
|
||||
|
||||
## Examples of Field Hooks
|
||||
|
||||
To better illustrate how field-level hooks can be applied, here are some specific examples. These demonstrate the flexibility and potential of field hooks in different contexts. Remember, these examples are just a starting point - the true potential of field-level hooks lies in their adaptability to a wide array of use cases.
|
||||
To better illustrate how field-level hooks can be applied, here are some specific examples. These demonstrate the
|
||||
flexibility and potential of field hooks in different contexts. Remember, these examples are just a starting point - the
|
||||
true potential of field-level hooks lies in their adaptability to a wide array of use cases.
|
||||
|
||||
### beforeValidate
|
||||
|
||||
Runs before the `update` operation. This hook allows you to pre-process or format field data before it undergoes validation.
|
||||
Runs before the `update` operation. This hook allows you to pre-process or format field data before it undergoes
|
||||
validation.
|
||||
|
||||
```ts
|
||||
import { Field } from 'payload/types'
|
||||
@@ -113,11 +119,15 @@ const usernameField: Field = {
|
||||
}
|
||||
```
|
||||
|
||||
In this example, the `beforeValidate` hook is used to process the `username` field. The hook takes the incoming value of the field and transforms it by trimming whitespace and converting it to lowercase. This ensures that the username is stored in a consistent format in the database.
|
||||
In this example, the `beforeValidate` hook is used to process the `username` field. The hook takes the incoming value of
|
||||
the field and transforms it by trimming whitespace and converting it to lowercase. This ensures that the username is
|
||||
stored in a consistent format in the database.
|
||||
|
||||
### beforeChange
|
||||
|
||||
Immediately following validation, `beforeChange` hooks will run within `create` and `update` operations. At this stage, you can be confident that the field data that will be saved to the document is valid in accordance to your field validations.
|
||||
Immediately following validation, `beforeChange` hooks will run within `create` and `update` operations. At this stage,
|
||||
you can be confident that the field data that will be saved to the document is valid in accordance to your field
|
||||
validations.
|
||||
|
||||
```ts
|
||||
import { Field } from 'payload/types'
|
||||
@@ -136,11 +146,14 @@ const emailField: Field = {
|
||||
}
|
||||
```
|
||||
|
||||
In the `emailField`, the `beforeChange` hook checks the `operation` type. If the operation is `create`, it performs additional validation or transformation on the email field value. This allows for operation-specific logic to be applied to the field.
|
||||
In the `emailField`, the `beforeChange` hook checks the `operation` type. If the operation is `create`, it performs
|
||||
additional validation or transformation on the email field value. This allows for operation-specific logic to be applied
|
||||
to the field.
|
||||
|
||||
### afterChange
|
||||
|
||||
The `afterChange` hook is executed after a field's value has been changed and saved in the database. This hook is useful for post-processing or triggering side effects based on the new value of the field.
|
||||
The `afterChange` hook is executed after a field's value has been changed and saved in the database. This hook is useful
|
||||
for post-processing or triggering side effects based on the new value of the field.
|
||||
|
||||
```ts
|
||||
import { Field } from 'payload/types'
|
||||
@@ -165,11 +178,15 @@ const membershipStatusField: Field = {
|
||||
}
|
||||
```
|
||||
|
||||
In this example, the `afterChange` hook is used with a `membershipStatusField`, which allows users to select their membership level (Standard, Premium, VIP). The hook monitors changes in the membership status. When a change occurs, it logs the update and can be used to trigger further actions, such as tracking conversion from one tier to another or notifying them about changes in their membership benefits.
|
||||
In this example, the `afterChange` hook is used with a `membershipStatusField`, which allows users to select their
|
||||
membership level (Standard, Premium, VIP). The hook monitors changes in the membership status. When a change occurs, it
|
||||
logs the update and can be used to trigger further actions, such as tracking conversion from one tier to another or
|
||||
notifying them about changes in their membership benefits.
|
||||
|
||||
### afterRead
|
||||
|
||||
The `afterRead` hook is invoked after a field value is read from the database. This is ideal for formatting or transforming the field data for output.
|
||||
The `afterRead` hook is invoked after a field value is read from the database. This is ideal for formatting or
|
||||
transforming the field data for output.
|
||||
|
||||
```ts
|
||||
import { Field } from 'payload/types'
|
||||
@@ -186,8 +203,9 @@ const dateField: Field = {
|
||||
}
|
||||
```
|
||||
|
||||
Here, the `afterRead` hook for the `dateField` is used to format the date into a more readable format using `toLocaleDateString()`. This hook modifies the way the date is presented to the user, making it more user-friendly.
|
||||
|
||||
Here, the `afterRead` hook for the `dateField` is used to format the date into a more readable format
|
||||
using `toLocaleDateString()`. This hook modifies the way the date is presented to the user, making it more
|
||||
user-friendly.
|
||||
|
||||
## TypeScript
|
||||
|
||||
|
||||
@@ -159,6 +159,39 @@ A function called by the search preview component to display the actual URL of y
|
||||
}
|
||||
```
|
||||
|
||||
#### `interfaceName`
|
||||
|
||||
Rename the meta group interface name that is generated for TypeScript and GraphQL.
|
||||
|
||||
```ts
|
||||
// payload.config.ts
|
||||
{
|
||||
// ...
|
||||
seoPlugin({
|
||||
interfaceName: 'customInterfaceNameSEO'
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
#### `fieldOverrides`
|
||||
|
||||
Pass any valid field props to the base fields: Title, Description or Image.
|
||||
|
||||
```ts
|
||||
// payload.config.ts
|
||||
seoPlugin({
|
||||
// ...
|
||||
fieldOverrides: {
|
||||
title: {
|
||||
required: true,
|
||||
},
|
||||
description: {
|
||||
localized: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
## TypeScript
|
||||
|
||||
All types can be directly imported:
|
||||
|
||||
@@ -98,6 +98,13 @@ On boot, a seed script is included to scaffold a basic database for you to use a
|
||||
|
||||
> NOTICE: seeding the database is destructive because it drops your current database to populate a fresh one from the seed template. Only run this command if you are starting a new project or can afford to lose your current data.
|
||||
|
||||
### Conflicting routes
|
||||
|
||||
>In a monorepo when routes are bootstrapped to the same host, they can conflict with Payload's own routes if they have the same name. In our template we've named the Nextjs API routes to `next` to avoid this conflict.
|
||||
>
|
||||
>This can happen with any other routes conflicting with Payload such as `admin` and we recommend using different names for custom routes.
|
||||
>Alternatively you can also rename Payload's own routes via the [configuration](https://payloadcms.com/docs/configuration/overview).
|
||||
|
||||
## Production
|
||||
|
||||
To run Payload in production, you need to build and serve the Admin panel. To do so, follow these steps:
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
|
||||
export async function POST(): Promise<NextResponse> {
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
10
examples/custom-server/src/app/next/test-get/route.ts
Normal file
10
examples/custom-server/src/app/next/test-get/route.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
|
||||
/**
|
||||
* The Next.js API routes can conflict with Payload's own routes if they share the same path
|
||||
* To avoid this you can customise the path of Payload or the API route of Nextjs as we've done here
|
||||
* See readme: https://github.com/payloadcms/payload/tree/main/examples/custom-server#conflicting-routes
|
||||
* */
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
10
examples/custom-server/src/app/next/test-post/route.ts
Normal file
10
examples/custom-server/src/app/next/test-post/route.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
|
||||
/**
|
||||
* The Next.js API routes can conflict with Payload's own routes if they share the same path
|
||||
* To avoid this you can customise the path of Payload or the API route of Nextjs as we've done here
|
||||
* See readme: https://github.com/payloadcms/payload/tree/main/examples/custom-server#conflicting-routes
|
||||
* */
|
||||
export async function POST(): Promise<NextResponse> {
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
2
examples/hierarchy/.env.example
Normal file
2
examples/hierarchy/.env.example
Normal file
@@ -0,0 +1,2 @@
|
||||
DATABASE_URI=mongodb://127.0.0.1/payload-template-blank
|
||||
PAYLOAD_SECRET=YOUR_SECRET_HERE
|
||||
6
examples/hierarchy/.gitignore
vendored
Normal file
6
examples/hierarchy/.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
build
|
||||
dist
|
||||
/media
|
||||
node_modules
|
||||
.DS_Store
|
||||
.env
|
||||
8
examples/hierarchy/.prettierrc.js
Normal file
8
examples/hierarchy/.prettierrc.js
Normal file
@@ -0,0 +1,8 @@
|
||||
module.exports = {
|
||||
printWidth: 100,
|
||||
parser: 'typescript',
|
||||
semi: false,
|
||||
singleQuote: true,
|
||||
trailingComma: 'all',
|
||||
arrowParens: 'avoid',
|
||||
}
|
||||
58
examples/hierarchy/README.md
Normal file
58
examples/hierarchy/README.md
Normal file
@@ -0,0 +1,58 @@
|
||||
# Payload Hierarchy Example
|
||||
|
||||
This example demonstrates how to achieve a virtual hierarchy between documents in your [Payload](https://github.com/payloadcms/payload) application.
|
||||
|
||||
## Quick Start
|
||||
|
||||
To spin up the project locally, follow these steps:
|
||||
|
||||
1. First clone the repo
|
||||
1. Then `cd YOUR_PROJECT_REPO && cp .env.example .env`
|
||||
1. Next `yarn && yarn dev` (or `docker-compose up`, see [Docker](#docker))
|
||||
1. Now `open http://localhost:3000/admin` to access the admin panel
|
||||
1. Create your first admin user using the form on the page
|
||||
|
||||
That's it! Changes made in `./src` will be reflected in your app.
|
||||
|
||||
## How it works
|
||||
|
||||
This example achieves parent/child relationships between your documents through the use of virtual fields. When you query a document with the `?children=true` query param, an afterRead hook is used to populate the documents within its own tree.
|
||||
|
||||
For more information on how virtual fields, see the [Official Virtual Fields Example](https://github.com/payloadcms/payload/tree/main/examples/virtual-fields).
|
||||
|
||||
### Collections
|
||||
|
||||
See the [Collections](https://payloadcms.com/docs/configuration/collections) docs for details on how to extend any of this functionality.
|
||||
|
||||
- #### Users
|
||||
|
||||
The `users` collection is a default payload users collection.
|
||||
|
||||
- #### Entities
|
||||
|
||||
The `entities` collection can define a parent as any other entity. It has a virtual field that will also populate children when it is called via the API using a query `children=true`. See [Virtual Fields](https://github.com/payloadcms/payload/tree/main/examples/virtual-fields) for more details on how virtual fields work.
|
||||
|
||||
The virtual field retrieves __all__ children which includes other entities and people.
|
||||
|
||||
- #### People
|
||||
|
||||
The `people` collection is a collection that can define an array of parent entities. It also has an allocation field. This is for demonstrating attaching data to a parent-child relationship.
|
||||
|
||||
## Development
|
||||
|
||||
To spin up this example locally, follow the [Quick Start](#quick-start).
|
||||
|
||||
## Production
|
||||
|
||||
To run Payload in production, you need to build and serve the Admin panel. To do so, follow these steps:
|
||||
|
||||
1. First invoke the `payload build` script by running `yarn build` or `npm run build` in your project root. This creates a `./build` directory with a production-ready admin bundle.
|
||||
1. Then run `yarn serve` or `npm run serve` to run Node in production and serve Payload from the `./build` directory.
|
||||
|
||||
### Deployment
|
||||
|
||||
The easiest way to deploy your project is to use [Payload Cloud](https://payloadcms.com/new/import), a one-click hosting solution to deploy production-ready instances of your Payload apps directly from your GitHub repo. You can also deploy your app manually, check out the [deployment documentation](https://payloadcms.com/docs/production/deployment) for full details.
|
||||
|
||||
## Questions
|
||||
|
||||
If you have any issues or questions, reach out to us on [Discord](https://discord.com/invite/payload) or start a [GitHub discussion](https://github.com/payloadcms/payload/discussions).
|
||||
6
examples/hierarchy/nodemon.json
Normal file
6
examples/hierarchy/nodemon.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/nodemon.json",
|
||||
"ext": "ts",
|
||||
"exec": "ts-node src/server.ts -- -I",
|
||||
"stdin": false
|
||||
}
|
||||
35
examples/hierarchy/package.json
Normal file
35
examples/hierarchy/package.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "hierarchy",
|
||||
"description": "A hierarchy example with Payload",
|
||||
"version": "1.0.0",
|
||||
"main": "dist/server.js",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"dev": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts nodemon",
|
||||
"build:payload": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload build",
|
||||
"build:server": "tsc",
|
||||
"build": "yarn copyfiles && yarn build:payload && yarn build:server",
|
||||
"serve": "cross-env PAYLOAD_CONFIG_PATH=dist/payload.config.js NODE_ENV=production node dist/server.js",
|
||||
"copyfiles": "copyfiles -u 1 \"src/**/*.{html,css,scss,ttf,woff,woff2,eot,svg,jpg,png}\" dist/",
|
||||
"generate:types": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload generate:types",
|
||||
"generate:graphQLSchema": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload generate:graphQLSchema",
|
||||
"payload": "cross-env PAYLOAD_CONFIG_PATH=src/payload.config.ts payload"
|
||||
},
|
||||
"dependencies": {
|
||||
"@payloadcms/bundler-webpack": "^1.0.0",
|
||||
"@payloadcms/db-mongodb": "^1.0.0",
|
||||
"@payloadcms/plugin-cloud": "^3.0.0",
|
||||
"@payloadcms/richtext-slate": "^1.0.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"dotenv": "^8.2.0",
|
||||
"express": "^4.17.1",
|
||||
"payload": "^2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/express": "^4.17.9",
|
||||
"copyfiles": "^2.4.1",
|
||||
"nodemon": "^2.0.6",
|
||||
"ts-node": "^9.1.1",
|
||||
"typescript": "^4.8.4"
|
||||
}
|
||||
}
|
||||
79
examples/hierarchy/src/collections/Entities.ts
Normal file
79
examples/hierarchy/src/collections/Entities.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { CollectionConfig } from 'payload/types'
|
||||
|
||||
export const Entities: CollectionConfig = {
|
||||
slug: 'entities',
|
||||
admin: {
|
||||
useAsTitle: 'name',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
required: true,
|
||||
},
|
||||
// - This field is populated by setting the query parameter 'children=true'
|
||||
// - This is a virtual field used to track a child relationship
|
||||
// - Only relationship information is returned by this field
|
||||
// - Data beyond relationships is not stored in this field
|
||||
{
|
||||
name: 'children',
|
||||
type: 'relationship',
|
||||
relationTo: ['entities', 'people'],
|
||||
access: {
|
||||
create: () => false,
|
||||
update: () => false,
|
||||
},
|
||||
hooks: {
|
||||
afterRead: [
|
||||
async ({ data, req }) => {
|
||||
const { id } = data
|
||||
|
||||
if (!req.query.children) return
|
||||
|
||||
const people = await req.payload.find({
|
||||
req,
|
||||
collection: 'people',
|
||||
where: {
|
||||
'parents.parent': { equals: id },
|
||||
},
|
||||
limit: 0,
|
||||
depth: 0,
|
||||
pagination: false,
|
||||
})
|
||||
|
||||
const entities = await req.payload.find({
|
||||
req,
|
||||
collection: 'entities',
|
||||
where: {
|
||||
parent: { equals: id },
|
||||
},
|
||||
limit: 0,
|
||||
depth: 0,
|
||||
pagination: false,
|
||||
})
|
||||
|
||||
return [
|
||||
...entities.docs.map(entity => {
|
||||
return {
|
||||
relationTo: 'entity',
|
||||
value: entity,
|
||||
}
|
||||
}),
|
||||
...people.docs.map(person => {
|
||||
return {
|
||||
relationTo: 'people',
|
||||
value: person,
|
||||
}
|
||||
}),
|
||||
]
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'parent',
|
||||
type: 'relationship',
|
||||
relationTo: 'entities',
|
||||
},
|
||||
],
|
||||
}
|
||||
32
examples/hierarchy/src/collections/People.ts
Normal file
32
examples/hierarchy/src/collections/People.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { CollectionConfig } from 'payload/types'
|
||||
|
||||
export const People: CollectionConfig = {
|
||||
slug: 'people',
|
||||
admin: {
|
||||
useAsTitle: 'name',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'parents',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'parent',
|
||||
type: 'relationship',
|
||||
relationTo: 'entities',
|
||||
},
|
||||
{
|
||||
name: 'allocation',
|
||||
type: 'number',
|
||||
min: 0,
|
||||
max: 100,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
15
examples/hierarchy/src/collections/Users.ts
Normal file
15
examples/hierarchy/src/collections/Users.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { CollectionConfig } from 'payload/types'
|
||||
|
||||
const Users: CollectionConfig = {
|
||||
slug: 'users',
|
||||
auth: true,
|
||||
admin: {
|
||||
useAsTitle: 'email',
|
||||
},
|
||||
fields: [
|
||||
// Email added by default
|
||||
// Add more fields as needed
|
||||
],
|
||||
}
|
||||
|
||||
export default Users
|
||||
30
examples/hierarchy/src/payload.config.ts
Normal file
30
examples/hierarchy/src/payload.config.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import path from 'path'
|
||||
|
||||
import { payloadCloud } from '@payloadcms/plugin-cloud'
|
||||
import { mongooseAdapter } from '@payloadcms/db-mongodb'
|
||||
import { webpackBundler } from '@payloadcms/bundler-webpack'
|
||||
import { slateEditor } from '@payloadcms/richtext-slate'
|
||||
import { buildConfig } from 'payload/config'
|
||||
|
||||
import Users from './collections/Users'
|
||||
import { Entities } from './collections/Entities'
|
||||
import { People } from './collections/People'
|
||||
|
||||
export default buildConfig({
|
||||
admin: {
|
||||
user: Users.slug,
|
||||
bundler: webpackBundler(),
|
||||
},
|
||||
editor: slateEditor({}),
|
||||
collections: [Users, Entities, People],
|
||||
typescript: {
|
||||
outputFile: path.resolve(__dirname, 'payload-types.ts'),
|
||||
},
|
||||
graphQL: {
|
||||
schemaOutputFile: path.resolve(__dirname, 'generated-schema.graphql'),
|
||||
},
|
||||
plugins: [payloadCloud()],
|
||||
db: mongooseAdapter({
|
||||
url: process.env.DATABASE_URI,
|
||||
}),
|
||||
})
|
||||
27
examples/hierarchy/src/server.ts
Normal file
27
examples/hierarchy/src/server.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import express from 'express'
|
||||
import payload from 'payload'
|
||||
|
||||
require('dotenv').config()
|
||||
const app = express()
|
||||
|
||||
// Redirect root to Admin panel
|
||||
app.get('/', (_, res) => {
|
||||
res.redirect('/admin')
|
||||
})
|
||||
|
||||
const start = async () => {
|
||||
// Initialize Payload
|
||||
await payload.init({
|
||||
secret: process.env.PAYLOAD_SECRET,
|
||||
express: app,
|
||||
onInit: async () => {
|
||||
payload.logger.info(`Payload Admin URL: ${payload.getAdminURL()}`)
|
||||
},
|
||||
})
|
||||
|
||||
// Add your own express routes here
|
||||
|
||||
app.listen(3000)
|
||||
}
|
||||
|
||||
start()
|
||||
22
examples/hierarchy/tsconfig.json
Normal file
22
examples/hierarchy/tsconfig.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es5",
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": true,
|
||||
"strict": false,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"jsx": "react",
|
||||
"paths": {
|
||||
"payload/generated-types": ["./src/payload-types.ts"]
|
||||
}
|
||||
},
|
||||
"include": ["src"],
|
||||
"exclude": ["node_modules", "dist", "build"],
|
||||
"ts-node": {
|
||||
"transpileOnly": true,
|
||||
"swc": true
|
||||
}
|
||||
}
|
||||
7896
examples/hierarchy/yarn.lock
Normal file
7896
examples/hierarchy/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
@@ -2,26 +2,27 @@ import type { AfterLoginHook } from 'payload/dist/collections/config/types'
|
||||
|
||||
export const recordLastLoggedInTenant: AfterLoginHook = async ({ req, user }) => {
|
||||
try {
|
||||
const relatedOrg = await req.payload.find({
|
||||
collection: 'tenants',
|
||||
where: {
|
||||
'domains.domain': {
|
||||
in: [req.headers.host],
|
||||
},
|
||||
},
|
||||
depth: 0,
|
||||
limit: 1,
|
||||
})
|
||||
|
||||
if (relatedOrg.docs.length > 0) {
|
||||
await req.payload.update({
|
||||
id: user.id,
|
||||
collection: 'users',
|
||||
data: {
|
||||
lastLoggedInTenant: relatedOrg.docs[0].id,
|
||||
const relatedOrg = await req.payload
|
||||
.find({
|
||||
collection: 'tenants',
|
||||
where: {
|
||||
'domains.domain': {
|
||||
in: [req.headers.host],
|
||||
},
|
||||
},
|
||||
depth: 0,
|
||||
limit: 1,
|
||||
})
|
||||
}
|
||||
?.then(res => res.docs?.[0])
|
||||
|
||||
await req.payload.update({
|
||||
id: user.id,
|
||||
collection: 'users',
|
||||
data: {
|
||||
lastLoggedInTenant: relatedOrg?.id || null,
|
||||
},
|
||||
req,
|
||||
})
|
||||
} catch (err: unknown) {
|
||||
req.payload.logger.error(`Error recording last logged in tenant for user ${user.id}: ${err}`)
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ export const isSuperOrTenantAdmin = async (args: { req: PayloadRequest }): Promi
|
||||
},
|
||||
depth: 0,
|
||||
limit: 1,
|
||||
req,
|
||||
})
|
||||
|
||||
// if this tenant does not exist, deny access
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
26
package.json
26
package.json
@@ -15,9 +15,13 @@
|
||||
"dev:generate-graphql-schema": "ts-node -T ./test/generateGraphQLSchema.ts",
|
||||
"dev:generate-types": "ts-node -T ./test/generateTypes.ts",
|
||||
"dev:postgres": "pnpm --filter payload run dev:postgres",
|
||||
"docker:restart": "pnpm docker:stop --remove-orphans && pnpm docker:start",
|
||||
"docker:start": "docker-compose -f packages/plugin-cloud-storage/docker-compose.yml up -d",
|
||||
"docker:stop": "docker-compose -f packages/plugin-cloud-storage/docker-compose.yml down",
|
||||
"fix": "eslint \"packages/**/*.ts\" --fix",
|
||||
"lint": "eslint \"packages/**/*.ts\"",
|
||||
"lint-staged": "lint-staged",
|
||||
"prepare": "husky install",
|
||||
"pretest": "pnpm build",
|
||||
"reinstall": "pnpm clean:unix && pnpm install",
|
||||
"script:list-packages": "tsx ./scripts/list-packages.ts",
|
||||
@@ -29,10 +33,10 @@
|
||||
"test:e2e:headed": "cross-env DISABLE_LOGGING=true playwright test --headed",
|
||||
"test:int:postgres": "cross-env PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles",
|
||||
"test:int": "cross-env DISABLE_LOGGING=true jest --forceExit --detectOpenHandles",
|
||||
"translateNewKeys": "pnpm --filter payload run translateNewKeys",
|
||||
"prepare": "husky install"
|
||||
"translateNewKeys": "pnpm --filter payload run translateNewKeys"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@aws-sdk/client-s3": "^3.142.0",
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@playwright/test": "1.40.1",
|
||||
"@swc/cli": "^0.1.62",
|
||||
@@ -64,7 +68,7 @@
|
||||
"copyfiles": "2.4.1",
|
||||
"cross-env": "7.0.3",
|
||||
"dotenv": "8.6.0",
|
||||
"drizzle-orm": "0.28.5",
|
||||
"drizzle-orm": "0.29.3",
|
||||
"express": "4.18.2",
|
||||
"form-data": "3.0.1",
|
||||
"fs-extra": "10.1.0",
|
||||
@@ -77,12 +81,12 @@
|
||||
"jest": "29.7.0",
|
||||
"jest-environment-jsdom": "29.7.0",
|
||||
"jwt-decode": "3.1.2",
|
||||
"lexical": "0.12.5",
|
||||
"lexical": "0.13.1",
|
||||
"lint-staged": "^14.0.1",
|
||||
"minimist": "1.2.8",
|
||||
"mongodb-memory-server": "^9",
|
||||
"node-fetch": "2.6.12",
|
||||
"nodemon": "3.0.2",
|
||||
"nodemon": "3.0.3",
|
||||
"prettier": "^3.0.3",
|
||||
"prompts": "2.4.2",
|
||||
"qs": "6.11.2",
|
||||
@@ -94,7 +98,7 @@
|
||||
"slash": "3.0.0",
|
||||
"slate": "0.91.4",
|
||||
"tempfile": "^3.0.0",
|
||||
"ts-node": "10.9.1",
|
||||
"ts-node": "10.9.2",
|
||||
"turbo": "^1.11.1",
|
||||
"typescript": "5.2.2",
|
||||
"uuid": "^9.0.1"
|
||||
@@ -104,6 +108,16 @@
|
||||
"react-i18next": "11.18.6",
|
||||
"react-router-dom": "5.3.4"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"copyfiles": "$copyfiles",
|
||||
"cross-env": "$cross-env",
|
||||
"dotenv": "$dotenv",
|
||||
"drizzle-orm": "$drizzle-orm",
|
||||
"ts-node": "$ts-node",
|
||||
"typescript": "$typescript"
|
||||
}
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14",
|
||||
"pnpm": ">=8"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "1.3.1",
|
||||
"version": "1.4.2",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"repository": "https://github.com/payloadcms/payload",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -29,15 +29,18 @@ export const connect: Connect = async function connect(this: MongooseAdapter, pa
|
||||
urlToConnect = process.env.PAYLOAD_TEST_MONGO_URL
|
||||
} else {
|
||||
connectionOptions.dbName = 'payloadmemory'
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server')
|
||||
const { MongoMemoryReplSet } = require('mongodb-memory-server')
|
||||
const getPort = require('get-port')
|
||||
|
||||
const port = await getPort()
|
||||
this.mongoMemoryServer = await MongoMemoryServer.create({
|
||||
this.mongoMemoryServer = await MongoMemoryReplSet.create({
|
||||
instance: {
|
||||
dbName: 'payloadmemory',
|
||||
port,
|
||||
},
|
||||
replSet: {
|
||||
count: 3,
|
||||
},
|
||||
})
|
||||
|
||||
urlToConnect = this.mongoMemoryServer.getUri()
|
||||
@@ -50,7 +53,7 @@ export const connect: Connect = async function connect(this: MongooseAdapter, pa
|
||||
|
||||
const client = this.connection.getClient()
|
||||
|
||||
if (!client.options.replicaSet || this.transactionOptions === false) {
|
||||
if (!client.options.replicaSet) {
|
||||
this.transactionOptions = false
|
||||
this.beginTransaction = undefined
|
||||
}
|
||||
|
||||
@@ -49,6 +49,7 @@ export const createGlobalVersion: CreateGlobalVersion = async function createGlo
|
||||
],
|
||||
},
|
||||
{ $unset: { latest: 1 } },
|
||||
options,
|
||||
)
|
||||
|
||||
const result: Document = JSON.parse(JSON.stringify(doc))
|
||||
|
||||
@@ -32,7 +32,7 @@ export const createMigration: CreateMigration = async function createMigration({
|
||||
|
||||
// Check for predefined migration.
|
||||
// Either passed in via --file or prefixed with @payloadcms/db-mongodb/
|
||||
if (file || migrationName.startsWith('@payloadcms/db-mongodb/')) {
|
||||
if (file || migrationName?.startsWith('@payloadcms/db-mongodb/')) {
|
||||
if (!file) file = migrationName
|
||||
|
||||
const predefinedMigrationName = file.replace('@payloadcms/db-mongodb/', '')
|
||||
@@ -59,8 +59,8 @@ export const createMigration: CreateMigration = async function createMigration({
|
||||
|
||||
const timestamp = `${formattedDate}_${formattedTime}`
|
||||
|
||||
const formattedName = migrationName.replace(/\W/g, '_')
|
||||
const fileName = `${timestamp}_${formattedName}.ts`
|
||||
const formattedName = migrationName?.replace(/\W/g, '_')
|
||||
const fileName = migrationName ? `${timestamp}_${formattedName}.ts` : `${timestamp}_migration.ts`
|
||||
const filePath = `${dir}/${fileName}`
|
||||
fs.writeFileSync(filePath, migrationFileContent)
|
||||
payload.logger.info({ msg: `Migration created at ${filePath}` })
|
||||
|
||||
@@ -57,6 +57,7 @@ export const createVersion: CreateVersion = async function createVersion(
|
||||
],
|
||||
},
|
||||
{ $unset: { latest: 1 } },
|
||||
options,
|
||||
)
|
||||
|
||||
const result: Document = JSON.parse(JSON.stringify(doc))
|
||||
|
||||
@@ -63,6 +63,7 @@ export const find: Find = async function find(
|
||||
paginationOptions.useCustomCountFn = () => {
|
||||
return Promise.resolve(
|
||||
Model.countDocuments(query, {
|
||||
...options,
|
||||
hint: { _id: 1 },
|
||||
}),
|
||||
)
|
||||
|
||||
@@ -82,6 +82,7 @@ export const findGlobalVersions: FindGlobalVersions = async function findGlobalV
|
||||
paginationOptions.useCustomCountFn = () => {
|
||||
return Promise.resolve(
|
||||
Model.countDocuments(query, {
|
||||
...options,
|
||||
hint: { _id: 1 },
|
||||
}),
|
||||
)
|
||||
|
||||
@@ -63,7 +63,6 @@ export const findVersions: FindVersions = async function findVersions(
|
||||
lean: true,
|
||||
leanWithId: true,
|
||||
limit,
|
||||
offset: skip || 0,
|
||||
options,
|
||||
page,
|
||||
pagination,
|
||||
@@ -79,6 +78,7 @@ export const findVersions: FindVersions = async function findVersions(
|
||||
paginationOptions.useCustomCountFn = () => {
|
||||
return Promise.resolve(
|
||||
Model.countDocuments(query, {
|
||||
...options,
|
||||
hint: { _id: 1 },
|
||||
}),
|
||||
)
|
||||
|
||||
@@ -93,18 +93,13 @@ export function mongooseAdapter({
|
||||
connectOptions,
|
||||
disableIndexHints = false,
|
||||
migrationDir: migrationDirArg,
|
||||
transactionOptions,
|
||||
transactionOptions = {},
|
||||
url,
|
||||
}: Args): MongooseAdapterResult {
|
||||
function adapter({ payload }: { payload: Payload }) {
|
||||
const migrationDir = findMigrationDir(migrationDirArg)
|
||||
let beginTransactionFunction = beginTransaction
|
||||
mongoose.set('strictQuery', false)
|
||||
|
||||
if (transactionOptions === false) {
|
||||
beginTransactionFunction = () => null
|
||||
}
|
||||
|
||||
return createDatabaseAdapter<MongooseAdapter>({
|
||||
name: 'mongoose',
|
||||
|
||||
@@ -122,7 +117,7 @@ export function mongooseAdapter({
|
||||
versions: {},
|
||||
|
||||
// DatabaseAdapter
|
||||
beginTransaction: beginTransactionFunction,
|
||||
beginTransaction: transactionOptions ? beginTransaction : undefined,
|
||||
commitTransaction,
|
||||
connect,
|
||||
create,
|
||||
|
||||
@@ -11,25 +11,30 @@ import type { MongooseAdapter } from '.'
|
||||
/**
|
||||
* Drop the current database and run all migrate up functions
|
||||
*/
|
||||
export async function migrateFresh(this: MongooseAdapter): Promise<void> {
|
||||
export async function migrateFresh(
|
||||
this: MongooseAdapter,
|
||||
{ forceAcceptWarning = false }: { forceAcceptWarning?: boolean },
|
||||
): Promise<void> {
|
||||
const { payload } = this
|
||||
|
||||
const { confirm: acceptWarning } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
if (!forceAcceptWarning) {
|
||||
const { confirm: acceptWarning } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
|
||||
},
|
||||
},
|
||||
)
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if (!acceptWarning) {
|
||||
process.exit(0)
|
||||
if (!acceptWarning) {
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
payload.logger.info({
|
||||
|
||||
@@ -77,6 +77,7 @@ export const sanitizeQueryValue = ({
|
||||
// Object equality requires the value to be the first key in the object that is being queried.
|
||||
if (
|
||||
operator === 'equals' &&
|
||||
formattedValue &&
|
||||
typeof formattedValue === 'object' &&
|
||||
formattedValue.value &&
|
||||
formattedValue.relationTo
|
||||
@@ -156,6 +157,23 @@ export const sanitizeQueryValue = ({
|
||||
|
||||
if (operator === 'exists') {
|
||||
formattedValue = formattedValue === 'true' || formattedValue === true
|
||||
|
||||
// Clearable fields
|
||||
if (['relationship', 'select', 'upload'].includes(field.type)) {
|
||||
if (formattedValue) {
|
||||
return {
|
||||
rawQuery: {
|
||||
$and: [{ [path]: { $exists: true } }, { [path]: { $ne: null } }],
|
||||
},
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
rawQuery: {
|
||||
$or: [{ [path]: { $exists: false } }, { [path]: { $eq: null } }],
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { operator: formattedOperator, val: formattedValue }
|
||||
|
||||
@@ -17,7 +17,11 @@ export const rollbackTransaction: RollbackTransaction = async function rollbackT
|
||||
}
|
||||
|
||||
// the first call for rollback should be aborted and deleted causing any other operations with the same transaction to fail
|
||||
await this.sessions[id].abortTransaction()
|
||||
await this.sessions[id].endSession()
|
||||
try {
|
||||
await this.sessions[id].abortTransaction()
|
||||
await this.sessions[id].endSession()
|
||||
} catch (error) {
|
||||
// ignore the error as it is likely a race condition from multiple errors
|
||||
}
|
||||
delete this.sessions[id]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "0.3.1",
|
||||
"version": "0.6.0",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"repository": "https://github.com/payloadcms/payload",
|
||||
"license": "MIT",
|
||||
@@ -22,8 +22,8 @@
|
||||
"dependencies": {
|
||||
"@libsql/client": "^0.3.1",
|
||||
"console-table-printer": "2.11.2",
|
||||
"drizzle-kit": "0.19.13-e99bac1",
|
||||
"drizzle-orm": "0.28.5",
|
||||
"drizzle-kit": "0.20.14-1f2c838",
|
||||
"drizzle-orm": "0.29.3",
|
||||
"pg": "8.11.3",
|
||||
"prompts": "2.4.2",
|
||||
"to-snake-case": "1.0.0",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { Payload } from 'payload'
|
||||
import type { Connect } from 'payload/database'
|
||||
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
@@ -8,6 +9,43 @@ import prompts from 'prompts'
|
||||
|
||||
import type { PostgresAdapter } from './types'
|
||||
|
||||
const connectWithReconnect = async function ({
|
||||
adapter,
|
||||
payload,
|
||||
reconnect = false,
|
||||
}: {
|
||||
adapter: PostgresAdapter
|
||||
payload: Payload
|
||||
reconnect?: boolean
|
||||
}) {
|
||||
let result
|
||||
|
||||
if (!reconnect) {
|
||||
result = await adapter.pool.connect()
|
||||
} else {
|
||||
try {
|
||||
result = await adapter.pool.connect()
|
||||
} catch (err) {
|
||||
setTimeout(() => {
|
||||
payload.logger.info('Reconnecting to postgres')
|
||||
void connectWithReconnect({ adapter, payload, reconnect: true })
|
||||
}, 1000)
|
||||
}
|
||||
}
|
||||
if (!result) {
|
||||
return
|
||||
}
|
||||
result.prependListener('error', (err) => {
|
||||
try {
|
||||
if (err.code === 'ECONNRESET') {
|
||||
void connectWithReconnect({ adapter, payload, reconnect: true })
|
||||
}
|
||||
} catch (err) {
|
||||
// swallow error
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export const connect: Connect = async function connect(this: PostgresAdapter, payload) {
|
||||
this.schema = {
|
||||
...this.tables,
|
||||
@@ -17,9 +55,11 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
|
||||
|
||||
try {
|
||||
this.pool = new Pool(this.poolOptions)
|
||||
await this.pool.connect()
|
||||
await connectWithReconnect({ adapter: this, payload })
|
||||
|
||||
this.drizzle = drizzle(this.pool, { schema: this.schema })
|
||||
const logger = this.logger || false
|
||||
|
||||
this.drizzle = drizzle(this.pool, { logger, schema: this.schema })
|
||||
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
|
||||
this.payload.logger.info('---- DROPPING TABLES ----')
|
||||
await this.drizzle.execute(sql`drop schema public cascade;
|
||||
@@ -39,7 +79,7 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
|
||||
)
|
||||
return
|
||||
|
||||
const { pushSchema } = require('drizzle-kit/utils')
|
||||
const { pushSchema } = require('drizzle-kit/payload')
|
||||
|
||||
// This will prompt if clarifications are needed for Drizzle to push new schema
|
||||
const { apply, hasDataLoss, statementsToExecute, warnings } = await pushSchema(
|
||||
@@ -59,9 +99,9 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
|
||||
const { confirm: acceptWarnings } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message,
|
||||
type: 'confirm',
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable no-restricted-syntax, no-await-in-loop */
|
||||
import type { DrizzleSnapshotJSON } from 'drizzle-kit/utils'
|
||||
import type { DrizzleSnapshotJSON } from 'drizzle-kit/payload'
|
||||
import type { CreateMigration } from 'payload/database'
|
||||
|
||||
import fs from 'fs'
|
||||
@@ -53,14 +53,14 @@ const getDefaultDrizzleSnapshot = (): DrizzleSnapshotJSON => ({
|
||||
|
||||
export const createMigration: CreateMigration = async function createMigration(
|
||||
this: PostgresAdapter,
|
||||
{ migrationName, payload },
|
||||
{ forceAcceptWarning, migrationName, payload },
|
||||
) {
|
||||
const dir = payload.db.migrationDir
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir)
|
||||
}
|
||||
|
||||
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/utils')
|
||||
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/payload')
|
||||
|
||||
const [yyymmdd, hhmmss] = new Date().toISOString().split('T')
|
||||
const formattedDate = yyymmdd.replace(/\D/g, '')
|
||||
@@ -95,13 +95,13 @@ export const createMigration: CreateMigration = async function createMigration(
|
||||
const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)
|
||||
|
||||
if (!sqlStatementsUp.length && !sqlStatementsDown.length) {
|
||||
if (!sqlStatementsUp.length && !sqlStatementsDown.length && !forceAcceptWarning) {
|
||||
const { confirm: shouldCreateBlankMigration } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: 'No schema changes detected. Would you like to create a blank migration file?',
|
||||
type: 'confirm',
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
|
||||
@@ -158,7 +158,7 @@ export const findMany = async function find({
|
||||
query: db
|
||||
.select({
|
||||
count: sql<number>`count
|
||||
(*)`,
|
||||
(DISTINCT ${adapter.tables[tableName].id})`,
|
||||
})
|
||||
.from(table)
|
||||
.where(where),
|
||||
|
||||
@@ -42,7 +42,7 @@ export type { MigrateDownArgs, MigrateUpArgs } from './types'
|
||||
export function postgresAdapter(args: Args): PostgresAdapterResult {
|
||||
function adapter({ payload }: { payload: Payload }) {
|
||||
const migrationDir = findMigrationDir(args.migrationDir)
|
||||
|
||||
const idType = args.idType || 'serial'
|
||||
return createDatabaseAdapter<PostgresAdapter>({
|
||||
name: 'postgres',
|
||||
|
||||
@@ -50,6 +50,8 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
|
||||
drizzle: undefined,
|
||||
enums: {},
|
||||
fieldConstraints: {},
|
||||
idType,
|
||||
logger: args.logger,
|
||||
pool: undefined,
|
||||
poolOptions: args.pool,
|
||||
push: args.push,
|
||||
@@ -67,7 +69,10 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
|
||||
createGlobalVersion,
|
||||
createMigration,
|
||||
createVersion,
|
||||
defaultIDType: 'number',
|
||||
/**
|
||||
* This represents how a default ID is treated in Payload as were a field type
|
||||
*/
|
||||
defaultIDType: idType === 'serial' ? 'number' : 'text',
|
||||
deleteMany,
|
||||
deleteOne,
|
||||
deleteVersions,
|
||||
|
||||
@@ -9,7 +9,6 @@ import toSnakeCase from 'to-snake-case'
|
||||
import type { PostgresAdapter } from './types'
|
||||
|
||||
import { buildTable } from './schema/build'
|
||||
import { getConfigIDType } from './schema/getConfigIDType'
|
||||
|
||||
export const init: Init = async function init(this: PostgresAdapter) {
|
||||
if (this.payload.config.localization) {
|
||||
@@ -24,9 +23,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
|
||||
buildTable({
|
||||
adapter: this,
|
||||
buildTexts: true,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!collection?.versions?.drafts,
|
||||
disableUnique: false,
|
||||
fields: collection.fields,
|
||||
@@ -38,13 +37,11 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
const versionsTableName = `_${tableName}_v`
|
||||
const versionFields = buildVersionCollectionFields(collection)
|
||||
|
||||
const versionsParentIDColType = getConfigIDType(collection.fields)
|
||||
|
||||
buildTable({
|
||||
adapter: this,
|
||||
buildTexts: true,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!collection.versions?.drafts,
|
||||
disableUnique: true,
|
||||
fields: versionFields,
|
||||
@@ -59,9 +56,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
|
||||
buildTable({
|
||||
adapter: this,
|
||||
buildTexts: true,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!global?.versions?.drafts,
|
||||
disableUnique: false,
|
||||
fields: global.fields,
|
||||
@@ -75,9 +72,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
|
||||
buildTable({
|
||||
adapter: this,
|
||||
buildTexts: true,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!global.versions?.drafts,
|
||||
disableUnique: true,
|
||||
fields: versionFields,
|
||||
|
||||
@@ -80,7 +80,7 @@ export async function migrate(this: PostgresAdapter): Promise<void> {
|
||||
}
|
||||
|
||||
async function runMigrationFile(payload: Payload, migration: Migration, batch: number) {
|
||||
const { generateDrizzleJson } = require('drizzle-kit/utils')
|
||||
const { generateDrizzleJson } = require('drizzle-kit/payload')
|
||||
|
||||
const start = Date.now()
|
||||
const req = { payload } as PayloadRequest
|
||||
|
||||
@@ -37,7 +37,7 @@ export async function migrateDown(this: PostgresAdapter): Promise<void> {
|
||||
}
|
||||
|
||||
const start = Date.now()
|
||||
const req = {} as PayloadRequest
|
||||
const req = { payload } as PayloadRequest
|
||||
|
||||
try {
|
||||
payload.logger.info({ msg: `Migrating down: ${migrationFile.name}` })
|
||||
|
||||
@@ -14,25 +14,30 @@ import { parseError } from './utilities/parseError'
|
||||
/**
|
||||
* Drop the current database and run all migrate up functions
|
||||
*/
|
||||
export async function migrateFresh(this: PostgresAdapter): Promise<void> {
|
||||
export async function migrateFresh(
|
||||
this: PostgresAdapter,
|
||||
{ forceAcceptWarning = false },
|
||||
): Promise<void> {
|
||||
const { payload } = this
|
||||
|
||||
const { confirm: acceptWarning } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
|
||||
},
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
if (forceAcceptWarning === false) {
|
||||
const { confirm: acceptWarning } = await prompts(
|
||||
{
|
||||
name: 'confirm',
|
||||
type: 'confirm',
|
||||
initial: false,
|
||||
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
|
||||
},
|
||||
},
|
||||
)
|
||||
{
|
||||
onCancel: () => {
|
||||
process.exit(0)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if (!acceptWarning) {
|
||||
process.exit(0)
|
||||
if (!acceptWarning) {
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
payload.logger.info({
|
||||
|
||||
@@ -75,6 +75,7 @@ const buildQuery = async function buildQuery({
|
||||
pathSegments: sortPath.replace(/__/g, '.').split('.'),
|
||||
selectFields,
|
||||
tableName,
|
||||
value: sortPath,
|
||||
})
|
||||
orderBy.column = sortTable?.[sortTableColumnName]
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import type { Field, FieldAffectingData, TabAsField } from 'payload/types'
|
||||
import type { Field, FieldAffectingData, NumberField, TabAsField, TextField } from 'payload/types'
|
||||
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { and, eq, like, sql } from 'drizzle-orm'
|
||||
import { alias } from 'drizzle-orm/pg-core'
|
||||
import { APIError } from 'payload/errors'
|
||||
import { fieldAffectsData, tabHasName } from 'payload/types'
|
||||
@@ -44,6 +44,14 @@ type Args = {
|
||||
rootTableName?: string
|
||||
selectFields: Record<string, GenericColumn>
|
||||
tableName: string
|
||||
/**
|
||||
* If creating a new table name for arrays and blocks, this suffix should be appended to the table name
|
||||
*/
|
||||
tableNameSuffix?: string
|
||||
/**
|
||||
* The raw value of the query before sanitization
|
||||
*/
|
||||
value: unknown
|
||||
}
|
||||
/**
|
||||
* Transforms path to table and column name
|
||||
@@ -65,6 +73,8 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName: incomingRootTableName,
|
||||
selectFields,
|
||||
tableName,
|
||||
tableNameSuffix = '',
|
||||
value,
|
||||
}: Args): TableColumn => {
|
||||
const fieldPath = incomingSegments[0]
|
||||
let locale = incomingLocale
|
||||
@@ -83,8 +93,8 @@ export const getTableColumnFromPath = ({
|
||||
constraints,
|
||||
field: {
|
||||
name: 'id',
|
||||
type: 'number',
|
||||
},
|
||||
type: adapter.idType === 'uuid' ? 'text' : 'number',
|
||||
} as TextField | NumberField,
|
||||
table: adapter.tables[newTableName],
|
||||
}
|
||||
}
|
||||
@@ -125,6 +135,8 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix,
|
||||
value,
|
||||
})
|
||||
}
|
||||
case 'tab': {
|
||||
@@ -134,7 +146,7 @@ export const getTableColumnFromPath = ({
|
||||
aliasTable,
|
||||
collectionPath,
|
||||
columnPrefix: `${columnPrefix}${field.name}_`,
|
||||
constraintPath,
|
||||
constraintPath: `${constraintPath}${field.name}.`,
|
||||
constraints,
|
||||
fields: field.fields,
|
||||
joinAliases,
|
||||
@@ -144,6 +156,8 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix: `${tableNameSuffix}${toSnakeCase(field.name)}_`,
|
||||
value,
|
||||
})
|
||||
}
|
||||
return getTableColumnFromPath({
|
||||
@@ -161,6 +175,8 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -185,7 +201,7 @@ export const getTableColumnFromPath = ({
|
||||
aliasTable,
|
||||
collectionPath,
|
||||
columnPrefix: `${columnPrefix}${field.name}_`,
|
||||
constraintPath,
|
||||
constraintPath: `${constraintPath}${field.name}.`,
|
||||
constraints,
|
||||
fields: field.fields,
|
||||
joinAliases,
|
||||
@@ -195,11 +211,13 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix: `${tableNameSuffix}${toSnakeCase(field.name)}_`,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
case 'array': {
|
||||
newTableName = `${tableName}_${toSnakeCase(field.name)}`
|
||||
newTableName = `${tableName}_${tableNameSuffix}${toSnakeCase(field.name)}`
|
||||
constraintPath = `${constraintPath}${field.name}.%.`
|
||||
if (locale && field.localized && adapter.payload.config.localization) {
|
||||
joins[newTableName] = and(
|
||||
@@ -232,12 +250,39 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
case 'blocks': {
|
||||
let blockTableColumn: TableColumn
|
||||
let newTableName: string
|
||||
|
||||
// handle blockType queries
|
||||
if (pathSegments[1] === 'blockType') {
|
||||
// find the block config using the value
|
||||
const blockTypes = Array.isArray(value) ? value : [value]
|
||||
blockTypes.forEach((blockType) => {
|
||||
const block = field.blocks.find((block) => block.slug === blockType)
|
||||
newTableName = `${tableName}_blocks_${toSnakeCase(block.slug)}`
|
||||
joins[newTableName] = eq(
|
||||
adapter.tables[tableName].id,
|
||||
adapter.tables[newTableName]._parentID,
|
||||
)
|
||||
constraints.push({
|
||||
columnName: '_path',
|
||||
table: adapter.tables[newTableName],
|
||||
value: pathSegments[0],
|
||||
})
|
||||
})
|
||||
return {
|
||||
constraints,
|
||||
field,
|
||||
getNotNullColumnByValue: () => 'id',
|
||||
table: adapter.tables[tableName],
|
||||
}
|
||||
}
|
||||
|
||||
const hasBlockField = field.blocks.some((block) => {
|
||||
newTableName = `${tableName}_blocks_${toSnakeCase(block.slug)}`
|
||||
constraintPath = `${constraintPath}${field.name}.%.`
|
||||
@@ -258,6 +303,7 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields: blockSelectFields,
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
} catch (error) {
|
||||
// this is fine, not every block will have the field
|
||||
@@ -298,9 +344,6 @@ export const getTableColumnFromPath = ({
|
||||
table: blockTableColumn.table,
|
||||
}
|
||||
}
|
||||
if (pathSegments[1] === 'blockType') {
|
||||
throw new APIError('Querying on blockType is not supported')
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
@@ -317,21 +360,15 @@ export const getTableColumnFromPath = ({
|
||||
|
||||
// Join in the relationships table
|
||||
joinAliases.push({
|
||||
condition: eq(
|
||||
(aliasTable || adapter.tables[rootTableName]).id,
|
||||
aliasRelationshipTable.parent,
|
||||
condition: and(
|
||||
eq((aliasTable || adapter.tables[rootTableName]).id, aliasRelationshipTable.parent),
|
||||
like(aliasRelationshipTable.path, `${constraintPath}${field.name}`),
|
||||
),
|
||||
table: aliasRelationshipTable,
|
||||
})
|
||||
|
||||
selectFields[`${relationTableName}.path`] = aliasRelationshipTable.path
|
||||
|
||||
constraints.push({
|
||||
columnName: 'path',
|
||||
table: aliasRelationshipTable,
|
||||
value: `${constraintPath}${field.name}`,
|
||||
})
|
||||
|
||||
let newAliasTable
|
||||
|
||||
if (typeof field.relationTo === 'string') {
|
||||
@@ -346,7 +383,7 @@ export const getTableColumnFromPath = ({
|
||||
table: newAliasTable,
|
||||
})
|
||||
|
||||
if (newCollectionPath === '') {
|
||||
if (newCollectionPath === '' || newCollectionPath === 'id') {
|
||||
return {
|
||||
columnName: `${field.relationTo}ID`,
|
||||
constraints,
|
||||
@@ -394,6 +431,7 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName: newTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -428,7 +466,7 @@ export const getTableColumnFromPath = ({
|
||||
columnName: `${columnPrefix}${field.name}`,
|
||||
constraints,
|
||||
field,
|
||||
pathSegments: pathSegments,
|
||||
pathSegments,
|
||||
table: targetTable,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,11 +63,7 @@ export async function parseParams({
|
||||
where: condition,
|
||||
})
|
||||
if (builtConditions.length > 0) {
|
||||
if (result) {
|
||||
result = operatorMap[conditionOperator](result, ...builtConditions)
|
||||
} else {
|
||||
result = operatorMap[conditionOperator](...builtConditions)
|
||||
}
|
||||
result = operatorMap[conditionOperator](...builtConditions)
|
||||
}
|
||||
} else {
|
||||
// It's a path - and there can be multiple comparisons on a single path.
|
||||
@@ -77,6 +73,7 @@ export async function parseParams({
|
||||
if (typeof pathOperators === 'object') {
|
||||
for (const operator of Object.keys(pathOperators)) {
|
||||
if (validOperators.includes(operator as Operator)) {
|
||||
const val = where[relationOrPath][operator]
|
||||
const {
|
||||
columnName,
|
||||
constraints: queryConstraints,
|
||||
@@ -95,10 +92,9 @@ export async function parseParams({
|
||||
pathSegments: relationOrPath.replace(/__/g, '.').split('.'),
|
||||
selectFields,
|
||||
tableName,
|
||||
value: val,
|
||||
})
|
||||
|
||||
const val = where[relationOrPath][operator]
|
||||
|
||||
queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => {
|
||||
if (typeof value === 'string' && value.indexOf('%') > -1) {
|
||||
constraints.push(operatorMap.like(constraintTable[col], value))
|
||||
@@ -169,6 +165,7 @@ export async function parseParams({
|
||||
}
|
||||
|
||||
const sanitizedQueryValue = sanitizeQueryValue({
|
||||
adapter,
|
||||
field,
|
||||
operator,
|
||||
relationOrPath,
|
||||
@@ -207,6 +204,16 @@ export async function parseParams({
|
||||
break
|
||||
}
|
||||
|
||||
if (operator === 'equals' && queryValue === null) {
|
||||
constraints.push(isNull(rawColumn || table[columnName]))
|
||||
break
|
||||
}
|
||||
|
||||
if (operator === 'not_equals' && queryValue === null) {
|
||||
constraints.push(isNotNull(rawColumn || table[columnName]))
|
||||
break
|
||||
}
|
||||
|
||||
constraints.push(
|
||||
operatorMap[queryOperator](rawColumn || table[columnName], queryValue),
|
||||
)
|
||||
|
||||
@@ -2,7 +2,10 @@ import { APIError } from 'payload/errors'
|
||||
import { type Field, type TabAsField, fieldAffectsData } from 'payload/types'
|
||||
import { createArrayFromCommaDelineated } from 'payload/utilities'
|
||||
|
||||
import type { PostgresAdapter } from '../types'
|
||||
|
||||
type SanitizeQueryValueArgs = {
|
||||
adapter: PostgresAdapter
|
||||
field: Field | TabAsField
|
||||
operator: string
|
||||
relationOrPath: string
|
||||
@@ -10,6 +13,7 @@ type SanitizeQueryValueArgs = {
|
||||
}
|
||||
|
||||
export const sanitizeQueryValue = ({
|
||||
adapter,
|
||||
field,
|
||||
operator: operatorArg,
|
||||
relationOrPath,
|
||||
@@ -27,8 +31,10 @@ export const sanitizeQueryValue = ({
|
||||
) {
|
||||
const allPossibleIDTypes: (number | string)[] = []
|
||||
formattedValue.forEach((val) => {
|
||||
if (typeof val === 'string') {
|
||||
if (adapter.idType !== 'uuid' && typeof val === 'string') {
|
||||
allPossibleIDTypes.push(val, parseInt(val))
|
||||
} else if (typeof val === 'string') {
|
||||
allPossibleIDTypes.push(val)
|
||||
} else {
|
||||
allPossibleIDTypes.push(val, String(val))
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
// type GenerateMigration = (before: DrizzleSnapshotJSON, after: DrizzleSnapshotJSON) => string[]
|
||||
|
||||
// type GenerateDrizzleJSON = (schema: DrizzleSchemaExports) => DrizzleSnapshotJSON
|
||||
|
||||
// type PushDiff = (schema: DrizzleSchemaExports) => Promise<{ warnings: string[], apply: () => Promise<void> }>
|
||||
|
||||
// drizzle-kit@utils
|
||||
|
||||
import { drizzle } from 'drizzle-orm/node-postgres'
|
||||
import { Pool } from 'pg'
|
||||
|
||||
async function generateUsage() {
|
||||
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/utils')
|
||||
|
||||
// @ts-expect-error Just TypeScript being broken // TODO: Open TypeScript issue
|
||||
const schema = await import('./data/users')
|
||||
// @ts-expect-error Just TypeScript being broken // TODO: Open TypeScript issue
|
||||
const schemaAfter = await import('./data/users-after')
|
||||
|
||||
const drizzleJsonBefore = generateDrizzleJson(schema)
|
||||
const drizzleJsonAfter = generateDrizzleJson(schemaAfter)
|
||||
|
||||
const sqlStatements = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)
|
||||
|
||||
console.log(sqlStatements)
|
||||
}
|
||||
|
||||
async function pushUsage() {
|
||||
const { pushSchema } = require('drizzle-kit/utils')
|
||||
|
||||
// @ts-expect-error Just TypeScript being broken // TODO: Open TypeScript issue
|
||||
const schemaAfter = await import('./data/users-after')
|
||||
|
||||
const db = drizzle(new Pool({ connectionString: '' }))
|
||||
|
||||
const response = await pushSchema(schemaAfter, db)
|
||||
|
||||
console.log('\n')
|
||||
console.log('hasDataLoss: ', response.hasDataLoss)
|
||||
console.log('warnings: ', response.warnings)
|
||||
console.log('statements: ', response.statementsToExecute)
|
||||
|
||||
await response.apply()
|
||||
|
||||
process.exit(0)
|
||||
}
|
||||
@@ -17,19 +17,19 @@ import {
|
||||
import { fieldAffectsData } from 'payload/types'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { GenericColumns, GenericTable, PostgresAdapter } from '../types'
|
||||
import type { GenericColumns, GenericTable, IDType, PostgresAdapter } from '../types'
|
||||
|
||||
import { getConfigIDType } from './getConfigIDType'
|
||||
import { parentIDColumnMap } from './parentIDColumnMap'
|
||||
import { setColumnID } from './setColumnID'
|
||||
import { traverseFields } from './traverseFields'
|
||||
|
||||
type Args = {
|
||||
adapter: PostgresAdapter
|
||||
baseColumns?: Record<string, PgColumnBuilder>
|
||||
baseExtraConfig?: Record<string, (cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder>
|
||||
buildTexts?: boolean
|
||||
buildNumbers?: boolean
|
||||
buildRelationships?: boolean
|
||||
buildTexts?: boolean
|
||||
disableNotNull: boolean
|
||||
disableUnique: boolean
|
||||
fields: Field[]
|
||||
@@ -42,8 +42,8 @@ type Args = {
|
||||
}
|
||||
|
||||
type Result = {
|
||||
hasManyTextField: 'index' | boolean
|
||||
hasManyNumberField: 'index' | boolean
|
||||
hasManyTextField: 'index' | boolean
|
||||
relationsToBuild: Map<string, string>
|
||||
}
|
||||
|
||||
@@ -51,9 +51,9 @@ export const buildTable = ({
|
||||
adapter,
|
||||
baseColumns = {},
|
||||
baseExtraConfig = {},
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
disableNotNull,
|
||||
disableUnique = false,
|
||||
fields,
|
||||
@@ -89,27 +89,20 @@ export const buildTable = ({
|
||||
// Drizzle relations
|
||||
const relationsToBuild: Map<string, string> = new Map()
|
||||
|
||||
const idColType = getConfigIDType(fields)
|
||||
const idColType: IDType = setColumnID({ adapter, columns, fields })
|
||||
|
||||
const idColTypeMap = {
|
||||
integer: serial,
|
||||
numeric,
|
||||
varchar,
|
||||
}
|
||||
|
||||
columns.id = idColTypeMap[idColType]('id').primaryKey()
|
||||
;({
|
||||
hasLocalizedField,
|
||||
hasLocalizedManyTextField,
|
||||
hasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField,
|
||||
hasManyTextField,
|
||||
hasManyNumberField,
|
||||
hasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columns,
|
||||
disableNotNull,
|
||||
disableUnique,
|
||||
@@ -196,12 +189,12 @@ export const buildTable = ({
|
||||
const textsTableName = `${rootTableName}_texts`
|
||||
const columns: Record<string, PgColumnBuilder> = {
|
||||
id: serial('id').primaryKey(),
|
||||
text: varchar('text'),
|
||||
order: integer('order').notNull(),
|
||||
parent: parentIDColumnMap[idColType]('parent_id')
|
||||
.references(() => table.id, { onDelete: 'cascade' })
|
||||
.notNull(),
|
||||
path: varchar('path').notNull(),
|
||||
text: varchar('text'),
|
||||
}
|
||||
|
||||
if (hasLocalizedManyTextField) {
|
||||
@@ -210,15 +203,15 @@ export const buildTable = ({
|
||||
|
||||
textsTable = pgTable(textsTableName, columns, (cols) => {
|
||||
const indexes: Record<string, IndexBuilder> = {
|
||||
orderParentIdx: index('order_parent_idx').on(cols.order, cols.parent),
|
||||
orderParentIdx: index(`${textsTableName}_order_parent_idx`).on(cols.order, cols.parent),
|
||||
}
|
||||
|
||||
if (hasManyTextField === 'index') {
|
||||
indexes.text_idx = index('text_idx').on(cols.text)
|
||||
indexes.text_idx = index(`${textsTableName}_text_idx`).on(cols.text)
|
||||
}
|
||||
|
||||
if (hasLocalizedManyTextField) {
|
||||
indexes.localeParent = index('locale_parent').on(cols.locale, cols.parent)
|
||||
indexes.localeParent = index(`${textsTableName}_locale_parent`).on(cols.locale, cols.parent)
|
||||
}
|
||||
|
||||
return indexes
|
||||
@@ -254,15 +247,18 @@ export const buildTable = ({
|
||||
|
||||
numbersTable = pgTable(numbersTableName, columns, (cols) => {
|
||||
const indexes: Record<string, IndexBuilder> = {
|
||||
orderParentIdx: index('order_parent_idx').on(cols.order, cols.parent),
|
||||
orderParentIdx: index(`${numbersTableName}_order_parent_idx`).on(cols.order, cols.parent),
|
||||
}
|
||||
|
||||
if (hasManyNumberField === 'index') {
|
||||
indexes.numberIdx = index('number_idx').on(cols.number)
|
||||
indexes.numberIdx = index(`${numbersTableName}_number_idx`).on(cols.number)
|
||||
}
|
||||
|
||||
if (hasLocalizedManyNumberField) {
|
||||
indexes.localeParent = index('locale_parent').on(cols.locale, cols.parent)
|
||||
indexes.localeParent = index(`${numbersTableName}_locale_parent`).on(
|
||||
cols.locale,
|
||||
cols.parent,
|
||||
)
|
||||
}
|
||||
|
||||
return indexes
|
||||
@@ -297,7 +293,7 @@ export const buildTable = ({
|
||||
|
||||
relationships.forEach((relationTo) => {
|
||||
const formattedRelationTo = toSnakeCase(relationTo)
|
||||
let colType = 'integer'
|
||||
let colType = adapter.idType === 'uuid' ? 'uuid' : 'integer'
|
||||
const relatedCollectionCustomID = adapter.payload.collections[
|
||||
relationTo
|
||||
].config.fields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
@@ -313,13 +309,13 @@ export const buildTable = ({
|
||||
|
||||
relationshipsTable = pgTable(relationshipsTableName, relationshipColumns, (cols) => {
|
||||
const result: Record<string, unknown> = {
|
||||
order: index('order_idx').on(cols.order),
|
||||
parentIdx: index('parent_idx').on(cols.parent),
|
||||
pathIdx: index('path_idx').on(cols.path),
|
||||
order: index(`${relationshipsTableName}_order_idx`).on(cols.order),
|
||||
parentIdx: index(`${relationshipsTableName}_parent_idx`).on(cols.parent),
|
||||
pathIdx: index(`${relationshipsTableName}_path_idx`).on(cols.path),
|
||||
}
|
||||
|
||||
if (hasLocalizedRelationshipField) {
|
||||
result.localeIdx = index('locale_idx').on(cols.locale)
|
||||
result.localeIdx = index(`${relationshipsTableName}_locale_idx`).on(cols.locale)
|
||||
}
|
||||
|
||||
return result
|
||||
@@ -381,5 +377,5 @@ export const buildTable = ({
|
||||
|
||||
adapter.relations[`relations_${tableName}`] = tableRelations
|
||||
|
||||
return { hasManyTextField, hasManyNumberField, relationsToBuild }
|
||||
return { hasManyNumberField, hasManyTextField, relationsToBuild }
|
||||
}
|
||||
|
||||
@@ -6,10 +6,11 @@ import type { GenericColumn } from '../types'
|
||||
type CreateIndexArgs = {
|
||||
columnName: string
|
||||
name: string | string[]
|
||||
tableName: string
|
||||
unique?: boolean
|
||||
}
|
||||
|
||||
export const createIndex = ({ name, columnName, unique }: CreateIndexArgs) => {
|
||||
export const createIndex = ({ name, columnName, tableName, unique }: CreateIndexArgs) => {
|
||||
return (table: { [x: string]: GenericColumn }) => {
|
||||
let columns
|
||||
if (Array.isArray(name)) {
|
||||
@@ -20,7 +21,8 @@ export const createIndex = ({ name, columnName, unique }: CreateIndexArgs) => {
|
||||
} else {
|
||||
columns = [table[name]]
|
||||
}
|
||||
if (unique) return uniqueIndex(`${columnName}_idx`).on(columns[0], ...columns.slice(1))
|
||||
return index(`${columnName}_idx`).on(columns[0], ...columns.slice(1))
|
||||
if (unique)
|
||||
return uniqueIndex(`${tableName}_${columnName}_idx`).on(columns[0], ...columns.slice(1))
|
||||
return index(`${tableName}_${columnName}_idx`).on(columns[0], ...columns.slice(1))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
import { type Field, fieldAffectsData } from 'payload/types'
|
||||
|
||||
export const getConfigIDType = (fields: Field[]): string => {
|
||||
const idField = fields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
|
||||
if (idField) {
|
||||
if (idField.type === 'number') {
|
||||
return 'numeric'
|
||||
}
|
||||
|
||||
if (idField.type === 'text') {
|
||||
return 'varchar'
|
||||
}
|
||||
}
|
||||
|
||||
return 'integer'
|
||||
}
|
||||
@@ -1,7 +1,13 @@
|
||||
import { integer, numeric, varchar } from 'drizzle-orm/pg-core'
|
||||
import { integer, numeric, uuid, varchar } from 'drizzle-orm/pg-core'
|
||||
|
||||
export const parentIDColumnMap = {
|
||||
import type { IDType } from '../types'
|
||||
|
||||
export const parentIDColumnMap: Record<
|
||||
IDType,
|
||||
typeof integer<string> | typeof numeric<string> | typeof uuid<string> | typeof varchar
|
||||
> = {
|
||||
integer,
|
||||
numeric,
|
||||
uuid,
|
||||
varchar,
|
||||
}
|
||||
|
||||
33
packages/db-postgres/src/schema/setColumnID.ts
Normal file
33
packages/db-postgres/src/schema/setColumnID.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import type { PgColumnBuilder } from 'drizzle-orm/pg-core'
|
||||
|
||||
import { numeric, serial, uuid, varchar } from 'drizzle-orm/pg-core'
|
||||
import { type Field, fieldAffectsData } from 'payload/types'
|
||||
import { flattenTopLevelFields } from 'payload/utilities'
|
||||
|
||||
import type { IDType, PostgresAdapter } from '../types'
|
||||
|
||||
type Args = { adapter: PostgresAdapter; columns: Record<string, PgColumnBuilder>; fields: Field[] }
|
||||
export const setColumnID = ({ adapter, columns, fields }: Args): IDType => {
|
||||
const idField = flattenTopLevelFields(fields).find(
|
||||
(field) => fieldAffectsData(field) && field.name === 'id',
|
||||
)
|
||||
if (idField) {
|
||||
if (idField.type === 'number') {
|
||||
columns.id = numeric('id').primaryKey()
|
||||
return 'numeric'
|
||||
}
|
||||
|
||||
if (idField.type === 'text') {
|
||||
columns.id = varchar('id').primaryKey()
|
||||
return 'varchar'
|
||||
}
|
||||
}
|
||||
|
||||
if (adapter.idType === 'uuid') {
|
||||
columns.id = uuid('id').defaultRandom().primaryKey()
|
||||
return 'uuid'
|
||||
}
|
||||
|
||||
columns.id = serial('id').primaryKey()
|
||||
return 'integer'
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import type { Field, TabAsField } from 'payload/types'
|
||||
import { relations } from 'drizzle-orm'
|
||||
import {
|
||||
PgNumericBuilder,
|
||||
PgUUIDBuilder,
|
||||
PgVarcharBuilder,
|
||||
boolean,
|
||||
index,
|
||||
@@ -21,7 +22,7 @@ import { InvalidConfiguration } from 'payload/errors'
|
||||
import { fieldAffectsData, optionIsObject } from 'payload/types'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { GenericColumns, PostgresAdapter } from '../types'
|
||||
import type { GenericColumns, IDType, PostgresAdapter } from '../types'
|
||||
|
||||
import { hasLocalesTable } from '../utilities/hasLocalesTable'
|
||||
import { buildTable } from './build'
|
||||
@@ -32,9 +33,9 @@ import { validateExistingBlockIsIdentical } from './validateExistingBlockIsIdent
|
||||
|
||||
type Args = {
|
||||
adapter: PostgresAdapter
|
||||
buildTexts: boolean
|
||||
buildNumbers: boolean
|
||||
buildRelationships: boolean
|
||||
buildTexts: boolean
|
||||
columnPrefix?: string
|
||||
columns: Record<string, PgColumnBuilder>
|
||||
disableNotNull: boolean
|
||||
@@ -56,18 +57,18 @@ type Args = {
|
||||
|
||||
type Result = {
|
||||
hasLocalizedField: boolean
|
||||
hasLocalizedManyTextField: boolean
|
||||
hasLocalizedManyNumberField: boolean
|
||||
hasLocalizedManyTextField: boolean
|
||||
hasLocalizedRelationshipField: boolean
|
||||
hasManyTextField: 'index' | boolean
|
||||
hasManyNumberField: 'index' | boolean
|
||||
hasManyTextField: 'index' | boolean
|
||||
}
|
||||
|
||||
export const traverseFields = ({
|
||||
adapter,
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull,
|
||||
@@ -93,7 +94,8 @@ export const traverseFields = ({
|
||||
let hasManyNumberField: 'index' | boolean = false
|
||||
let hasLocalizedManyNumberField = false
|
||||
|
||||
let parentIDColType = 'integer'
|
||||
let parentIDColType: IDType = 'integer'
|
||||
if (columns.id instanceof PgUUIDBuilder) parentIDColType = 'uuid'
|
||||
if (columns.id instanceof PgNumericBuilder) parentIDColType = 'numeric'
|
||||
if (columns.id instanceof PgVarcharBuilder) parentIDColType = 'varchar'
|
||||
|
||||
@@ -122,7 +124,7 @@ export const traverseFields = ({
|
||||
if (
|
||||
(field.unique || field.index) &&
|
||||
!['array', 'blocks', 'group', 'point', 'relationship', 'upload'].includes(field.type) &&
|
||||
!(field.type === 'number' && field.hasMany === true)
|
||||
!('hasMany' in field && field.hasMany === true)
|
||||
) {
|
||||
const unique = disableUnique !== true && field.unique
|
||||
if (unique) {
|
||||
@@ -132,9 +134,10 @@ export const traverseFields = ({
|
||||
}
|
||||
adapter.fieldConstraints[rootTableName][`${columnName}_idx`] = constraintValue
|
||||
}
|
||||
targetIndexes[`${field.name}Idx`] = createIndex({
|
||||
targetIndexes[`${newTableName}_${field.name}Idx`] = createIndex({
|
||||
name: fieldName,
|
||||
columnName,
|
||||
tableName: newTableName,
|
||||
unique,
|
||||
})
|
||||
}
|
||||
@@ -241,17 +244,18 @@ export const traverseFields = ({
|
||||
string,
|
||||
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
|
||||
> = {
|
||||
orderIdx: (cols) => index('order_idx').on(cols.order),
|
||||
parentIdx: (cols) => index('parent_idx').on(cols.parent),
|
||||
orderIdx: (cols) => index(`${selectTableName}_order_idx`).on(cols.order),
|
||||
parentIdx: (cols) => index(`${selectTableName}_parent_idx`).on(cols.parent),
|
||||
}
|
||||
|
||||
if (field.localized) {
|
||||
baseColumns.locale = adapter.enums.enum__locales('locale').notNull()
|
||||
baseExtraConfig.localeIdx = (cols) => index('locale_idx').on(cols.locale)
|
||||
baseExtraConfig.localeIdx = (cols) =>
|
||||
index(`${selectTableName}_locale_idx`).on(cols.locale)
|
||||
}
|
||||
|
||||
if (field.index) {
|
||||
baseExtraConfig.value = (cols) => index('value_idx').on(cols.value)
|
||||
baseExtraConfig.value = (cols) => index(`${selectTableName}_value_idx`).on(cols.value)
|
||||
}
|
||||
|
||||
buildTable({
|
||||
@@ -304,18 +308,19 @@ export const traverseFields = ({
|
||||
string,
|
||||
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
|
||||
> = {
|
||||
_orderIdx: (cols) => index('_order_idx').on(cols._order),
|
||||
_parentIDIdx: (cols) => index('_parent_id_idx').on(cols._parentID),
|
||||
_orderIdx: (cols) => index(`${arrayTableName}_order_idx`).on(cols._order),
|
||||
_parentIDIdx: (cols) => index(`${arrayTableName}_parent_id_idx`).on(cols._parentID),
|
||||
}
|
||||
|
||||
if (field.localized && adapter.payload.config.localization) {
|
||||
baseColumns._locale = adapter.enums.enum__locales('_locale').notNull()
|
||||
baseExtraConfig._localeIdx = (cols) => index('_locale_idx').on(cols._locale)
|
||||
baseExtraConfig._localeIdx = (cols) =>
|
||||
index(`${arrayTableName}_locale_idx`).on(cols._locale)
|
||||
}
|
||||
|
||||
const {
|
||||
hasManyTextField: subHasManyTextField,
|
||||
hasManyNumberField: subHasManyNumberField,
|
||||
hasManyTextField: subHasManyTextField,
|
||||
relationsToBuild: subRelationsToBuild,
|
||||
} = buildTable({
|
||||
adapter,
|
||||
@@ -384,19 +389,20 @@ export const traverseFields = ({
|
||||
string,
|
||||
(cols: GenericColumns) => IndexBuilder | UniqueConstraintBuilder
|
||||
> = {
|
||||
_orderIdx: (cols) => index('order_idx').on(cols._order),
|
||||
_parentIDIdx: (cols) => index('parent_id_idx').on(cols._parentID),
|
||||
_pathIdx: (cols) => index('path_idx').on(cols._path),
|
||||
_orderIdx: (cols) => index(`${blockTableName}_order_idx`).on(cols._order),
|
||||
_parentIDIdx: (cols) => index(`${blockTableName}_parent_id_idx`).on(cols._parentID),
|
||||
_pathIdx: (cols) => index(`${blockTableName}_path_idx`).on(cols._path),
|
||||
}
|
||||
|
||||
if (field.localized && adapter.payload.config.localization) {
|
||||
baseColumns._locale = adapter.enums.enum__locales('_locale').notNull()
|
||||
baseExtraConfig._localeIdx = (cols) => index('locale_idx').on(cols._locale)
|
||||
baseExtraConfig._localeIdx = (cols) =>
|
||||
index(`${blockTableName}_locale_idx`).on(cols._locale)
|
||||
}
|
||||
|
||||
const {
|
||||
hasManyTextField: subHasManyTextField,
|
||||
hasManyNumberField: subHasManyNumberField,
|
||||
hasManyTextField: subHasManyTextField,
|
||||
relationsToBuild: subRelationsToBuild,
|
||||
} = buildTable({
|
||||
adapter,
|
||||
@@ -465,16 +471,16 @@ export const traverseFields = ({
|
||||
if (!('name' in field)) {
|
||||
const {
|
||||
hasLocalizedField: groupHasLocalizedField,
|
||||
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
|
||||
hasLocalizedManyNumberField: groupHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: groupHasLocalizedRelationshipField,
|
||||
hasManyTextField: groupHasManyTextField,
|
||||
hasManyNumberField: groupHasManyNumberField,
|
||||
hasManyTextField: groupHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull,
|
||||
@@ -507,16 +513,16 @@ export const traverseFields = ({
|
||||
|
||||
const {
|
||||
hasLocalizedField: groupHasLocalizedField,
|
||||
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
|
||||
hasLocalizedManyNumberField: groupHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: groupHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: groupHasLocalizedRelationshipField,
|
||||
hasManyTextField: groupHasManyTextField,
|
||||
hasManyNumberField: groupHasManyNumberField,
|
||||
hasManyTextField: groupHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix: `${columnName}_`,
|
||||
columns,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
@@ -550,16 +556,16 @@ export const traverseFields = ({
|
||||
|
||||
const {
|
||||
hasLocalizedField: tabHasLocalizedField,
|
||||
hasLocalizedManyTextField: tabHasLocalizedManyTextField,
|
||||
hasLocalizedManyNumberField: tabHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: tabHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: tabHasLocalizedRelationshipField,
|
||||
hasManyTextField: tabHasManyTextField,
|
||||
hasManyNumberField: tabHasManyNumberField,
|
||||
hasManyTextField: tabHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
@@ -593,16 +599,16 @@ export const traverseFields = ({
|
||||
const disableNotNullFromHere = Boolean(field.admin?.condition) || disableNotNull
|
||||
const {
|
||||
hasLocalizedField: rowHasLocalizedField,
|
||||
hasLocalizedManyTextField: rowHasLocalizedManyTextField,
|
||||
hasLocalizedManyNumberField: rowHasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField: rowHasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField: rowHasLocalizedRelationshipField,
|
||||
hasManyTextField: rowHasManyTextField,
|
||||
hasManyNumberField: rowHasManyNumberField,
|
||||
hasManyTextField: rowHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
buildTexts,
|
||||
buildNumbers,
|
||||
buildRelationships,
|
||||
buildTexts,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
@@ -663,10 +669,10 @@ export const traverseFields = ({
|
||||
|
||||
return {
|
||||
hasLocalizedField,
|
||||
hasLocalizedManyTextField,
|
||||
hasLocalizedManyNumberField,
|
||||
hasLocalizedManyTextField,
|
||||
hasLocalizedRelationshipField,
|
||||
hasManyTextField,
|
||||
hasManyNumberField,
|
||||
hasManyTextField,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,10 @@ const getFlattenedFieldNames = (fields: Field[], prefix: string = ''): string[]
|
||||
return fields.reduce((fieldsToUse, field) => {
|
||||
let fieldPrefix = prefix
|
||||
|
||||
if (field.type === 'blocks') {
|
||||
if (
|
||||
['array', 'blocks', 'relationship', 'upload'].includes(field.type) ||
|
||||
('hasMany' in field && field.hasMany === true)
|
||||
) {
|
||||
return fieldsToUse
|
||||
}
|
||||
|
||||
@@ -54,29 +57,27 @@ export const validateExistingBlockIsIdentical = ({
|
||||
rootTableName,
|
||||
table,
|
||||
}: Args): void => {
|
||||
if (table) {
|
||||
const fieldNames = getFlattenedFieldNames(block.fields)
|
||||
const fieldNames = getFlattenedFieldNames(block.fields)
|
||||
|
||||
const missingField =
|
||||
// ensure every field from the config is in the matching table
|
||||
fieldNames.find((name) => Object.keys(table).indexOf(name) === -1) ||
|
||||
// ensure every table column is matched for every field from the config
|
||||
Object.keys(table).find((fieldName) => {
|
||||
if (!['_locale', '_order', '_parentID', '_path', '_uuid'].includes(fieldName)) {
|
||||
return fieldNames.indexOf(fieldName) === -1
|
||||
}
|
||||
})
|
||||
const missingField =
|
||||
// ensure every field from the config is in the matching table
|
||||
fieldNames.find((name) => Object.keys(table).indexOf(name) === -1) ||
|
||||
// ensure every table column is matched for every field from the config
|
||||
Object.keys(table).find((fieldName) => {
|
||||
if (!['_locale', '_order', '_parentID', '_path', '_uuid'].includes(fieldName)) {
|
||||
return fieldNames.indexOf(fieldName) === -1
|
||||
}
|
||||
})
|
||||
|
||||
if (missingField) {
|
||||
throw new InvalidConfiguration(
|
||||
`The table ${rootTableName} has multiple blocks with slug ${block.slug}, but the schemas do not match. One block includes the field ${missingField}, while the other block does not.`,
|
||||
)
|
||||
}
|
||||
if (missingField) {
|
||||
throw new InvalidConfiguration(
|
||||
`The table ${rootTableName} has multiple blocks with slug ${block.slug}, but the schemas do not match. One block includes the field ${missingField}, while the other block does not.`,
|
||||
)
|
||||
}
|
||||
|
||||
if (Boolean(localized) !== Boolean(table._locale)) {
|
||||
throw new InvalidConfiguration(
|
||||
`The table ${rootTableName} has multiple blocks with slug ${block.slug}, but the schemas do not match. One is localized, but another is not. Block schemas of the same name must match exactly.`,
|
||||
)
|
||||
}
|
||||
if (Boolean(localized) !== Boolean(table._locale)) {
|
||||
throw new InvalidConfiguration(
|
||||
`The table ${rootTableName} has multiple blocks with slug ${block.slug}, but the schemas do not match. One is localized, but another is not. Block schemas of the same name must match exactly.`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,8 +7,8 @@ import { fieldAffectsData } from 'payload/types'
|
||||
import type { BlocksMap } from '../../utilities/createBlocksMap'
|
||||
|
||||
import { transformHasManyNumber } from './hasManyNumber'
|
||||
import { transformRelationship } from './relationship'
|
||||
import { transformHasManyText } from './hasManyText'
|
||||
import { transformRelationship } from './relationship'
|
||||
|
||||
type TraverseFieldsArgs = {
|
||||
/**
|
||||
@@ -35,10 +35,6 @@ type TraverseFieldsArgs = {
|
||||
* An array of Payload fields to traverse
|
||||
*/
|
||||
fields: (Field | TabAsField)[]
|
||||
/**
|
||||
* All hasMany text fields, as returned by Drizzle, keyed on an object by field path
|
||||
*/
|
||||
texts: Record<string, Record<string, unknown>[]>
|
||||
/**
|
||||
* All hasMany number fields, as returned by Drizzle, keyed on an object by field path
|
||||
*/
|
||||
@@ -55,6 +51,10 @@ type TraverseFieldsArgs = {
|
||||
* Data structure representing the nearest table from db
|
||||
*/
|
||||
table: Record<string, unknown>
|
||||
/**
|
||||
* All hasMany text fields, as returned by Drizzle, keyed on an object by field path
|
||||
*/
|
||||
texts: Record<string, Record<string, unknown>[]>
|
||||
}
|
||||
|
||||
// Traverse fields recursively, transforming data
|
||||
@@ -66,11 +66,11 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
deletions,
|
||||
fieldPrefix,
|
||||
fields,
|
||||
texts,
|
||||
numbers,
|
||||
path,
|
||||
relationships,
|
||||
table,
|
||||
texts,
|
||||
}: TraverseFieldsArgs): T => {
|
||||
const sanitizedPath = path ? `${path}.` : path
|
||||
|
||||
@@ -83,11 +83,11 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
deletions,
|
||||
fieldPrefix,
|
||||
fields: field.tabs.map((tab) => ({ ...tab, type: 'tab' })),
|
||||
texts,
|
||||
numbers,
|
||||
path,
|
||||
relationships,
|
||||
table,
|
||||
texts,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -103,17 +103,22 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
deletions,
|
||||
fieldPrefix,
|
||||
fields: field.fields,
|
||||
texts,
|
||||
numbers,
|
||||
path,
|
||||
relationships,
|
||||
table,
|
||||
texts,
|
||||
})
|
||||
}
|
||||
|
||||
if (fieldAffectsData(field)) {
|
||||
const fieldName = `${fieldPrefix || ''}${field.name}`
|
||||
const fieldData = table[fieldName]
|
||||
|
||||
if (fieldPrefix) {
|
||||
deletions.push(() => delete table[fieldName])
|
||||
}
|
||||
|
||||
if (field.type === 'array') {
|
||||
if (Array.isArray(fieldData)) {
|
||||
if (field.localized) {
|
||||
@@ -135,13 +140,17 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
deletions,
|
||||
fieldPrefix: '',
|
||||
fields: field.fields,
|
||||
texts,
|
||||
numbers,
|
||||
path: `${sanitizedPath}${field.name}.${row._order - 1}`,
|
||||
relationships,
|
||||
table: row,
|
||||
texts,
|
||||
})
|
||||
|
||||
if ('_order' in rowResult) {
|
||||
delete rowResult._order
|
||||
}
|
||||
|
||||
arrayResult[locale].push(rowResult)
|
||||
}
|
||||
|
||||
@@ -153,6 +162,11 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
row.id = row._uuid
|
||||
delete row._uuid
|
||||
}
|
||||
|
||||
if ('_order' in row) {
|
||||
delete row._order
|
||||
}
|
||||
|
||||
return traverseFields<T>({
|
||||
blocks,
|
||||
config,
|
||||
@@ -160,11 +174,11 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
deletions,
|
||||
fieldPrefix: '',
|
||||
fields: field.fields,
|
||||
texts,
|
||||
numbers,
|
||||
path: `${sanitizedPath}${field.name}.${i}`,
|
||||
relationships,
|
||||
table: row,
|
||||
texts,
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -204,11 +218,11 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
deletions,
|
||||
fieldPrefix: '',
|
||||
fields: block.fields,
|
||||
texts,
|
||||
numbers,
|
||||
path: `${blockFieldPath}.${row._order - 1}`,
|
||||
relationships,
|
||||
table: row,
|
||||
texts,
|
||||
})
|
||||
|
||||
delete blockResult._order
|
||||
@@ -235,11 +249,11 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
deletions,
|
||||
fieldPrefix: '',
|
||||
fields: block.fields,
|
||||
texts,
|
||||
numbers,
|
||||
path: `${blockFieldPath}.${i}`,
|
||||
relationships,
|
||||
table: row,
|
||||
texts,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -316,15 +330,15 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
transformHasManyText({
|
||||
field,
|
||||
locale,
|
||||
textRows: texts,
|
||||
ref: result,
|
||||
textRows: texts,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
transformHasManyText({
|
||||
field,
|
||||
textRows: textPathMatch,
|
||||
ref: result,
|
||||
textRows: textPathMatch,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -420,13 +434,16 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
deletions,
|
||||
fieldPrefix: groupFieldPrefix,
|
||||
fields: field.fields,
|
||||
texts,
|
||||
numbers,
|
||||
path: `${sanitizedPath}${field.name}`,
|
||||
relationships,
|
||||
table,
|
||||
texts,
|
||||
})
|
||||
})
|
||||
if ('_order' in ref) {
|
||||
delete ref._order
|
||||
}
|
||||
} else {
|
||||
const groupData = {}
|
||||
|
||||
@@ -437,12 +454,15 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
deletions,
|
||||
fieldPrefix: groupFieldPrefix,
|
||||
fields: field.fields,
|
||||
texts,
|
||||
numbers,
|
||||
path: `${sanitizedPath}${field.name}`,
|
||||
relationships,
|
||||
table,
|
||||
texts,
|
||||
})
|
||||
if ('_order' in ref) {
|
||||
delete ref._order
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import type {
|
||||
ColumnBaseConfig,
|
||||
ColumnDataType,
|
||||
DrizzleConfig,
|
||||
ExtractTablesWithRelations,
|
||||
Relation,
|
||||
Relations,
|
||||
@@ -9,11 +10,14 @@ import type { NodePgDatabase, NodePgQueryResultHKT } from 'drizzle-orm/node-post
|
||||
import type { PgColumn, PgEnum, PgTableWithColumns, PgTransaction } from 'drizzle-orm/pg-core'
|
||||
import type { Payload } from 'payload'
|
||||
import type { BaseDatabaseAdapter } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
import type { Pool, PoolConfig } from 'pg'
|
||||
|
||||
export type DrizzleDB = NodePgDatabase<Record<string, unknown>>
|
||||
|
||||
export type Args = {
|
||||
idType?: 'serial' | 'uuid'
|
||||
logger?: DrizzleConfig['logger']
|
||||
migrationDir?: string
|
||||
pool: PoolConfig
|
||||
push?: boolean
|
||||
@@ -48,6 +52,13 @@ export type DrizzleTransaction = PgTransaction<
|
||||
export type PostgresAdapter = BaseDatabaseAdapter & {
|
||||
drizzle: DrizzleDB
|
||||
enums: Record<string, GenericEnum>
|
||||
/**
|
||||
* An object keyed on each table, with a key value pair where the constraint name is the key, followed by the dot-notation field name
|
||||
* Used for returning properly formed errors from unique fields
|
||||
*/
|
||||
fieldConstraints: Record<string, Record<string, string>>
|
||||
idType: Args['idType']
|
||||
logger: DrizzleConfig['logger']
|
||||
pool: Pool
|
||||
poolOptions: Args['pool']
|
||||
push: boolean
|
||||
@@ -61,17 +72,14 @@ export type PostgresAdapter = BaseDatabaseAdapter & {
|
||||
}
|
||||
}
|
||||
tables: Record<string, GenericTable>
|
||||
/**
|
||||
* An object keyed on each table, with a key value pair where the constraint name is the key, followed by the dot-notation field name
|
||||
* Used for returning properly formed errors from unique fields
|
||||
*/
|
||||
fieldConstraints: Record<string, Record<string, string>>
|
||||
}
|
||||
|
||||
export type IDType = 'integer' | 'numeric' | 'uuid' | 'varchar'
|
||||
|
||||
export type PostgresAdapterResult = (args: { payload: Payload }) => PostgresAdapter
|
||||
|
||||
export type MigrateUpArgs = { payload: Payload }
|
||||
export type MigrateDownArgs = { payload: Payload }
|
||||
export type MigrateUpArgs = { payload: Payload; req?: Partial<PayloadRequest> }
|
||||
export type MigrateDownArgs = { payload: Payload; req?: Partial<PayloadRequest> }
|
||||
|
||||
declare module 'payload' {
|
||||
export interface DatabaseAdapter
|
||||
@@ -79,6 +87,7 @@ declare module 'payload' {
|
||||
BaseDatabaseAdapter {
|
||||
drizzle: DrizzleDB
|
||||
enums: Record<string, GenericEnum>
|
||||
fieldConstraints: Record<string, Record<string, string>>
|
||||
pool: Pool
|
||||
push: boolean
|
||||
relations: Record<string, GenericRelation>
|
||||
@@ -91,6 +100,5 @@ declare module 'payload' {
|
||||
}
|
||||
}
|
||||
tables: Record<string, GenericTable>
|
||||
fieldConstraints: Record<string, Record<string, string>>
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
}
|
||||
}
|
||||
|
||||
const parentID = parentRows[parentRowIndex].id || parentRows[parentRowIndex]._parentID
|
||||
const parentID = parentRows[parentRowIndex].id
|
||||
|
||||
// Add any sub arrays that need to be created
|
||||
// We will call this recursively below
|
||||
@@ -61,8 +61,10 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
// Insert all corresponding arrays
|
||||
// (one insert per array table)
|
||||
for (const [tableName, row] of Object.entries(rowsByTable)) {
|
||||
// the nested arrays need the ID for the parentID foreign key
|
||||
let insertedRows: Args['parentRows']
|
||||
if (row.rows.length > 0) {
|
||||
await db.insert(adapter.tables[tableName]).values(row.rows).returning()
|
||||
insertedRows = await db.insert(adapter.tables[tableName]).values(row.rows).returning()
|
||||
}
|
||||
|
||||
// Insert locale rows
|
||||
@@ -76,7 +78,7 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
adapter,
|
||||
arrays: row.arrays,
|
||||
db,
|
||||
parentRows: row.rows,
|
||||
parentRows: insertedRows,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload",
|
||||
"version": "2.8.0",
|
||||
"version": "2.11.1",
|
||||
"description": "Node, React and MongoDB Headless CMS and Application Framework",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
@@ -59,7 +59,7 @@
|
||||
"@faceless-ui/scroll-info": "1.3.0",
|
||||
"@faceless-ui/window-info": "2.1.1",
|
||||
"@monaco-editor/react": "4.5.1",
|
||||
"@swc/core": "1.3.76",
|
||||
"@swc/core": "1.3.107",
|
||||
"@swc/register": "0.1.10",
|
||||
"body-parser": "1.20.2",
|
||||
"body-scroll-lock": "4.0.0-beta.0",
|
||||
@@ -101,7 +101,6 @@
|
||||
"jwt-decode": "3.1.2",
|
||||
"md5": "2.3.0",
|
||||
"method-override": "3.0.0",
|
||||
"micro-memoize": "4.1.2",
|
||||
"minimist": "1.2.8",
|
||||
"mkdirp": "1.0.4",
|
||||
"monaco-editor": "0.38.0",
|
||||
@@ -193,7 +192,7 @@
|
||||
"get-port": "5.1.1",
|
||||
"mini-css-extract-plugin": "1.6.2",
|
||||
"node-fetch": "2.6.12",
|
||||
"nodemon": "3.0.1",
|
||||
"nodemon": "3.0.3",
|
||||
"object.assign": "4.1.4",
|
||||
"object.entries": "1.1.6",
|
||||
"passport-strategy": "1.0.0",
|
||||
|
||||
@@ -24,11 +24,16 @@ export const Collapsible: React.FC<Props> = ({
|
||||
}) => {
|
||||
const [collapsedLocal, setCollapsedLocal] = useState(Boolean(initCollapsed))
|
||||
const [hoveringToggle, setHoveringToggle] = useState(false)
|
||||
const isNested = useCollapsible()
|
||||
const { withinCollapsible } = useCollapsible()
|
||||
const { t } = useTranslation('fields')
|
||||
|
||||
const collapsed = typeof collapsedFromProps === 'boolean' ? collapsedFromProps : collapsedLocal
|
||||
|
||||
const toggleCollapsible = React.useCallback(() => {
|
||||
if (typeof onToggle === 'function') onToggle(!collapsed)
|
||||
setCollapsedLocal(!collapsed)
|
||||
}, [onToggle, collapsed])
|
||||
|
||||
return (
|
||||
<div
|
||||
className={[
|
||||
@@ -36,14 +41,14 @@ export const Collapsible: React.FC<Props> = ({
|
||||
className,
|
||||
dragHandleProps && `${baseClass}--has-drag-handle`,
|
||||
collapsed && `${baseClass}--collapsed`,
|
||||
isNested && `${baseClass}--nested`,
|
||||
withinCollapsible && `${baseClass}--nested`,
|
||||
hoveringToggle && `${baseClass}--hovered`,
|
||||
`${baseClass}--style-${collapsibleStyle}`,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ')}
|
||||
>
|
||||
<CollapsibleProvider>
|
||||
<CollapsibleProvider collapsed={collapsed} toggle={toggleCollapsible}>
|
||||
<div
|
||||
className={`${baseClass}__toggle-wrap`}
|
||||
onMouseEnter={() => setHoveringToggle(true)}
|
||||
@@ -65,10 +70,7 @@ export const Collapsible: React.FC<Props> = ({
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ')}
|
||||
onClick={() => {
|
||||
if (typeof onToggle === 'function') onToggle(!collapsed)
|
||||
setCollapsedLocal(!collapsed)
|
||||
}}
|
||||
onClick={toggleCollapsible}
|
||||
type="button"
|
||||
>
|
||||
<span>{t('toggleBlock')}</span>
|
||||
|
||||
@@ -1,14 +1,35 @@
|
||||
import React, { createContext, useContext } from 'react'
|
||||
|
||||
const Context = createContext(false)
|
||||
type ContextType = {
|
||||
collapsed: boolean
|
||||
isVisible: boolean
|
||||
toggle: () => void
|
||||
withinCollapsible: boolean
|
||||
}
|
||||
const Context = createContext({
|
||||
collapsed: false,
|
||||
isVisible: true,
|
||||
toggle: () => {},
|
||||
withinCollapsible: true,
|
||||
})
|
||||
|
||||
export const CollapsibleProvider: React.FC<{
|
||||
children?: React.ReactNode
|
||||
collapsed?: boolean
|
||||
toggle: () => void
|
||||
withinCollapsible?: boolean
|
||||
}> = ({ children, withinCollapsible = true }) => {
|
||||
return <Context.Provider value={withinCollapsible}>{children}</Context.Provider>
|
||||
}> = ({ children, collapsed, toggle, withinCollapsible = true }) => {
|
||||
const { collapsed: parentIsCollapsed, isVisible } = useCollapsible()
|
||||
|
||||
const contextValue = React.useMemo((): ContextType => {
|
||||
return {
|
||||
collapsed: Boolean(collapsed),
|
||||
isVisible: isVisible && !parentIsCollapsed,
|
||||
toggle,
|
||||
withinCollapsible,
|
||||
}
|
||||
}, [collapsed, withinCollapsible, toggle, parentIsCollapsed, isVisible])
|
||||
return <Context.Provider value={contextValue}>{children}</Context.Provider>
|
||||
}
|
||||
|
||||
export const useCollapsible = (): boolean => useContext(Context)
|
||||
|
||||
export default Context
|
||||
export const useCollapsible = (): ContextType => useContext(Context)
|
||||
|
||||
@@ -64,7 +64,7 @@ const DeleteDocument: React.FC<Props> = (props) => {
|
||||
if (res.status < 400) {
|
||||
setDeleting(false)
|
||||
toggleModal(modalSlug)
|
||||
toast.success(t('titleDeleted', { label: getTranslation(singular, i18n), title }))
|
||||
toast.success(json.message || t('titleDeleted', { label: getTranslation(singular, i18n), title }))
|
||||
return history.push(`${admin}/collections/${slug}`)
|
||||
}
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ export const DocumentControls: React.FC<{
|
||||
id?: string
|
||||
isAccountView?: boolean
|
||||
isEditing?: boolean
|
||||
permissions?: CollectionPermission | GlobalPermission | null
|
||||
permissions?: CollectionPermission | GlobalPermission
|
||||
}> = (props) => {
|
||||
const {
|
||||
id,
|
||||
|
||||
@@ -20,7 +20,7 @@ export const getCustomViews = (args: {
|
||||
? collection?.admin?.components?.views?.Edit
|
||||
: undefined
|
||||
|
||||
const defaultViewKeys = Object.keys(defaultCollectionViews)
|
||||
const defaultViewKeys = Object.keys(defaultCollectionViews())
|
||||
|
||||
customViews = Object.entries(collectionViewsConfig || {}).reduce((prev, [key, view]) => {
|
||||
if (defaultViewKeys.includes(key)) {
|
||||
@@ -38,7 +38,7 @@ export const getCustomViews = (args: {
|
||||
? global?.admin?.components?.views?.Edit
|
||||
: undefined
|
||||
|
||||
const defaultViewKeys = Object.keys(defaultGlobalViews)
|
||||
const defaultViewKeys = Object.keys(defaultGlobalViews())
|
||||
|
||||
customViews = Object.entries(globalViewsConfig || {}).reduce((prev, [key, view]) => {
|
||||
if (defaultViewKeys.includes(key)) {
|
||||
|
||||
@@ -133,9 +133,10 @@ export const ListDrawerContent: React.FC<ListDrawerProps> = ({
|
||||
const moreThanOneAvailableCollection = enabledCollectionConfigs.length > 1
|
||||
|
||||
useEffect(() => {
|
||||
const { slug, admin: { listSearchableFields } = {} } = selectedCollectionConfig
|
||||
const { slug, admin: { listSearchableFields } = {}, versions } = selectedCollectionConfig
|
||||
const params: {
|
||||
cacheBust?: number
|
||||
draft?: string
|
||||
limit?: number
|
||||
page?: number
|
||||
search?: string
|
||||
@@ -172,6 +173,7 @@ export const ListDrawerContent: React.FC<ListDrawerProps> = ({
|
||||
if (sort) params.sort = sort
|
||||
if (cacheBust) params.cacheBust = cacheBust
|
||||
if (copyOfWhere) params.where = copyOfWhere
|
||||
if (versions?.drafts) params.draft = 'true'
|
||||
|
||||
setParams(params)
|
||||
}, [
|
||||
|
||||
@@ -12,42 +12,83 @@ import { fieldAffectsData, fieldHasSubFields, tabHasName } from '../../../../../
|
||||
import getValueWithDefault from '../../../../../fields/getDefaultValue'
|
||||
import { iterateFields } from './iterateFields'
|
||||
|
||||
type Args = {
|
||||
export type AddFieldStatePromiseArgs = {
|
||||
/**
|
||||
* if all parents are localized, then the field is localized
|
||||
*/
|
||||
anyParentLocalized?: boolean
|
||||
config: SanitizedConfig
|
||||
data: Data
|
||||
field: NonPresentationalField
|
||||
/**
|
||||
* You can use this to filter down to only `localized` fields that require transalation (type: text, textarea, etc.). Another plugin might want to look for only `point` type fields to do some GIS function. With the filter function you can go in like a surgeon.
|
||||
*/
|
||||
filter?: (args: AddFieldStatePromiseArgs) => boolean
|
||||
/**
|
||||
* Force the value of fields like arrays or blocks to be the full value instead of the length @default false
|
||||
*/
|
||||
forceFullValue?: boolean
|
||||
fullData: Data
|
||||
id: number | string
|
||||
/**
|
||||
* Whether the field schema should be included in the state
|
||||
*/
|
||||
includeSchema?: boolean
|
||||
locale: string
|
||||
/**
|
||||
* Whether to omit parent fields in the state. @default false
|
||||
*/
|
||||
omitParents?: boolean
|
||||
operation: 'create' | 'update'
|
||||
passesCondition: boolean
|
||||
path: string
|
||||
preferences: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
/**
|
||||
* Whether to skip checking the field's condition. @default false
|
||||
*/
|
||||
skipConditionChecks?: boolean
|
||||
/**
|
||||
* Whether to skip validating the field. @default false
|
||||
*/
|
||||
skipValidation?: boolean
|
||||
state: Fields
|
||||
t: TFunction
|
||||
user: User
|
||||
}
|
||||
|
||||
export const addFieldStatePromise = async ({
|
||||
id,
|
||||
config,
|
||||
data,
|
||||
field,
|
||||
fullData,
|
||||
locale,
|
||||
operation,
|
||||
passesCondition,
|
||||
path,
|
||||
preferences,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
}: Args): Promise<void> => {
|
||||
/**
|
||||
* Flattens the fields schema and fields data.
|
||||
* The output is the field path (e.g. array.0.name) mapped to a FormField object.
|
||||
*/
|
||||
export const addFieldStatePromise = async (args: AddFieldStatePromiseArgs): Promise<void> => {
|
||||
const {
|
||||
id,
|
||||
anyParentLocalized = false,
|
||||
config,
|
||||
data,
|
||||
field,
|
||||
filter,
|
||||
forceFullValue = false,
|
||||
fullData,
|
||||
includeSchema = false,
|
||||
locale,
|
||||
omitParents = false,
|
||||
operation,
|
||||
passesCondition,
|
||||
path,
|
||||
preferences,
|
||||
skipConditionChecks = false,
|
||||
skipValidation = false,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
} = args
|
||||
if (fieldAffectsData(field)) {
|
||||
const fieldState: FormField = {
|
||||
condition: field.admin?.condition,
|
||||
fieldSchema: includeSchema ? field : undefined,
|
||||
initialValue: undefined,
|
||||
passesCondition,
|
||||
valid: true,
|
||||
@@ -66,9 +107,9 @@ export const addFieldStatePromise = async ({
|
||||
data[field.name] = valueWithDefault
|
||||
}
|
||||
|
||||
let validationResult: boolean | string = true
|
||||
let validationResult: string | true = true
|
||||
|
||||
if (typeof fieldState.validate === 'function') {
|
||||
if (typeof fieldState.validate === 'function' && !skipValidation) {
|
||||
validationResult = await fieldState.validate(data?.[field.name], {
|
||||
...field,
|
||||
id,
|
||||
@@ -96,24 +137,36 @@ export const addFieldStatePromise = async ({
|
||||
const rowPath = `${path}${field.name}.${i}.`
|
||||
row.id = row?.id || new ObjectID().toHexString()
|
||||
|
||||
state[`${rowPath}id`] = {
|
||||
initialValue: row.id,
|
||||
valid: true,
|
||||
value: row.id,
|
||||
if (!omitParents && (!filter || filter(args))) {
|
||||
state[`${rowPath}id`] = {
|
||||
fieldSchema: includeSchema
|
||||
? field.fields.find((field) => 'name' in field && field.name === 'id')
|
||||
: undefined,
|
||||
initialValue: row.id,
|
||||
valid: true,
|
||||
value: row.id,
|
||||
}
|
||||
}
|
||||
|
||||
acc.promises.push(
|
||||
iterateFields({
|
||||
id,
|
||||
anyParentLocalized: field.localized || anyParentLocalized,
|
||||
config,
|
||||
data: row,
|
||||
fields: field.fields,
|
||||
filter,
|
||||
forceFullValue,
|
||||
fullData,
|
||||
includeSchema,
|
||||
locale,
|
||||
omitParents,
|
||||
operation,
|
||||
parentPassesCondition: passesCondition,
|
||||
path: rowPath,
|
||||
preferences,
|
||||
skipConditionChecks,
|
||||
skipValidation,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
@@ -146,8 +199,8 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.value = null
|
||||
fieldState.initialValue = null
|
||||
} else {
|
||||
fieldState.value = arrayValue.length
|
||||
fieldState.initialValue = arrayValue.length
|
||||
fieldState.value = forceFullValue ? arrayValue : arrayValue.length
|
||||
fieldState.initialValue = forceFullValue ? arrayValue : arrayValue.length
|
||||
|
||||
if (arrayValue.length > 0) {
|
||||
fieldState.disableFormData = true
|
||||
@@ -157,7 +210,9 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.rows = rowMetadata
|
||||
|
||||
// Add field to state
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
if (!omitParents && (!filter || filter(args))) {
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
@@ -173,36 +228,60 @@ export const addFieldStatePromise = async ({
|
||||
if (block) {
|
||||
row.id = row?.id || new ObjectID().toHexString()
|
||||
|
||||
state[`${rowPath}id`] = {
|
||||
initialValue: row.id,
|
||||
valid: true,
|
||||
value: row.id,
|
||||
}
|
||||
if (!omitParents && (!filter || filter(args))) {
|
||||
state[`${rowPath}id`] = {
|
||||
fieldSchema: includeSchema
|
||||
? block.fields.find(
|
||||
(blockField) => 'name' in blockField && blockField.name === 'id',
|
||||
)
|
||||
: undefined,
|
||||
initialValue: row.id,
|
||||
valid: true,
|
||||
value: row.id,
|
||||
}
|
||||
|
||||
state[`${rowPath}blockType`] = {
|
||||
initialValue: row.blockType,
|
||||
valid: true,
|
||||
value: row.blockType,
|
||||
}
|
||||
state[`${rowPath}blockType`] = {
|
||||
fieldSchema: includeSchema
|
||||
? block.fields.find(
|
||||
(blockField) => 'name' in blockField && blockField.name === 'blockType',
|
||||
)
|
||||
: undefined,
|
||||
initialValue: row.blockType,
|
||||
valid: true,
|
||||
value: row.blockType,
|
||||
}
|
||||
|
||||
state[`${rowPath}blockName`] = {
|
||||
initialValue: row.blockName,
|
||||
valid: true,
|
||||
value: row.blockName,
|
||||
state[`${rowPath}blockName`] = {
|
||||
fieldSchema: includeSchema
|
||||
? block.fields.find(
|
||||
(blockField) => 'name' in blockField && blockField.name === 'blockName',
|
||||
)
|
||||
: undefined,
|
||||
initialValue: row.blockName,
|
||||
valid: true,
|
||||
value: row.blockName,
|
||||
}
|
||||
}
|
||||
|
||||
acc.promises.push(
|
||||
iterateFields({
|
||||
id,
|
||||
anyParentLocalized: field.localized || anyParentLocalized,
|
||||
config,
|
||||
data: row,
|
||||
fields: block.fields,
|
||||
filter,
|
||||
forceFullValue,
|
||||
fullData,
|
||||
includeSchema,
|
||||
locale,
|
||||
omitParents,
|
||||
operation,
|
||||
parentPassesCondition: passesCondition,
|
||||
path: rowPath,
|
||||
preferences,
|
||||
skipConditionChecks,
|
||||
skipValidation,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
@@ -237,8 +316,8 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.value = null
|
||||
fieldState.initialValue = null
|
||||
} else {
|
||||
fieldState.value = blocksValue.length
|
||||
fieldState.initialValue = blocksValue.length
|
||||
fieldState.value = forceFullValue ? blocksValue : blocksValue.length
|
||||
fieldState.initialValue = forceFullValue ? blocksValue : blocksValue.length
|
||||
|
||||
if (blocksValue.length > 0) {
|
||||
fieldState.disableFormData = true
|
||||
@@ -248,7 +327,9 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.rows = rowMetadata
|
||||
|
||||
// Add field to state
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
if (!omitParents && (!filter || filter(args))) {
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
@@ -256,15 +337,22 @@ export const addFieldStatePromise = async ({
|
||||
case 'group': {
|
||||
await iterateFields({
|
||||
id,
|
||||
anyParentLocalized: field.localized || anyParentLocalized,
|
||||
config,
|
||||
data: data?.[field.name] || {},
|
||||
fields: field.fields,
|
||||
filter,
|
||||
forceFullValue,
|
||||
fullData,
|
||||
includeSchema,
|
||||
locale,
|
||||
omitParents,
|
||||
operation,
|
||||
parentPassesCondition: passesCondition,
|
||||
path: `${path}${field.name}.`,
|
||||
preferences,
|
||||
skipConditionChecks,
|
||||
skipValidation,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
@@ -324,7 +412,9 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.initialValue = relationshipValue
|
||||
}
|
||||
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
if (!filter || filter(args)) {
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
@@ -337,7 +427,9 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.value = relationshipValue
|
||||
fieldState.initialValue = relationshipValue
|
||||
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
if (!filter || filter(args)) {
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
@@ -347,7 +439,9 @@ export const addFieldStatePromise = async ({
|
||||
fieldState.initialValue = valueWithDefault
|
||||
|
||||
// Add field to state
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
if (!filter || filter(args)) {
|
||||
state[`${path}${field.name}`] = fieldState
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
@@ -356,15 +450,22 @@ export const addFieldStatePromise = async ({
|
||||
// Handle field types that do not use names (row, etc)
|
||||
await iterateFields({
|
||||
id,
|
||||
anyParentLocalized: field.localized || anyParentLocalized,
|
||||
config,
|
||||
data,
|
||||
fields: field.fields,
|
||||
filter,
|
||||
forceFullValue,
|
||||
fullData,
|
||||
includeSchema,
|
||||
locale,
|
||||
omitParents,
|
||||
operation,
|
||||
parentPassesCondition: passesCondition,
|
||||
path,
|
||||
preferences,
|
||||
skipConditionChecks,
|
||||
skipValidation,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
@@ -373,15 +474,22 @@ export const addFieldStatePromise = async ({
|
||||
const promises = field.tabs.map((tab) =>
|
||||
iterateFields({
|
||||
id,
|
||||
anyParentLocalized: tab.localized || anyParentLocalized,
|
||||
config,
|
||||
data: tabHasName(tab) ? data?.[tab.name] : data,
|
||||
fields: tab.fields,
|
||||
filter,
|
||||
forceFullValue,
|
||||
fullData,
|
||||
includeSchema,
|
||||
locale,
|
||||
omitParents,
|
||||
operation,
|
||||
parentPassesCondition: passesCondition,
|
||||
path: tabHasName(tab) ? `${path}${tab.name}.` : path,
|
||||
preferences,
|
||||
skipConditionChecks,
|
||||
skipValidation,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
|
||||
@@ -4,65 +4,123 @@ import type { User } from '../../../../../auth'
|
||||
import type { SanitizedConfig } from '../../../../../config/types'
|
||||
import type { Field as FieldSchema } from '../../../../../fields/config/types'
|
||||
import type { Data, Fields } from '../types'
|
||||
import type { AddFieldStatePromiseArgs } from './addFieldStatePromise'
|
||||
|
||||
import { fieldIsPresentationalOnly } from '../../../../../fields/config/types'
|
||||
import { addFieldStatePromise } from './addFieldStatePromise'
|
||||
|
||||
type Args = {
|
||||
config: SanitizedConfig
|
||||
/**
|
||||
* if any parents is localized, then the field is localized. @default false
|
||||
*/
|
||||
anyParentLocalized?: boolean
|
||||
/**
|
||||
* config is only needed for validation
|
||||
*/
|
||||
config?: SanitizedConfig
|
||||
data: Data
|
||||
fields: FieldSchema[]
|
||||
filter?: (args: AddFieldStatePromiseArgs) => boolean
|
||||
/**
|
||||
* Force the value of fields like arrays or blocks to be the full value instead of the length @default false
|
||||
*/
|
||||
forceFullValue?: boolean
|
||||
fullData: Data
|
||||
id: number | string
|
||||
id?: number | string
|
||||
/**
|
||||
* Whether the field schema should be included in the state. @default false
|
||||
*/
|
||||
includeSchema?: boolean
|
||||
|
||||
/**
|
||||
* operation is only needed for checking field conditions
|
||||
*/
|
||||
locale: string
|
||||
/**
|
||||
* Whether to omit parent fields in the state. @default false
|
||||
*/
|
||||
omitParents?: boolean
|
||||
/**
|
||||
* operation is only needed for validation
|
||||
*/
|
||||
operation: 'create' | 'update'
|
||||
parentPassesCondition: boolean
|
||||
path: string
|
||||
preferences: {
|
||||
parentPassesCondition?: boolean
|
||||
/**
|
||||
* The initial path of the field. @default ''
|
||||
*/
|
||||
path?: string
|
||||
preferences?: {
|
||||
[key: string]: unknown
|
||||
}
|
||||
state: Fields
|
||||
/**
|
||||
* Whether to skip checking the field's condition. @default false
|
||||
*/
|
||||
skipConditionChecks?: boolean
|
||||
/**
|
||||
* Whether to skip validating the field. @default false
|
||||
*/
|
||||
skipValidation?: boolean
|
||||
state?: Fields
|
||||
t: TFunction
|
||||
user: User
|
||||
}
|
||||
|
||||
/**
|
||||
* Flattens the fields schema and fields data
|
||||
*/
|
||||
export const iterateFields = async ({
|
||||
id,
|
||||
anyParentLocalized = false,
|
||||
config,
|
||||
data,
|
||||
fields,
|
||||
filter,
|
||||
forceFullValue = false,
|
||||
fullData,
|
||||
includeSchema = false,
|
||||
locale,
|
||||
omitParents = false,
|
||||
operation,
|
||||
parentPassesCondition,
|
||||
parentPassesCondition = true,
|
||||
path = '',
|
||||
preferences,
|
||||
state,
|
||||
skipConditionChecks = false,
|
||||
skipValidation = false,
|
||||
state = {},
|
||||
t,
|
||||
user,
|
||||
}: Args): Promise<void> => {
|
||||
const promises = []
|
||||
fields.forEach((field) => {
|
||||
const initialData = data
|
||||
if (!fieldIsPresentationalOnly(field) && !field?.admin?.disabled) {
|
||||
const passesCondition = Boolean(
|
||||
(field?.admin?.condition
|
||||
? Boolean(field.admin.condition(fullData || {}, initialData || {}, { user }))
|
||||
: true) && parentPassesCondition,
|
||||
)
|
||||
let passesCondition = true
|
||||
if (!skipConditionChecks) {
|
||||
passesCondition = Boolean(
|
||||
(field?.admin?.condition
|
||||
? Boolean(field.admin.condition(fullData || {}, data || {}, { user }))
|
||||
: true) && parentPassesCondition,
|
||||
)
|
||||
}
|
||||
|
||||
promises.push(
|
||||
addFieldStatePromise({
|
||||
id,
|
||||
anyParentLocalized,
|
||||
config,
|
||||
data,
|
||||
field,
|
||||
filter,
|
||||
forceFullValue,
|
||||
fullData,
|
||||
includeSchema,
|
||||
locale,
|
||||
omitParents,
|
||||
operation,
|
||||
passesCondition,
|
||||
path,
|
||||
preferences,
|
||||
skipConditionChecks,
|
||||
skipValidation,
|
||||
state,
|
||||
t,
|
||||
user,
|
||||
|
||||
@@ -2,11 +2,22 @@ import { unflatten as flatleyUnflatten } from 'flatley'
|
||||
|
||||
import type { Data, Fields } from './types'
|
||||
|
||||
const reduceFieldsToValues = (fields: Fields, unflatten?: boolean): Data => {
|
||||
/**
|
||||
* Reduce flattened form fields (Fields) to just map to the respective values instead of the full FormField object
|
||||
*
|
||||
* @param unflatten This also unflattens the data if `unflatten` is true. The unflattened data should match the original data structure
|
||||
* @param ignoreDisableFormData - if true, will include fields that have `disableFormData` set to true, for example, blocks or arrays fields.
|
||||
*
|
||||
*/
|
||||
const reduceFieldsToValues = (
|
||||
fields: Fields,
|
||||
unflatten?: boolean,
|
||||
ignoreDisableFormData?: boolean,
|
||||
): Data => {
|
||||
const data = {}
|
||||
|
||||
Object.keys(fields).forEach((key) => {
|
||||
if (!fields[key].disableFormData) {
|
||||
if (ignoreDisableFormData === true || !fields[key].disableFormData) {
|
||||
data[key] = fields[key].value
|
||||
}
|
||||
})
|
||||
|
||||
@@ -20,6 +20,7 @@ export type FormField = {
|
||||
condition?: Condition
|
||||
disableFormData?: boolean
|
||||
errorMessage?: string
|
||||
fieldSchema?: FieldConfig
|
||||
initialValue: unknown
|
||||
passesCondition?: boolean
|
||||
rows?: Row[]
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
|
||||
& > .field-type {
|
||||
margin-bottom: var(--spacing-field);
|
||||
max-width: 100%;
|
||||
|
||||
&[type='hidden'] {
|
||||
margin-bottom: 0;
|
||||
|
||||
@@ -33,7 +33,7 @@ const Group: React.FC<Props> = (props) => {
|
||||
permissions,
|
||||
} = props
|
||||
|
||||
const isWithinCollapsible = useCollapsible()
|
||||
const { withinCollapsible } = useCollapsible()
|
||||
const isWithinGroup = useGroup()
|
||||
const isWithinRow = useRow()
|
||||
const isWithinTab = useTabs()
|
||||
@@ -43,7 +43,7 @@ const Group: React.FC<Props> = (props) => {
|
||||
const groupHasErrors = submitted && errorCount > 0
|
||||
|
||||
const path = pathFromProps || name
|
||||
const isTopLevel = !(isWithinCollapsible || isWithinGroup || isWithinRow)
|
||||
const isTopLevel = !(withinCollapsible || isWithinGroup || isWithinRow)
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -51,7 +51,7 @@ const Group: React.FC<Props> = (props) => {
|
||||
fieldBaseClass,
|
||||
baseClass,
|
||||
isTopLevel && `${baseClass}--top-level`,
|
||||
isWithinCollapsible && `${baseClass}--within-collapsible`,
|
||||
withinCollapsible && `${baseClass}--within-collapsible`,
|
||||
isWithinGroup && `${baseClass}--within-group`,
|
||||
isWithinRow && `${baseClass}--within-row`,
|
||||
isWithinTab && `${baseClass}--within-tab`,
|
||||
|
||||
@@ -151,7 +151,7 @@ const NumberField: React.FC<Props> = (props) => {
|
||||
if (isOverHasMany) {
|
||||
return t('validation:limitReached', { max: maxRows, value: value.length + 1 })
|
||||
}
|
||||
return t('general:noOptions')
|
||||
return null
|
||||
}}
|
||||
numberOnly
|
||||
onChange={handleHasManyChange}
|
||||
@@ -170,7 +170,7 @@ const NumberField: React.FC<Props> = (props) => {
|
||||
onChange={handleChange}
|
||||
onWheel={(e) => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
// @ts-expect-error
|
||||
e.target.blur()
|
||||
}}
|
||||
placeholder={getTranslation(placeholder, i18n)}
|
||||
|
||||
@@ -9,7 +9,7 @@ const reduceToIDs = (options) =>
|
||||
return [...ids, ...reduceToIDs(option.options)]
|
||||
}
|
||||
|
||||
return [...ids, option.value]
|
||||
return [...ids, { id: option.value, relationTo: option.relationTo }]
|
||||
}, [])
|
||||
|
||||
const sortOptions = (options: Option[]): Option[] =>
|
||||
@@ -63,10 +63,12 @@ const optionsReducer = (state: OptionGroup[], action: Action): OptionGroup[] =>
|
||||
const optionsToAddTo = newOptions.find(
|
||||
(optionGroup) => optionGroup.label === collection.labels.plural,
|
||||
)
|
||||
|
||||
const newSubOptions = docs.reduce((docSubOptions, doc) => {
|
||||
if (loadedIDs.indexOf(doc.id) === -1) {
|
||||
loadedIDs.push(doc.id)
|
||||
if (
|
||||
loadedIDs.filter((item) => item.id === doc.id && item.relationTo === relation).length ===
|
||||
0
|
||||
) {
|
||||
loadedIDs.push({ id: doc.id, relationTo: relation })
|
||||
|
||||
const docTitle = formatUseAsTitle({
|
||||
collection,
|
||||
@@ -89,7 +91,10 @@ const optionsReducer = (state: OptionGroup[], action: Action): OptionGroup[] =>
|
||||
}, [])
|
||||
|
||||
ids.forEach((id) => {
|
||||
if (!loadedIDs.includes(id)) {
|
||||
if (
|
||||
loadedIDs.filter((item) => item.id === id && item.relationTo === relation).length === 0
|
||||
) {
|
||||
loadedIDs.push({ id, relationTo: relation })
|
||||
newSubOptions.push({
|
||||
label: `${i18n.t('general:untitled')} - ID: ${id}`,
|
||||
relationTo: relation,
|
||||
|
||||
@@ -29,9 +29,14 @@ type RichTextAdapterBase<
|
||||
}) => Promise<void> | null
|
||||
outputSchema?: ({
|
||||
field,
|
||||
interfaceNameDefinitions,
|
||||
isRequired,
|
||||
}: {
|
||||
field: RichTextField<Value, AdapterProps, ExtraFieldProperties>
|
||||
/**
|
||||
* Allows you to define new top-level interfaces that can be re-used in the output schema.
|
||||
*/
|
||||
interfaceNameDefinitions: Map<string, JSONSchema4>
|
||||
isRequired: boolean
|
||||
}) => JSONSchema4
|
||||
populationPromise?: (data: {
|
||||
|
||||
@@ -83,7 +83,7 @@ const TabsField: React.FC<Props> = (props) => {
|
||||
const { preferencesKey } = useDocumentInfo()
|
||||
const { i18n } = useTranslation()
|
||||
|
||||
const isWithinCollapsible = useCollapsible()
|
||||
const { withinCollapsible } = useCollapsible()
|
||||
const [activeTabIndex, setActiveTabIndex] = useState<number>(0)
|
||||
const tabsPrefKey = `tabs-${indexPath}`
|
||||
|
||||
@@ -138,7 +138,7 @@ const TabsField: React.FC<Props> = (props) => {
|
||||
fieldBaseClass,
|
||||
className,
|
||||
baseClass,
|
||||
isWithinCollapsible && `${baseClass}--within-collapsible`,
|
||||
withinCollapsible && `${baseClass}--within-collapsible`,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ')}
|
||||
|
||||
@@ -110,7 +110,7 @@ const TextInput: React.FC<TextInputProps> = (props) => {
|
||||
if (isOverHasMany) {
|
||||
return t('validation:limitReached', { max: maxRows, value: value.length + 1 })
|
||||
}
|
||||
return t('general:noOptions')
|
||||
return null
|
||||
}}
|
||||
onChange={onChange}
|
||||
options={[]}
|
||||
|
||||
@@ -137,6 +137,7 @@ const UploadInput: React.FC<UploadInputProps> = (props) => {
|
||||
fieldBaseClass,
|
||||
baseClass,
|
||||
className,
|
||||
`field-${path.replace(/\./g, '__')}`,
|
||||
showError && 'error',
|
||||
readOnly && 'read-only',
|
||||
]
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
.upload {
|
||||
position: relative;
|
||||
max-width: 100%;
|
||||
|
||||
&__wrap {
|
||||
background: var(--theme-elevation-50);
|
||||
|
||||
@@ -40,7 +40,6 @@ export const DocumentInfoProvider: React.FC<Props> = ({
|
||||
const [publishedDoc, setPublishedDoc] = useState<TypeWithID & TypeWithTimestamps>(null)
|
||||
const [versions, setVersions] = useState<PaginatedDocs<Version>>(null)
|
||||
const [unpublishedVersions, setUnpublishedVersions] = useState<PaginatedDocs<Version>>(null)
|
||||
const [docPermissions, setDocPermissions] = useState<DocumentPermissions>(null)
|
||||
|
||||
const baseURL = `${serverURL}${api}`
|
||||
let slug: string
|
||||
@@ -62,6 +61,10 @@ export const DocumentInfoProvider: React.FC<Props> = ({
|
||||
}
|
||||
}
|
||||
|
||||
const [docPermissions, setDocPermissions] = useState<DocumentPermissions>(
|
||||
permissions[pluralType][slug],
|
||||
)
|
||||
|
||||
const getVersions = useCallback(async () => {
|
||||
let versionFetchURL
|
||||
let publishedFetchURL
|
||||
@@ -215,14 +218,14 @@ export const DocumentInfoProvider: React.FC<Props> = ({
|
||||
'Accept-Language': i18n.language,
|
||||
},
|
||||
})
|
||||
const json = await res.json()
|
||||
setDocPermissions(json)
|
||||
} else {
|
||||
// fallback to permissions from the entity type
|
||||
// (i.e. create has no id)
|
||||
setDocPermissions(permissions[pluralType][slug])
|
||||
try {
|
||||
const json = await res.json()
|
||||
setDocPermissions(json)
|
||||
} catch (e) {
|
||||
console.error('Unable to fetch document permissions', e)
|
||||
}
|
||||
}
|
||||
}, [serverURL, api, pluralType, slug, id, permissions, i18n.language, code])
|
||||
}, [serverURL, api, pluralType, slug, id, i18n.language, code])
|
||||
|
||||
const getDocPreferences = useCallback(async () => {
|
||||
return getPreference<DocumentPreferences>(preferencesKey)
|
||||
@@ -262,6 +265,7 @@ export const DocumentInfoProvider: React.FC<Props> = ({
|
||||
|
||||
const value: ContextType = {
|
||||
id,
|
||||
slug,
|
||||
collection,
|
||||
docPermissions,
|
||||
getDocPermissions,
|
||||
@@ -271,7 +275,6 @@ export const DocumentInfoProvider: React.FC<Props> = ({
|
||||
preferencesKey,
|
||||
publishedDoc,
|
||||
setDocFieldPreferences,
|
||||
slug,
|
||||
unpublishedVersions,
|
||||
versions,
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ import type { TypeWithVersion } from '../../../../versions/types'
|
||||
|
||||
export type Version = TypeWithVersion<any>
|
||||
|
||||
export type DocumentPermissions = CollectionPermission | GlobalPermission | null
|
||||
export type DocumentPermissions = CollectionPermission | GlobalPermission
|
||||
|
||||
export type ContextType = {
|
||||
collection?: SanitizedCollectionConfig
|
||||
|
||||
@@ -17,9 +17,9 @@ export type globalViewType =
|
||||
| 'Version'
|
||||
| 'Versions'
|
||||
|
||||
export const defaultGlobalViews: {
|
||||
export const defaultGlobalViews = (): {
|
||||
[key in globalViewType]: React.ComponentType<any>
|
||||
} = {
|
||||
} => ({
|
||||
API,
|
||||
Default: DefaultGlobalEdit,
|
||||
LivePreview: LivePreviewView,
|
||||
@@ -27,7 +27,7 @@ export const defaultGlobalViews: {
|
||||
Relationships: null,
|
||||
Version: VersionView,
|
||||
Versions: VersionsView,
|
||||
}
|
||||
})
|
||||
|
||||
export const CustomGlobalComponent = (
|
||||
args: GlobalEditViewProps & {
|
||||
@@ -43,18 +43,14 @@ export const CustomGlobalComponent = (
|
||||
// For example, the Edit view:
|
||||
// 1. Edit?.Default
|
||||
// 2. Edit?.Default?.Component
|
||||
// TODO: Remove the `@ts-ignore` when a Typescript wizard arrives
|
||||
// For some reason `Component` does not exist on type `Edit[view]` no matter how narrow the type is
|
||||
const Component =
|
||||
typeof Edit === 'object' && typeof Edit[view] === 'function'
|
||||
? Edit[view]
|
||||
: typeof Edit === 'object' &&
|
||||
typeof Edit?.[view] === 'object' &&
|
||||
// @ts-ignore
|
||||
typeof Edit[view].Component === 'function'
|
||||
? // @ts-ignore
|
||||
Edit[view].Component
|
||||
: defaultGlobalViews[view]
|
||||
? Edit[view].Component
|
||||
: defaultGlobalViews()[view]
|
||||
|
||||
if (Component) {
|
||||
return <Component {...args} />
|
||||
|
||||
@@ -17,9 +17,9 @@ export type collectionViewType =
|
||||
| 'Version'
|
||||
| 'Versions'
|
||||
|
||||
export const defaultCollectionViews: {
|
||||
export const defaultCollectionViews = (): {
|
||||
[key in collectionViewType]: React.ComponentType<any>
|
||||
} = {
|
||||
} => ({
|
||||
API,
|
||||
Default: DefaultCollectionEdit,
|
||||
LivePreview: LivePreviewView,
|
||||
@@ -27,7 +27,7 @@ export const defaultCollectionViews: {
|
||||
Relationships: null,
|
||||
Version: VersionView,
|
||||
Versions: VersionsView,
|
||||
}
|
||||
})
|
||||
|
||||
export const CustomCollectionComponent = (
|
||||
args: CollectionEditViewProps & {
|
||||
@@ -43,18 +43,15 @@ export const CustomCollectionComponent = (
|
||||
// For example, the Edit view:
|
||||
// 1. Edit?.Default
|
||||
// 2. Edit?.Default?.Component
|
||||
// TODO: Remove the `@ts-ignore` when a Typescript wizard arrives
|
||||
// For some reason `Component` does not exist on type `Edit[view]` no matter how narrow the type is
|
||||
|
||||
const Component =
|
||||
typeof Edit === 'object' && typeof Edit[view] === 'function'
|
||||
? Edit[view]
|
||||
: typeof Edit === 'object' &&
|
||||
typeof Edit?.[view] === 'object' &&
|
||||
// @ts-ignore
|
||||
typeof Edit[view].Component === 'function'
|
||||
? // @ts-ignore
|
||||
Edit[view].Component
|
||||
: defaultCollectionViews[view]
|
||||
? Edit[view].Component
|
||||
: defaultCollectionViews()[view]
|
||||
|
||||
if (Component) {
|
||||
return <Component {...args} />
|
||||
|
||||
@@ -74,21 +74,22 @@ const DefaultCell: React.FC<Props> = (props) => {
|
||||
if (collection.upload && fieldAffectsData(field) && field.name === 'filename') {
|
||||
CellComponent = cellComponents.File
|
||||
} else {
|
||||
return (
|
||||
<WrapElement {...wrapElementProps}>
|
||||
{(cellData === '' || typeof cellData === 'undefined') &&
|
||||
'label' in field &&
|
||||
t('noLabel', {
|
||||
if (!cellData && 'label' in field) {
|
||||
return (
|
||||
<WrapElement {...wrapElementProps}>
|
||||
{t('noLabel', {
|
||||
label: getTranslation(
|
||||
typeof field.label === 'function' ? 'data' : field.label || 'data',
|
||||
i18n,
|
||||
),
|
||||
})}
|
||||
{typeof cellData === 'string' && cellData}
|
||||
{typeof cellData === 'number' && cellData}
|
||||
{typeof cellData === 'object' && JSON.stringify(cellData)}
|
||||
</WrapElement>
|
||||
)
|
||||
</WrapElement>
|
||||
)
|
||||
} else if (typeof cellData === 'string' || typeof cellData === 'number') {
|
||||
return <WrapElement {...wrapElementProps}>{cellData}</WrapElement>
|
||||
} else if (typeof cellData === 'object') {
|
||||
return <WrapElement {...wrapElementProps}>{JSON.stringify(cellData)}</WrapElement>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -29,37 +29,38 @@ async function forgotPassword(incomingArgs: Arguments): Promise<null | string> {
|
||||
|
||||
let args = incomingArgs
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await args.collection.config.hooks.beforeOperation.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
args =
|
||||
(await hook({
|
||||
args,
|
||||
collection: args.collection?.config,
|
||||
context: args.req.context,
|
||||
operation: 'forgotPassword',
|
||||
})) || args
|
||||
}, Promise.resolve())
|
||||
|
||||
const {
|
||||
collection: { config: collectionConfig },
|
||||
data,
|
||||
disableEmail,
|
||||
expiration,
|
||||
req: {
|
||||
payload: { config, emailOptions, sendEmail: email },
|
||||
payload,
|
||||
t,
|
||||
},
|
||||
req,
|
||||
} = args
|
||||
|
||||
try {
|
||||
const shouldCommit = await initTransaction(req)
|
||||
const shouldCommit = await initTransaction(args.req)
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
// /////////////////////////////////////
|
||||
|
||||
await args.collection.config.hooks.beforeOperation.reduce(async (priorHook, hook) => {
|
||||
await priorHook
|
||||
|
||||
args =
|
||||
(await hook({
|
||||
args,
|
||||
collection: args.collection?.config,
|
||||
context: args.req.context,
|
||||
operation: 'forgotPassword',
|
||||
req: args.req,
|
||||
})) || args
|
||||
}, Promise.resolve())
|
||||
|
||||
const {
|
||||
collection: { config: collectionConfig },
|
||||
data,
|
||||
disableEmail,
|
||||
expiration,
|
||||
req: {
|
||||
payload: { config, emailOptions, sendEmail: email },
|
||||
payload,
|
||||
t,
|
||||
},
|
||||
req,
|
||||
} = args
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Forget password
|
||||
@@ -159,7 +160,7 @@ async function forgotPassword(incomingArgs: Arguments): Promise<null | string> {
|
||||
|
||||
return token
|
||||
} catch (error: unknown) {
|
||||
await killTransaction(req)
|
||||
await killTransaction(args.req)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,10 +3,8 @@ import type { PayloadRequest } from '../../../express/types'
|
||||
import type { Payload } from '../../../payload'
|
||||
import type { Result } from '../forgotPassword'
|
||||
|
||||
import { getDataLoader } from '../../../collections/dataloader'
|
||||
import { APIError } from '../../../errors'
|
||||
import { setRequestContext } from '../../../express/setRequestContext'
|
||||
import { i18nInit } from '../../../translations/init'
|
||||
import { createLocalReq } from '../../../utilities/createLocalReq'
|
||||
import forgotPassword from '../forgotPassword'
|
||||
|
||||
export type Options<T extends keyof GeneratedTypes['collections']> = {
|
||||
@@ -24,15 +22,7 @@ async function localForgotPassword<T extends keyof GeneratedTypes['collections']
|
||||
payload: Payload,
|
||||
options: Options<T>,
|
||||
): Promise<Result> {
|
||||
const {
|
||||
collection: collectionSlug,
|
||||
context,
|
||||
data,
|
||||
disableEmail,
|
||||
expiration,
|
||||
req = {} as PayloadRequest,
|
||||
} = options
|
||||
setRequestContext(req, context)
|
||||
const { collection: collectionSlug, data, disableEmail, expiration } = options
|
||||
|
||||
const collection = payload.collections[collectionSlug]
|
||||
|
||||
@@ -44,12 +34,7 @@ async function localForgotPassword<T extends keyof GeneratedTypes['collections']
|
||||
)
|
||||
}
|
||||
|
||||
req.payloadAPI = req.payloadAPI || 'local'
|
||||
req.payload = payload
|
||||
req.i18n = i18nInit(payload.config.i18n)
|
||||
|
||||
if (!req.t) req.t = req.i18n.t
|
||||
if (!req.payloadDataLoader) req.payloadDataLoader = getDataLoader(req)
|
||||
const req = createLocalReq(options, payload)
|
||||
|
||||
return forgotPassword({
|
||||
collection,
|
||||
|
||||
@@ -5,10 +5,8 @@ import type { GeneratedTypes } from '../../../index'
|
||||
import type { Payload } from '../../../payload'
|
||||
import type { Result } from '../login'
|
||||
|
||||
import { getDataLoader } from '../../../collections/dataloader'
|
||||
import { APIError } from '../../../errors'
|
||||
import { setRequestContext } from '../../../express/setRequestContext'
|
||||
import { i18nInit } from '../../../translations/init'
|
||||
import { createLocalReq } from '../../../utilities/createLocalReq'
|
||||
import login from '../login'
|
||||
|
||||
export type Options<TSlug extends keyof GeneratedTypes['collections']> = {
|
||||
@@ -33,25 +31,14 @@ async function localLogin<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
): Promise<Result & { user: GeneratedTypes['collections'][TSlug] }> {
|
||||
const {
|
||||
collection: collectionSlug,
|
||||
context,
|
||||
data,
|
||||
depth,
|
||||
fallbackLocale: fallbackLocaleArg = options?.req?.fallbackLocale,
|
||||
locale: localeArg = null,
|
||||
overrideAccess = true,
|
||||
req = {} as PayloadRequest,
|
||||
res,
|
||||
showHiddenFields,
|
||||
} = options
|
||||
setRequestContext(req, context)
|
||||
|
||||
const collection = payload.collections[collectionSlug]
|
||||
const localizationConfig = payload?.config?.localization
|
||||
const defaultLocale = localizationConfig ? localizationConfig.defaultLocale : null
|
||||
const locale = localeArg || req?.locale || defaultLocale
|
||||
const fallbackLocale = localizationConfig
|
||||
? localizationConfig.locales.find(({ code }) => locale === code)?.fallbackLocale
|
||||
: null
|
||||
|
||||
if (!collection) {
|
||||
throw new APIError(
|
||||
@@ -59,12 +46,7 @@ async function localLogin<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
)
|
||||
}
|
||||
|
||||
req.payloadAPI = req.payloadAPI || 'local'
|
||||
req.payload = payload
|
||||
req.i18n = i18nInit(payload.config.i18n)
|
||||
|
||||
if (!req.t) req.t = req.i18n.t
|
||||
if (!req.payloadDataLoader) req.payloadDataLoader = getDataLoader(req)
|
||||
const req = createLocalReq(options, payload)
|
||||
|
||||
const args = {
|
||||
collection,
|
||||
@@ -76,12 +58,6 @@ async function localLogin<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
showHiddenFields,
|
||||
}
|
||||
|
||||
if (locale) args.req.locale = locale
|
||||
if (fallbackLocale) {
|
||||
args.req.fallbackLocale =
|
||||
typeof fallbackLocaleArg !== 'undefined' ? fallbackLocaleArg : fallbackLocale || defaultLocale
|
||||
}
|
||||
|
||||
return login<TSlug>(args)
|
||||
}
|
||||
|
||||
|
||||
@@ -3,10 +3,8 @@ import type { PayloadRequest } from '../../../express/types'
|
||||
import type { Payload } from '../../../payload'
|
||||
import type { Result } from '../resetPassword'
|
||||
|
||||
import { getDataLoader } from '../../../collections/dataloader'
|
||||
import { APIError } from '../../../errors'
|
||||
import { setRequestContext } from '../../../express/setRequestContext'
|
||||
import { i18nInit } from '../../../translations/init'
|
||||
import { createLocalReq } from '../../../utilities/createLocalReq'
|
||||
import resetPassword from '../resetPassword'
|
||||
|
||||
export type Options<T extends keyof GeneratedTypes['collections']> = {
|
||||
@@ -24,15 +22,7 @@ async function localResetPassword<T extends keyof GeneratedTypes['collections']>
|
||||
payload: Payload,
|
||||
options: Options<T>,
|
||||
): Promise<Result> {
|
||||
const {
|
||||
collection: collectionSlug,
|
||||
context,
|
||||
data,
|
||||
overrideAccess,
|
||||
req = {} as PayloadRequest,
|
||||
} = options
|
||||
|
||||
setRequestContext(req, context)
|
||||
const { collection: collectionSlug, data, overrideAccess } = options
|
||||
|
||||
const collection = payload.collections[collectionSlug]
|
||||
|
||||
@@ -44,12 +34,7 @@ async function localResetPassword<T extends keyof GeneratedTypes['collections']>
|
||||
)
|
||||
}
|
||||
|
||||
req.payload = payload
|
||||
req.payloadAPI = req.payloadAPI || 'local'
|
||||
req.i18n = i18nInit(payload.config.i18n)
|
||||
|
||||
if (!req.t) req.t = req.i18n.t
|
||||
if (!req.payloadDataLoader) req.payloadDataLoader = getDataLoader(req)
|
||||
const req = createLocalReq(options, payload)
|
||||
|
||||
return resetPassword({
|
||||
collection,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user