Compare commits
70 Commits
db-postgre
...
mongodb/nu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
501022991f | ||
|
|
53600c7352 | ||
|
|
76c1b23729 | ||
|
|
8104fac5ed | ||
|
|
59ef1263a5 | ||
|
|
813e3dbc98 | ||
|
|
12a23092e1 | ||
|
|
4a49beb552 | ||
|
|
80530c666e | ||
|
|
f53e2df951 | ||
|
|
db58e2bab2 | ||
|
|
d88e97e123 | ||
|
|
bb72e506e6 | ||
|
|
b1f727fd6a | ||
|
|
72af18229b | ||
|
|
af52b526c8 | ||
|
|
60525623bf | ||
|
|
0fba582926 | ||
|
|
5d1cad3adb | ||
|
|
e31f72da8e | ||
|
|
7aa058d604 | ||
|
|
64e80d242e | ||
|
|
e8f2ca484e | ||
|
|
ceca5c4e97 | ||
|
|
ee13736288 | ||
|
|
815bdfac0b | ||
|
|
7a7f0ed7e8 | ||
|
|
ad42d541b3 | ||
|
|
32ed95e1ee | ||
|
|
70e57fef18 | ||
|
|
0a07f607b9 | ||
|
|
3918fc7c21 | ||
|
|
13f71ac475 | ||
|
|
07720e777a | ||
|
|
efff47e400 | ||
|
|
453ac218ea | ||
|
|
d4b09bd9cd | ||
|
|
dd67e03fc1 | ||
|
|
548de80bee | ||
|
|
2c05fbbb5e | ||
|
|
9b54659818 | ||
|
|
e9f550406e | ||
|
|
98b87e2278 | ||
|
|
5f3d0169be | ||
|
|
35c2a085ef | ||
|
|
1ac943ed5e | ||
|
|
25cee8bb10 | ||
|
|
419aef452d | ||
|
|
ea52489126 | ||
|
|
e80c70acae | ||
|
|
70b0064d0b | ||
|
|
9636bf6efd | ||
|
|
8f4d0da4e0 | ||
|
|
f0f1dbdcb0 | ||
|
|
a895aee8b1 | ||
|
|
aa1dac08c1 | ||
|
|
b8cd1c6ba4 | ||
|
|
6344464bc6 | ||
|
|
5d4022f144 | ||
|
|
bf942fdfa6 | ||
|
|
d6c25783cf | ||
|
|
82e9d31127 | ||
|
|
399e606b34 | ||
|
|
0d18822062 | ||
|
|
00fc0343da | ||
|
|
6323965c65 | ||
|
|
6d6823c3e5 | ||
|
|
ca70298436 | ||
|
|
4f565759f6 | ||
|
|
df39602758 |
66
.github/workflows/main.yml
vendored
66
.github/workflows/main.yml
vendored
@@ -2,9 +2,9 @@ name: build
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize]
|
||||
types: [ opened, reopened, synchronize ]
|
||||
push:
|
||||
branches: ['main']
|
||||
branches: [ 'main' ]
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
@@ -15,25 +15,25 @@ jobs:
|
||||
needs_build: ${{ steps.filter.outputs.needs_build }}
|
||||
templates: ${{ steps.filter.outputs.templates }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 25
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
needs_build:
|
||||
- '.github/workflows/**'
|
||||
- 'packages/**'
|
||||
- 'test/**'
|
||||
- 'pnpm-lock.yaml'
|
||||
- 'package.json'
|
||||
templates:
|
||||
- 'templates/**'
|
||||
- name: Log all filter results
|
||||
run: |
|
||||
echo "needs_build: ${{ steps.filter.outputs.needs_build }}"
|
||||
echo "templates: ${{ steps.filter.outputs.templates }}"
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 25
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
needs_build:
|
||||
- '.github/workflows/**'
|
||||
- 'packages/**'
|
||||
- 'test/**'
|
||||
- 'pnpm-lock.yaml'
|
||||
- 'package.json'
|
||||
templates:
|
||||
- 'templates/**'
|
||||
- name: Log all filter results
|
||||
run: |
|
||||
echo "needs_build: ${{ steps.filter.outputs.needs_build }}"
|
||||
echo "templates: ${{ steps.filter.outputs.templates }}"
|
||||
|
||||
core-build:
|
||||
needs: changes
|
||||
@@ -85,11 +85,15 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
database: [mongoose, postgres, supabase]
|
||||
database: [mongoose, postgres, postgres-custom-schema, postgres-uuid, supabase]
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: payloadtests
|
||||
AWS_ENDPOINT_URL: http://127.0.0.1:4566
|
||||
AWS_ACCESS_KEY_ID: localstack
|
||||
AWS_SECRET_ACCESS_KEY: localstack
|
||||
AWS_REGION: us-east-1
|
||||
|
||||
steps:
|
||||
- name: Use Node.js 18
|
||||
@@ -109,6 +113,9 @@ jobs:
|
||||
path: ./*
|
||||
key: ${{ github.sha }}-${{ github.run_number }}
|
||||
|
||||
- name: Start LocalStack
|
||||
run: pnpm docker:start
|
||||
|
||||
- name: Start PostgreSQL
|
||||
uses: CasperWA/postgresql-action@v1.2
|
||||
with:
|
||||
@@ -116,7 +123,7 @@ jobs:
|
||||
postgresql db: ${{ env.POSTGRES_DB }}
|
||||
postgresql user: ${{ env.POSTGRES_USER }}
|
||||
postgresql password: ${{ env.POSTGRES_PASSWORD }}
|
||||
if: matrix.database == 'postgres'
|
||||
if: startsWith(matrix.database, 'postgres')
|
||||
|
||||
- name: Install Supabase CLI
|
||||
uses: supabase/setup-cli@v1
|
||||
@@ -132,14 +139,19 @@ jobs:
|
||||
|
||||
- name: Wait for PostgreSQL
|
||||
run: sleep 30
|
||||
if: matrix.database == 'postgres'
|
||||
if: startsWith(matrix.database, 'postgres')
|
||||
|
||||
- name: Configure PostgreSQL
|
||||
run: |
|
||||
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "CREATE ROLE runner SUPERUSER LOGIN;"
|
||||
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "SELECT version();"
|
||||
echo "POSTGRES_URL=postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" >> $GITHUB_ENV
|
||||
if: matrix.database == 'postgres'
|
||||
if: startsWith(matrix.database, 'postgres')
|
||||
|
||||
- name: Configure PostgreSQL with custom schema
|
||||
run: |
|
||||
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "CREATE SCHEMA custom;"
|
||||
if: matrix.database == 'postgres-custom-schema'
|
||||
|
||||
- name: Configure Supabase
|
||||
run: |
|
||||
@@ -162,7 +174,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
part: [1/8, 2/8, 3/8, 4/8, 5/8, 6/8, 7/8, 8/8]
|
||||
part: [ 1/8, 2/8, 3/8, 4/8, 5/8, 6/8, 7/8, 8/8 ]
|
||||
|
||||
steps:
|
||||
- name: Use Node.js 18
|
||||
@@ -310,7 +322,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
template: [blank, website, ecommerce]
|
||||
template: [ blank, website, ecommerce ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -6,7 +6,9 @@ dist
|
||||
|
||||
test-results
|
||||
.devcontainer
|
||||
.localstack
|
||||
/migrations
|
||||
.localstack
|
||||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/node,macos,windows,webstorm,sublimetext,visualstudiocode
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=node,macos,windows,webstorm,sublimetext,visualstudiocode
|
||||
|
||||
2
.idea/runConfigurations/Run_Dev_Fields.xml
generated
2
.idea/runConfigurations/Run_Dev_Fields.xml
generated
@@ -1,5 +1,5 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Run Dev Fields" type="NodeJSConfigurationType" application-parameters="fields" path-to-js-file="node_modules/.pnpm/nodemon@3.0.1/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
|
||||
<configuration default="false" name="Run Dev Fields" type="NodeJSConfigurationType" application-parameters="fields" path-to-js-file="node_modules/.pnpm/nodemon@3.0.3/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
2
.idea/runConfigurations/Run_Dev__community.xml
generated
2
.idea/runConfigurations/Run_Dev__community.xml
generated
@@ -1,5 +1,5 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Run Dev _community" type="NodeJSConfigurationType" application-parameters="_community" path-to-js-file="node_modules/.pnpm/nodemon@3.0.1/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
|
||||
<configuration default="false" name="Run Dev _community" type="NodeJSConfigurationType" application-parameters="_community" path-to-js-file="node_modules/.pnpm/nodemon@3.0.3/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
56
CHANGELOG.md
56
CHANGELOG.md
@@ -1,3 +1,59 @@
|
||||
## [2.11.2](https://github.com/payloadcms/payload/compare/v2.11.1...v2.11.2) (2024-02-23)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **db-postgres:** configurable custom schema to use ([#5047](https://github.com/payloadcms/payload/issues/5047)) ([e8f2ca4](https://github.com/payloadcms/payload/commit/e8f2ca484ee56cd7767d5111e46ebd24752ff8de))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Add Context Provider in EditMany Component ([#5005](https://github.com/payloadcms/payload/issues/5005)) ([70e57fe](https://github.com/payloadcms/payload/commit/70e57fef184f7fcf56344ea755465f246f2253a5))
|
||||
* **db-mongodb:** unique sparse for not required fields ([#5114](https://github.com/payloadcms/payload/issues/5114)) ([815bdfa](https://github.com/payloadcms/payload/commit/815bdfac0b0afbff2a20e54d5aee64b90f6b3a77))
|
||||
* **db-postgres:** set _parentID for array nested localized fields ([#5117](https://github.com/payloadcms/payload/issues/5117)) ([ceca5c4](https://github.com/payloadcms/payload/commit/ceca5c4e97f53f1346797a31b6abfc0375e98215))
|
||||
* disabling API Key does not remove the key ([#5145](https://github.com/payloadcms/payload/issues/5145)) ([7a7f0ed](https://github.com/payloadcms/payload/commit/7a7f0ed7e8132253be607c111c160163b84bd770))
|
||||
* handle thrown errors in config-level afterError hook ([#5147](https://github.com/payloadcms/payload/issues/5147)) ([32ed95e](https://github.com/payloadcms/payload/commit/32ed95e1ee87409db234f1b7bd6d2e462fd9ed5d))
|
||||
* only replace the drawer content with full edit component if it exists ([#5144](https://github.com/payloadcms/payload/issues/5144)) ([0a07f60](https://github.com/payloadcms/payload/commit/0a07f607b9fb1217ad956cd05b2a84a4042a19ca))
|
||||
* transaction error from access endpoint ([#5156](https://github.com/payloadcms/payload/issues/5156)) ([ad42d54](https://github.com/payloadcms/payload/commit/ad42d541b342ed56463b81cee6d6307df6f06d7f))
|
||||
|
||||
## [2.11.1](https://github.com/payloadcms/payload/compare/v2.11.0...v2.11.1) (2024-02-16)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **db-postgres:** adds idType to use uuid or serial id columns ([#3864](https://github.com/payloadcms/payload/issues/3864)) ([d6c2578](https://github.com/payloadcms/payload/commit/d6c25783cfa97983bf9db27ceb5ccd39a62c62f1))
|
||||
* **db-postgres:** reconnect after disconnection from database ([#5086](https://github.com/payloadcms/payload/issues/5086)) ([bf942fd](https://github.com/payloadcms/payload/commit/bf942fdfa6ea9c26cf05295cc9db646bf31fa622))
|
||||
* **plugin-search:** add req to beforeSync args for transactions ([#5068](https://github.com/payloadcms/payload/issues/5068)) ([98b87e2](https://github.com/payloadcms/payload/commit/98b87e22782c0a788f79326f22be05a6b176ad74))
|
||||
* **richtext-lexical:** add justify aligment to AlignFeature ([#4035](https://github.com/payloadcms/payload/issues/4035)) ([#4868](https://github.com/payloadcms/payload/issues/4868)) ([6d6823c](https://github.com/payloadcms/payload/commit/6d6823c3e5609a58eeeeb8d043945a762f9463df))
|
||||
* **richtext-lexical:** AddBlock handle for all nodes, even if they aren't empty paragraphs ([#5063](https://github.com/payloadcms/payload/issues/5063)) ([00fc034](https://github.com/payloadcms/payload/commit/00fc0343dabf184d5bab418d47c403b3ad11698f))
|
||||
* **richtext-lexical:** Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground ([#5066](https://github.com/payloadcms/payload/issues/5066)) ([0d18822](https://github.com/payloadcms/payload/commit/0d18822062275c1826c8e2c3da2571a2b3483310))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **db-mongodb:** find versions pagination ([#5091](https://github.com/payloadcms/payload/issues/5091)) ([5d4022f](https://github.com/payloadcms/payload/commit/5d4022f1445e2809c01cb1dd599280f0a56cdc6e))
|
||||
* **db-postgres:** query using blockType ([#5044](https://github.com/payloadcms/payload/issues/5044)) ([35c2a08](https://github.com/payloadcms/payload/commit/35c2a085efa6d5ad59779960874bc9728a17e3a0))
|
||||
* filterOptions errors cause transaction to abort ([#5079](https://github.com/payloadcms/payload/issues/5079)) ([5f3d016](https://github.com/payloadcms/payload/commit/5f3d0169bee21e1c0963dbd7ede9fe5f1c46a5a5))
|
||||
* **plugin-form-builder:** hooks do not respect transactions ([#5069](https://github.com/payloadcms/payload/issues/5069)) ([82e9d31](https://github.com/payloadcms/payload/commit/82e9d31127c8df83c5bed92a5ffdab76d331900f))
|
||||
* remove collection findByID caching ([#5034](https://github.com/payloadcms/payload/issues/5034)) ([1ac943e](https://github.com/payloadcms/payload/commit/1ac943ed5e8416883b863147fdf3c23380955559))
|
||||
* **richtext-lexical:** do not remove adjacent paragraph node when inserting certain nodes in empty editor ([#5061](https://github.com/payloadcms/payload/issues/5061)) ([6323965](https://github.com/payloadcms/payload/commit/6323965c652ea68dffeb716957b124d165b9ce96))
|
||||
* **uploads:** account for serverURL when retrieving external file ([#5102](https://github.com/payloadcms/payload/issues/5102)) ([25cee8b](https://github.com/payloadcms/payload/commit/25cee8bb102bf80b3a4bfb4b4e46712722cc7f0d))
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES: @payloadcms/richtext-lexical
|
||||
|
||||
* **richtext-lexical:** Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground (#5066)
|
||||
|
||||
- You HAVE to make sure that any versions of the lexical packages (IF you have any installed) match the lexical version which richtext-lexical uses: v0.13.1. If you do not do this, you may be plagued by React useContext / "cannot find active editor state" errors
|
||||
- Updates to lexical's API, e.g. the removal of INTERNAL_isPointSelection, could be breaking depending on your code. Please consult the [lexical changelog](https://github.com/facebook/lexical/blob/main/CHANGELOG.md).
|
||||
|
||||
## [2.11.0](https://github.com/payloadcms/payload/compare/v2.10.1...v2.11.0) (2024-02-09)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* exposes collapsible provider with more functionality ([#5043](https://github.com/payloadcms/payload/issues/5043)) ([df39602](https://github.com/payloadcms/payload/commit/df39602758ae8dc3765bb48e51f7a657babfa559))
|
||||
|
||||
## [2.10.1](https://github.com/payloadcms/payload/compare/v2.10.0...v2.10.1) (2024-02-09)
|
||||
|
||||
|
||||
|
||||
@@ -635,6 +635,37 @@ export const CustomArrayManager = () => {
|
||||
]}
|
||||
/>
|
||||
|
||||
### useCollapsible
|
||||
|
||||
The `useCollapsible` hook allows you to control parent collapsibles:
|
||||
|
||||
| Property | Description |
|
||||
|---------------------------|--------------------------------------------------------------------------------------------------------------------|
|
||||
| **`collapsed`** | State of the collapsible. `true` if open, `false` if collapsed |
|
||||
| **`isVisible`** | If nested, determine if the nearest collapsible is visible. `true` if no parent is closed, `false` otherwise |
|
||||
| **`toggle`** | Toggles the state of the nearest collapsible |
|
||||
| **`withinCollapsible`** | Determine when you are within another collaspible | |
|
||||
|
||||
**Example:**
|
||||
|
||||
```tsx
|
||||
import React from 'react'
|
||||
|
||||
import { useCollapsible } from 'payload/components/utilities'
|
||||
|
||||
const CustomComponent: React.FC = () => {
|
||||
const { collapsed, toggle } = useCollapsible()
|
||||
return (
|
||||
<div>
|
||||
<p className="field-type">I am {collapsed ? 'closed' : 'open'}</p>
|
||||
<button onClick={toggle} type="button">
|
||||
Toggle
|
||||
</button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
### useDocumentInfo
|
||||
|
||||
The `useDocumentInfo` hook provides lots of information about the document currently being edited, including the following:
|
||||
@@ -774,8 +805,8 @@ const MyComponent: React.FC = () => {
|
||||
return (
|
||||
<>
|
||||
<span>The current theme is {theme} and autoMode is {autoMode}</span>
|
||||
<button
|
||||
type="button"
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setTheme(prev => prev === "light" ? "dark" : "light")}
|
||||
>
|
||||
Toggle theme
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: Postgres
|
||||
label: Postgres
|
||||
order: 50
|
||||
desc: Payload supports Postgres through an officially supported Drizzle database adapter.
|
||||
desc: Payload supports Postgres through an officially supported Drizzle database adapter.
|
||||
keywords: Postgres, documentation, typescript, Content Management System, cms, headless, javascript, node, react, express
|
||||
---
|
||||
|
||||
@@ -37,11 +37,12 @@ export default buildConfig({
|
||||
|
||||
### Options
|
||||
|
||||
| Option | Description |
|
||||
| ----------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `pool` | [Pool connection options](https://orm.drizzle.team/docs/quick-postgresql/node-postgres) that will be passed to Drizzle and `node-postgres`. |
|
||||
| `push` | Disable Drizzle's [`db push`](https://orm.drizzle.team/kit-docs/overview#prototyping-with-db-push) in development mode. By default, `push` is enabled for development mode only. |
|
||||
| `migrationDir` | Customize the directory that migrations are stored. |
|
||||
| Option | Description |
|
||||
|----------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `pool` | [Pool connection options](https://orm.drizzle.team/docs/quick-postgresql/node-postgres) that will be passed to Drizzle and `node-postgres`. |
|
||||
| `push` | Disable Drizzle's [`db push`](https://orm.drizzle.team/kit-docs/overview#prototyping-with-db-push) in development mode. By default, `push` is enabled for development mode only. |
|
||||
| `migrationDir` | Customize the directory that migrations are stored. |
|
||||
| `schemaName` | A string for the postgres schema to use, defaults to 'public'. |
|
||||
|
||||
### Access to Drizzle
|
||||
|
||||
@@ -65,7 +66,7 @@ In addition to exposing Drizzle directly, all of the tables, Drizzle relations,
|
||||
|
||||
Drizzle exposes two ways to work locally in development mode.
|
||||
|
||||
The first is [`db push`](https://orm.drizzle.team/kit-docs/overview#prototyping-with-db-push), which automatically pushes changes you make to your Payload config (and therefore, Drizzle schema) to your database so you don't have to manually migrate every time you change your Payload config. This only works in development mode, and should not be mixed with manually running [`migrate`](/docs/database/migrations) commands.
|
||||
The first is [`db push`](https://orm.drizzle.team/kit-docs/overview#prototyping-with-db-push), which automatically pushes changes you make to your Payload config (and therefore, Drizzle schema) to your database so you don't have to manually migrate every time you change your Payload config. This only works in development mode, and should not be mixed with manually running [`migrate`](/docs/database/migrations) commands.
|
||||
|
||||
You will be warned if any changes that you make will entail data loss while in development mode. Push is enabled by default, but you can opt out if you'd like.
|
||||
|
||||
@@ -77,11 +78,11 @@ Migrations are extremely powerful thanks to the seamless way that Payload and Dr
|
||||
|
||||
1. You are building your Payload config locally, with a local database used for testing.
|
||||
1. You have left the default setting of `push` enabled, so every time you change your Payload config (add or remove fields, collections, etc.), Drizzle will automatically push changes to your local DB.
|
||||
1. Once you're done with your changes, or have completed a feature, you can run `npm run payload migrate:create`.
|
||||
1. Once you're done with your changes, or have completed a feature, you can run `npm run payload migrate:create`.
|
||||
1. Payload and Drizzle will look for any existing migrations, and automatically generate all SQL changes necessary to convert your schema from its prior state into the state of your current Payload config, and store the resulting DDL in a newly created migration.
|
||||
1. Once you're ready to go to production, you will be able to run `npm run payload migrate` against your production database, which will apply any new migrations that have not yet run.
|
||||
1. Now your production database is in sync with your Payload config!
|
||||
|
||||
<Banner type="warning">
|
||||
Warning: do not mix "push" and migrations with your local development database. If you use "push" locally, and then try to migrate, Payload will throw a warning, telling you that these two methods are not meant to be used interchangeably.
|
||||
</Banner>
|
||||
</Banner>
|
||||
|
||||
@@ -36,7 +36,7 @@ If your Hook simply performs a side-effect, such as updating a CRM, it might be
|
||||
|
||||
#### Server-only execution
|
||||
|
||||
Payload Hooks do not have any effect within the Payload Admin panel. You can safely [remove your hooks](/docs/admin/webpack#aliasing-server-only-modules) from your Admin panel's code by customizing the Webpack config, which not only keeps your Admin bundles' filesize small but also ensures that any server-side only code does not cause problems within browser environments.
|
||||
Payload Hooks are only triggered on the server. You can safely [remove your hooks](/docs/admin/webpack#aliasing-server-only-modules) from your Admin panel's client-side code by customizing the Webpack config, which not only keeps your Admin bundles' filesize small but also ensures that any server-side only code does not cause problems within browser environments.
|
||||
|
||||
## Hook Types
|
||||
|
||||
|
||||
@@ -4508,9 +4508,9 @@ interpret@^2.2.0:
|
||||
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
|
||||
|
||||
ip@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da"
|
||||
integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
|
||||
integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
|
||||
@@ -4701,9 +4701,9 @@ interpret@^2.2.0:
|
||||
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
|
||||
|
||||
ip@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da"
|
||||
integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
|
||||
integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
|
||||
@@ -4508,9 +4508,9 @@ interpret@^2.2.0:
|
||||
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
|
||||
|
||||
ip@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da"
|
||||
integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
|
||||
integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
|
||||
@@ -4258,14 +4258,14 @@ interpret@^2.2.0:
|
||||
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
|
||||
|
||||
ip@^1.1.5:
|
||||
version "1.1.8"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.8.tgz#ae05948f6b075435ed3307acce04629da8cdbf48"
|
||||
integrity sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg==
|
||||
version "1.1.9"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.9.tgz#8dfbcc99a754d07f425310b86a99546b1151e396"
|
||||
integrity sha512-cyRxvOEpNHNtchU3Ln9KC/auJgup87llfQpQ+t5ghoC/UhL16SWzbueiCsdTnWmqAWl7LadfuwhlqmtOaqMHdQ==
|
||||
|
||||
ip@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da"
|
||||
integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
|
||||
integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
|
||||
@@ -3924,9 +3924,9 @@ interpret@^2.2.0:
|
||||
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
|
||||
|
||||
ip@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da"
|
||||
integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
|
||||
integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
|
||||
@@ -4347,9 +4347,9 @@ interpret@^2.2.0:
|
||||
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
|
||||
|
||||
ip@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da"
|
||||
integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
|
||||
integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
|
||||
@@ -4508,9 +4508,9 @@ interpret@^2.2.0:
|
||||
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
|
||||
|
||||
ip@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da"
|
||||
integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
|
||||
integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
|
||||
@@ -4625,9 +4625,9 @@ interpret@^2.2.0:
|
||||
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
|
||||
|
||||
ip@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da"
|
||||
integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
|
||||
integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
|
||||
@@ -4599,9 +4599,9 @@ interpret@^2.2.0:
|
||||
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
|
||||
|
||||
ip@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da"
|
||||
integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
|
||||
integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
|
||||
@@ -4513,9 +4513,9 @@ interpret@^2.2.0:
|
||||
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
|
||||
|
||||
ip@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da"
|
||||
integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
|
||||
integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
|
||||
@@ -4835,9 +4835,9 @@ interpret@^2.2.0:
|
||||
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
|
||||
|
||||
ip@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da"
|
||||
integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
|
||||
integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
|
||||
@@ -3748,9 +3748,9 @@ interpret@^2.2.0:
|
||||
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
|
||||
|
||||
ip@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da"
|
||||
integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.1.tgz#e8f3595d33a3ea66490204234b77636965307105"
|
||||
integrity sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
|
||||
24
package.json
24
package.json
@@ -15,9 +15,13 @@
|
||||
"dev:generate-graphql-schema": "ts-node -T ./test/generateGraphQLSchema.ts",
|
||||
"dev:generate-types": "ts-node -T ./test/generateTypes.ts",
|
||||
"dev:postgres": "pnpm --filter payload run dev:postgres",
|
||||
"docker:restart": "pnpm docker:stop --remove-orphans && pnpm docker:start",
|
||||
"docker:start": "docker-compose -f packages/plugin-cloud-storage/docker-compose.yml up -d",
|
||||
"docker:stop": "docker-compose -f packages/plugin-cloud-storage/docker-compose.yml down",
|
||||
"fix": "eslint \"packages/**/*.ts\" --fix",
|
||||
"lint": "eslint \"packages/**/*.ts\"",
|
||||
"lint-staged": "lint-staged",
|
||||
"prepare": "husky install",
|
||||
"pretest": "pnpm build",
|
||||
"reinstall": "pnpm clean:unix && pnpm install",
|
||||
"script:list-packages": "tsx ./scripts/list-packages.ts",
|
||||
@@ -29,10 +33,10 @@
|
||||
"test:e2e:headed": "cross-env DISABLE_LOGGING=true playwright test --headed",
|
||||
"test:int:postgres": "cross-env PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles",
|
||||
"test:int": "cross-env DISABLE_LOGGING=true jest --forceExit --detectOpenHandles",
|
||||
"translateNewKeys": "pnpm --filter payload run translateNewKeys",
|
||||
"prepare": "husky install"
|
||||
"translateNewKeys": "pnpm --filter payload run translateNewKeys"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@aws-sdk/client-s3": "^3.142.0",
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@playwright/test": "1.40.1",
|
||||
"@swc/cli": "^0.1.62",
|
||||
@@ -77,12 +81,12 @@
|
||||
"jest": "29.7.0",
|
||||
"jest-environment-jsdom": "29.7.0",
|
||||
"jwt-decode": "3.1.2",
|
||||
"lexical": "0.12.5",
|
||||
"lexical": "0.13.1",
|
||||
"lint-staged": "^14.0.1",
|
||||
"minimist": "1.2.8",
|
||||
"mongodb-memory-server": "^9",
|
||||
"node-fetch": "2.6.12",
|
||||
"nodemon": "3.0.2",
|
||||
"nodemon": "3.0.3",
|
||||
"prettier": "^3.0.3",
|
||||
"prompts": "2.4.2",
|
||||
"qs": "6.11.2",
|
||||
@@ -106,12 +110,12 @@
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"copyfiles": "2.4.1",
|
||||
"cross-env": "7.0.3",
|
||||
"dotenv": "8.6.0",
|
||||
"drizzle-orm": "0.29.3",
|
||||
"ts-node": "10.9.2",
|
||||
"typescript": "5.2.2"
|
||||
"copyfiles": "$copyfiles",
|
||||
"cross-env": "$cross-env",
|
||||
"dotenv": "$dotenv",
|
||||
"drizzle-orm": "$drizzle-orm",
|
||||
"ts-node": "$ts-node",
|
||||
"typescript": "$typescript"
|
||||
}
|
||||
},
|
||||
"engines": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "1.4.1",
|
||||
"version": "1.4.3",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"repository": "https://github.com/payloadcms/payload",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -63,7 +63,6 @@ export const findVersions: FindVersions = async function findVersions(
|
||||
lean: true,
|
||||
leanWithId: true,
|
||||
limit,
|
||||
offset: skip || 0,
|
||||
options,
|
||||
page,
|
||||
pagination,
|
||||
|
||||
@@ -49,6 +49,8 @@ export interface Args {
|
||||
/** Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false */
|
||||
disableIndexHints?: boolean
|
||||
migrationDir?: string
|
||||
/** Set to true to evaluate null field values as existing */
|
||||
nullFieldValuesNotExist?: boolean
|
||||
transactionOptions?: TransactionOptions | false
|
||||
/** The URL to connect to MongoDB or false to start payload and prevent connecting */
|
||||
url: false | string
|
||||
@@ -93,6 +95,7 @@ export function mongooseAdapter({
|
||||
connectOptions,
|
||||
disableIndexHints = false,
|
||||
migrationDir: migrationDirArg,
|
||||
nullFieldValuesNotExist = true,
|
||||
transactionOptions = {},
|
||||
url,
|
||||
}: Args): MongooseAdapterResult {
|
||||
@@ -103,6 +106,8 @@ export function mongooseAdapter({
|
||||
return createDatabaseAdapter<MongooseAdapter>({
|
||||
name: 'mongoose',
|
||||
|
||||
nullFieldValuesNotExist,
|
||||
|
||||
// Mongoose-specific
|
||||
autoPluralization,
|
||||
collections: {},
|
||||
|
||||
@@ -14,8 +14,10 @@ import type {
|
||||
DateField,
|
||||
EmailField,
|
||||
Field,
|
||||
FieldAffectingData,
|
||||
GroupField,
|
||||
JSONField,
|
||||
NonPresentationalField,
|
||||
NumberField,
|
||||
PointField,
|
||||
RadioField,
|
||||
@@ -23,12 +25,12 @@ import type {
|
||||
RichTextField,
|
||||
RowField,
|
||||
SelectField,
|
||||
Tab,
|
||||
TabsField,
|
||||
TextField,
|
||||
TextareaField,
|
||||
UploadField,
|
||||
} from 'payload/types'
|
||||
import type { FieldAffectingData, NonPresentationalField, Tab, UnnamedTab } from 'payload/types'
|
||||
|
||||
import { Schema } from 'mongoose'
|
||||
import {
|
||||
@@ -61,7 +63,15 @@ const formatBaseSchema = (field: FieldAffectingData, buildSchemaOptions: BuildSc
|
||||
unique: (!disableUnique && field.unique) || false,
|
||||
}
|
||||
|
||||
if (schema.unique && (field.localized || draftsEnabled)) {
|
||||
if (
|
||||
schema.unique &&
|
||||
(field.localized ||
|
||||
draftsEnabled ||
|
||||
(fieldAffectsData(field) &&
|
||||
field.type !== 'group' &&
|
||||
field.type !== 'tab' &&
|
||||
field.required !== true))
|
||||
) {
|
||||
schema.sparse = true
|
||||
}
|
||||
|
||||
@@ -79,7 +89,6 @@ const localizeSchema = (
|
||||
) => {
|
||||
if (fieldIsLocalized(entity) && localization && Array.isArray(localization.locales)) {
|
||||
return {
|
||||
localized: true,
|
||||
type: localization.localeCodes.reduce(
|
||||
(localeSchema, locale) => ({
|
||||
...localeSchema,
|
||||
@@ -89,6 +98,7 @@ const localizeSchema = (
|
||||
_id: false,
|
||||
},
|
||||
),
|
||||
localized: true,
|
||||
}
|
||||
}
|
||||
return schema
|
||||
@@ -140,7 +150,6 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
) => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
default: undefined,
|
||||
type: [
|
||||
buildSchema(config, field.fields, {
|
||||
allowIDField: true,
|
||||
@@ -153,6 +162,7 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
},
|
||||
}),
|
||||
],
|
||||
default: undefined,
|
||||
}
|
||||
|
||||
schema.add({
|
||||
@@ -166,8 +176,8 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const fieldSchema = {
|
||||
default: undefined,
|
||||
type: [new Schema({}, { _id: false, discriminatorKey: 'blockType' })],
|
||||
default: undefined,
|
||||
}
|
||||
|
||||
schema.add({
|
||||
@@ -187,12 +197,12 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
if (field.localized && config.localization) {
|
||||
config.localization.localeCodes.forEach((localeCode) => {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore Possible incorrect typing in mongoose types, this works
|
||||
// @ts-expect-error Possible incorrect typing in mongoose types, this works
|
||||
schema.path(`${field.name}.${localeCode}`).discriminator(blockItem.slug, blockSchema)
|
||||
})
|
||||
} else {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore Possible incorrect typing in mongoose types, this works
|
||||
// @ts-expect-error Possible incorrect typing in mongoose types, this works
|
||||
schema.path(field.name).discriminator(blockItem.slug, blockSchema)
|
||||
}
|
||||
})
|
||||
@@ -325,14 +335,14 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
buildSchemaOptions: BuildSchemaOptions,
|
||||
): void => {
|
||||
const baseSchema: SchemaTypeOptions<unknown> = {
|
||||
type: {
|
||||
type: String,
|
||||
enum: ['Point'],
|
||||
},
|
||||
coordinates: {
|
||||
type: [Number],
|
||||
default: field.defaultValue || undefined,
|
||||
required: false,
|
||||
type: [Number],
|
||||
},
|
||||
type: {
|
||||
enum: ['Point'],
|
||||
type: String,
|
||||
},
|
||||
}
|
||||
if (buildSchemaOptions.disableUnique && field.unique && field.localized) {
|
||||
@@ -366,11 +376,11 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
): void => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: String,
|
||||
enum: field.options.map((option) => {
|
||||
if (typeof option === 'object') return option.value
|
||||
return option
|
||||
}),
|
||||
type: String,
|
||||
}
|
||||
|
||||
schema.add({
|
||||
@@ -388,7 +398,6 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
|
||||
if (field.localized && config.localization) {
|
||||
schemaToReturn = {
|
||||
localized: true,
|
||||
type: config.localization.localeCodes.reduce((locales, locale) => {
|
||||
let localeSchema: { [key: string]: any } = {}
|
||||
|
||||
@@ -396,56 +405,57 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
localeSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
_id: false,
|
||||
relationTo: { enum: field.relationTo, type: String },
|
||||
type: Schema.Types.Mixed,
|
||||
relationTo: { type: String, enum: field.relationTo },
|
||||
value: {
|
||||
refPath: `${field.name}.${locale}.relationTo`,
|
||||
type: Schema.Types.Mixed,
|
||||
refPath: `${field.name}.${locale}.relationTo`,
|
||||
},
|
||||
}
|
||||
} else {
|
||||
localeSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
ref: field.relationTo,
|
||||
type: Schema.Types.Mixed,
|
||||
ref: field.relationTo,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...locales,
|
||||
[locale]: field.hasMany ? { default: undefined, type: [localeSchema] } : localeSchema,
|
||||
[locale]: field.hasMany ? { type: [localeSchema], default: undefined } : localeSchema,
|
||||
}
|
||||
}, {}),
|
||||
localized: true,
|
||||
}
|
||||
} else if (hasManyRelations) {
|
||||
schemaToReturn = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
_id: false,
|
||||
relationTo: { enum: field.relationTo, type: String },
|
||||
type: Schema.Types.Mixed,
|
||||
relationTo: { type: String, enum: field.relationTo },
|
||||
value: {
|
||||
refPath: `${field.name}.relationTo`,
|
||||
type: Schema.Types.Mixed,
|
||||
refPath: `${field.name}.relationTo`,
|
||||
},
|
||||
}
|
||||
|
||||
if (field.hasMany) {
|
||||
schemaToReturn = {
|
||||
default: undefined,
|
||||
type: [schemaToReturn],
|
||||
default: undefined,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
schemaToReturn = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
ref: field.relationTo,
|
||||
type: Schema.Types.Mixed,
|
||||
ref: field.relationTo,
|
||||
}
|
||||
|
||||
if (field.hasMany) {
|
||||
schemaToReturn = {
|
||||
default: undefined,
|
||||
type: [schemaToReturn],
|
||||
default: undefined,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -488,11 +498,11 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
): void => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
type: String,
|
||||
enum: field.options.map((option) => {
|
||||
if (typeof option === 'object') return option.value
|
||||
return option
|
||||
}),
|
||||
type: String,
|
||||
}
|
||||
|
||||
if (buildSchemaOptions.draftsEnabled || !field.required) {
|
||||
@@ -576,8 +586,8 @@ const fieldToSchemaMap: Record<string, FieldSchemaGenerator> = {
|
||||
): void => {
|
||||
const baseSchema = {
|
||||
...formatBaseSchema(field, buildSchemaOptions),
|
||||
ref: field.relationTo,
|
||||
type: Schema.Types.Mixed,
|
||||
ref: field.relationTo,
|
||||
}
|
||||
|
||||
schema.add({
|
||||
|
||||
@@ -102,6 +102,7 @@ export async function buildSearchParam({
|
||||
hasCustomID,
|
||||
operator,
|
||||
path,
|
||||
payload,
|
||||
val,
|
||||
})
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { Payload } from 'payload'
|
||||
import type { Field, TabAsField } from 'payload/types'
|
||||
|
||||
import mongoose from 'mongoose'
|
||||
@@ -8,6 +9,7 @@ type SanitizeQueryValueArgs = {
|
||||
hasCustomID: boolean
|
||||
operator: string
|
||||
path: string
|
||||
payload: Payload
|
||||
val: any
|
||||
}
|
||||
|
||||
@@ -16,6 +18,7 @@ export const sanitizeQueryValue = ({
|
||||
hasCustomID,
|
||||
operator,
|
||||
path,
|
||||
payload,
|
||||
val,
|
||||
}: SanitizeQueryValueArgs): {
|
||||
operator?: string
|
||||
@@ -174,6 +177,14 @@ export const sanitizeQueryValue = ({
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!payload.db.nullFieldValuesNotExist) {
|
||||
return {
|
||||
rawQuery: {
|
||||
$or: [{ [path]: { $exists: true } }, { [path]: { $eq: null } }],
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { operator: formattedOperator, val: formattedValue }
|
||||
|
||||
@@ -17,7 +17,11 @@ export const rollbackTransaction: RollbackTransaction = async function rollbackT
|
||||
}
|
||||
|
||||
// the first call for rollback should be aborted and deleted causing any other operations with the same transaction to fail
|
||||
await this.sessions[id].abortTransaction()
|
||||
await this.sessions[id].endSession()
|
||||
try {
|
||||
await this.sessions[id].abortTransaction()
|
||||
await this.sessions[id].endSession()
|
||||
} catch (error) {
|
||||
// ignore the error as it is likely a race condition from multiple errors
|
||||
}
|
||||
delete this.sessions[id]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "0.5.2",
|
||||
"version": "0.7.0",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"repository": "https://github.com/payloadcms/payload",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -1,13 +1,51 @@
|
||||
import type { Payload } from 'payload'
|
||||
import type { Connect } from 'payload/database'
|
||||
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { drizzle } from 'drizzle-orm/node-postgres'
|
||||
import { numeric, pgTable, timestamp, varchar } from 'drizzle-orm/pg-core'
|
||||
import { numeric, timestamp, varchar } from 'drizzle-orm/pg-core'
|
||||
import { Pool } from 'pg'
|
||||
import prompts from 'prompts'
|
||||
|
||||
import type { PostgresAdapter } from './types'
|
||||
|
||||
const connectWithReconnect = async function ({
|
||||
adapter,
|
||||
payload,
|
||||
reconnect = false,
|
||||
}: {
|
||||
adapter: PostgresAdapter
|
||||
payload: Payload
|
||||
reconnect?: boolean
|
||||
}) {
|
||||
let result
|
||||
|
||||
if (!reconnect) {
|
||||
result = await adapter.pool.connect()
|
||||
} else {
|
||||
try {
|
||||
result = await adapter.pool.connect()
|
||||
} catch (err) {
|
||||
setTimeout(() => {
|
||||
payload.logger.info('Reconnecting to postgres')
|
||||
void connectWithReconnect({ adapter, payload, reconnect: true })
|
||||
}, 1000)
|
||||
}
|
||||
}
|
||||
if (!result) {
|
||||
return
|
||||
}
|
||||
result.prependListener('error', (err) => {
|
||||
try {
|
||||
if (err.code === 'ECONNRESET') {
|
||||
void connectWithReconnect({ adapter, payload, reconnect: true })
|
||||
}
|
||||
} catch (err) {
|
||||
// swallow error
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export const connect: Connect = async function connect(this: PostgresAdapter, payload) {
|
||||
this.schema = {
|
||||
...this.tables,
|
||||
@@ -17,14 +55,19 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
|
||||
|
||||
try {
|
||||
this.pool = new Pool(this.poolOptions)
|
||||
await this.pool.connect()
|
||||
await connectWithReconnect({ adapter: this, payload })
|
||||
|
||||
const logger = this.logger || false
|
||||
|
||||
this.drizzle = drizzle(this.pool, { schema: this.schema, logger })
|
||||
this.drizzle = drizzle(this.pool, { logger, schema: this.schema })
|
||||
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
|
||||
this.payload.logger.info('---- DROPPING TABLES ----')
|
||||
await this.drizzle.execute(sql`drop schema public cascade;
|
||||
create schema public;`)
|
||||
this.payload.logger.info(`---- DROPPING TABLES SCHEMA(${this.schemaName || 'public'}) ----`)
|
||||
await this.drizzle.execute(
|
||||
sql.raw(`
|
||||
drop schema if exists ${this.schemaName || 'public'} cascade;
|
||||
create schema ${this.schemaName || 'public'};
|
||||
`),
|
||||
)
|
||||
this.payload.logger.info('---- DROPPED TABLES ----')
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -81,7 +124,7 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
|
||||
await apply()
|
||||
|
||||
// Migration table def in order to use query using drizzle
|
||||
const migrationsSchema = pgTable('payload_migrations', {
|
||||
const migrationsSchema = this.pgSchema.table('payload_migrations', {
|
||||
name: varchar('name'),
|
||||
batch: numeric('batch'),
|
||||
created_at: timestamp('created_at'),
|
||||
|
||||
@@ -78,7 +78,7 @@ export const traverseFields = ({
|
||||
with: {},
|
||||
}
|
||||
|
||||
const arrayTableName = `${currentTableName}_${toSnakeCase(field.name)}`
|
||||
const arrayTableName = `${currentTableName}_${path}${toSnakeCase(field.name)}`
|
||||
|
||||
if (adapter.tables[`${arrayTableName}_locales`]) withArray.with._locales = _locales
|
||||
currentArgs.with[`${path}${field.name}`] = withArray
|
||||
|
||||
@@ -42,7 +42,7 @@ export type { MigrateDownArgs, MigrateUpArgs } from './types'
|
||||
export function postgresAdapter(args: Args): PostgresAdapterResult {
|
||||
function adapter({ payload }: { payload: Payload }) {
|
||||
const migrationDir = findMigrationDir(args.migrationDir)
|
||||
|
||||
const idType = args.idType || 'serial'
|
||||
return createDatabaseAdapter<PostgresAdapter>({
|
||||
name: 'postgres',
|
||||
|
||||
@@ -50,12 +50,15 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
|
||||
drizzle: undefined,
|
||||
enums: {},
|
||||
fieldConstraints: {},
|
||||
idType,
|
||||
logger: args.logger,
|
||||
pgSchema: undefined,
|
||||
pool: undefined,
|
||||
poolOptions: args.pool,
|
||||
push: args.push,
|
||||
relations: {},
|
||||
schema: {},
|
||||
schemaName: args.schemaName,
|
||||
sessions: {},
|
||||
tables: {},
|
||||
|
||||
@@ -68,7 +71,10 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
|
||||
createGlobalVersion,
|
||||
createMigration,
|
||||
createVersion,
|
||||
defaultIDType: 'number',
|
||||
/**
|
||||
* This represents how a default ID is treated in Payload as were a field type
|
||||
*/
|
||||
defaultIDType: idType === 'serial' ? 'number' : 'text',
|
||||
deleteMany,
|
||||
deleteOne,
|
||||
deleteVersions,
|
||||
|
||||
@@ -2,16 +2,21 @@
|
||||
import type { Init } from 'payload/database'
|
||||
import type { SanitizedCollectionConfig } from 'payload/types'
|
||||
|
||||
import { pgEnum } from 'drizzle-orm/pg-core'
|
||||
import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'
|
||||
import { buildVersionCollectionFields, buildVersionGlobalFields } from 'payload/versions'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { PostgresAdapter } from './types'
|
||||
|
||||
import { buildTable } from './schema/build'
|
||||
import { getConfigIDType } from './schema/getConfigIDType'
|
||||
|
||||
export const init: Init = async function init(this: PostgresAdapter) {
|
||||
if (this.schemaName) {
|
||||
this.pgSchema = pgSchema(this.schemaName)
|
||||
} else {
|
||||
this.pgSchema = { table: pgTable }
|
||||
}
|
||||
|
||||
if (this.payload.config.localization) {
|
||||
this.enums.enum__locales = pgEnum(
|
||||
'_locales',
|
||||
@@ -24,9 +29,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
|
||||
buildTable({
|
||||
adapter: this,
|
||||
buildTexts: true,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!collection?.versions?.drafts,
|
||||
disableUnique: false,
|
||||
fields: collection.fields,
|
||||
@@ -38,13 +43,11 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
const versionsTableName = `_${tableName}_v`
|
||||
const versionFields = buildVersionCollectionFields(collection)
|
||||
|
||||
const versionsParentIDColType = getConfigIDType(collection.fields)
|
||||
|
||||
buildTable({
|
||||
adapter: this,
|
||||
buildTexts: true,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!collection.versions?.drafts,
|
||||
disableUnique: true,
|
||||
fields: versionFields,
|
||||
@@ -59,9 +62,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
|
||||
buildTable({
|
||||
adapter: this,
|
||||
buildTexts: true,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!global?.versions?.drafts,
|
||||
disableUnique: false,
|
||||
fields: global.fields,
|
||||
@@ -75,9 +78,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
|
||||
|
||||
buildTable({
|
||||
adapter: this,
|
||||
buildTexts: true,
|
||||
buildNumbers: true,
|
||||
buildRelationships: true,
|
||||
buildTexts: true,
|
||||
disableNotNull: !!global.versions?.drafts,
|
||||
disableUnique: true,
|
||||
fields: versionFields,
|
||||
|
||||
@@ -39,7 +39,7 @@ export async function migrate(this: PostgresAdapter): Promise<void> {
|
||||
latestBatch = Number(migrationsInDB[0]?.batch)
|
||||
}
|
||||
} else {
|
||||
await createMigrationTable(this.drizzle)
|
||||
await createMigrationTable(this)
|
||||
}
|
||||
|
||||
if (migrationsInDB.find((m) => m.batch === -1)) {
|
||||
|
||||
@@ -44,8 +44,10 @@ export async function migrateFresh(
|
||||
msg: `Dropping database.`,
|
||||
})
|
||||
|
||||
await this.drizzle.execute(sql`drop schema public cascade;
|
||||
create schema public;`)
|
||||
await this.drizzle.execute(
|
||||
sql.raw(`drop schema ${this.schemaName || 'public'} cascade;
|
||||
create schema ${this.schemaName || 'public'};`),
|
||||
)
|
||||
|
||||
const migrationFiles = await readMigrationFiles({ payload })
|
||||
payload.logger.debug({
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import type { PgTableWithColumns } from 'drizzle-orm/pg-core'
|
||||
import type { Field, Where } from 'payload/types'
|
||||
|
||||
import { asc, desc } from 'drizzle-orm'
|
||||
@@ -12,7 +13,7 @@ export type BuildQueryJoins = Record<string, SQL>
|
||||
|
||||
export type BuildQueryJoinAliases = {
|
||||
condition: SQL
|
||||
table: GenericTable
|
||||
table: GenericTable | PgTableWithColumns<any>
|
||||
}[]
|
||||
|
||||
type BuildQueryArgs = {
|
||||
@@ -75,6 +76,7 @@ const buildQuery = async function buildQuery({
|
||||
pathSegments: sortPath.replace(/__/g, '.').split('.'),
|
||||
selectFields,
|
||||
tableName,
|
||||
value: sortPath,
|
||||
})
|
||||
orderBy.column = sortTable?.[sortTableColumnName]
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import type { Field, FieldAffectingData, TabAsField } from 'payload/types'
|
||||
import type { PgTableWithColumns } from 'drizzle-orm/pg-core'
|
||||
import type { Field, FieldAffectingData, NumberField, TabAsField, TextField } from 'payload/types'
|
||||
|
||||
import { and, eq, like, sql } from 'drizzle-orm'
|
||||
import { alias } from 'drizzle-orm/pg-core'
|
||||
@@ -15,7 +16,7 @@ import type { BuildQueryJoinAliases, BuildQueryJoins } from './buildQuery'
|
||||
|
||||
type Constraint = {
|
||||
columnName: string
|
||||
table: GenericTable
|
||||
table: GenericTable | PgTableWithColumns<any>
|
||||
value: unknown
|
||||
}
|
||||
|
||||
@@ -26,12 +27,12 @@ type TableColumn = {
|
||||
getNotNullColumnByValue?: (val: unknown) => string
|
||||
pathSegments?: string[]
|
||||
rawColumn?: SQL
|
||||
table: GenericTable
|
||||
table: GenericTable | PgTableWithColumns<any>
|
||||
}
|
||||
|
||||
type Args = {
|
||||
adapter: PostgresAdapter
|
||||
aliasTable?: GenericTable
|
||||
aliasTable?: GenericTable | PgTableWithColumns<any>
|
||||
collectionPath: string
|
||||
columnPrefix?: string
|
||||
constraintPath?: string
|
||||
@@ -48,6 +49,10 @@ type Args = {
|
||||
* If creating a new table name for arrays and blocks, this suffix should be appended to the table name
|
||||
*/
|
||||
tableNameSuffix?: string
|
||||
/**
|
||||
* The raw value of the query before sanitization
|
||||
*/
|
||||
value: unknown
|
||||
}
|
||||
/**
|
||||
* Transforms path to table and column name
|
||||
@@ -70,6 +75,7 @@ export const getTableColumnFromPath = ({
|
||||
selectFields,
|
||||
tableName,
|
||||
tableNameSuffix = '',
|
||||
value,
|
||||
}: Args): TableColumn => {
|
||||
const fieldPath = incomingSegments[0]
|
||||
let locale = incomingLocale
|
||||
@@ -88,8 +94,8 @@ export const getTableColumnFromPath = ({
|
||||
constraints,
|
||||
field: {
|
||||
name: 'id',
|
||||
type: 'number',
|
||||
},
|
||||
type: adapter.idType === 'uuid' ? 'text' : 'number',
|
||||
} as TextField | NumberField,
|
||||
table: adapter.tables[newTableName],
|
||||
}
|
||||
}
|
||||
@@ -131,6 +137,7 @@ export const getTableColumnFromPath = ({
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix,
|
||||
value,
|
||||
})
|
||||
}
|
||||
case 'tab': {
|
||||
@@ -151,6 +158,7 @@ export const getTableColumnFromPath = ({
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix: `${tableNameSuffix}${toSnakeCase(field.name)}_`,
|
||||
value,
|
||||
})
|
||||
}
|
||||
return getTableColumnFromPath({
|
||||
@@ -169,6 +177,7 @@ export const getTableColumnFromPath = ({
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -204,6 +213,7 @@ export const getTableColumnFromPath = ({
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
tableNameSuffix: `${tableNameSuffix}${toSnakeCase(field.name)}_`,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -241,12 +251,39 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
case 'blocks': {
|
||||
let blockTableColumn: TableColumn
|
||||
let newTableName: string
|
||||
|
||||
// handle blockType queries
|
||||
if (pathSegments[1] === 'blockType') {
|
||||
// find the block config using the value
|
||||
const blockTypes = Array.isArray(value) ? value : [value]
|
||||
blockTypes.forEach((blockType) => {
|
||||
const block = field.blocks.find((block) => block.slug === blockType)
|
||||
newTableName = `${tableName}_blocks_${toSnakeCase(block.slug)}`
|
||||
joins[newTableName] = eq(
|
||||
adapter.tables[tableName].id,
|
||||
adapter.tables[newTableName]._parentID,
|
||||
)
|
||||
constraints.push({
|
||||
columnName: '_path',
|
||||
table: adapter.tables[newTableName],
|
||||
value: pathSegments[0],
|
||||
})
|
||||
})
|
||||
return {
|
||||
constraints,
|
||||
field,
|
||||
getNotNullColumnByValue: () => 'id',
|
||||
table: adapter.tables[tableName],
|
||||
}
|
||||
}
|
||||
|
||||
const hasBlockField = field.blocks.some((block) => {
|
||||
newTableName = `${tableName}_blocks_${toSnakeCase(block.slug)}`
|
||||
constraintPath = `${constraintPath}${field.name}.%.`
|
||||
@@ -267,6 +304,7 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName,
|
||||
selectFields: blockSelectFields,
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
} catch (error) {
|
||||
// this is fine, not every block will have the field
|
||||
@@ -307,9 +345,6 @@ export const getTableColumnFromPath = ({
|
||||
table: blockTableColumn.table,
|
||||
}
|
||||
}
|
||||
if (pathSegments[1] === 'blockType') {
|
||||
throw new APIError('Querying on blockType is not supported')
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
@@ -397,6 +432,7 @@ export const getTableColumnFromPath = ({
|
||||
rootTableName: newTableName,
|
||||
selectFields,
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -63,11 +63,7 @@ export async function parseParams({
|
||||
where: condition,
|
||||
})
|
||||
if (builtConditions.length > 0) {
|
||||
if (result) {
|
||||
result = operatorMap[conditionOperator](result, ...builtConditions)
|
||||
} else {
|
||||
result = operatorMap[conditionOperator](...builtConditions)
|
||||
}
|
||||
result = operatorMap[conditionOperator](...builtConditions)
|
||||
}
|
||||
} else {
|
||||
// It's a path - and there can be multiple comparisons on a single path.
|
||||
@@ -77,6 +73,7 @@ export async function parseParams({
|
||||
if (typeof pathOperators === 'object') {
|
||||
for (const operator of Object.keys(pathOperators)) {
|
||||
if (validOperators.includes(operator as Operator)) {
|
||||
const val = where[relationOrPath][operator]
|
||||
const {
|
||||
columnName,
|
||||
constraints: queryConstraints,
|
||||
@@ -95,10 +92,9 @@ export async function parseParams({
|
||||
pathSegments: relationOrPath.replace(/__/g, '.').split('.'),
|
||||
selectFields,
|
||||
tableName,
|
||||
value: val,
|
||||
})
|
||||
|
||||
const val = where[relationOrPath][operator]
|
||||
|
||||
queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => {
|
||||
if (typeof value === 'string' && value.indexOf('%') > -1) {
|
||||
constraints.push(operatorMap.like(constraintTable[col], value))
|
||||
@@ -169,6 +165,7 @@ export async function parseParams({
|
||||
}
|
||||
|
||||
const sanitizedQueryValue = sanitizeQueryValue({
|
||||
adapter,
|
||||
field,
|
||||
operator,
|
||||
relationOrPath,
|
||||
|
||||
@@ -2,7 +2,10 @@ import { APIError } from 'payload/errors'
|
||||
import { type Field, type TabAsField, fieldAffectsData } from 'payload/types'
|
||||
import { createArrayFromCommaDelineated } from 'payload/utilities'
|
||||
|
||||
import type { PostgresAdapter } from '../types'
|
||||
|
||||
type SanitizeQueryValueArgs = {
|
||||
adapter: PostgresAdapter
|
||||
field: Field | TabAsField
|
||||
operator: string
|
||||
relationOrPath: string
|
||||
@@ -10,6 +13,7 @@ type SanitizeQueryValueArgs = {
|
||||
}
|
||||
|
||||
export const sanitizeQueryValue = ({
|
||||
adapter,
|
||||
field,
|
||||
operator: operatorArg,
|
||||
relationOrPath,
|
||||
@@ -27,8 +31,10 @@ export const sanitizeQueryValue = ({
|
||||
) {
|
||||
const allPossibleIDTypes: (number | string)[] = []
|
||||
formattedValue.forEach((val) => {
|
||||
if (typeof val === 'string') {
|
||||
if (adapter.idType !== 'uuid' && typeof val === 'string') {
|
||||
allPossibleIDTypes.push(val, parseInt(val))
|
||||
} else if (typeof val === 'string') {
|
||||
allPossibleIDTypes.push(val)
|
||||
} else {
|
||||
allPossibleIDTypes.push(val, String(val))
|
||||
}
|
||||
|
||||
@@ -1,26 +1,22 @@
|
||||
/* eslint-disable no-param-reassign */
|
||||
import type { Relation } from 'drizzle-orm'
|
||||
import type { IndexBuilder, PgColumnBuilder, UniqueConstraintBuilder } from 'drizzle-orm/pg-core'
|
||||
import type {
|
||||
IndexBuilder,
|
||||
PgColumnBuilder,
|
||||
PgTableWithColumns,
|
||||
UniqueConstraintBuilder,
|
||||
} from 'drizzle-orm/pg-core'
|
||||
import type { Field } from 'payload/types'
|
||||
|
||||
import { relations } from 'drizzle-orm'
|
||||
import {
|
||||
index,
|
||||
integer,
|
||||
numeric,
|
||||
pgTable,
|
||||
serial,
|
||||
timestamp,
|
||||
unique,
|
||||
varchar,
|
||||
} from 'drizzle-orm/pg-core'
|
||||
import { index, integer, numeric, serial, timestamp, unique, varchar } from 'drizzle-orm/pg-core'
|
||||
import { fieldAffectsData } from 'payload/types'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { GenericColumns, GenericTable, PostgresAdapter } from '../types'
|
||||
import type { GenericColumns, GenericTable, IDType, PostgresAdapter } from '../types'
|
||||
|
||||
import { getConfigIDType } from './getConfigIDType'
|
||||
import { parentIDColumnMap } from './parentIDColumnMap'
|
||||
import { setColumnID } from './setColumnID'
|
||||
import { traverseFields } from './traverseFields'
|
||||
|
||||
type Args = {
|
||||
@@ -77,27 +73,20 @@ export const buildTable = ({
|
||||
|
||||
const localesColumns: Record<string, PgColumnBuilder> = {}
|
||||
const localesIndexes: Record<string, (cols: GenericColumns) => IndexBuilder> = {}
|
||||
let localesTable: GenericTable
|
||||
let textsTable: GenericTable
|
||||
let numbersTable: GenericTable
|
||||
let localesTable: GenericTable | PgTableWithColumns<any>
|
||||
let textsTable: GenericTable | PgTableWithColumns<any>
|
||||
let numbersTable: GenericTable | PgTableWithColumns<any>
|
||||
|
||||
// Relationships to the base collection
|
||||
const relationships: Set<string> = rootRelationships || new Set()
|
||||
|
||||
let relationshipsTable: GenericTable
|
||||
let relationshipsTable: GenericTable | PgTableWithColumns<any>
|
||||
|
||||
// Drizzle relations
|
||||
const relationsToBuild: Map<string, string> = new Map()
|
||||
|
||||
const idColType = getConfigIDType(fields)
|
||||
const idColType: IDType = setColumnID({ adapter, columns, fields })
|
||||
|
||||
const idColTypeMap = {
|
||||
integer: serial,
|
||||
numeric,
|
||||
varchar,
|
||||
}
|
||||
|
||||
columns.id = idColTypeMap[idColType]('id').primaryKey()
|
||||
;({
|
||||
hasLocalizedField,
|
||||
hasLocalizedManyNumberField,
|
||||
@@ -143,7 +132,7 @@ export const buildTable = ({
|
||||
.notNull()
|
||||
}
|
||||
|
||||
const table = pgTable(tableName, columns, (cols) => {
|
||||
const table = adapter.pgSchema.table(tableName, columns, (cols) => {
|
||||
const extraConfig = Object.entries(baseExtraConfig).reduce((config, [key, func]) => {
|
||||
config[key] = func(cols)
|
||||
return config
|
||||
@@ -165,7 +154,7 @@ export const buildTable = ({
|
||||
.references(() => table.id, { onDelete: 'cascade' })
|
||||
.notNull()
|
||||
|
||||
localesTable = pgTable(localeTableName, localesColumns, (cols) => {
|
||||
localesTable = adapter.pgSchema.table(localeTableName, localesColumns, (cols) => {
|
||||
return Object.entries(localesIndexes).reduce(
|
||||
(acc, [colName, func]) => {
|
||||
acc[colName] = func(cols)
|
||||
@@ -208,7 +197,7 @@ export const buildTable = ({
|
||||
columns.locale = adapter.enums.enum__locales('locale')
|
||||
}
|
||||
|
||||
textsTable = pgTable(textsTableName, columns, (cols) => {
|
||||
textsTable = adapter.pgSchema.table(textsTableName, columns, (cols) => {
|
||||
const indexes: Record<string, IndexBuilder> = {
|
||||
orderParentIdx: index(`${textsTableName}_order_parent_idx`).on(cols.order, cols.parent),
|
||||
}
|
||||
@@ -252,7 +241,7 @@ export const buildTable = ({
|
||||
columns.locale = adapter.enums.enum__locales('locale')
|
||||
}
|
||||
|
||||
numbersTable = pgTable(numbersTableName, columns, (cols) => {
|
||||
numbersTable = adapter.pgSchema.table(numbersTableName, columns, (cols) => {
|
||||
const indexes: Record<string, IndexBuilder> = {
|
||||
orderParentIdx: index(`${numbersTableName}_order_parent_idx`).on(cols.order, cols.parent),
|
||||
}
|
||||
@@ -300,7 +289,7 @@ export const buildTable = ({
|
||||
|
||||
relationships.forEach((relationTo) => {
|
||||
const formattedRelationTo = toSnakeCase(relationTo)
|
||||
let colType = 'integer'
|
||||
let colType = adapter.idType === 'uuid' ? 'uuid' : 'integer'
|
||||
const relatedCollectionCustomID = adapter.payload.collections[
|
||||
relationTo
|
||||
].config.fields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
@@ -314,19 +303,23 @@ export const buildTable = ({
|
||||
|
||||
const relationshipsTableName = `${tableName}_rels`
|
||||
|
||||
relationshipsTable = pgTable(relationshipsTableName, relationshipColumns, (cols) => {
|
||||
const result: Record<string, unknown> = {
|
||||
order: index(`${relationshipsTableName}_order_idx`).on(cols.order),
|
||||
parentIdx: index(`${relationshipsTableName}_parent_idx`).on(cols.parent),
|
||||
pathIdx: index(`${relationshipsTableName}_path_idx`).on(cols.path),
|
||||
}
|
||||
relationshipsTable = adapter.pgSchema.table(
|
||||
relationshipsTableName,
|
||||
relationshipColumns,
|
||||
(cols) => {
|
||||
const result: Record<string, unknown> = {
|
||||
order: index(`${relationshipsTableName}_order_idx`).on(cols.order),
|
||||
parentIdx: index(`${relationshipsTableName}_parent_idx`).on(cols.parent),
|
||||
pathIdx: index(`${relationshipsTableName}_path_idx`).on(cols.path),
|
||||
}
|
||||
|
||||
if (hasLocalizedRelationshipField) {
|
||||
result.localeIdx = index(`${relationshipsTableName}_locale_idx`).on(cols.locale)
|
||||
}
|
||||
if (hasLocalizedRelationshipField) {
|
||||
result.localeIdx = index(`${relationshipsTableName}_locale_idx`).on(cols.locale)
|
||||
}
|
||||
|
||||
return result
|
||||
})
|
||||
return result
|
||||
},
|
||||
)
|
||||
|
||||
adapter.tables[relationshipsTableName] = relationshipsTable
|
||||
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
import { type Field, fieldAffectsData } from 'payload/types'
|
||||
|
||||
export const getConfigIDType = (fields: Field[]): string => {
|
||||
const idField = fields.find((field) => fieldAffectsData(field) && field.name === 'id')
|
||||
|
||||
if (idField) {
|
||||
if (idField.type === 'number') {
|
||||
return 'numeric'
|
||||
}
|
||||
|
||||
if (idField.type === 'text') {
|
||||
return 'varchar'
|
||||
}
|
||||
}
|
||||
|
||||
return 'integer'
|
||||
}
|
||||
@@ -1,7 +1,13 @@
|
||||
import { integer, numeric, varchar } from 'drizzle-orm/pg-core'
|
||||
import { integer, numeric, uuid, varchar } from 'drizzle-orm/pg-core'
|
||||
|
||||
export const parentIDColumnMap = {
|
||||
import type { IDType } from '../types'
|
||||
|
||||
export const parentIDColumnMap: Record<
|
||||
IDType,
|
||||
typeof integer<string> | typeof numeric<string> | typeof uuid<string> | typeof varchar
|
||||
> = {
|
||||
integer,
|
||||
numeric,
|
||||
uuid,
|
||||
varchar,
|
||||
}
|
||||
|
||||
33
packages/db-postgres/src/schema/setColumnID.ts
Normal file
33
packages/db-postgres/src/schema/setColumnID.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import type { PgColumnBuilder } from 'drizzle-orm/pg-core'
|
||||
|
||||
import { numeric, serial, uuid, varchar } from 'drizzle-orm/pg-core'
|
||||
import { type Field, fieldAffectsData } from 'payload/types'
|
||||
import { flattenTopLevelFields } from 'payload/utilities'
|
||||
|
||||
import type { IDType, PostgresAdapter } from '../types'
|
||||
|
||||
type Args = { adapter: PostgresAdapter; columns: Record<string, PgColumnBuilder>; fields: Field[] }
|
||||
export const setColumnID = ({ adapter, columns, fields }: Args): IDType => {
|
||||
const idField = flattenTopLevelFields(fields).find(
|
||||
(field) => fieldAffectsData(field) && field.name === 'id',
|
||||
)
|
||||
if (idField) {
|
||||
if (idField.type === 'number') {
|
||||
columns.id = numeric('id').primaryKey()
|
||||
return 'numeric'
|
||||
}
|
||||
|
||||
if (idField.type === 'text') {
|
||||
columns.id = varchar('id').primaryKey()
|
||||
return 'varchar'
|
||||
}
|
||||
}
|
||||
|
||||
if (adapter.idType === 'uuid') {
|
||||
columns.id = uuid('id').defaultRandom().primaryKey()
|
||||
return 'uuid'
|
||||
}
|
||||
|
||||
columns.id = serial('id').primaryKey()
|
||||
return 'integer'
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import type { Field, TabAsField } from 'payload/types'
|
||||
import { relations } from 'drizzle-orm'
|
||||
import {
|
||||
PgNumericBuilder,
|
||||
PgUUIDBuilder,
|
||||
PgVarcharBuilder,
|
||||
boolean,
|
||||
index,
|
||||
@@ -21,7 +22,7 @@ import { InvalidConfiguration } from 'payload/errors'
|
||||
import { fieldAffectsData, optionIsObject } from 'payload/types'
|
||||
import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { GenericColumns, PostgresAdapter } from '../types'
|
||||
import type { GenericColumns, IDType, PostgresAdapter } from '../types'
|
||||
|
||||
import { hasLocalesTable } from '../utilities/hasLocalesTable'
|
||||
import { buildTable } from './build'
|
||||
@@ -93,7 +94,8 @@ export const traverseFields = ({
|
||||
let hasManyNumberField: 'index' | boolean = false
|
||||
let hasLocalizedManyNumberField = false
|
||||
|
||||
let parentIDColType = 'integer'
|
||||
let parentIDColType: IDType = 'integer'
|
||||
if (columns.id instanceof PgUUIDBuilder) parentIDColType = 'uuid'
|
||||
if (columns.id instanceof PgNumericBuilder) parentIDColType = 'numeric'
|
||||
if (columns.id instanceof PgVarcharBuilder) parentIDColType = 'varchar'
|
||||
|
||||
|
||||
@@ -7,7 +7,14 @@ import type {
|
||||
Relations,
|
||||
} from 'drizzle-orm'
|
||||
import type { NodePgDatabase, NodePgQueryResultHKT } from 'drizzle-orm/node-postgres'
|
||||
import type { PgColumn, PgEnum, PgTableWithColumns, PgTransaction } from 'drizzle-orm/pg-core'
|
||||
import type {
|
||||
PgColumn,
|
||||
PgEnum,
|
||||
PgSchema,
|
||||
PgTableWithColumns,
|
||||
PgTransaction,
|
||||
} from 'drizzle-orm/pg-core'
|
||||
import type { PgTableFn } from 'drizzle-orm/pg-core/table'
|
||||
import type { Payload } from 'payload'
|
||||
import type { BaseDatabaseAdapter } from 'payload/database'
|
||||
import type { PayloadRequest } from 'payload/types'
|
||||
@@ -16,10 +23,12 @@ import type { Pool, PoolConfig } from 'pg'
|
||||
export type DrizzleDB = NodePgDatabase<Record<string, unknown>>
|
||||
|
||||
export type Args = {
|
||||
idType?: 'serial' | 'uuid'
|
||||
logger?: DrizzleConfig['logger']
|
||||
migrationDir?: string
|
||||
pool: PoolConfig
|
||||
push?: boolean
|
||||
schemaName?: string
|
||||
}
|
||||
|
||||
export type GenericColumn = PgColumn<
|
||||
@@ -56,12 +65,15 @@ export type PostgresAdapter = BaseDatabaseAdapter & {
|
||||
* Used for returning properly formed errors from unique fields
|
||||
*/
|
||||
fieldConstraints: Record<string, Record<string, string>>
|
||||
idType: Args['idType']
|
||||
logger: DrizzleConfig['logger']
|
||||
pgSchema?: { table: PgTableFn } | PgSchema
|
||||
pool: Pool
|
||||
poolOptions: Args['pool']
|
||||
push: boolean
|
||||
relations: Record<string, GenericRelation>
|
||||
schema: Record<string, GenericEnum | GenericRelation | GenericTable>
|
||||
schemaName?: Args['schemaName']
|
||||
sessions: {
|
||||
[id: string]: {
|
||||
db: DrizzleTransaction
|
||||
@@ -69,9 +81,11 @@ export type PostgresAdapter = BaseDatabaseAdapter & {
|
||||
resolve: () => Promise<void>
|
||||
}
|
||||
}
|
||||
tables: Record<string, GenericTable>
|
||||
tables: Record<string, GenericTable | PgTableWithColumns<any>>
|
||||
}
|
||||
|
||||
export type IDType = 'integer' | 'numeric' | 'uuid' | 'varchar'
|
||||
|
||||
export type PostgresAdapterResult = (args: { payload: Payload }) => PostgresAdapter
|
||||
|
||||
export type MigrateUpArgs = { payload: Payload; req?: Partial<PayloadRequest> }
|
||||
|
||||
@@ -36,11 +36,11 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
}
|
||||
}
|
||||
|
||||
const parentID = parentRows[parentRowIndex].id || parentRows[parentRowIndex]._parentID
|
||||
const parentID = parentRows[parentRowIndex].id
|
||||
|
||||
// Add any sub arrays that need to be created
|
||||
// We will call this recursively below
|
||||
arrayRows.forEach((arrayRow) => {
|
||||
arrayRows.forEach((arrayRow, i) => {
|
||||
if (Object.keys(arrayRow.arrays).length > 0) {
|
||||
rowsByTable[tableName].arrays.push(arrayRow.arrays)
|
||||
}
|
||||
@@ -53,6 +53,9 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
arrayRowLocaleData._parentID = arrayRow.row.id
|
||||
arrayRowLocaleData._locale = arrayRowLocale
|
||||
rowsByTable[tableName].locales.push(arrayRowLocaleData)
|
||||
if (!arrayRow.row.id) {
|
||||
arrayRowLocaleData._getParentID = (rows) => rows[i].id
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -61,12 +64,23 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
// Insert all corresponding arrays
|
||||
// (one insert per array table)
|
||||
for (const [tableName, row] of Object.entries(rowsByTable)) {
|
||||
// the nested arrays need the ID for the parentID foreign key
|
||||
let insertedRows: Args['parentRows']
|
||||
if (row.rows.length > 0) {
|
||||
await db.insert(adapter.tables[tableName]).values(row.rows).returning()
|
||||
insertedRows = await db.insert(adapter.tables[tableName]).values(row.rows).returning()
|
||||
}
|
||||
|
||||
// Insert locale rows
|
||||
if (adapter.tables[`${tableName}_locales`] && row.locales.length > 0) {
|
||||
if (!row.locales[0]._parentID) {
|
||||
row.locales = row.locales.map((localeRow, i) => {
|
||||
if (typeof localeRow._getParentID === 'function') {
|
||||
localeRow._parentID = localeRow._getParentID(insertedRows)
|
||||
delete localeRow._getParentID
|
||||
}
|
||||
return localeRow
|
||||
})
|
||||
}
|
||||
await db.insert(adapter.tables[`${tableName}_locales`]).values(row.locales).returning()
|
||||
}
|
||||
|
||||
@@ -76,7 +90,7 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
|
||||
adapter,
|
||||
arrays: row.arrays,
|
||||
db,
|
||||
parentRows: row.rows,
|
||||
parentRows: insertedRows,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
import { sql } from 'drizzle-orm'
|
||||
|
||||
import type { DrizzleDB } from '../types'
|
||||
import type { PostgresAdapter } from '../types'
|
||||
|
||||
export const createMigrationTable = async (db: DrizzleDB): Promise<void> => {
|
||||
await db.execute(sql`CREATE TABLE IF NOT EXISTS "payload_migrations" (
|
||||
export const createMigrationTable = async (adapter: PostgresAdapter): Promise<void> => {
|
||||
const prependSchema = adapter.schemaName ? `"${adapter.schemaName}".` : ''
|
||||
|
||||
await adapter.drizzle.execute(
|
||||
sql.raw(`CREATE TABLE IF NOT EXISTS ${prependSchema}"payload_migrations" (
|
||||
"id" serial PRIMARY KEY NOT NULL,
|
||||
"name" varchar,
|
||||
"batch" numeric,
|
||||
"updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
|
||||
);`)
|
||||
);`),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload",
|
||||
"version": "2.10.1",
|
||||
"version": "2.11.2",
|
||||
"description": "Node, React and MongoDB Headless CMS and Application Framework",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
@@ -101,7 +101,6 @@
|
||||
"jwt-decode": "3.1.2",
|
||||
"md5": "2.3.0",
|
||||
"method-override": "3.0.0",
|
||||
"micro-memoize": "4.1.2",
|
||||
"minimist": "1.2.8",
|
||||
"mkdirp": "1.0.4",
|
||||
"monaco-editor": "0.38.0",
|
||||
@@ -193,7 +192,7 @@
|
||||
"get-port": "5.1.1",
|
||||
"mini-css-extract-plugin": "1.6.2",
|
||||
"node-fetch": "2.6.12",
|
||||
"nodemon": "3.0.1",
|
||||
"nodemon": "3.0.3",
|
||||
"object.assign": "4.1.4",
|
||||
"object.entries": "1.1.6",
|
||||
"passport-strategy": "1.0.0",
|
||||
|
||||
@@ -24,11 +24,16 @@ export const Collapsible: React.FC<Props> = ({
|
||||
}) => {
|
||||
const [collapsedLocal, setCollapsedLocal] = useState(Boolean(initCollapsed))
|
||||
const [hoveringToggle, setHoveringToggle] = useState(false)
|
||||
const isNested = useCollapsible()
|
||||
const { withinCollapsible } = useCollapsible()
|
||||
const { t } = useTranslation('fields')
|
||||
|
||||
const collapsed = typeof collapsedFromProps === 'boolean' ? collapsedFromProps : collapsedLocal
|
||||
|
||||
const toggleCollapsible = React.useCallback(() => {
|
||||
if (typeof onToggle === 'function') onToggle(!collapsed)
|
||||
setCollapsedLocal(!collapsed)
|
||||
}, [onToggle, collapsed])
|
||||
|
||||
return (
|
||||
<div
|
||||
className={[
|
||||
@@ -36,14 +41,14 @@ export const Collapsible: React.FC<Props> = ({
|
||||
className,
|
||||
dragHandleProps && `${baseClass}--has-drag-handle`,
|
||||
collapsed && `${baseClass}--collapsed`,
|
||||
isNested && `${baseClass}--nested`,
|
||||
withinCollapsible && `${baseClass}--nested`,
|
||||
hoveringToggle && `${baseClass}--hovered`,
|
||||
`${baseClass}--style-${collapsibleStyle}`,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ')}
|
||||
>
|
||||
<CollapsibleProvider>
|
||||
<CollapsibleProvider collapsed={collapsed} toggle={toggleCollapsible}>
|
||||
<div
|
||||
className={`${baseClass}__toggle-wrap`}
|
||||
onMouseEnter={() => setHoveringToggle(true)}
|
||||
@@ -65,10 +70,7 @@ export const Collapsible: React.FC<Props> = ({
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ')}
|
||||
onClick={() => {
|
||||
if (typeof onToggle === 'function') onToggle(!collapsed)
|
||||
setCollapsedLocal(!collapsed)
|
||||
}}
|
||||
onClick={toggleCollapsible}
|
||||
type="button"
|
||||
>
|
||||
<span>{t('toggleBlock')}</span>
|
||||
|
||||
@@ -1,14 +1,35 @@
|
||||
import React, { createContext, useContext } from 'react'
|
||||
|
||||
const Context = createContext(false)
|
||||
type ContextType = {
|
||||
collapsed: boolean
|
||||
isVisible: boolean
|
||||
toggle: () => void
|
||||
withinCollapsible: boolean
|
||||
}
|
||||
const Context = createContext({
|
||||
collapsed: false,
|
||||
isVisible: true,
|
||||
toggle: () => {},
|
||||
withinCollapsible: true,
|
||||
})
|
||||
|
||||
export const CollapsibleProvider: React.FC<{
|
||||
children?: React.ReactNode
|
||||
collapsed?: boolean
|
||||
toggle: () => void
|
||||
withinCollapsible?: boolean
|
||||
}> = ({ children, withinCollapsible = true }) => {
|
||||
return <Context.Provider value={withinCollapsible}>{children}</Context.Provider>
|
||||
}> = ({ children, collapsed, toggle, withinCollapsible = true }) => {
|
||||
const { collapsed: parentIsCollapsed, isVisible } = useCollapsible()
|
||||
|
||||
const contextValue = React.useMemo((): ContextType => {
|
||||
return {
|
||||
collapsed: Boolean(collapsed),
|
||||
isVisible: isVisible && !parentIsCollapsed,
|
||||
toggle,
|
||||
withinCollapsible,
|
||||
}
|
||||
}, [collapsed, withinCollapsible, toggle, parentIsCollapsed, isVisible])
|
||||
return <Context.Provider value={contextValue}>{children}</Context.Provider>
|
||||
}
|
||||
|
||||
export const useCollapsible = (): boolean => useContext(Context)
|
||||
|
||||
export default Context
|
||||
export const useCollapsible = (): ContextType => useContext(Context)
|
||||
|
||||
@@ -52,21 +52,9 @@ const Content: React.FC<DocumentDrawerProps> = ({
|
||||
|
||||
const { id, docPermissions, getDocPreferences } = useDocumentInfo()
|
||||
|
||||
// The component definition could come from multiple places in the config
|
||||
// we need to cascade into the proper component from the top-down
|
||||
// 1. "components.Edit"
|
||||
// 2. "components.Edit.Default"
|
||||
// 3. "components.Edit.Default.Component"
|
||||
const CustomEditView =
|
||||
typeof Edit === 'function'
|
||||
? Edit
|
||||
: typeof Edit === 'object' && typeof Edit.Default === 'function'
|
||||
? Edit.Default
|
||||
: typeof Edit?.Default === 'object' &&
|
||||
'Component' in Edit.Default &&
|
||||
typeof Edit.Default.Component === 'function'
|
||||
? Edit.Default.Component
|
||||
: undefined
|
||||
// If they are replacing the entire edit view, use that.
|
||||
// Else let the DefaultEdit determine what to render.
|
||||
const CustomEditView = typeof Edit === 'function' ? Edit : undefined
|
||||
|
||||
const [fields, setFields] = useState(() => formatFields(collectionConfig, true))
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ import { fieldTypes } from '../../forms/field-types'
|
||||
import X from '../../icons/X'
|
||||
import { useAuth } from '../../utilities/Auth'
|
||||
import { useConfig } from '../../utilities/Config'
|
||||
import { DocumentInfoProvider } from '../../utilities/DocumentInfo'
|
||||
import { OperationContext } from '../../utilities/OperationProvider'
|
||||
import { SelectAllStatus, useSelection } from '../../views/collections/List/SelectionProvider'
|
||||
import { Drawer, DrawerToggler } from '../Drawer'
|
||||
@@ -120,53 +121,55 @@ const EditMany: React.FC<Props> = (props) => {
|
||||
{t('edit')}
|
||||
</DrawerToggler>
|
||||
<Drawer header={null} slug={drawerSlug}>
|
||||
<OperationContext.Provider value="update">
|
||||
<Form className={`${baseClass}__form`} onSuccess={onSuccess}>
|
||||
<div className={`${baseClass}__main`}>
|
||||
<div className={`${baseClass}__header`}>
|
||||
<h2 className={`${baseClass}__header__title`}>
|
||||
{t('editingLabel', { count, label: getTranslation(plural, i18n) })}
|
||||
</h2>
|
||||
<button
|
||||
aria-label={t('close')}
|
||||
className={`${baseClass}__header__close`}
|
||||
id={`close-drawer__${drawerSlug}`}
|
||||
onClick={() => closeModal(drawerSlug)}
|
||||
type="button"
|
||||
>
|
||||
<X />
|
||||
</button>
|
||||
</div>
|
||||
<FieldSelect fields={fields} setSelected={setSelected} />
|
||||
<RenderFields fieldSchema={selected} fieldTypes={fieldTypes} />
|
||||
<div className={`${baseClass}__sidebar-wrap`}>
|
||||
<div className={`${baseClass}__sidebar`}>
|
||||
<div className={`${baseClass}__sidebar-sticky-wrap`}>
|
||||
<div className={`${baseClass}__document-actions`}>
|
||||
{collection.versions ? (
|
||||
<React.Fragment>
|
||||
<Publish
|
||||
<DocumentInfoProvider collection={collection}>
|
||||
<OperationContext.Provider value="update">
|
||||
<Form className={`${baseClass}__form`} onSuccess={onSuccess}>
|
||||
<div className={`${baseClass}__main`}>
|
||||
<div className={`${baseClass}__header`}>
|
||||
<h2 className={`${baseClass}__header__title`}>
|
||||
{t('editingLabel', { count, label: getTranslation(plural, i18n) })}
|
||||
</h2>
|
||||
<button
|
||||
aria-label={t('close')}
|
||||
className={`${baseClass}__header__close`}
|
||||
id={`close-drawer__${drawerSlug}`}
|
||||
onClick={() => closeModal(drawerSlug)}
|
||||
type="button"
|
||||
>
|
||||
<X />
|
||||
</button>
|
||||
</div>
|
||||
<FieldSelect fields={fields} setSelected={setSelected} />
|
||||
<RenderFields fieldSchema={selected} fieldTypes={fieldTypes} />
|
||||
<div className={`${baseClass}__sidebar-wrap`}>
|
||||
<div className={`${baseClass}__sidebar`}>
|
||||
<div className={`${baseClass}__sidebar-sticky-wrap`}>
|
||||
<div className={`${baseClass}__document-actions`}>
|
||||
{collection.versions ? (
|
||||
<React.Fragment>
|
||||
<Publish
|
||||
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
|
||||
disabled={selected.length === 0}
|
||||
/>
|
||||
<SaveDraft
|
||||
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
|
||||
disabled={selected.length === 0}
|
||||
/>
|
||||
</React.Fragment>
|
||||
) : (
|
||||
<Submit
|
||||
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
|
||||
disabled={selected.length === 0}
|
||||
/>
|
||||
<SaveDraft
|
||||
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
|
||||
disabled={selected.length === 0}
|
||||
/>
|
||||
</React.Fragment>
|
||||
) : (
|
||||
<Submit
|
||||
action={`${serverURL}${api}/${slug}${getQueryParams()}`}
|
||||
disabled={selected.length === 0}
|
||||
/>
|
||||
)}
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Form>
|
||||
</OperationContext.Provider>
|
||||
</Form>
|
||||
</OperationContext.Provider>
|
||||
</DocumentInfoProvider>
|
||||
</Drawer>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -33,7 +33,7 @@ const Group: React.FC<Props> = (props) => {
|
||||
permissions,
|
||||
} = props
|
||||
|
||||
const isWithinCollapsible = useCollapsible()
|
||||
const { withinCollapsible } = useCollapsible()
|
||||
const isWithinGroup = useGroup()
|
||||
const isWithinRow = useRow()
|
||||
const isWithinTab = useTabs()
|
||||
@@ -43,7 +43,7 @@ const Group: React.FC<Props> = (props) => {
|
||||
const groupHasErrors = submitted && errorCount > 0
|
||||
|
||||
const path = pathFromProps || name
|
||||
const isTopLevel = !(isWithinCollapsible || isWithinGroup || isWithinRow)
|
||||
const isTopLevel = !(withinCollapsible || isWithinGroup || isWithinRow)
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -51,7 +51,7 @@ const Group: React.FC<Props> = (props) => {
|
||||
fieldBaseClass,
|
||||
baseClass,
|
||||
isTopLevel && `${baseClass}--top-level`,
|
||||
isWithinCollapsible && `${baseClass}--within-collapsible`,
|
||||
withinCollapsible && `${baseClass}--within-collapsible`,
|
||||
isWithinGroup && `${baseClass}--within-group`,
|
||||
isWithinRow && `${baseClass}--within-row`,
|
||||
isWithinTab && `${baseClass}--within-tab`,
|
||||
|
||||
@@ -83,7 +83,7 @@ const TabsField: React.FC<Props> = (props) => {
|
||||
const { preferencesKey } = useDocumentInfo()
|
||||
const { i18n } = useTranslation()
|
||||
|
||||
const isWithinCollapsible = useCollapsible()
|
||||
const { withinCollapsible } = useCollapsible()
|
||||
const [activeTabIndex, setActiveTabIndex] = useState<number>(0)
|
||||
const tabsPrefKey = `tabs-${indexPath}`
|
||||
|
||||
@@ -138,7 +138,7 @@ const TabsField: React.FC<Props> = (props) => {
|
||||
fieldBaseClass,
|
||||
className,
|
||||
baseClass,
|
||||
isWithinCollapsible && `${baseClass}--within-collapsible`,
|
||||
withinCollapsible && `${baseClass}--within-collapsible`,
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ')}
|
||||
|
||||
@@ -7,13 +7,14 @@ import { extractTranslations } from '../../translations/extractTranslations'
|
||||
const labels = extractTranslations(['authentication:enableAPIKey', 'authentication:apiKey'])
|
||||
|
||||
const encryptKey: FieldHook = ({ req, value }) =>
|
||||
value ? req.payload.encrypt(value as string) : undefined
|
||||
value ? req.payload.encrypt(value as string) : null
|
||||
const decryptKey: FieldHook = ({ req, value }) =>
|
||||
value ? req.payload.decrypt(value as string) : undefined
|
||||
|
||||
export default [
|
||||
{
|
||||
name: 'enableAPIKey',
|
||||
type: 'checkbox',
|
||||
admin: {
|
||||
components: {
|
||||
Field: () => null,
|
||||
@@ -21,10 +22,10 @@ export default [
|
||||
},
|
||||
defaultValue: false,
|
||||
label: labels['authentication:enableAPIKey'],
|
||||
type: 'checkbox',
|
||||
},
|
||||
{
|
||||
name: 'apiKey',
|
||||
type: 'text',
|
||||
admin: {
|
||||
components: {
|
||||
Field: () => null,
|
||||
@@ -35,10 +36,10 @@ export default [
|
||||
beforeChange: [encryptKey],
|
||||
},
|
||||
label: labels['authentication:apiKey'],
|
||||
type: 'text',
|
||||
},
|
||||
{
|
||||
name: 'apiKeyIndex',
|
||||
type: 'text',
|
||||
admin: {
|
||||
disabled: true,
|
||||
},
|
||||
@@ -59,6 +60,5 @@ export default [
|
||||
},
|
||||
],
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
] as Field[]
|
||||
|
||||
@@ -72,6 +72,8 @@ async function login<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
overrideAccess,
|
||||
req,
|
||||
req: {
|
||||
fallbackLocale,
|
||||
locale,
|
||||
payload,
|
||||
payload: { config, secret },
|
||||
},
|
||||
@@ -196,7 +198,9 @@ async function login<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
context: req.context,
|
||||
depth,
|
||||
doc: user,
|
||||
fallbackLocale,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -56,30 +56,33 @@ const args = minimist(process.argv.slice(2))
|
||||
const scriptIndex = args._.findIndex((x) => x === 'build')
|
||||
|
||||
const script = scriptIndex === -1 ? args._[0] : args._[scriptIndex]
|
||||
if (script) {
|
||||
if (script.startsWith('migrate')) {
|
||||
migrate(args).then(() => process.exit(0))
|
||||
} else {
|
||||
switch (script.toLowerCase()) {
|
||||
case 'build': {
|
||||
build()
|
||||
break
|
||||
}
|
||||
|
||||
if (script.startsWith('migrate')) {
|
||||
migrate(args).then(() => process.exit(0))
|
||||
} else {
|
||||
switch (script.toLowerCase()) {
|
||||
case 'build': {
|
||||
build()
|
||||
break
|
||||
case 'generate:types': {
|
||||
generateTypes()
|
||||
break
|
||||
}
|
||||
|
||||
case 'generate:graphqlschema': {
|
||||
generateGraphQLSchema()
|
||||
break
|
||||
}
|
||||
|
||||
default:
|
||||
console.log(`Unknown script "${script}".`)
|
||||
break
|
||||
}
|
||||
|
||||
case 'generate:types': {
|
||||
generateTypes()
|
||||
break
|
||||
}
|
||||
|
||||
case 'generate:graphqlschema': {
|
||||
generateGraphQLSchema()
|
||||
break
|
||||
}
|
||||
|
||||
default:
|
||||
console.log(`Unknown script "${script}".`)
|
||||
break
|
||||
}
|
||||
} else {
|
||||
console.error('No payload script specified. Did you mean to run `payload migrate`?')
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,7 +17,6 @@ import type {
|
||||
import executeAccess from '../../auth/executeAccess'
|
||||
import sendVerificationEmail from '../../auth/sendVerificationEmail'
|
||||
import { registerLocalStrategy } from '../../auth/strategies/local/register'
|
||||
import { fieldAffectsData } from '../../fields/config/types'
|
||||
import { afterChange } from '../../fields/hooks/afterChange'
|
||||
import { afterRead } from '../../fields/hooks/afterRead'
|
||||
import { beforeChange } from '../../fields/hooks/beforeChange'
|
||||
@@ -26,12 +25,12 @@ import { generateFileData } from '../../uploads/generateFileData'
|
||||
import { unlinkTempFiles } from '../../uploads/unlinkTempFiles'
|
||||
import { uploadFiles } from '../../uploads/uploadFiles'
|
||||
import { commitTransaction } from '../../utilities/commitTransaction'
|
||||
import flattenFields from '../../utilities/flattenTopLevelFields'
|
||||
import { initTransaction } from '../../utilities/initTransaction'
|
||||
import { killTransaction } from '../../utilities/killTransaction'
|
||||
import sanitizeInternalFields from '../../utilities/sanitizeInternalFields'
|
||||
import { saveVersion } from '../../versions/saveVersion'
|
||||
import { buildAfterOperation } from './utils'
|
||||
import flattenFields from '../../utilities/flattenTopLevelFields'
|
||||
|
||||
const unlinkFile = promisify(fs.unlink)
|
||||
|
||||
@@ -88,6 +87,8 @@ async function create<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
overrideAccess,
|
||||
overwriteExistingFiles = false,
|
||||
req: {
|
||||
fallbackLocale,
|
||||
locale,
|
||||
payload,
|
||||
payload: { config, emailOptions },
|
||||
},
|
||||
@@ -289,7 +290,9 @@ async function create<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
context: req.context,
|
||||
depth,
|
||||
doc: result,
|
||||
fallbackLocale,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -66,6 +66,7 @@ async function deleteOperation<TSlug extends keyof GeneratedTypes['collections']
|
||||
depth,
|
||||
overrideAccess,
|
||||
req: {
|
||||
fallbackLocale,
|
||||
locale,
|
||||
payload: { config },
|
||||
payload,
|
||||
@@ -149,9 +150,9 @@ async function deleteOperation<TSlug extends keyof GeneratedTypes['collections']
|
||||
if (collectionConfig.versions) {
|
||||
await deleteCollectionVersions({
|
||||
id,
|
||||
slug: collectionConfig.slug,
|
||||
payload,
|
||||
req,
|
||||
slug: collectionConfig.slug,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -178,7 +179,9 @@ async function deleteOperation<TSlug extends keyof GeneratedTypes['collections']
|
||||
context: req.context,
|
||||
depth,
|
||||
doc: result || doc,
|
||||
fallbackLocale,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -59,6 +59,8 @@ async function deleteByID<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
depth,
|
||||
overrideAccess,
|
||||
req: {
|
||||
fallbackLocale,
|
||||
locale,
|
||||
payload: { config },
|
||||
payload,
|
||||
t,
|
||||
@@ -120,9 +122,9 @@ async function deleteByID<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
if (collectionConfig.versions) {
|
||||
await deleteCollectionVersions({
|
||||
id,
|
||||
slug: collectionConfig.slug,
|
||||
payload,
|
||||
req,
|
||||
slug: collectionConfig.slug,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -156,7 +158,9 @@ async function deleteByID<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
context: req.context,
|
||||
depth,
|
||||
doc: result,
|
||||
fallbackLocale,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -68,7 +68,7 @@ async function find<T extends TypeWithID & Record<string, unknown>>(
|
||||
overrideAccess,
|
||||
page,
|
||||
pagination = true,
|
||||
req: { locale, payload },
|
||||
req: { fallbackLocale, locale, payload },
|
||||
req,
|
||||
showHiddenFields,
|
||||
sort,
|
||||
@@ -196,8 +196,10 @@ async function find<T extends TypeWithID & Record<string, unknown>>(
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
findMany: true,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
import memoize from 'micro-memoize'
|
||||
|
||||
import type { FindOneArgs } from '../../database/types'
|
||||
import type { PayloadRequest } from '../../express/types'
|
||||
import type { Collection, TypeWithID } from '../config/types'
|
||||
@@ -32,7 +30,6 @@ async function findByID<T extends TypeWithID>(incomingArgs: Arguments): Promise<
|
||||
|
||||
try {
|
||||
const shouldCommit = await initTransaction(args.req)
|
||||
const { transactionID } = args.req
|
||||
|
||||
// /////////////////////////////////////
|
||||
// beforeOperation - Collection
|
||||
@@ -59,7 +56,7 @@ async function findByID<T extends TypeWithID>(incomingArgs: Arguments): Promise<
|
||||
disableErrors,
|
||||
draft: draftEnabled = false,
|
||||
overrideAccess = false,
|
||||
req: { locale, t },
|
||||
req: { fallbackLocale, locale, t },
|
||||
req,
|
||||
showHiddenFields,
|
||||
} = args
|
||||
@@ -90,25 +87,7 @@ async function findByID<T extends TypeWithID>(incomingArgs: Arguments): Promise<
|
||||
|
||||
if (!findOneArgs.where.and[0].id) throw new NotFound(t)
|
||||
|
||||
if (!req.findByID) {
|
||||
req.findByID = { [transactionID]: {} }
|
||||
} else if (!req.findByID[transactionID]) {
|
||||
req.findByID[transactionID] = {}
|
||||
}
|
||||
|
||||
if (!req.findByID[transactionID][collectionConfig.slug]) {
|
||||
const nonMemoizedFindByID = async (query: FindOneArgs) => req.payload.db.findOne(query)
|
||||
|
||||
req.findByID[transactionID][collectionConfig.slug] = memoize(nonMemoizedFindByID, {
|
||||
isPromise: true,
|
||||
maxSize: 100,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This is straight from their docs, bad typings
|
||||
transformKey: JSON.stringify,
|
||||
})
|
||||
}
|
||||
|
||||
let result = (await req.findByID[transactionID][collectionConfig.slug](findOneArgs)) as T
|
||||
let result: T = await req.payload.db.findOne(findOneArgs)
|
||||
|
||||
if (!result) {
|
||||
if (!disableErrors) {
|
||||
@@ -118,9 +97,6 @@ async function findByID<T extends TypeWithID>(incomingArgs: Arguments): Promise<
|
||||
return null
|
||||
}
|
||||
|
||||
// Clone the result - it may have come back memoized
|
||||
result = JSON.parse(JSON.stringify(result))
|
||||
|
||||
// /////////////////////////////////////
|
||||
// Replace document with draft if available
|
||||
// /////////////////////////////////////
|
||||
@@ -163,7 +139,9 @@ async function findByID<T extends TypeWithID>(incomingArgs: Arguments): Promise<
|
||||
currentDepth,
|
||||
depth,
|
||||
doc: result,
|
||||
fallbackLocale,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -34,7 +34,7 @@ async function findVersionByID<T extends TypeWithID = any>(
|
||||
depth,
|
||||
disableErrors,
|
||||
overrideAccess,
|
||||
req: { locale, payload, t },
|
||||
req: { fallbackLocale, locale, payload, t },
|
||||
req,
|
||||
showHiddenFields,
|
||||
} = args
|
||||
@@ -112,7 +112,9 @@ async function findVersionByID<T extends TypeWithID = any>(
|
||||
currentDepth,
|
||||
depth,
|
||||
doc: result.version,
|
||||
fallbackLocale,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -37,7 +37,7 @@ async function findVersions<T extends TypeWithVersion<T>>(
|
||||
overrideAccess,
|
||||
page,
|
||||
pagination = true,
|
||||
req: { locale, payload },
|
||||
req: { fallbackLocale, locale, payload },
|
||||
req,
|
||||
showHiddenFields,
|
||||
sort,
|
||||
@@ -125,8 +125,10 @@ async function findVersions<T extends TypeWithVersion<T>>(
|
||||
context: req.context,
|
||||
depth,
|
||||
doc: data.version,
|
||||
fallbackLocale,
|
||||
findMany: true,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -34,7 +34,7 @@ async function restoreVersion<T extends TypeWithID = any>(args: Arguments): Prom
|
||||
depth,
|
||||
overrideAccess = false,
|
||||
req,
|
||||
req: { locale, payload, t },
|
||||
req: { fallbackLocale, locale, payload, t },
|
||||
showHiddenFields,
|
||||
} = args
|
||||
|
||||
@@ -140,7 +140,9 @@ async function restoreVersion<T extends TypeWithID = any>(args: Arguments): Prom
|
||||
context: req.context,
|
||||
depth,
|
||||
doc: result,
|
||||
fallbackLocale,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -75,6 +75,7 @@ async function update<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
overrideAccess,
|
||||
overwriteExistingFiles = false,
|
||||
req: {
|
||||
fallbackLocale,
|
||||
locale,
|
||||
payload: { config },
|
||||
payload,
|
||||
@@ -176,7 +177,9 @@ async function update<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
context: req.context,
|
||||
depth: 0,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess: true,
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
@@ -309,7 +312,9 @@ async function update<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
context: req.context,
|
||||
depth,
|
||||
doc: result,
|
||||
fallbackLocale: null,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -76,6 +76,7 @@ async function updateByID<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
overrideAccess,
|
||||
overwriteExistingFiles = false,
|
||||
req: {
|
||||
fallbackLocale,
|
||||
locale,
|
||||
payload: { config },
|
||||
payload,
|
||||
@@ -130,7 +131,9 @@ async function updateByID<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
context: req.context,
|
||||
depth: 0,
|
||||
doc: docWithLocales,
|
||||
fallbackLocale: null,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess: true,
|
||||
req,
|
||||
showHiddenFields: true,
|
||||
@@ -297,7 +300,9 @@ async function updateByID<TSlug extends keyof GeneratedTypes['collections']>(
|
||||
context: req.context,
|
||||
depth,
|
||||
doc: result,
|
||||
fallbackLocale,
|
||||
global: null,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export { useCollapsible } from '../../admin/components/elements/Collapsible/provider'
|
||||
export { default as buildStateFromSchema } from '../../admin/components/forms/Form/buildStateFromSchema'
|
||||
export { useAuth } from '../../admin/components/utilities/Auth'
|
||||
export { useConfig } from '../../admin/components/utilities/Config'
|
||||
|
||||
@@ -56,7 +56,7 @@ const errorHandler =
|
||||
err,
|
||||
response,
|
||||
req.context,
|
||||
req.collection.config,
|
||||
null,
|
||||
)) || {
|
||||
response,
|
||||
status,
|
||||
|
||||
@@ -11,9 +11,11 @@ type Args = {
|
||||
currentDepth?: number
|
||||
depth: number
|
||||
doc: Record<string, unknown>
|
||||
fallbackLocale: null | string
|
||||
findMany?: boolean
|
||||
flattenLocales?: boolean
|
||||
global: SanitizedGlobalConfig | null
|
||||
locale: string
|
||||
overrideAccess: boolean
|
||||
req: PayloadRequest
|
||||
showHiddenFields: boolean
|
||||
@@ -26,9 +28,11 @@ export async function afterRead<T = any>(args: Args): Promise<T> {
|
||||
currentDepth: incomingCurrentDepth,
|
||||
depth: incomingDepth,
|
||||
doc: incomingDoc,
|
||||
fallbackLocale,
|
||||
findMany,
|
||||
flattenLocales = true,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
@@ -52,11 +56,13 @@ export async function afterRead<T = any>(args: Args): Promise<T> {
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
fieldPromises,
|
||||
fields: collection?.fields || global?.fields,
|
||||
findMany,
|
||||
flattenLocales,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
populationPromises,
|
||||
req,
|
||||
|
||||
@@ -16,11 +16,13 @@ type Args = {
|
||||
currentDepth: number
|
||||
depth: number
|
||||
doc: Record<string, unknown>
|
||||
fallbackLocale: null | string
|
||||
field: Field | TabAsField
|
||||
fieldPromises: Promise<void>[]
|
||||
findMany: boolean
|
||||
flattenLocales: boolean
|
||||
global: SanitizedGlobalConfig | null
|
||||
locale: null | string
|
||||
overrideAccess: boolean
|
||||
populationPromises: Promise<void>[]
|
||||
req: PayloadRequest
|
||||
@@ -44,11 +46,13 @@ export const promise = async ({
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
field,
|
||||
fieldPromises,
|
||||
findMany,
|
||||
flattenLocales,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
populationPromises,
|
||||
req,
|
||||
@@ -72,18 +76,13 @@ export const promise = async ({
|
||||
typeof siblingDoc[field.name] === 'object' &&
|
||||
siblingDoc[field.name] !== null &&
|
||||
field.localized &&
|
||||
req.locale !== 'all' &&
|
||||
locale !== 'all' &&
|
||||
req.payload.config.localization
|
||||
|
||||
if (shouldHoistLocalizedValue) {
|
||||
// replace actual value with localized value before sanitizing
|
||||
// { [locale]: fields } -> fields
|
||||
const { locale } = req
|
||||
const value = siblingDoc[field.name][locale]
|
||||
const fallbackLocale =
|
||||
req.payload.config.localization &&
|
||||
req.payload.config.localization?.fallback &&
|
||||
req.fallbackLocale
|
||||
|
||||
let hoistedValue = value
|
||||
|
||||
@@ -201,7 +200,7 @@ export const promise = async ({
|
||||
|
||||
const shouldRunHookOnAllLocales =
|
||||
field.localized &&
|
||||
(req.locale === 'all' || !flattenLocales) &&
|
||||
(locale === 'all' || !flattenLocales) &&
|
||||
typeof siblingDoc[field.name] === 'object'
|
||||
|
||||
if (shouldRunHookOnAllLocales) {
|
||||
@@ -277,7 +276,7 @@ export const promise = async ({
|
||||
) {
|
||||
siblingDoc[field.name] = await getValueWithDefault({
|
||||
defaultValue: field.defaultValue,
|
||||
locale: req.locale,
|
||||
locale,
|
||||
user: req.user,
|
||||
value: siblingDoc[field.name],
|
||||
})
|
||||
@@ -288,7 +287,9 @@ export const promise = async ({
|
||||
relationshipPopulationPromise({
|
||||
currentDepth,
|
||||
depth,
|
||||
fallbackLocale,
|
||||
field,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
@@ -309,11 +310,13 @@ export const promise = async ({
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
fieldPromises,
|
||||
fields: field.fields,
|
||||
findMany,
|
||||
flattenLocales,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
populationPromises,
|
||||
req,
|
||||
@@ -337,11 +340,13 @@ export const promise = async ({
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
fieldPromises,
|
||||
fields: field.fields,
|
||||
findMany,
|
||||
flattenLocales,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
populationPromises,
|
||||
req,
|
||||
@@ -361,11 +366,13 @@ export const promise = async ({
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
fieldPromises,
|
||||
fields: field.fields,
|
||||
findMany,
|
||||
flattenLocales,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
populationPromises,
|
||||
req,
|
||||
@@ -397,11 +404,13 @@ export const promise = async ({
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
fieldPromises,
|
||||
fields: block.fields,
|
||||
findMany,
|
||||
flattenLocales,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
populationPromises,
|
||||
req,
|
||||
@@ -425,11 +434,13 @@ export const promise = async ({
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
fieldPromises,
|
||||
fields: block.fields,
|
||||
findMany,
|
||||
flattenLocales,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
populationPromises,
|
||||
req,
|
||||
@@ -457,11 +468,13 @@ export const promise = async ({
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
fieldPromises,
|
||||
fields: field.fields,
|
||||
findMany,
|
||||
flattenLocales,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
populationPromises,
|
||||
req,
|
||||
@@ -487,11 +500,13 @@ export const promise = async ({
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
fieldPromises,
|
||||
fields: field.fields,
|
||||
findMany,
|
||||
flattenLocales,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
populationPromises,
|
||||
req,
|
||||
@@ -511,11 +526,13 @@ export const promise = async ({
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
fieldPromises,
|
||||
fields: field.tabs.map((tab) => ({ ...tab, type: 'tab' })),
|
||||
findMany,
|
||||
flattenLocales,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
populationPromises,
|
||||
req,
|
||||
|
||||
@@ -8,9 +8,11 @@ type PopulateArgs = {
|
||||
data: Record<string, unknown>
|
||||
dataReference: Record<string, any>
|
||||
depth: number
|
||||
fallbackLocale: null | string
|
||||
field: RelationshipField | UploadField
|
||||
index?: number
|
||||
key?: string
|
||||
locale: null | string
|
||||
overrideAccess: boolean
|
||||
req: PayloadRequest
|
||||
showHiddenFields: boolean
|
||||
@@ -21,9 +23,11 @@ const populate = async ({
|
||||
data,
|
||||
dataReference,
|
||||
depth,
|
||||
fallbackLocale,
|
||||
field,
|
||||
index,
|
||||
key,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
@@ -54,8 +58,8 @@ const populate = async ({
|
||||
id,
|
||||
depth,
|
||||
currentDepth + 1,
|
||||
req.locale,
|
||||
req.fallbackLocale,
|
||||
locale,
|
||||
fallbackLocale,
|
||||
overrideAccess,
|
||||
showHiddenFields,
|
||||
]),
|
||||
@@ -90,7 +94,9 @@ const populate = async ({
|
||||
type PromiseArgs = {
|
||||
currentDepth: number
|
||||
depth: number
|
||||
fallbackLocale: null | string
|
||||
field: RelationshipField | UploadField
|
||||
locale: null | string
|
||||
overrideAccess: boolean
|
||||
req: PayloadRequest
|
||||
showHiddenFields: boolean
|
||||
@@ -100,7 +106,9 @@ type PromiseArgs = {
|
||||
const relationshipPopulationPromise = async ({
|
||||
currentDepth,
|
||||
depth,
|
||||
fallbackLocale,
|
||||
field,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
@@ -112,7 +120,7 @@ const relationshipPopulationPromise = async ({
|
||||
|
||||
if (fieldSupportsMany(field) && field.hasMany) {
|
||||
if (
|
||||
req.locale === 'all' &&
|
||||
locale === 'all' &&
|
||||
typeof siblingDoc[field.name] === 'object' &&
|
||||
siblingDoc[field.name] !== null
|
||||
) {
|
||||
@@ -125,9 +133,11 @@ const relationshipPopulationPromise = async ({
|
||||
data: siblingDoc[field.name][key][index],
|
||||
dataReference: resultingDoc,
|
||||
depth: populateDepth,
|
||||
fallbackLocale,
|
||||
field,
|
||||
index,
|
||||
key,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
@@ -146,8 +156,10 @@ const relationshipPopulationPromise = async ({
|
||||
data: relatedDoc,
|
||||
dataReference: resultingDoc,
|
||||
depth: populateDepth,
|
||||
fallbackLocale,
|
||||
field,
|
||||
index,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
@@ -161,7 +173,7 @@ const relationshipPopulationPromise = async ({
|
||||
} else if (
|
||||
typeof siblingDoc[field.name] === 'object' &&
|
||||
siblingDoc[field.name] !== null &&
|
||||
req.locale === 'all'
|
||||
locale === 'all'
|
||||
) {
|
||||
Object.keys(siblingDoc[field.name]).forEach((key) => {
|
||||
const rowPromise = async () => {
|
||||
@@ -170,8 +182,10 @@ const relationshipPopulationPromise = async ({
|
||||
data: siblingDoc[field.name][key],
|
||||
dataReference: resultingDoc,
|
||||
depth: populateDepth,
|
||||
fallbackLocale,
|
||||
field,
|
||||
key,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
@@ -187,7 +201,9 @@ const relationshipPopulationPromise = async ({
|
||||
data: siblingDoc[field.name],
|
||||
dataReference: resultingDoc,
|
||||
depth: populateDepth,
|
||||
fallbackLocale,
|
||||
field,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -11,11 +11,13 @@ type Args = {
|
||||
currentDepth: number
|
||||
depth: number
|
||||
doc: Record<string, unknown>
|
||||
fallbackLocale: null | string
|
||||
fieldPromises: Promise<void>[]
|
||||
fields: (Field | TabAsField)[]
|
||||
findMany: boolean
|
||||
flattenLocales: boolean
|
||||
global: SanitizedGlobalConfig | null
|
||||
locale: null | string
|
||||
overrideAccess: boolean
|
||||
populationPromises: Promise<void>[]
|
||||
req: PayloadRequest
|
||||
@@ -31,11 +33,13 @@ export const traverseFields = ({
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
fieldPromises,
|
||||
fields,
|
||||
findMany,
|
||||
flattenLocales,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
populationPromises,
|
||||
req,
|
||||
@@ -52,11 +56,13 @@ export const traverseFields = ({
|
||||
currentDepth,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
field,
|
||||
fieldPromises,
|
||||
findMany,
|
||||
flattenLocales,
|
||||
global,
|
||||
locale,
|
||||
overrideAccess,
|
||||
populationPromises,
|
||||
req,
|
||||
|
||||
@@ -275,55 +275,63 @@ const validateFilterOptions: Validate = async (
|
||||
|
||||
await Promise.all(
|
||||
collections.map(async (collection) => {
|
||||
let optionFilter =
|
||||
typeof filterOptions === 'function'
|
||||
? await filterOptions({
|
||||
id,
|
||||
data,
|
||||
relationTo: collection,
|
||||
siblingData,
|
||||
user,
|
||||
})
|
||||
: filterOptions
|
||||
try {
|
||||
let optionFilter =
|
||||
typeof filterOptions === 'function'
|
||||
? await filterOptions({
|
||||
id,
|
||||
data,
|
||||
relationTo: collection,
|
||||
siblingData,
|
||||
user,
|
||||
})
|
||||
: filterOptions
|
||||
|
||||
if (optionFilter === true) {
|
||||
optionFilter = null
|
||||
}
|
||||
|
||||
const valueIDs: (number | string)[] = []
|
||||
|
||||
values.forEach((val) => {
|
||||
if (typeof val === 'object' && val?.value) {
|
||||
valueIDs.push(val.value)
|
||||
if (optionFilter === true) {
|
||||
optionFilter = null
|
||||
}
|
||||
|
||||
if (typeof val === 'string' || typeof val === 'number') {
|
||||
valueIDs.push(val)
|
||||
}
|
||||
})
|
||||
const valueIDs: (number | string)[] = []
|
||||
|
||||
if (valueIDs.length > 0) {
|
||||
const findWhere = {
|
||||
and: [{ id: { in: valueIDs } }],
|
||||
}
|
||||
values.forEach((val) => {
|
||||
if (typeof val === 'object' && val?.value) {
|
||||
valueIDs.push(val.value)
|
||||
}
|
||||
|
||||
if (optionFilter) findWhere.and.push(optionFilter)
|
||||
|
||||
if (optionFilter === false) {
|
||||
falseCollections.push(optionFilter)
|
||||
}
|
||||
|
||||
const result = await payload.find({
|
||||
collection,
|
||||
depth: 0,
|
||||
limit: 0,
|
||||
pagination: false,
|
||||
req,
|
||||
where: findWhere,
|
||||
if (typeof val === 'string' || typeof val === 'number') {
|
||||
valueIDs.push(val)
|
||||
}
|
||||
})
|
||||
|
||||
options[collection] = result.docs.map((doc) => doc.id)
|
||||
} else {
|
||||
if (valueIDs.length > 0) {
|
||||
const findWhere = {
|
||||
and: [{ id: { in: valueIDs } }],
|
||||
}
|
||||
|
||||
if (optionFilter) findWhere.and.push(optionFilter)
|
||||
|
||||
if (optionFilter === false) {
|
||||
falseCollections.push(optionFilter)
|
||||
}
|
||||
|
||||
// `req` omitted to prevent transaction errors from aborting the entire transaction
|
||||
const result = await payload.find({
|
||||
collection,
|
||||
depth: 0,
|
||||
limit: 0,
|
||||
pagination: false,
|
||||
where: findWhere,
|
||||
})
|
||||
|
||||
options[collection] = result.docs.map((doc) => doc.id)
|
||||
} else {
|
||||
options[collection] = []
|
||||
}
|
||||
} catch (err) {
|
||||
req.payload.logger.error({
|
||||
err,
|
||||
msg: `Error validating filter options for collection ${collection}`,
|
||||
})
|
||||
options[collection] = []
|
||||
}
|
||||
}),
|
||||
|
||||
@@ -23,14 +23,14 @@ type Args = {
|
||||
|
||||
async function findOne<T extends Record<string, unknown>>(args: Args): Promise<T> {
|
||||
const {
|
||||
slug,
|
||||
depth,
|
||||
draft: draftEnabled = false,
|
||||
globalConfig,
|
||||
overrideAccess = false,
|
||||
req: { locale, payload },
|
||||
req: { fallbackLocale, locale, payload },
|
||||
req,
|
||||
showHiddenFields,
|
||||
slug,
|
||||
} = args
|
||||
|
||||
try {
|
||||
@@ -51,9 +51,9 @@ async function findOne<T extends Record<string, unknown>>(args: Args): Promise<T
|
||||
// /////////////////////////////////////
|
||||
|
||||
let doc = await req.payload.db.findGlobal({
|
||||
slug,
|
||||
locale,
|
||||
req,
|
||||
slug,
|
||||
where: overrideAccess ? undefined : (accessResult as Where),
|
||||
})
|
||||
if (!doc) {
|
||||
@@ -100,7 +100,9 @@ async function findOne<T extends Record<string, unknown>>(args: Args): Promise<T
|
||||
context: req.context,
|
||||
depth,
|
||||
doc,
|
||||
fallbackLocale,
|
||||
global: globalConfig,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -31,7 +31,7 @@ async function findVersionByID<T extends TypeWithVersion<T> = any>(args: Argumen
|
||||
disableErrors,
|
||||
globalConfig,
|
||||
overrideAccess,
|
||||
req: { locale, payload, t },
|
||||
req: { fallbackLocale, locale, payload, t },
|
||||
req,
|
||||
showHiddenFields,
|
||||
} = args
|
||||
@@ -108,7 +108,9 @@ async function findVersionByID<T extends TypeWithVersion<T> = any>(args: Argumen
|
||||
currentDepth,
|
||||
depth,
|
||||
doc: result.version,
|
||||
fallbackLocale,
|
||||
global: globalConfig,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -35,7 +35,7 @@ async function findVersions<T extends TypeWithVersion<T>>(
|
||||
limit,
|
||||
overrideAccess,
|
||||
page,
|
||||
req: { locale, payload },
|
||||
req: { fallbackLocale, locale, payload },
|
||||
req,
|
||||
showHiddenFields,
|
||||
sort,
|
||||
@@ -97,8 +97,10 @@ async function findVersions<T extends TypeWithVersion<T>>(
|
||||
// Patch globalType onto version doc
|
||||
globalType: globalConfig.slug,
|
||||
},
|
||||
fallbackLocale,
|
||||
findMany: true,
|
||||
global: globalConfig,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -25,7 +25,7 @@ async function restoreVersion<T extends TypeWithVersion<T> = any>(args: Argument
|
||||
depth,
|
||||
globalConfig,
|
||||
overrideAccess,
|
||||
req: { payload, t },
|
||||
req: { fallbackLocale, locale, payload, t },
|
||||
req,
|
||||
showHiddenFields,
|
||||
} = args
|
||||
@@ -66,9 +66,9 @@ async function restoreVersion<T extends TypeWithVersion<T> = any>(args: Argument
|
||||
// /////////////////////////////////////
|
||||
|
||||
const previousDoc = await payload.findGlobal({
|
||||
slug: globalConfig.slug,
|
||||
depth,
|
||||
req,
|
||||
slug: globalConfig.slug,
|
||||
})
|
||||
|
||||
// /////////////////////////////////////
|
||||
@@ -76,23 +76,23 @@ async function restoreVersion<T extends TypeWithVersion<T> = any>(args: Argument
|
||||
// /////////////////////////////////////
|
||||
|
||||
const global = await payload.db.findGlobal({
|
||||
req,
|
||||
slug: globalConfig.slug,
|
||||
req,
|
||||
})
|
||||
|
||||
let result = rawVersion.version
|
||||
|
||||
if (global) {
|
||||
result = await payload.db.updateGlobal({
|
||||
slug: globalConfig.slug,
|
||||
data: result,
|
||||
req,
|
||||
slug: globalConfig.slug,
|
||||
})
|
||||
} else {
|
||||
result = await payload.db.createGlobal({
|
||||
slug: globalConfig.slug,
|
||||
data: result,
|
||||
req,
|
||||
slug: globalConfig.slug,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -105,7 +105,9 @@ async function restoreVersion<T extends TypeWithVersion<T> = any>(args: Argument
|
||||
context: req.context,
|
||||
depth,
|
||||
doc: result,
|
||||
fallbackLocale,
|
||||
global: globalConfig,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -32,15 +32,15 @@ async function update<TSlug extends keyof GeneratedTypes['globals']>(
|
||||
args: Args<GeneratedTypes['globals'][TSlug]>,
|
||||
): Promise<GeneratedTypes['globals'][TSlug]> {
|
||||
const {
|
||||
slug,
|
||||
autosave,
|
||||
depth,
|
||||
draft: draftArg,
|
||||
globalConfig,
|
||||
overrideAccess,
|
||||
req: { locale, payload },
|
||||
req: { fallbackLocale, locale, payload },
|
||||
req,
|
||||
showHiddenFields,
|
||||
slug,
|
||||
} = args
|
||||
|
||||
try {
|
||||
@@ -74,11 +74,11 @@ async function update<TSlug extends keyof GeneratedTypes['globals']>(
|
||||
// 2. Retrieve document
|
||||
// /////////////////////////////////////
|
||||
const { global, globalExists } = await getLatestGlobalVersion({
|
||||
slug,
|
||||
config: globalConfig,
|
||||
locale,
|
||||
payload,
|
||||
req,
|
||||
slug,
|
||||
where: query,
|
||||
})
|
||||
|
||||
@@ -97,7 +97,9 @@ async function update<TSlug extends keyof GeneratedTypes['globals']>(
|
||||
context: req.context,
|
||||
depth: 0,
|
||||
doc: globalJSON,
|
||||
fallbackLocale,
|
||||
global: globalConfig,
|
||||
locale,
|
||||
overrideAccess: true,
|
||||
req,
|
||||
showHiddenFields,
|
||||
@@ -175,15 +177,15 @@ async function update<TSlug extends keyof GeneratedTypes['globals']>(
|
||||
if (!shouldSaveDraft) {
|
||||
if (globalExists) {
|
||||
result = await payload.db.updateGlobal({
|
||||
slug,
|
||||
data: result,
|
||||
req,
|
||||
slug,
|
||||
})
|
||||
} else {
|
||||
result = await payload.db.createGlobal({
|
||||
slug,
|
||||
data: result,
|
||||
req,
|
||||
slug,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -218,7 +220,9 @@ async function update<TSlug extends keyof GeneratedTypes['globals']>(
|
||||
context: req.context,
|
||||
depth,
|
||||
doc: result,
|
||||
fallbackLocale: null,
|
||||
global: globalConfig,
|
||||
locale,
|
||||
overrideAccess,
|
||||
req,
|
||||
showHiddenFields,
|
||||
|
||||
@@ -274,7 +274,7 @@
|
||||
"near": "nær"
|
||||
},
|
||||
"upload": {
|
||||
"crop": "Avling",
|
||||
"crop": "Beskjær",
|
||||
"cropToolDescription": "Dra hjørnene av det valgte området, tegn et nytt område eller juster verdiene nedenfor.",
|
||||
"dragAndDrop": "Dra og slipp en fil",
|
||||
"dragAndDropHere": "eller dra og slipp en fil her",
|
||||
|
||||
@@ -1,25 +1,31 @@
|
||||
import type { Request } from 'express'
|
||||
|
||||
import type { File, FileData } from './types'
|
||||
import { Request } from 'express'
|
||||
|
||||
import { APIError } from '../errors'
|
||||
|
||||
type Args = {
|
||||
req: Request
|
||||
data: FileData
|
||||
req: Request
|
||||
}
|
||||
export const getExternalFile = async ({ req, data }: Args): Promise<File> => {
|
||||
const baseUrl = req.get('origin') || `${req.protocol}://${req.get('host')}`
|
||||
const { url, filename } = data
|
||||
export const getExternalFile = async ({ data, req }: Args): Promise<File> => {
|
||||
const { filename, url } = data
|
||||
|
||||
if (typeof url === 'string') {
|
||||
const fileURL = `${baseUrl}${url}`
|
||||
let fileURL = url
|
||||
if (!url.startsWith('http')) {
|
||||
const baseUrl = req.get('origin') || `${req.protocol}://${req.get('host')}`
|
||||
fileURL = `${baseUrl}${url}`
|
||||
}
|
||||
|
||||
const { default: fetch } = (await import('node-fetch')) as any
|
||||
|
||||
const res = await fetch(fileURL, {
|
||||
credentials: 'include',
|
||||
method: 'GET',
|
||||
headers: {
|
||||
...req.headers,
|
||||
},
|
||||
method: 'GET',
|
||||
})
|
||||
|
||||
if (!res.ok) throw new APIError(`Failed to fetch file from ${fileURL}`, res.status)
|
||||
|
||||
@@ -57,8 +57,10 @@ export async function getEntityPolicies<T extends Args>(args: T): Promise<Return
|
||||
if (typeof where === 'object') {
|
||||
const paginatedRes = await req.payload.find({
|
||||
collection: entity.slug,
|
||||
depth: 0,
|
||||
limit: 1,
|
||||
overrideAccess: true,
|
||||
pagination: false,
|
||||
req,
|
||||
where: {
|
||||
...where,
|
||||
@@ -79,6 +81,7 @@ export async function getEntityPolicies<T extends Args>(args: T): Promise<Return
|
||||
return req.payload.findByID({
|
||||
id,
|
||||
collection: entity.slug,
|
||||
depth: 0,
|
||||
overrideAccess: true,
|
||||
req,
|
||||
})
|
||||
@@ -98,8 +101,13 @@ export async function getEntityPolicies<T extends Args>(args: T): Promise<Return
|
||||
const mutablePolicies = policiesObj
|
||||
|
||||
if (accessLevel === 'field' && docBeingAccessed === undefined) {
|
||||
docBeingAccessed = await getEntityDoc()
|
||||
// assign docBeingAccessed first as the promise to avoid multiple calls to getEntityDoc
|
||||
docBeingAccessed = getEntityDoc().then((doc) => {
|
||||
docBeingAccessed = doc
|
||||
})
|
||||
}
|
||||
// awaiting the promise to ensure docBeingAccessed is assigned before it is used
|
||||
await docBeingAccessed
|
||||
|
||||
const data = req?.body
|
||||
|
||||
|
||||
51
packages/plugin-cloud-storage/docker-compose.yml
Normal file
51
packages/plugin-cloud-storage/docker-compose.yml
Normal file
@@ -0,0 +1,51 @@
|
||||
version: '3.2'
|
||||
services:
|
||||
localstack:
|
||||
image: localstack/localstack:latest
|
||||
container_name: localstack_demo
|
||||
ports:
|
||||
- '4563-4599:4563-4599'
|
||||
- '8055:8080'
|
||||
environment:
|
||||
- SERVICES=s3
|
||||
- DEBUG=1
|
||||
- DATA_DIR=/tmp/localstack/data
|
||||
volumes:
|
||||
- './.localstack:/var/lib/localstack'
|
||||
- '/var/run/docker.sock:/var/run/docker.sock'
|
||||
|
||||
azure-storage:
|
||||
image: mcr.microsoft.com/azure-storage/azurite:3.18.0
|
||||
restart: always
|
||||
command: 'azurite --loose --blobHost 0.0.0.0 --tableHost 0.0.0.0 --queueHost 0.0.0.0'
|
||||
ports:
|
||||
- '10000:10000'
|
||||
- '10001:10001'
|
||||
- '10002:10002'
|
||||
volumes:
|
||||
- ./azurestoragedata:/data"
|
||||
|
||||
google-cloud-storage:
|
||||
image: fsouza/fake-gcs-server
|
||||
restart: always
|
||||
command:
|
||||
[
|
||||
'-scheme',
|
||||
'http',
|
||||
'-port',
|
||||
'4443',
|
||||
'-public-host',
|
||||
'http://localhost:4443',
|
||||
'-external-url',
|
||||
'http://localhost:4443',
|
||||
'-backend',
|
||||
'memory',
|
||||
]
|
||||
ports:
|
||||
- '4443:4443'
|
||||
volumes:
|
||||
- ./google-cloud-storage/payload-bucket:/data/payload-bucket
|
||||
|
||||
volumes:
|
||||
google-cloud-storage:
|
||||
azurestoragedata:
|
||||
@@ -52,7 +52,7 @@
|
||||
"@types/find-node-modules": "^2.1.2",
|
||||
"cross-env": "^7.0.3",
|
||||
"dotenv": "^8.2.0",
|
||||
"nodemon": "^2.0.6",
|
||||
"nodemon": "3.0.3",
|
||||
"payload": "workspace:*",
|
||||
"rimraf": "^4.1.2",
|
||||
"ts-node": "^9.1.1",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@payloadcms/plugin-form-builder",
|
||||
"description": "Form builder plugin for Payload CMS",
|
||||
"version": "1.2.0",
|
||||
"version": "1.2.1",
|
||||
"homepage:": "https://payloadcms.com",
|
||||
"repository": "git@github.com:payloadcms/plugin-form-builder.git",
|
||||
"main": "dist/index.js",
|
||||
@@ -31,7 +31,7 @@
|
||||
"@types/react": "18.2.15",
|
||||
"copyfiles": "^2.4.1",
|
||||
"cross-env": "^7.0.3",
|
||||
"nodemon": "^3.0.2",
|
||||
"nodemon": "3.0.3",
|
||||
"payload": "workspace:*",
|
||||
"react": "^18.0.0",
|
||||
"ts-node": "10.9.1"
|
||||
|
||||
@@ -5,7 +5,7 @@ import { replaceDoubleCurlys } from '../../../utilities/replaceDoubleCurlys'
|
||||
import { serializeSlate } from '../../../utilities/slate/serializeSlate'
|
||||
|
||||
const sendEmail = async (beforeChangeData: any, formConfig: PluginConfig): Promise<any> => {
|
||||
const { data, operation } = beforeChangeData
|
||||
const { data, operation, req } = beforeChangeData
|
||||
|
||||
if (operation === 'create') {
|
||||
const {
|
||||
@@ -22,6 +22,7 @@ const sendEmail = async (beforeChangeData: any, formConfig: PluginConfig): Promi
|
||||
id: formID,
|
||||
collection: formOverrides?.slug || 'forms',
|
||||
locale,
|
||||
req,
|
||||
})
|
||||
|
||||
const { emails } = form
|
||||
|
||||
@@ -11,6 +11,7 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
|
||||
|
||||
const newConfig: CollectionConfig = {
|
||||
...(formConfig?.formSubmissionOverrides || {}),
|
||||
slug: formConfig?.formSubmissionOverrides?.slug || 'form-submissions',
|
||||
access: {
|
||||
create: () => true,
|
||||
read: ({ req: { user } }) => !!user, // logged-in users,
|
||||
@@ -24,13 +25,13 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
|
||||
fields: [
|
||||
{
|
||||
name: 'form',
|
||||
type: 'relationship',
|
||||
admin: {
|
||||
readOnly: true,
|
||||
},
|
||||
relationTo: formSlug,
|
||||
required: true,
|
||||
type: 'relationship',
|
||||
validate: async (value, { payload }) => {
|
||||
validate: async (value, { payload, req }) => {
|
||||
/* Don't run in the client side */
|
||||
if (!payload) return true
|
||||
|
||||
@@ -41,6 +42,7 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
|
||||
existingForm = await payload.findByID({
|
||||
id: value,
|
||||
collection: formSlug,
|
||||
req,
|
||||
})
|
||||
|
||||
return true
|
||||
@@ -52,19 +54,20 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
|
||||
},
|
||||
{
|
||||
name: 'submissionData',
|
||||
type: 'array',
|
||||
admin: {
|
||||
readOnly: true,
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'field',
|
||||
required: true,
|
||||
type: 'text',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'value',
|
||||
required: true,
|
||||
type: 'text',
|
||||
required: true,
|
||||
validate: (value: unknown) => {
|
||||
// TODO:
|
||||
// create a validation function that dynamically
|
||||
@@ -84,7 +87,6 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
|
||||
},
|
||||
},
|
||||
],
|
||||
type: 'array',
|
||||
},
|
||||
...(formConfig?.formSubmissionOverrides?.fields || []),
|
||||
],
|
||||
@@ -96,7 +98,6 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
|
||||
],
|
||||
...(formConfig?.formSubmissionOverrides?.hooks || {}),
|
||||
},
|
||||
slug: formConfig?.formSubmissionOverrides?.slug || 'form-submissions',
|
||||
}
|
||||
|
||||
const paymentFieldConfig = formConfig?.fields?.payment
|
||||
@@ -104,26 +105,27 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
|
||||
if (paymentFieldConfig) {
|
||||
newConfig.fields.push({
|
||||
name: 'payment',
|
||||
type: 'group',
|
||||
admin: {
|
||||
readOnly: true,
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'field',
|
||||
label: 'Field',
|
||||
type: 'text',
|
||||
label: 'Field',
|
||||
},
|
||||
{
|
||||
name: 'status',
|
||||
label: 'Status',
|
||||
type: 'text',
|
||||
label: 'Status',
|
||||
},
|
||||
{
|
||||
name: 'amount',
|
||||
type: 'number',
|
||||
admin: {
|
||||
description: 'Amount in cents',
|
||||
},
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
name: 'paymentProcessor',
|
||||
@@ -131,28 +133,27 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
|
||||
},
|
||||
{
|
||||
name: 'creditCard',
|
||||
type: 'group',
|
||||
fields: [
|
||||
{
|
||||
name: 'token',
|
||||
label: 'token',
|
||||
type: 'text',
|
||||
label: 'token',
|
||||
},
|
||||
{
|
||||
name: 'brand',
|
||||
label: 'Brand',
|
||||
type: 'text',
|
||||
label: 'Brand',
|
||||
},
|
||||
{
|
||||
name: 'number',
|
||||
label: 'Number',
|
||||
type: 'text',
|
||||
label: 'Number',
|
||||
},
|
||||
],
|
||||
label: 'Credit Card',
|
||||
type: 'group',
|
||||
},
|
||||
],
|
||||
type: 'group',
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/plugin-nested-docs",
|
||||
"version": "1.0.11",
|
||||
"version": "1.0.12",
|
||||
"description": "The official Nested Docs plugin for Payload",
|
||||
"repository": "https://github.com/payloadcms/payload",
|
||||
"license": "MIT",
|
||||
@@ -20,6 +20,9 @@
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"payload": "workspace:*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"payload": "^0.18.5 || ^1.0.0 || ^2.0.0"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
"default": "./src/index.ts",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/plugin-search",
|
||||
"version": "1.0.1",
|
||||
"version": "1.1.0",
|
||||
"homepage:": "https://payloadcms.com",
|
||||
"repository": "git@github.com:payloadcms/plugin-search.git",
|
||||
"description": "Search plugin for Payload",
|
||||
|
||||
@@ -1,27 +1,25 @@
|
||||
import type { CollectionAfterDeleteHook } from 'payload/types'
|
||||
|
||||
const deleteFromSearch: CollectionAfterDeleteHook = ({ doc, req: { payload } }) => {
|
||||
const deleteFromSearch: CollectionAfterDeleteHook = async ({ doc, req: { payload }, req }) => {
|
||||
try {
|
||||
const deleteSearchDoc = async (): Promise<any> => {
|
||||
const searchDocQuery = await payload.find({
|
||||
collection: 'search',
|
||||
depth: 0,
|
||||
where: {
|
||||
'doc.value': {
|
||||
equals: doc.id,
|
||||
},
|
||||
const searchDocQuery = await payload.find({
|
||||
collection: 'search',
|
||||
depth: 0,
|
||||
req,
|
||||
where: {
|
||||
'doc.value': {
|
||||
equals: doc.id,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if (searchDocQuery?.docs?.[0]) {
|
||||
await payload.delete({
|
||||
id: searchDocQuery?.docs?.[0]?.id,
|
||||
collection: 'search',
|
||||
req,
|
||||
})
|
||||
|
||||
if (searchDocQuery?.docs?.[0]) {
|
||||
payload.delete({
|
||||
id: searchDocQuery?.docs?.[0]?.id,
|
||||
collection: 'search',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
deleteSearchDoc()
|
||||
} catch (err: unknown) {
|
||||
payload.logger.error({
|
||||
err: `Error deleting search doc: ${err}`,
|
||||
|
||||
@@ -6,6 +6,7 @@ const syncWithSearch: SyncWithSearch = async (args) => {
|
||||
doc,
|
||||
operation,
|
||||
req: { payload },
|
||||
req,
|
||||
// @ts-expect-error
|
||||
searchConfig,
|
||||
} = args
|
||||
@@ -26,6 +27,7 @@ const syncWithSearch: SyncWithSearch = async (args) => {
|
||||
dataToSave = await beforeSync({
|
||||
originalDoc: doc,
|
||||
payload,
|
||||
req,
|
||||
searchDoc: dataToSave,
|
||||
})
|
||||
}
|
||||
@@ -53,13 +55,13 @@ const syncWithSearch: SyncWithSearch = async (args) => {
|
||||
try {
|
||||
if (operation === 'create') {
|
||||
if (doSync) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
payload.create({
|
||||
await payload.create({
|
||||
collection: 'search',
|
||||
data: {
|
||||
...dataToSave,
|
||||
priority: defaultPriority,
|
||||
},
|
||||
req,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -70,6 +72,7 @@ const syncWithSearch: SyncWithSearch = async (args) => {
|
||||
const searchDocQuery = await payload.find({
|
||||
collection: 'search',
|
||||
depth: 0,
|
||||
req,
|
||||
where: {
|
||||
'doc.value': {
|
||||
equals: id,
|
||||
@@ -88,15 +91,12 @@ const syncWithSearch: SyncWithSearch = async (args) => {
|
||||
// to ensure the same, out-of-date result does not appear twice (where only syncing the first found doc)
|
||||
if (duplicativeDocs.length > 0) {
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
Promise.all(
|
||||
duplicativeDocs.map(({ id: duplicativeDocID }) =>
|
||||
payload.delete({
|
||||
id: duplicativeDocID,
|
||||
collection: 'search',
|
||||
}),
|
||||
), // eslint-disable-line function-paren-newline
|
||||
)
|
||||
const duplicativeDocIDs = duplicativeDocs.map(({ id }) => id)
|
||||
await payload.delete({
|
||||
collection: 'search',
|
||||
req,
|
||||
where: { id: { in: duplicativeDocIDs } },
|
||||
})
|
||||
} catch (err: unknown) {
|
||||
payload.logger.error(`Error deleting duplicative search documents.`)
|
||||
}
|
||||
@@ -108,14 +108,14 @@ const syncWithSearch: SyncWithSearch = async (args) => {
|
||||
if (doSync) {
|
||||
// update the doc normally
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
payload.update({
|
||||
await payload.update({
|
||||
id: searchDocID,
|
||||
collection: 'search',
|
||||
data: {
|
||||
...dataToSave,
|
||||
priority: foundDoc.priority || defaultPriority,
|
||||
},
|
||||
req,
|
||||
})
|
||||
} catch (err: unknown) {
|
||||
payload.logger.error(`Error updating search document.`)
|
||||
@@ -124,10 +124,10 @@ const syncWithSearch: SyncWithSearch = async (args) => {
|
||||
if (deleteDrafts && status === 'draft') {
|
||||
// do not include draft docs in search results, so delete the record
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
payload.delete({
|
||||
await payload.delete({
|
||||
id: searchDocID,
|
||||
collection: 'search',
|
||||
req,
|
||||
})
|
||||
} catch (err: unknown) {
|
||||
payload.logger.error(`Error deleting search document: ${err}`)
|
||||
@@ -135,13 +135,13 @@ const syncWithSearch: SyncWithSearch = async (args) => {
|
||||
}
|
||||
} else if (doSync) {
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
payload.create({
|
||||
await payload.create({
|
||||
collection: 'search',
|
||||
data: {
|
||||
...dataToSave,
|
||||
priority: defaultPriority,
|
||||
},
|
||||
req,
|
||||
})
|
||||
} catch (err: unknown) {
|
||||
payload.logger.error(`Error creating search document: ${err}`)
|
||||
|
||||
@@ -34,7 +34,7 @@ const Search =
|
||||
afterChange: [
|
||||
...(existingHooks?.afterChange || []),
|
||||
async (args: any) => {
|
||||
syncWithSearch({
|
||||
await syncWithSearch({
|
||||
...args,
|
||||
collection: collection.slug,
|
||||
searchConfig,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { Payload } from 'payload'
|
||||
import type { CollectionAfterChangeHook, CollectionConfig } from 'payload/types'
|
||||
import type { CollectionAfterChangeHook, CollectionConfig, PayloadRequest } from 'payload/types'
|
||||
|
||||
export interface DocToSync {
|
||||
[key: string]: any
|
||||
@@ -15,6 +15,7 @@ export type BeforeSync = (args: {
|
||||
[key: string]: any
|
||||
}
|
||||
payload: Payload
|
||||
req: PayloadRequest
|
||||
searchDoc: DocToSync
|
||||
}) => DocToSync | Promise<DocToSync>
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@
|
||||
"cross-env": "^7.0.3",
|
||||
"dotenv": "^8.2.0",
|
||||
"jest": "^29.5.0",
|
||||
"nodemon": "^2.0.6",
|
||||
"nodemon": "3.0.3",
|
||||
"payload": "workspace:*",
|
||||
"ts-jest": "^29.1.0",
|
||||
"webpack": "^5.78.0"
|
||||
|
||||
@@ -2,10 +2,12 @@ import en from './en.json'
|
||||
import es from './es.json'
|
||||
import fa from './fa.json'
|
||||
import fr from './fr.json'
|
||||
import pl from './pl.json'
|
||||
|
||||
export default {
|
||||
en,
|
||||
es,
|
||||
fa,
|
||||
fr,
|
||||
pl,
|
||||
}
|
||||
|
||||
22
packages/plugin-seo/src/translations/pl.json
Normal file
22
packages/plugin-seo/src/translations/pl.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"$schema": "./translation-schema.json",
|
||||
"plugin-seo": {
|
||||
"autoGenerate": "Wygeneruj automatycznie",
|
||||
"imageAutoGenerationTip": "Automatyczne generowanie pobierze wybrany główny obraz.",
|
||||
"bestPractices": "najlepsze praktyki",
|
||||
"lengthTipTitle": "Długość powinna wynosić od {{minLength}} do {{maxLength}} znaków. Po porady dotyczące pisania wysokiej jakości meta tytułów zobacz ",
|
||||
"lengthTipDescription": "Długość powinna wynosić od {{minLength}} do {{maxLength}} znaków. Po porady dotyczące pisania wysokiej jakości meta opisów zobacz ",
|
||||
"good": "Dobrze",
|
||||
"tooLong": "Zbyt długie",
|
||||
"tooShort": "Zbyt krótkie",
|
||||
"almostThere": "Prawie gotowe",
|
||||
"characterCount": "{{current}}/{{minLength}}-{{maxLength}} znaków, ",
|
||||
"charactersToGo": "pozostało {{characters}} znaków",
|
||||
"charactersLeftOver": "zostało {{characters}} znaków",
|
||||
"charactersTooMany": "{{characters}} znaków za dużo",
|
||||
"noImage": "Brak obrazu",
|
||||
"checksPassing": "{{current}}/{{max}} testów zakończonych pomyślnie",
|
||||
"preview": "Podgląd",
|
||||
"previewDescription": "Dokładne wyniki listowania mogą się różnić w zależności od treści i zgodności z kryteriami wyszukiwania."
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/richtext-lexical",
|
||||
"version": "0.6.1",
|
||||
"version": "0.7.0",
|
||||
"description": "The officially supported Lexical richtext adapter for Payload",
|
||||
"repository": "https://github.com/payloadcms/payload",
|
||||
"license": "MIT",
|
||||
@@ -19,31 +19,31 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@faceless-ui/modal": "2.0.1",
|
||||
"@lexical/headless": "0.12.6",
|
||||
"@lexical/link": "0.12.6",
|
||||
"@lexical/list": "0.12.6",
|
||||
"@lexical/mark": "0.12.6",
|
||||
"@lexical/markdown": "0.12.6",
|
||||
"@lexical/react": "0.12.6",
|
||||
"@lexical/rich-text": "0.12.6",
|
||||
"@lexical/selection": "0.12.6",
|
||||
"@lexical/utils": "0.12.6",
|
||||
"@lexical/headless": "0.13.1",
|
||||
"@lexical/link": "0.13.1",
|
||||
"@lexical/list": "0.13.1",
|
||||
"@lexical/mark": "0.13.1",
|
||||
"@lexical/markdown": "0.13.1",
|
||||
"@lexical/react": "0.13.1",
|
||||
"@lexical/rich-text": "0.13.1",
|
||||
"@lexical/selection": "0.13.1",
|
||||
"@lexical/utils": "0.13.1",
|
||||
"bson-objectid": "2.0.4",
|
||||
"classnames": "^2.3.2",
|
||||
"deep-equal": "2.2.3",
|
||||
"i18next": "22.5.1",
|
||||
"json-schema": "^0.4.0",
|
||||
"lexical": "0.12.6",
|
||||
"lexical": "0.13.1",
|
||||
"lodash": "4.17.21",
|
||||
"react": "18.2.0",
|
||||
"react-dom": "18.2.0",
|
||||
"react-error-boundary": "^4.0.11",
|
||||
"react-error-boundary": "4.0.12",
|
||||
"react-i18next": "11.18.6",
|
||||
"ts-essentials": "7.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@payloadcms/eslint-config": "workspace:*",
|
||||
"@types/json-schema": "7.0.12",
|
||||
"@types/json-schema": "7.0.15",
|
||||
"@types/node": "20.6.2",
|
||||
"@types/react": "18.2.15",
|
||||
"payload": "workspace:*"
|
||||
|
||||
@@ -2,7 +2,7 @@ import type { SerializedQuoteNode } from '@lexical/rich-text'
|
||||
|
||||
import { $createQuoteNode, QuoteNode } from '@lexical/rich-text'
|
||||
import { $setBlocksType } from '@lexical/selection'
|
||||
import { $INTERNAL_isPointSelection, $getSelection } from 'lexical'
|
||||
import { $getSelection } from 'lexical'
|
||||
|
||||
import type { HTMLConverter } from '../converters/html/converter/types'
|
||||
import type { FeatureProvider } from '../types'
|
||||
@@ -31,9 +31,7 @@ export const BlockQuoteFeature = (): FeatureProvider => {
|
||||
onClick: ({ editor }) => {
|
||||
editor.update(() => {
|
||||
const selection = $getSelection()
|
||||
if ($INTERNAL_isPointSelection(selection)) {
|
||||
$setBlocksType(selection, () => $createQuoteNode())
|
||||
}
|
||||
$setBlocksType(selection, () => $createQuoteNode())
|
||||
})
|
||||
},
|
||||
order: 20,
|
||||
@@ -44,6 +42,7 @@ export const BlockQuoteFeature = (): FeatureProvider => {
|
||||
markdownTransformers: [MarkdownTransformer],
|
||||
nodes: [
|
||||
{
|
||||
type: QuoteNode.getType(),
|
||||
converters: {
|
||||
html: {
|
||||
converter: async ({ converters, node, parent }) => {
|
||||
@@ -62,7 +61,6 @@ export const BlockQuoteFeature = (): FeatureProvider => {
|
||||
} as HTMLConverter<SerializedQuoteNode>,
|
||||
},
|
||||
node: QuoteNode,
|
||||
type: QuoteNode.getType(),
|
||||
},
|
||||
],
|
||||
props: null,
|
||||
@@ -82,9 +80,7 @@ export const BlockQuoteFeature = (): FeatureProvider => {
|
||||
keywords: ['quote', 'blockquote'],
|
||||
onSelect: () => {
|
||||
const selection = $getSelection()
|
||||
if ($INTERNAL_isPointSelection(selection)) {
|
||||
$setBlocksType(selection, () => $createQuoteNode())
|
||||
}
|
||||
$setBlocksType(selection, () => $createQuoteNode())
|
||||
},
|
||||
}),
|
||||
],
|
||||
|
||||
@@ -39,8 +39,16 @@ export function BlocksPlugin(): JSX.Element | null {
|
||||
const { focus } = selection
|
||||
const focusNode = focus.getNode()
|
||||
|
||||
// First, delete currently selected node if it's an empty paragraph
|
||||
if ($isParagraphNode(focusNode) && focusNode.getTextContentSize() === 0) {
|
||||
// First, delete currently selected node if it's an empty paragraph and if there are sufficient
|
||||
// paragraph nodes (more than 1) left in the parent node, so that we don't "trap" the user
|
||||
if (
|
||||
$isParagraphNode(focusNode) &&
|
||||
focusNode.getTextContentSize() === 0 &&
|
||||
focusNode
|
||||
.getParent()
|
||||
.getChildren()
|
||||
.filter((node) => $isParagraphNode(node)).length > 1
|
||||
) {
|
||||
focusNode.remove()
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import type { HeadingTagType, SerializedHeadingNode } from '@lexical/rich-text'
|
||||
|
||||
import { $createHeadingNode, HeadingNode } from '@lexical/rich-text'
|
||||
import { $setBlocksType } from '@lexical/selection'
|
||||
import { $INTERNAL_isPointSelection, $getSelection } from 'lexical'
|
||||
import { $getSelection } from 'lexical'
|
||||
|
||||
import type { HTMLConverter } from '../converters/html/converter/types'
|
||||
import type { FeatureProvider } from '../types'
|
||||
@@ -14,9 +14,7 @@ import { MarkdownTransformer } from './markdownTransformer'
|
||||
|
||||
const setHeading = (headingSize: HeadingTagType) => {
|
||||
const selection = $getSelection()
|
||||
if ($INTERNAL_isPointSelection(selection)) {
|
||||
$setBlocksType(selection, () => $createHeadingNode(headingSize))
|
||||
}
|
||||
$setBlocksType(selection, () => $createHeadingNode(headingSize))
|
||||
}
|
||||
|
||||
type Props = {
|
||||
@@ -67,6 +65,7 @@ export const HeadingFeature = (props: Props): FeatureProvider => {
|
||||
markdownTransformers: [MarkdownTransformer(enabledHeadingSizes)],
|
||||
nodes: [
|
||||
{
|
||||
type: HeadingNode.getType(),
|
||||
converters: {
|
||||
html: {
|
||||
converter: async ({ converters, node, parent }) => {
|
||||
@@ -85,7 +84,6 @@ export const HeadingFeature = (props: Props): FeatureProvider => {
|
||||
} as HTMLConverter<SerializedHeadingNode>,
|
||||
},
|
||||
node: HeadingNode,
|
||||
type: HeadingNode.getType(),
|
||||
},
|
||||
],
|
||||
props,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user