Compare commits

..

43 Commits

Author SHA1 Message Date
Elliot DeNolf
548de80bee chore(release): db-postgres/0.6.0 [skip ci] 2024-02-16 14:59:51 -05:00
Elliot DeNolf
2c05fbbb5e chore(release): plugin-form-builder/1.2.1 [skip ci] 2024-02-16 14:59:15 -05:00
Elliot DeNolf
9b54659818 chore(release): db-mongodb/1.4.2 [skip ci] 2024-02-16 14:58:57 -05:00
Elliot DeNolf
e9f550406e chore(release): payload/2.11.1 [skip ci] 2024-02-16 14:57:28 -05:00
Dan Ribbens
98b87e2278 feat(plugin-search): add req to beforeSync args for transactions (#5068)
* feat(plugin-search): pass `req` to beforeSync to support using transactions

* fix(plugin-search): hooks do not respect transactions

* chore(plugin-search): await hooks

* chore: remove eslint disable comments
2024-02-16 14:20:40 -05:00
Dan Ribbens
5f3d0169be fix: filterOptions errors cause transaction to abort (#5079)
* fix: filterOptions errors cause transaction to abort

* fix(db-mongodb): uncaught abortTransaction race condition

* chore: remove test that is not adding value

* chore: limit options on errors in filterOptions

* chore: limit options when an error occurs in filterOptions
2024-02-16 13:33:40 -05:00
Dan Ribbens
35c2a085ef fix(db-postgres): query using blockType (#5044)
* fix(db-postgres): query using blockType

* chore: cleanup commented lines
2024-02-16 13:30:26 -05:00
Dan Ribbens
1ac943ed5e fix: remove collection findByID caching (#5034) 2024-02-16 13:23:57 -05:00
Elliot DeNolf
25cee8bb10 fix(uploads): account for serverURL when retrieving external file (#5102) 2024-02-16 13:11:18 -05:00
Elliot DeNolf
419aef452d chore: add .localstack to gitignore 2024-02-16 12:53:28 -05:00
Elliot DeNolf
ea52489126 Merge pull request #4295 from payloadcms/test/plugin-cloud-storage-emulators
test(plugin-cloud-storage): use localstack for tests
2024-02-16 12:47:13 -05:00
Elliot DeNolf
e80c70acae test: cleanup 2024-02-16 12:33:07 -05:00
Elliot DeNolf
70b0064d0b test: adjust adapter log message 2024-02-16 11:32:00 -05:00
Elliot DeNolf
9636bf6efd test: rename .env -> .env.emulated, safely assert bucket contents 2024-02-16 11:31:14 -05:00
Elliot DeNolf
8f4d0da4e0 test: conditionally run plugin-cloud-storage 2024-02-16 11:31:14 -05:00
Elliot DeNolf
f0f1dbdcb0 ci: pnpm docker:start 2024-02-16 11:31:14 -05:00
Elliot DeNolf
a895aee8b1 ci: add localstack aws envs 2024-02-16 11:31:14 -05:00
Elliot DeNolf
aa1dac08c1 ci: add localstack setup 2024-02-16 11:31:14 -05:00
Elliot DeNolf
b8cd1c6ba4 test(plugin-cloud-storage): add test against localstack 2024-02-16 11:31:12 -05:00
Elliot DeNolf
6344464bc6 test(plugin-cloud-storage): add single docker compose for all emulators 2024-02-16 11:30:55 -05:00
Dan Ribbens
5d4022f144 fix(db-mongodb): find versions pagination (#5091) 2024-02-15 16:14:01 -05:00
Dan Ribbens
bf942fdfa6 feat(db-postgres): reconnect after disconnection from database (#5086) 2024-02-15 16:09:17 -05:00
Dan Ribbens
d6c25783cf feat(db-postgres): adds idType to use uuid or serial id columns (#3864)
* feat(db-postgres): WIP adds idType to use uuid or serial id columns

* chore: add postgres-uuid test ci

* chore: add postgres-uuid env vars

* chore: sanitizeQueryValue prevent invalid types

* fix(db-postgres): invalid parentID of nested arrays
2024-02-15 16:06:37 -05:00
Dan Ribbens
82e9d31127 fix(plugin-form-builder): hooks do not respect transactions (#5069)
* fix(plugin-form-builder): hooks do not respect transactions

* chore(plugin-form-builder): linting and cleanup
2024-02-15 15:55:42 -05:00
Elliot DeNolf
399e606b34 chore: use ref for pnpm overrides (#5081) 2024-02-13 12:37:43 -05:00
Alessio Gravili
0d18822062 feat(richtext-lexical)!: Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground (#5066)
* feat(richtext-lexical): Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground

* chore: upgrade lexical version used in monorepo
2024-02-12 17:54:50 +01:00
Alessio Gravili
00fc0343da feat(richtext-lexical): AddBlock handle for all nodes, even if they aren't empty paragraphs (#5063) 2024-02-12 16:11:41 +01:00
Alessio Gravili
6323965c65 fix(richtext-lexical): do not remove adjacent paragraph node when inserting certain nodes in empty editor (#5061) 2024-02-12 14:27:58 +01:00
Máté Tallósi
6d6823c3e5 feat(richtext-lexical): add justify aligment to AlignFeature (#4035) (#4868) 2024-02-12 14:27:12 +01:00
Alessio Gravili
ca70298436 chore: upgrade nodemon versions (#5059) 2024-02-12 14:11:57 +01:00
Elliot DeNolf
4f565759f6 chore(release): payload/2.11.0 [skip ci] 2024-02-09 16:12:03 -05:00
Jarrod Flesch
df39602758 feat: exposes collapsible provider with more functionality (#5043) 2024-02-09 10:38:30 -05:00
Elliot DeNolf
6ea6172afa chore(release): db-postgres/0.5.2 [skip ci] 2024-02-09 09:06:15 -05:00
Elliot DeNolf
486774796d chore(release): db-mongodb/1.4.1 [skip ci] 2024-02-09 09:06:06 -05:00
Elliot DeNolf
1cd1c38764 chore(release): payload/2.10.1 [skip ci] 2024-02-09 09:04:42 -05:00
Elliot DeNolf
f6d7da7510 fix: clearable cells handle null values (#5038) 2024-02-09 08:59:38 -05:00
Elliot DeNolf
cdc4cb971b fix(db-mongodb): handle null values with exists (#5037) 2024-02-09 08:58:10 -05:00
Elliot DeNolf
e0191b54e1 chore(release): richtext-lexical/0.6.1 [skip ci] 2024-02-08 11:49:02 -05:00
Alessio Gravili
2315781f18 fix(richtext-lexical): make editor reactive to initialValue changes (#5010) 2024-02-08 15:30:21 +01:00
Elliot DeNolf
a0a58e7fd2 fix: query relationships by explicit id field (#5022) 2024-02-07 14:18:13 -05:00
Jessica Chowdhury
e1813fb884 fix: ensures docs with the same id are shown in relationship field select (#4859) 2024-02-07 14:04:03 -05:00
Elliot DeNolf
da184d40ec fix(db-postgres): handle nested docs with drafts (#5012) 2024-02-06 21:27:33 -05:00
Elliot DeNolf
ca8675f89d chore(release): plugin-seo/2.2.1 [skip ci] 2024-02-06 15:41:58 -05:00
77 changed files with 1567 additions and 1072 deletions

View File

@@ -2,9 +2,9 @@ name: build
on:
pull_request:
types: [opened, reopened, synchronize]
types: [ opened, reopened, synchronize ]
push:
branches: ['main']
branches: [ 'main' ]
jobs:
changes:
@@ -15,25 +15,25 @@ jobs:
needs_build: ${{ steps.filter.outputs.needs_build }}
templates: ${{ steps.filter.outputs.templates }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 25
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
needs_build:
- '.github/workflows/**'
- 'packages/**'
- 'test/**'
- 'pnpm-lock.yaml'
- 'package.json'
templates:
- 'templates/**'
- name: Log all filter results
run: |
echo "needs_build: ${{ steps.filter.outputs.needs_build }}"
echo "templates: ${{ steps.filter.outputs.templates }}"
- uses: actions/checkout@v4
with:
fetch-depth: 25
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
needs_build:
- '.github/workflows/**'
- 'packages/**'
- 'test/**'
- 'pnpm-lock.yaml'
- 'package.json'
templates:
- 'templates/**'
- name: Log all filter results
run: |
echo "needs_build: ${{ steps.filter.outputs.needs_build }}"
echo "templates: ${{ steps.filter.outputs.templates }}"
core-build:
needs: changes
@@ -85,11 +85,15 @@ jobs:
strategy:
fail-fast: false
matrix:
database: [mongoose, postgres, supabase]
database: [ mongoose, postgres, postgres-uuid, supabase ]
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: payloadtests
AWS_ENDPOINT_URL: http://127.0.0.1:4566
AWS_ACCESS_KEY_ID: localstack
AWS_SECRET_ACCESS_KEY: localstack
AWS_REGION: us-east-1
steps:
- name: Use Node.js 18
@@ -109,6 +113,9 @@ jobs:
path: ./*
key: ${{ github.sha }}-${{ github.run_number }}
- name: Start LocalStack
run: pnpm docker:start
- name: Start PostgreSQL
uses: CasperWA/postgresql-action@v1.2
with:
@@ -116,7 +123,7 @@ jobs:
postgresql db: ${{ env.POSTGRES_DB }}
postgresql user: ${{ env.POSTGRES_USER }}
postgresql password: ${{ env.POSTGRES_PASSWORD }}
if: matrix.database == 'postgres'
if: matrix.database == 'postgres' || matrix.database == 'postgres-uuid'
- name: Install Supabase CLI
uses: supabase/setup-cli@v1
@@ -132,14 +139,14 @@ jobs:
- name: Wait for PostgreSQL
run: sleep 30
if: matrix.database == 'postgres'
if: matrix.database == 'postgres' || matrix.database == 'postgres-uuid'
- name: Configure PostgreSQL
run: |
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "CREATE ROLE runner SUPERUSER LOGIN;"
psql "postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" -c "SELECT version();"
echo "POSTGRES_URL=postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@localhost:5432/$POSTGRES_DB" >> $GITHUB_ENV
if: matrix.database == 'postgres'
if: matrix.database == 'postgres' || matrix.database == 'postgres-uuid'
- name: Configure Supabase
run: |
@@ -162,7 +169,7 @@ jobs:
strategy:
fail-fast: false
matrix:
part: [1/8, 2/8, 3/8, 4/8, 5/8, 6/8, 7/8, 8/8]
part: [ 1/8, 2/8, 3/8, 4/8, 5/8, 6/8, 7/8, 8/8 ]
steps:
- name: Use Node.js 18
@@ -310,7 +317,7 @@ jobs:
strategy:
fail-fast: false
matrix:
template: [blank, website, ecommerce]
template: [ blank, website, ecommerce ]
steps:
- uses: actions/checkout@v4

2
.gitignore vendored
View File

@@ -6,7 +6,9 @@ dist
test-results
.devcontainer
.localstack
/migrations
.localstack
# Created by https://www.toptal.com/developers/gitignore/api/node,macos,windows,webstorm,sublimetext,visualstudiocode
# Edit at https://www.toptal.com/developers/gitignore?templates=node,macos,windows,webstorm,sublimetext,visualstudiocode

View File

@@ -1,5 +1,5 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Run Dev Fields" type="NodeJSConfigurationType" application-parameters="fields" path-to-js-file="node_modules/.pnpm/nodemon@3.0.1/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
<configuration default="false" name="Run Dev Fields" type="NodeJSConfigurationType" application-parameters="fields" path-to-js-file="node_modules/.pnpm/nodemon@3.0.3/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
<method v="2" />
</configuration>
</component>

View File

@@ -1,5 +1,5 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Run Dev _community" type="NodeJSConfigurationType" application-parameters="_community" path-to-js-file="node_modules/.pnpm/nodemon@3.0.1/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
<configuration default="false" name="Run Dev _community" type="NodeJSConfigurationType" application-parameters="_community" path-to-js-file="node_modules/.pnpm/nodemon@3.0.3/node_modules/nodemon/bin/nodemon.js" working-dir="$PROJECT_DIR$">
<method v="2" />
</configuration>
</component>

View File

@@ -1,3 +1,49 @@
## [2.11.1](https://github.com/payloadcms/payload/compare/v2.11.0...v2.11.1) (2024-02-16)
### ⚠ BREAKING CHANGES
* **richtext-lexical:** Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground (#5066)
### Features
* **db-postgres:** adds idType to use uuid or serial id columns ([#3864](https://github.com/payloadcms/payload/issues/3864)) ([d6c2578](https://github.com/payloadcms/payload/commit/d6c25783cfa97983bf9db27ceb5ccd39a62c62f1))
* **db-postgres:** reconnect after disconnection from database ([#5086](https://github.com/payloadcms/payload/issues/5086)) ([bf942fd](https://github.com/payloadcms/payload/commit/bf942fdfa6ea9c26cf05295cc9db646bf31fa622))
* **plugin-search:** add req to beforeSync args for transactions ([#5068](https://github.com/payloadcms/payload/issues/5068)) ([98b87e2](https://github.com/payloadcms/payload/commit/98b87e22782c0a788f79326f22be05a6b176ad74))
* **richtext-lexical:** add justify aligment to AlignFeature ([#4035](https://github.com/payloadcms/payload/issues/4035)) ([#4868](https://github.com/payloadcms/payload/issues/4868)) ([6d6823c](https://github.com/payloadcms/payload/commit/6d6823c3e5609a58eeeeb8d043945a762f9463df))
* **richtext-lexical:** AddBlock handle for all nodes, even if they aren't empty paragraphs ([#5063](https://github.com/payloadcms/payload/issues/5063)) ([00fc034](https://github.com/payloadcms/payload/commit/00fc0343dabf184d5bab418d47c403b3ad11698f))
* **richtext-lexical:** Update lexical from 0.12.6 to 0.13.1, port over all useful changes from playground ([#5066](https://github.com/payloadcms/payload/issues/5066)) ([0d18822](https://github.com/payloadcms/payload/commit/0d18822062275c1826c8e2c3da2571a2b3483310))
### Bug Fixes
* **db-mongodb:** find versions pagination ([#5091](https://github.com/payloadcms/payload/issues/5091)) ([5d4022f](https://github.com/payloadcms/payload/commit/5d4022f1445e2809c01cb1dd599280f0a56cdc6e))
* **db-postgres:** query using blockType ([#5044](https://github.com/payloadcms/payload/issues/5044)) ([35c2a08](https://github.com/payloadcms/payload/commit/35c2a085efa6d5ad59779960874bc9728a17e3a0))
* filterOptions errors cause transaction to abort ([#5079](https://github.com/payloadcms/payload/issues/5079)) ([5f3d016](https://github.com/payloadcms/payload/commit/5f3d0169bee21e1c0963dbd7ede9fe5f1c46a5a5))
* **plugin-form-builder:** hooks do not respect transactions ([#5069](https://github.com/payloadcms/payload/issues/5069)) ([82e9d31](https://github.com/payloadcms/payload/commit/82e9d31127c8df83c5bed92a5ffdab76d331900f))
* remove collection findByID caching ([#5034](https://github.com/payloadcms/payload/issues/5034)) ([1ac943e](https://github.com/payloadcms/payload/commit/1ac943ed5e8416883b863147fdf3c23380955559))
* **richtext-lexical:** do not remove adjacent paragraph node when inserting certain nodes in empty editor ([#5061](https://github.com/payloadcms/payload/issues/5061)) ([6323965](https://github.com/payloadcms/payload/commit/6323965c652ea68dffeb716957b124d165b9ce96))
* **uploads:** account for serverURL when retrieving external file ([#5102](https://github.com/payloadcms/payload/issues/5102)) ([25cee8b](https://github.com/payloadcms/payload/commit/25cee8bb102bf80b3a4bfb4b4e46712722cc7f0d))
## [2.11.0](https://github.com/payloadcms/payload/compare/v2.10.1...v2.11.0) (2024-02-09)
### Features
* exposes collapsible provider with more functionality ([#5043](https://github.com/payloadcms/payload/issues/5043)) ([df39602](https://github.com/payloadcms/payload/commit/df39602758ae8dc3765bb48e51f7a657babfa559))
## [2.10.1](https://github.com/payloadcms/payload/compare/v2.10.0...v2.10.1) (2024-02-09)
### Bug Fixes
* clearable cells handle null values ([#5038](https://github.com/payloadcms/payload/issues/5038)) ([f6d7da7](https://github.com/payloadcms/payload/commit/f6d7da751039df25066b51bb91d6453e1a4efd82))
* **db-mongodb:** handle null values with exists ([#5037](https://github.com/payloadcms/payload/issues/5037)) ([cdc4cb9](https://github.com/payloadcms/payload/commit/cdc4cb971b9180ba2ed09741f5af1a3c18292828))
* **db-postgres:** handle nested docs with drafts ([#5012](https://github.com/payloadcms/payload/issues/5012)) ([da184d4](https://github.com/payloadcms/payload/commit/da184d40ece74bffb224002eb5df8f6987d65043))
* ensures docs with the same id are shown in relationship field select ([#4859](https://github.com/payloadcms/payload/issues/4859)) ([e1813fb](https://github.com/payloadcms/payload/commit/e1813fb884e0dc84203fcbab87527a99a4d3a5d7))
* query relationships by explicit id field ([#5022](https://github.com/payloadcms/payload/issues/5022)) ([a0a58e7](https://github.com/payloadcms/payload/commit/a0a58e7fd20dff54d210c968f4d5defd67441bdd))
* **richtext-lexical:** make editor reactive to initialValue changes ([#5010](https://github.com/payloadcms/payload/issues/5010)) ([2315781](https://github.com/payloadcms/payload/commit/2315781f1891ddde4b4c5f2f0cfa1c17af85b7a9))
## [2.10.0](https://github.com/payloadcms/payload/compare/v2.9.0...v2.10.0) (2024-02-06)

View File

@@ -635,6 +635,37 @@ export const CustomArrayManager = () => {
]}
/>
### useCollapsible
The `useCollapsible` hook allows you to control parent collapsibles:
| Property | Description |
|---------------------------|--------------------------------------------------------------------------------------------------------------------|
| **`collapsed`** | State of the collapsible. `true` if open, `false` if collapsed |
| **`isVisible`** | If nested, determine if the nearest collapsible is visible. `true` if no parent is closed, `false` otherwise |
| **`toggle`** | Toggles the state of the nearest collapsible |
| **`withinCollapsible`** | Determine when you are within another collaspible | |
**Example:**
```tsx
import React from 'react'
import { useCollapsible } from 'payload/components/utilities'
const CustomComponent: React.FC = () => {
const { collapsed, toggle } = useCollapsible()
return (
<div>
<p className="field-type">I am {collapsed ? 'closed' : 'open'}</p>
<button onClick={toggle} type="button">
Toggle
</button>
</div>
)
}
```
### useDocumentInfo
The `useDocumentInfo` hook provides lots of information about the document currently being edited, including the following:
@@ -774,8 +805,8 @@ const MyComponent: React.FC = () => {
return (
<>
<span>The current theme is {theme} and autoMode is {autoMode}</span>
<button
type="button"
<button
type="button"
onClick={() => setTheme(prev => prev === "light" ? "dark" : "light")}
>
Toggle theme

View File

@@ -15,9 +15,13 @@
"dev:generate-graphql-schema": "ts-node -T ./test/generateGraphQLSchema.ts",
"dev:generate-types": "ts-node -T ./test/generateTypes.ts",
"dev:postgres": "pnpm --filter payload run dev:postgres",
"docker:restart": "pnpm docker:stop --remove-orphans && pnpm docker:start",
"docker:start": "docker-compose -f packages/plugin-cloud-storage/docker-compose.yml up -d",
"docker:stop": "docker-compose -f packages/plugin-cloud-storage/docker-compose.yml down",
"fix": "eslint \"packages/**/*.ts\" --fix",
"lint": "eslint \"packages/**/*.ts\"",
"lint-staged": "lint-staged",
"prepare": "husky install",
"pretest": "pnpm build",
"reinstall": "pnpm clean:unix && pnpm install",
"script:list-packages": "tsx ./scripts/list-packages.ts",
@@ -29,10 +33,10 @@
"test:e2e:headed": "cross-env DISABLE_LOGGING=true playwright test --headed",
"test:int:postgres": "cross-env PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles",
"test:int": "cross-env DISABLE_LOGGING=true jest --forceExit --detectOpenHandles",
"translateNewKeys": "pnpm --filter payload run translateNewKeys",
"prepare": "husky install"
"translateNewKeys": "pnpm --filter payload run translateNewKeys"
},
"devDependencies": {
"@aws-sdk/client-s3": "^3.142.0",
"@payloadcms/eslint-config": "workspace:*",
"@playwright/test": "1.40.1",
"@swc/cli": "^0.1.62",
@@ -77,12 +81,12 @@
"jest": "29.7.0",
"jest-environment-jsdom": "29.7.0",
"jwt-decode": "3.1.2",
"lexical": "0.12.5",
"lexical": "0.13.1",
"lint-staged": "^14.0.1",
"minimist": "1.2.8",
"mongodb-memory-server": "^9",
"node-fetch": "2.6.12",
"nodemon": "3.0.2",
"nodemon": "3.0.3",
"prettier": "^3.0.3",
"prompts": "2.4.2",
"qs": "6.11.2",
@@ -106,12 +110,12 @@
},
"pnpm": {
"overrides": {
"copyfiles": "2.4.1",
"cross-env": "7.0.3",
"dotenv": "8.6.0",
"drizzle-orm": "0.29.3",
"ts-node": "10.9.2",
"typescript": "5.2.2"
"copyfiles": "$copyfiles",
"cross-env": "$cross-env",
"dotenv": "$dotenv",
"drizzle-orm": "$drizzle-orm",
"ts-node": "$ts-node",
"typescript": "$typescript"
}
},
"engines": {

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-mongodb",
"version": "1.4.0",
"version": "1.4.2",
"description": "The officially supported MongoDB database adapter for Payload",
"repository": "https://github.com/payloadcms/payload",
"license": "MIT",

View File

@@ -63,7 +63,6 @@ export const findVersions: FindVersions = async function findVersions(
lean: true,
leanWithId: true,
limit,
offset: skip || 0,
options,
page,
pagination,

View File

@@ -157,6 +157,23 @@ export const sanitizeQueryValue = ({
if (operator === 'exists') {
formattedValue = formattedValue === 'true' || formattedValue === true
// Clearable fields
if (['relationship', 'select', 'upload'].includes(field.type)) {
if (formattedValue) {
return {
rawQuery: {
$and: [{ [path]: { $exists: true } }, { [path]: { $ne: null } }],
},
}
} else {
return {
rawQuery: {
$or: [{ [path]: { $exists: false } }, { [path]: { $eq: null } }],
},
}
}
}
}
return { operator: formattedOperator, val: formattedValue }

View File

@@ -17,7 +17,11 @@ export const rollbackTransaction: RollbackTransaction = async function rollbackT
}
// the first call for rollback should be aborted and deleted causing any other operations with the same transaction to fail
await this.sessions[id].abortTransaction()
await this.sessions[id].endSession()
try {
await this.sessions[id].abortTransaction()
await this.sessions[id].endSession()
} catch (error) {
// ignore the error as it is likely a race condition from multiple errors
}
delete this.sessions[id]
}

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/db-postgres",
"version": "0.5.1",
"version": "0.6.0",
"description": "The officially supported Postgres database adapter for Payload",
"repository": "https://github.com/payloadcms/payload",
"license": "MIT",

View File

@@ -1,3 +1,4 @@
import type { Payload } from 'payload'
import type { Connect } from 'payload/database'
import { eq, sql } from 'drizzle-orm'
@@ -8,6 +9,43 @@ import prompts from 'prompts'
import type { PostgresAdapter } from './types'
const connectWithReconnect = async function ({
adapter,
payload,
reconnect = false,
}: {
adapter: PostgresAdapter
payload: Payload
reconnect?: boolean
}) {
let result
if (!reconnect) {
result = await adapter.pool.connect()
} else {
try {
result = await adapter.pool.connect()
} catch (err) {
setTimeout(() => {
payload.logger.info('Reconnecting to postgres')
void connectWithReconnect({ adapter, payload, reconnect: true })
}, 1000)
}
}
if (!result) {
return
}
result.prependListener('error', (err) => {
try {
if (err.code === 'ECONNRESET') {
void connectWithReconnect({ adapter, payload, reconnect: true })
}
} catch (err) {
// swallow error
}
})
}
export const connect: Connect = async function connect(this: PostgresAdapter, payload) {
this.schema = {
...this.tables,
@@ -17,10 +55,11 @@ export const connect: Connect = async function connect(this: PostgresAdapter, pa
try {
this.pool = new Pool(this.poolOptions)
await this.pool.connect()
await connectWithReconnect({ adapter: this, payload })
const logger = this.logger || false
this.drizzle = drizzle(this.pool, { schema: this.schema, logger })
this.drizzle = drizzle(this.pool, { logger, schema: this.schema })
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
this.payload.logger.info('---- DROPPING TABLES ----')
await this.drizzle.execute(sql`drop schema public cascade;

View File

@@ -42,7 +42,7 @@ export type { MigrateDownArgs, MigrateUpArgs } from './types'
export function postgresAdapter(args: Args): PostgresAdapterResult {
function adapter({ payload }: { payload: Payload }) {
const migrationDir = findMigrationDir(args.migrationDir)
const idType = args.idType || 'serial'
return createDatabaseAdapter<PostgresAdapter>({
name: 'postgres',
@@ -50,6 +50,7 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
drizzle: undefined,
enums: {},
fieldConstraints: {},
idType,
logger: args.logger,
pool: undefined,
poolOptions: args.pool,
@@ -68,7 +69,10 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
createGlobalVersion,
createMigration,
createVersion,
defaultIDType: 'number',
/**
* This represents how a default ID is treated in Payload as were a field type
*/
defaultIDType: idType === 'serial' ? 'number' : 'text',
deleteMany,
deleteOne,
deleteVersions,

View File

@@ -9,7 +9,6 @@ import toSnakeCase from 'to-snake-case'
import type { PostgresAdapter } from './types'
import { buildTable } from './schema/build'
import { getConfigIDType } from './schema/getConfigIDType'
export const init: Init = async function init(this: PostgresAdapter) {
if (this.payload.config.localization) {
@@ -24,9 +23,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
buildTable({
adapter: this,
buildTexts: true,
buildNumbers: true,
buildRelationships: true,
buildTexts: true,
disableNotNull: !!collection?.versions?.drafts,
disableUnique: false,
fields: collection.fields,
@@ -38,13 +37,11 @@ export const init: Init = async function init(this: PostgresAdapter) {
const versionsTableName = `_${tableName}_v`
const versionFields = buildVersionCollectionFields(collection)
const versionsParentIDColType = getConfigIDType(collection.fields)
buildTable({
adapter: this,
buildTexts: true,
buildNumbers: true,
buildRelationships: true,
buildTexts: true,
disableNotNull: !!collection.versions?.drafts,
disableUnique: true,
fields: versionFields,
@@ -59,9 +56,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
buildTable({
adapter: this,
buildTexts: true,
buildNumbers: true,
buildRelationships: true,
buildTexts: true,
disableNotNull: !!global?.versions?.drafts,
disableUnique: false,
fields: global.fields,
@@ -75,9 +72,9 @@ export const init: Init = async function init(this: PostgresAdapter) {
buildTable({
adapter: this,
buildTexts: true,
buildNumbers: true,
buildRelationships: true,
buildTexts: true,
disableNotNull: !!global.versions?.drafts,
disableUnique: true,
fields: versionFields,

View File

@@ -75,6 +75,7 @@ const buildQuery = async function buildQuery({
pathSegments: sortPath.replace(/__/g, '.').split('.'),
selectFields,
tableName,
value: sortPath,
})
orderBy.column = sortTable?.[sortTableColumnName]
} catch (err) {

View File

@@ -1,6 +1,6 @@
/* eslint-disable no-param-reassign */
import type { SQL } from 'drizzle-orm'
import type { Field, FieldAffectingData, TabAsField } from 'payload/types'
import type { Field, FieldAffectingData, NumberField, TabAsField, TextField } from 'payload/types'
import { and, eq, like, sql } from 'drizzle-orm'
import { alias } from 'drizzle-orm/pg-core'
@@ -44,6 +44,14 @@ type Args = {
rootTableName?: string
selectFields: Record<string, GenericColumn>
tableName: string
/**
* If creating a new table name for arrays and blocks, this suffix should be appended to the table name
*/
tableNameSuffix?: string
/**
* The raw value of the query before sanitization
*/
value: unknown
}
/**
* Transforms path to table and column name
@@ -65,6 +73,8 @@ export const getTableColumnFromPath = ({
rootTableName: incomingRootTableName,
selectFields,
tableName,
tableNameSuffix = '',
value,
}: Args): TableColumn => {
const fieldPath = incomingSegments[0]
let locale = incomingLocale
@@ -83,8 +93,8 @@ export const getTableColumnFromPath = ({
constraints,
field: {
name: 'id',
type: 'number',
},
type: adapter.idType === 'uuid' ? 'text' : 'number',
} as TextField | NumberField,
table: adapter.tables[newTableName],
}
}
@@ -125,6 +135,8 @@ export const getTableColumnFromPath = ({
rootTableName,
selectFields,
tableName: newTableName,
tableNameSuffix,
value,
})
}
case 'tab': {
@@ -144,6 +156,8 @@ export const getTableColumnFromPath = ({
rootTableName,
selectFields,
tableName: newTableName,
tableNameSuffix: `${tableNameSuffix}${toSnakeCase(field.name)}_`,
value,
})
}
return getTableColumnFromPath({
@@ -161,6 +175,8 @@ export const getTableColumnFromPath = ({
rootTableName,
selectFields,
tableName: newTableName,
tableNameSuffix,
value,
})
}
@@ -195,11 +211,13 @@ export const getTableColumnFromPath = ({
rootTableName,
selectFields,
tableName: newTableName,
tableNameSuffix: `${tableNameSuffix}${toSnakeCase(field.name)}_`,
value,
})
}
case 'array': {
newTableName = `${tableName}_${toSnakeCase(field.name)}`
newTableName = `${tableName}_${tableNameSuffix}${toSnakeCase(field.name)}`
constraintPath = `${constraintPath}${field.name}.%.`
if (locale && field.localized && adapter.payload.config.localization) {
joins[newTableName] = and(
@@ -232,12 +250,39 @@ export const getTableColumnFromPath = ({
rootTableName,
selectFields,
tableName: newTableName,
value,
})
}
case 'blocks': {
let blockTableColumn: TableColumn
let newTableName: string
// handle blockType queries
if (pathSegments[1] === 'blockType') {
// find the block config using the value
const blockTypes = Array.isArray(value) ? value : [value]
blockTypes.forEach((blockType) => {
const block = field.blocks.find((block) => block.slug === blockType)
newTableName = `${tableName}_blocks_${toSnakeCase(block.slug)}`
joins[newTableName] = eq(
adapter.tables[tableName].id,
adapter.tables[newTableName]._parentID,
)
constraints.push({
columnName: '_path',
table: adapter.tables[newTableName],
value: pathSegments[0],
})
})
return {
constraints,
field,
getNotNullColumnByValue: () => 'id',
table: adapter.tables[tableName],
}
}
const hasBlockField = field.blocks.some((block) => {
newTableName = `${tableName}_blocks_${toSnakeCase(block.slug)}`
constraintPath = `${constraintPath}${field.name}.%.`
@@ -258,6 +303,7 @@ export const getTableColumnFromPath = ({
rootTableName,
selectFields: blockSelectFields,
tableName: newTableName,
value,
})
} catch (error) {
// this is fine, not every block will have the field
@@ -298,9 +344,6 @@ export const getTableColumnFromPath = ({
table: blockTableColumn.table,
}
}
if (pathSegments[1] === 'blockType') {
throw new APIError('Querying on blockType is not supported')
}
break
}
@@ -340,7 +383,7 @@ export const getTableColumnFromPath = ({
table: newAliasTable,
})
if (newCollectionPath === '') {
if (newCollectionPath === '' || newCollectionPath === 'id') {
return {
columnName: `${field.relationTo}ID`,
constraints,
@@ -388,6 +431,7 @@ export const getTableColumnFromPath = ({
rootTableName: newTableName,
selectFields,
tableName: newTableName,
value,
})
}

View File

@@ -63,11 +63,7 @@ export async function parseParams({
where: condition,
})
if (builtConditions.length > 0) {
if (result) {
result = operatorMap[conditionOperator](result, ...builtConditions)
} else {
result = operatorMap[conditionOperator](...builtConditions)
}
result = operatorMap[conditionOperator](...builtConditions)
}
} else {
// It's a path - and there can be multiple comparisons on a single path.
@@ -77,6 +73,7 @@ export async function parseParams({
if (typeof pathOperators === 'object') {
for (const operator of Object.keys(pathOperators)) {
if (validOperators.includes(operator as Operator)) {
const val = where[relationOrPath][operator]
const {
columnName,
constraints: queryConstraints,
@@ -95,10 +92,9 @@ export async function parseParams({
pathSegments: relationOrPath.replace(/__/g, '.').split('.'),
selectFields,
tableName,
value: val,
})
const val = where[relationOrPath][operator]
queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => {
if (typeof value === 'string' && value.indexOf('%') > -1) {
constraints.push(operatorMap.like(constraintTable[col], value))
@@ -169,6 +165,7 @@ export async function parseParams({
}
const sanitizedQueryValue = sanitizeQueryValue({
adapter,
field,
operator,
relationOrPath,

View File

@@ -2,7 +2,10 @@ import { APIError } from 'payload/errors'
import { type Field, type TabAsField, fieldAffectsData } from 'payload/types'
import { createArrayFromCommaDelineated } from 'payload/utilities'
import type { PostgresAdapter } from '../types'
type SanitizeQueryValueArgs = {
adapter: PostgresAdapter
field: Field | TabAsField
operator: string
relationOrPath: string
@@ -10,6 +13,7 @@ type SanitizeQueryValueArgs = {
}
export const sanitizeQueryValue = ({
adapter,
field,
operator: operatorArg,
relationOrPath,
@@ -27,8 +31,10 @@ export const sanitizeQueryValue = ({
) {
const allPossibleIDTypes: (number | string)[] = []
formattedValue.forEach((val) => {
if (typeof val === 'string') {
if (adapter.idType !== 'uuid' && typeof val === 'string') {
allPossibleIDTypes.push(val, parseInt(val))
} else if (typeof val === 'string') {
allPossibleIDTypes.push(val)
} else {
allPossibleIDTypes.push(val, String(val))
}

View File

@@ -17,10 +17,10 @@ import {
import { fieldAffectsData } from 'payload/types'
import toSnakeCase from 'to-snake-case'
import type { GenericColumns, GenericTable, PostgresAdapter } from '../types'
import type { GenericColumns, GenericTable, IDType, PostgresAdapter } from '../types'
import { getConfigIDType } from './getConfigIDType'
import { parentIDColumnMap } from './parentIDColumnMap'
import { setColumnID } from './setColumnID'
import { traverseFields } from './traverseFields'
type Args = {
@@ -89,15 +89,8 @@ export const buildTable = ({
// Drizzle relations
const relationsToBuild: Map<string, string> = new Map()
const idColType = getConfigIDType(fields)
const idColType: IDType = setColumnID({ adapter, columns, fields })
const idColTypeMap = {
integer: serial,
numeric,
varchar,
}
columns.id = idColTypeMap[idColType]('id').primaryKey()
;({
hasLocalizedField,
hasLocalizedManyNumberField,
@@ -300,7 +293,7 @@ export const buildTable = ({
relationships.forEach((relationTo) => {
const formattedRelationTo = toSnakeCase(relationTo)
let colType = 'integer'
let colType = adapter.idType === 'uuid' ? 'uuid' : 'integer'
const relatedCollectionCustomID = adapter.payload.collections[
relationTo
].config.fields.find((field) => fieldAffectsData(field) && field.name === 'id')

View File

@@ -1,17 +0,0 @@
import { type Field, fieldAffectsData } from 'payload/types'
export const getConfigIDType = (fields: Field[]): string => {
const idField = fields.find((field) => fieldAffectsData(field) && field.name === 'id')
if (idField) {
if (idField.type === 'number') {
return 'numeric'
}
if (idField.type === 'text') {
return 'varchar'
}
}
return 'integer'
}

View File

@@ -1,7 +1,13 @@
import { integer, numeric, varchar } from 'drizzle-orm/pg-core'
import { integer, numeric, uuid, varchar } from 'drizzle-orm/pg-core'
export const parentIDColumnMap = {
import type { IDType } from '../types'
export const parentIDColumnMap: Record<
IDType,
typeof integer<string> | typeof numeric<string> | typeof uuid<string> | typeof varchar
> = {
integer,
numeric,
uuid,
varchar,
}

View File

@@ -0,0 +1,33 @@
import type { PgColumnBuilder } from 'drizzle-orm/pg-core'
import { numeric, serial, uuid, varchar } from 'drizzle-orm/pg-core'
import { type Field, fieldAffectsData } from 'payload/types'
import { flattenTopLevelFields } from 'payload/utilities'
import type { IDType, PostgresAdapter } from '../types'
type Args = { adapter: PostgresAdapter; columns: Record<string, PgColumnBuilder>; fields: Field[] }
export const setColumnID = ({ adapter, columns, fields }: Args): IDType => {
const idField = flattenTopLevelFields(fields).find(
(field) => fieldAffectsData(field) && field.name === 'id',
)
if (idField) {
if (idField.type === 'number') {
columns.id = numeric('id').primaryKey()
return 'numeric'
}
if (idField.type === 'text') {
columns.id = varchar('id').primaryKey()
return 'varchar'
}
}
if (adapter.idType === 'uuid') {
columns.id = uuid('id').defaultRandom().primaryKey()
return 'uuid'
}
columns.id = serial('id').primaryKey()
return 'integer'
}

View File

@@ -6,6 +6,7 @@ import type { Field, TabAsField } from 'payload/types'
import { relations } from 'drizzle-orm'
import {
PgNumericBuilder,
PgUUIDBuilder,
PgVarcharBuilder,
boolean,
index,
@@ -21,7 +22,7 @@ import { InvalidConfiguration } from 'payload/errors'
import { fieldAffectsData, optionIsObject } from 'payload/types'
import toSnakeCase from 'to-snake-case'
import type { GenericColumns, PostgresAdapter } from '../types'
import type { GenericColumns, IDType, PostgresAdapter } from '../types'
import { hasLocalesTable } from '../utilities/hasLocalesTable'
import { buildTable } from './build'
@@ -93,7 +94,8 @@ export const traverseFields = ({
let hasManyNumberField: 'index' | boolean = false
let hasLocalizedManyNumberField = false
let parentIDColType = 'integer'
let parentIDColType: IDType = 'integer'
if (columns.id instanceof PgUUIDBuilder) parentIDColType = 'uuid'
if (columns.id instanceof PgNumericBuilder) parentIDColType = 'numeric'
if (columns.id instanceof PgVarcharBuilder) parentIDColType = 'varchar'

View File

@@ -16,6 +16,7 @@ import type { Pool, PoolConfig } from 'pg'
export type DrizzleDB = NodePgDatabase<Record<string, unknown>>
export type Args = {
idType?: 'serial' | 'uuid'
logger?: DrizzleConfig['logger']
migrationDir?: string
pool: PoolConfig
@@ -56,6 +57,7 @@ export type PostgresAdapter = BaseDatabaseAdapter & {
* Used for returning properly formed errors from unique fields
*/
fieldConstraints: Record<string, Record<string, string>>
idType: Args['idType']
logger: DrizzleConfig['logger']
pool: Pool
poolOptions: Args['pool']
@@ -72,6 +74,8 @@ export type PostgresAdapter = BaseDatabaseAdapter & {
tables: Record<string, GenericTable>
}
export type IDType = 'integer' | 'numeric' | 'uuid' | 'varchar'
export type PostgresAdapterResult = (args: { payload: Payload }) => PostgresAdapter
export type MigrateUpArgs = { payload: Payload; req?: Partial<PayloadRequest> }

View File

@@ -36,7 +36,7 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
}
}
const parentID = parentRows[parentRowIndex].id || parentRows[parentRowIndex]._parentID
const parentID = parentRows[parentRowIndex].id
// Add any sub arrays that need to be created
// We will call this recursively below
@@ -61,8 +61,10 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
// Insert all corresponding arrays
// (one insert per array table)
for (const [tableName, row] of Object.entries(rowsByTable)) {
// the nested arrays need the ID for the parentID foreign key
let insertedRows: Args['parentRows']
if (row.rows.length > 0) {
await db.insert(adapter.tables[tableName]).values(row.rows).returning()
insertedRows = await db.insert(adapter.tables[tableName]).values(row.rows).returning()
}
// Insert locale rows
@@ -76,7 +78,7 @@ export const insertArrays = async ({ adapter, arrays, db, parentRows }: Args): P
adapter,
arrays: row.arrays,
db,
parentRows: row.rows,
parentRows: insertedRows,
})
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "payload",
"version": "2.10.0",
"version": "2.11.1",
"description": "Node, React and MongoDB Headless CMS and Application Framework",
"license": "MIT",
"main": "./dist/index.js",
@@ -101,7 +101,6 @@
"jwt-decode": "3.1.2",
"md5": "2.3.0",
"method-override": "3.0.0",
"micro-memoize": "4.1.2",
"minimist": "1.2.8",
"mkdirp": "1.0.4",
"monaco-editor": "0.38.0",
@@ -193,7 +192,7 @@
"get-port": "5.1.1",
"mini-css-extract-plugin": "1.6.2",
"node-fetch": "2.6.12",
"nodemon": "3.0.1",
"nodemon": "3.0.3",
"object.assign": "4.1.4",
"object.entries": "1.1.6",
"passport-strategy": "1.0.0",

View File

@@ -24,11 +24,16 @@ export const Collapsible: React.FC<Props> = ({
}) => {
const [collapsedLocal, setCollapsedLocal] = useState(Boolean(initCollapsed))
const [hoveringToggle, setHoveringToggle] = useState(false)
const isNested = useCollapsible()
const { withinCollapsible } = useCollapsible()
const { t } = useTranslation('fields')
const collapsed = typeof collapsedFromProps === 'boolean' ? collapsedFromProps : collapsedLocal
const toggleCollapsible = React.useCallback(() => {
if (typeof onToggle === 'function') onToggle(!collapsed)
setCollapsedLocal(!collapsed)
}, [onToggle, collapsed])
return (
<div
className={[
@@ -36,14 +41,14 @@ export const Collapsible: React.FC<Props> = ({
className,
dragHandleProps && `${baseClass}--has-drag-handle`,
collapsed && `${baseClass}--collapsed`,
isNested && `${baseClass}--nested`,
withinCollapsible && `${baseClass}--nested`,
hoveringToggle && `${baseClass}--hovered`,
`${baseClass}--style-${collapsibleStyle}`,
]
.filter(Boolean)
.join(' ')}
>
<CollapsibleProvider>
<CollapsibleProvider collapsed={collapsed} toggle={toggleCollapsible}>
<div
className={`${baseClass}__toggle-wrap`}
onMouseEnter={() => setHoveringToggle(true)}
@@ -65,10 +70,7 @@ export const Collapsible: React.FC<Props> = ({
]
.filter(Boolean)
.join(' ')}
onClick={() => {
if (typeof onToggle === 'function') onToggle(!collapsed)
setCollapsedLocal(!collapsed)
}}
onClick={toggleCollapsible}
type="button"
>
<span>{t('toggleBlock')}</span>

View File

@@ -1,14 +1,35 @@
import React, { createContext, useContext } from 'react'
const Context = createContext(false)
type ContextType = {
collapsed: boolean
isVisible: boolean
toggle: () => void
withinCollapsible: boolean
}
const Context = createContext({
collapsed: false,
isVisible: true,
toggle: () => {},
withinCollapsible: true,
})
export const CollapsibleProvider: React.FC<{
children?: React.ReactNode
collapsed?: boolean
toggle: () => void
withinCollapsible?: boolean
}> = ({ children, withinCollapsible = true }) => {
return <Context.Provider value={withinCollapsible}>{children}</Context.Provider>
}> = ({ children, collapsed, toggle, withinCollapsible = true }) => {
const { collapsed: parentIsCollapsed, isVisible } = useCollapsible()
const contextValue = React.useMemo((): ContextType => {
return {
collapsed: Boolean(collapsed),
isVisible: isVisible && !parentIsCollapsed,
toggle,
withinCollapsible,
}
}, [collapsed, withinCollapsible, toggle, parentIsCollapsed, isVisible])
return <Context.Provider value={contextValue}>{children}</Context.Provider>
}
export const useCollapsible = (): boolean => useContext(Context)
export default Context
export const useCollapsible = (): ContextType => useContext(Context)

View File

@@ -33,7 +33,7 @@ const Group: React.FC<Props> = (props) => {
permissions,
} = props
const isWithinCollapsible = useCollapsible()
const { withinCollapsible } = useCollapsible()
const isWithinGroup = useGroup()
const isWithinRow = useRow()
const isWithinTab = useTabs()
@@ -43,7 +43,7 @@ const Group: React.FC<Props> = (props) => {
const groupHasErrors = submitted && errorCount > 0
const path = pathFromProps || name
const isTopLevel = !(isWithinCollapsible || isWithinGroup || isWithinRow)
const isTopLevel = !(withinCollapsible || isWithinGroup || isWithinRow)
return (
<div
@@ -51,7 +51,7 @@ const Group: React.FC<Props> = (props) => {
fieldBaseClass,
baseClass,
isTopLevel && `${baseClass}--top-level`,
isWithinCollapsible && `${baseClass}--within-collapsible`,
withinCollapsible && `${baseClass}--within-collapsible`,
isWithinGroup && `${baseClass}--within-group`,
isWithinRow && `${baseClass}--within-row`,
isWithinTab && `${baseClass}--within-tab`,

View File

@@ -9,7 +9,7 @@ const reduceToIDs = (options) =>
return [...ids, ...reduceToIDs(option.options)]
}
return [...ids, option.value]
return [...ids, { id: option.value, relationTo: option.relationTo }]
}, [])
const sortOptions = (options: Option[]): Option[] =>
@@ -63,10 +63,12 @@ const optionsReducer = (state: OptionGroup[], action: Action): OptionGroup[] =>
const optionsToAddTo = newOptions.find(
(optionGroup) => optionGroup.label === collection.labels.plural,
)
const newSubOptions = docs.reduce((docSubOptions, doc) => {
if (loadedIDs.indexOf(doc.id) === -1) {
loadedIDs.push(doc.id)
if (
loadedIDs.filter((item) => item.id === doc.id && item.relationTo === relation).length ===
0
) {
loadedIDs.push({ id: doc.id, relationTo: relation })
const docTitle = formatUseAsTitle({
collection,
@@ -89,7 +91,10 @@ const optionsReducer = (state: OptionGroup[], action: Action): OptionGroup[] =>
}, [])
ids.forEach((id) => {
if (!loadedIDs.includes(id)) {
if (
loadedIDs.filter((item) => item.id === id && item.relationTo === relation).length === 0
) {
loadedIDs.push({ id, relationTo: relation })
newSubOptions.push({
label: `${i18n.t('general:untitled')} - ID: ${id}`,
relationTo: relation,

View File

@@ -83,7 +83,7 @@ const TabsField: React.FC<Props> = (props) => {
const { preferencesKey } = useDocumentInfo()
const { i18n } = useTranslation()
const isWithinCollapsible = useCollapsible()
const { withinCollapsible } = useCollapsible()
const [activeTabIndex, setActiveTabIndex] = useState<number>(0)
const tabsPrefKey = `tabs-${indexPath}`
@@ -138,7 +138,7 @@ const TabsField: React.FC<Props> = (props) => {
fieldBaseClass,
className,
baseClass,
isWithinCollapsible && `${baseClass}--within-collapsible`,
withinCollapsible && `${baseClass}--within-collapsible`,
]
.filter(Boolean)
.join(' ')}

View File

@@ -74,21 +74,22 @@ const DefaultCell: React.FC<Props> = (props) => {
if (collection.upload && fieldAffectsData(field) && field.name === 'filename') {
CellComponent = cellComponents.File
} else {
return (
<WrapElement {...wrapElementProps}>
{(cellData === '' || typeof cellData === 'undefined') &&
'label' in field &&
t('noLabel', {
if (!cellData && 'label' in field) {
return (
<WrapElement {...wrapElementProps}>
{t('noLabel', {
label: getTranslation(
typeof field.label === 'function' ? 'data' : field.label || 'data',
i18n,
),
})}
{typeof cellData === 'string' && cellData}
{typeof cellData === 'number' && cellData}
{typeof cellData === 'object' && JSON.stringify(cellData)}
</WrapElement>
)
</WrapElement>
)
} else if (typeof cellData === 'string' || typeof cellData === 'number') {
return <WrapElement {...wrapElementProps}>{cellData}</WrapElement>
} else if (typeof cellData === 'object') {
return <WrapElement {...wrapElementProps}>{JSON.stringify(cellData)}</WrapElement>
}
}
}

View File

@@ -1,6 +1,4 @@
/* eslint-disable no-underscore-dangle */
import memoize from 'micro-memoize'
import type { FindOneArgs } from '../../database/types'
import type { PayloadRequest } from '../../express/types'
import type { Collection, TypeWithID } from '../config/types'
@@ -32,7 +30,6 @@ async function findByID<T extends TypeWithID>(incomingArgs: Arguments): Promise<
try {
const shouldCommit = await initTransaction(args.req)
const { transactionID } = args.req
// /////////////////////////////////////
// beforeOperation - Collection
@@ -90,25 +87,7 @@ async function findByID<T extends TypeWithID>(incomingArgs: Arguments): Promise<
if (!findOneArgs.where.and[0].id) throw new NotFound(t)
if (!req.findByID) {
req.findByID = { [transactionID]: {} }
} else if (!req.findByID[transactionID]) {
req.findByID[transactionID] = {}
}
if (!req.findByID[transactionID][collectionConfig.slug]) {
const nonMemoizedFindByID = async (query: FindOneArgs) => req.payload.db.findOne(query)
req.findByID[transactionID][collectionConfig.slug] = memoize(nonMemoizedFindByID, {
isPromise: true,
maxSize: 100,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This is straight from their docs, bad typings
transformKey: JSON.stringify,
})
}
let result = (await req.findByID[transactionID][collectionConfig.slug](findOneArgs)) as T
let result: T = await req.payload.db.findOne(findOneArgs)
if (!result) {
if (!disableErrors) {
@@ -118,9 +97,6 @@ async function findByID<T extends TypeWithID>(incomingArgs: Arguments): Promise<
return null
}
// Clone the result - it may have come back memoized
result = JSON.parse(JSON.stringify(result))
// /////////////////////////////////////
// Replace document with draft if available
// /////////////////////////////////////

View File

@@ -62,6 +62,17 @@ export async function getLocalizedPaths({
return paths
}
if (!matchedField && currentPath === 'id' && i === pathSegments.length - 1) {
lastIncompletePath.path = currentPath
const idField: Field = {
name: 'id',
type: payload.db.defaultIDType as 'text',
}
lastIncompletePath.field = idField
lastIncompletePath.complete = true
return paths
}
if (matchedField) {
if ('hidden' in matchedField && matchedField.hidden && !overrideAccess) {
lastIncompletePath.invalid = true

View File

@@ -1,3 +1,4 @@
export { useCollapsible } from '../../admin/components/elements/Collapsible/provider'
export { default as buildStateFromSchema } from '../../admin/components/forms/Form/buildStateFromSchema'
export { useAuth } from '../../admin/components/utilities/Auth'
export { useConfig } from '../../admin/components/utilities/Config'

View File

@@ -275,55 +275,63 @@ const validateFilterOptions: Validate = async (
await Promise.all(
collections.map(async (collection) => {
let optionFilter =
typeof filterOptions === 'function'
? await filterOptions({
id,
data,
relationTo: collection,
siblingData,
user,
})
: filterOptions
try {
let optionFilter =
typeof filterOptions === 'function'
? await filterOptions({
id,
data,
relationTo: collection,
siblingData,
user,
})
: filterOptions
if (optionFilter === true) {
optionFilter = null
}
const valueIDs: (number | string)[] = []
values.forEach((val) => {
if (typeof val === 'object' && val?.value) {
valueIDs.push(val.value)
if (optionFilter === true) {
optionFilter = null
}
if (typeof val === 'string' || typeof val === 'number') {
valueIDs.push(val)
}
})
const valueIDs: (number | string)[] = []
if (valueIDs.length > 0) {
const findWhere = {
and: [{ id: { in: valueIDs } }],
}
values.forEach((val) => {
if (typeof val === 'object' && val?.value) {
valueIDs.push(val.value)
}
if (optionFilter) findWhere.and.push(optionFilter)
if (optionFilter === false) {
falseCollections.push(optionFilter)
}
const result = await payload.find({
collection,
depth: 0,
limit: 0,
pagination: false,
req,
where: findWhere,
if (typeof val === 'string' || typeof val === 'number') {
valueIDs.push(val)
}
})
options[collection] = result.docs.map((doc) => doc.id)
} else {
if (valueIDs.length > 0) {
const findWhere = {
and: [{ id: { in: valueIDs } }],
}
if (optionFilter) findWhere.and.push(optionFilter)
if (optionFilter === false) {
falseCollections.push(optionFilter)
}
// `req` omitted to prevent transaction errors from aborting the entire transaction
const result = await payload.find({
collection,
depth: 0,
limit: 0,
pagination: false,
where: findWhere,
})
options[collection] = result.docs.map((doc) => doc.id)
} else {
options[collection] = []
}
} catch (err) {
req.payload.logger.error({
err,
msg: `Error validating filter options for collection ${collection}`,
})
options[collection] = []
}
}),

View File

@@ -1,25 +1,31 @@
import type { Request } from 'express'
import type { File, FileData } from './types'
import { Request } from 'express'
import { APIError } from '../errors'
type Args = {
req: Request
data: FileData
req: Request
}
export const getExternalFile = async ({ req, data }: Args): Promise<File> => {
const baseUrl = req.get('origin') || `${req.protocol}://${req.get('host')}`
const { url, filename } = data
export const getExternalFile = async ({ data, req }: Args): Promise<File> => {
const { filename, url } = data
if (typeof url === 'string') {
const fileURL = `${baseUrl}${url}`
let fileURL = url
if (!url.startsWith('http')) {
const baseUrl = req.get('origin') || `${req.protocol}://${req.get('host')}`
fileURL = `${baseUrl}${url}`
}
const { default: fetch } = (await import('node-fetch')) as any
const res = await fetch(fileURL, {
credentials: 'include',
method: 'GET',
headers: {
...req.headers,
},
method: 'GET',
})
if (!res.ok) throw new APIError(`Failed to fetch file from ${fileURL}`, res.status)

View File

@@ -0,0 +1,51 @@
version: '3.2'
services:
localstack:
image: localstack/localstack:latest
container_name: localstack_demo
ports:
- '4563-4599:4563-4599'
- '8055:8080'
environment:
- SERVICES=s3
- DEBUG=1
- DATA_DIR=/tmp/localstack/data
volumes:
- './.localstack:/var/lib/localstack'
- '/var/run/docker.sock:/var/run/docker.sock'
azure-storage:
image: mcr.microsoft.com/azure-storage/azurite:3.18.0
restart: always
command: 'azurite --loose --blobHost 0.0.0.0 --tableHost 0.0.0.0 --queueHost 0.0.0.0'
ports:
- '10000:10000'
- '10001:10001'
- '10002:10002'
volumes:
- ./azurestoragedata:/data"
google-cloud-storage:
image: fsouza/fake-gcs-server
restart: always
command:
[
'-scheme',
'http',
'-port',
'4443',
'-public-host',
'http://localhost:4443',
'-external-url',
'http://localhost:4443',
'-backend',
'memory',
]
ports:
- '4443:4443'
volumes:
- ./google-cloud-storage/payload-bucket:/data/payload-bucket
volumes:
google-cloud-storage:
azurestoragedata:

View File

@@ -52,7 +52,7 @@
"@types/find-node-modules": "^2.1.2",
"cross-env": "^7.0.3",
"dotenv": "^8.2.0",
"nodemon": "^2.0.6",
"nodemon": "3.0.3",
"payload": "workspace:*",
"rimraf": "^4.1.2",
"ts-node": "^9.1.1",

View File

@@ -1,7 +1,7 @@
{
"name": "@payloadcms/plugin-form-builder",
"description": "Form builder plugin for Payload CMS",
"version": "1.2.0",
"version": "1.2.1",
"homepage:": "https://payloadcms.com",
"repository": "git@github.com:payloadcms/plugin-form-builder.git",
"main": "dist/index.js",
@@ -31,7 +31,7 @@
"@types/react": "18.2.15",
"copyfiles": "^2.4.1",
"cross-env": "^7.0.3",
"nodemon": "^3.0.2",
"nodemon": "3.0.3",
"payload": "workspace:*",
"react": "^18.0.0",
"ts-node": "10.9.1"

View File

@@ -5,7 +5,7 @@ import { replaceDoubleCurlys } from '../../../utilities/replaceDoubleCurlys'
import { serializeSlate } from '../../../utilities/slate/serializeSlate'
const sendEmail = async (beforeChangeData: any, formConfig: PluginConfig): Promise<any> => {
const { data, operation } = beforeChangeData
const { data, operation, req } = beforeChangeData
if (operation === 'create') {
const {
@@ -22,6 +22,7 @@ const sendEmail = async (beforeChangeData: any, formConfig: PluginConfig): Promi
id: formID,
collection: formOverrides?.slug || 'forms',
locale,
req,
})
const { emails } = form

View File

@@ -11,6 +11,7 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
const newConfig: CollectionConfig = {
...(formConfig?.formSubmissionOverrides || {}),
slug: formConfig?.formSubmissionOverrides?.slug || 'form-submissions',
access: {
create: () => true,
read: ({ req: { user } }) => !!user, // logged-in users,
@@ -24,13 +25,13 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
fields: [
{
name: 'form',
type: 'relationship',
admin: {
readOnly: true,
},
relationTo: formSlug,
required: true,
type: 'relationship',
validate: async (value, { payload }) => {
validate: async (value, { payload, req }) => {
/* Don't run in the client side */
if (!payload) return true
@@ -41,6 +42,7 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
existingForm = await payload.findByID({
id: value,
collection: formSlug,
req,
})
return true
@@ -52,19 +54,20 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
},
{
name: 'submissionData',
type: 'array',
admin: {
readOnly: true,
},
fields: [
{
name: 'field',
required: true,
type: 'text',
required: true,
},
{
name: 'value',
required: true,
type: 'text',
required: true,
validate: (value: unknown) => {
// TODO:
// create a validation function that dynamically
@@ -84,7 +87,6 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
},
},
],
type: 'array',
},
...(formConfig?.formSubmissionOverrides?.fields || []),
],
@@ -96,7 +98,6 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
],
...(formConfig?.formSubmissionOverrides?.hooks || {}),
},
slug: formConfig?.formSubmissionOverrides?.slug || 'form-submissions',
}
const paymentFieldConfig = formConfig?.fields?.payment
@@ -104,26 +105,27 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
if (paymentFieldConfig) {
newConfig.fields.push({
name: 'payment',
type: 'group',
admin: {
readOnly: true,
},
fields: [
{
name: 'field',
label: 'Field',
type: 'text',
label: 'Field',
},
{
name: 'status',
label: 'Status',
type: 'text',
label: 'Status',
},
{
name: 'amount',
type: 'number',
admin: {
description: 'Amount in cents',
},
type: 'number',
},
{
name: 'paymentProcessor',
@@ -131,28 +133,27 @@ export const generateSubmissionCollection = (formConfig: PluginConfig): Collecti
},
{
name: 'creditCard',
type: 'group',
fields: [
{
name: 'token',
label: 'token',
type: 'text',
label: 'token',
},
{
name: 'brand',
label: 'Brand',
type: 'text',
label: 'Brand',
},
{
name: 'number',
label: 'Number',
type: 'text',
label: 'Number',
},
],
label: 'Credit Card',
type: 'group',
},
],
type: 'group',
})
}

View File

@@ -1,27 +1,25 @@
import type { CollectionAfterDeleteHook } from 'payload/types'
const deleteFromSearch: CollectionAfterDeleteHook = ({ doc, req: { payload } }) => {
const deleteFromSearch: CollectionAfterDeleteHook = async ({ doc, req: { payload }, req }) => {
try {
const deleteSearchDoc = async (): Promise<any> => {
const searchDocQuery = await payload.find({
collection: 'search',
depth: 0,
where: {
'doc.value': {
equals: doc.id,
},
const searchDocQuery = await payload.find({
collection: 'search',
depth: 0,
req,
where: {
'doc.value': {
equals: doc.id,
},
},
})
if (searchDocQuery?.docs?.[0]) {
await payload.delete({
id: searchDocQuery?.docs?.[0]?.id,
collection: 'search',
req,
})
if (searchDocQuery?.docs?.[0]) {
payload.delete({
id: searchDocQuery?.docs?.[0]?.id,
collection: 'search',
})
}
}
deleteSearchDoc()
} catch (err: unknown) {
payload.logger.error({
err: `Error deleting search doc: ${err}`,

View File

@@ -6,6 +6,7 @@ const syncWithSearch: SyncWithSearch = async (args) => {
doc,
operation,
req: { payload },
req,
// @ts-expect-error
searchConfig,
} = args
@@ -26,6 +27,7 @@ const syncWithSearch: SyncWithSearch = async (args) => {
dataToSave = await beforeSync({
originalDoc: doc,
payload,
req,
searchDoc: dataToSave,
})
}
@@ -53,13 +55,13 @@ const syncWithSearch: SyncWithSearch = async (args) => {
try {
if (operation === 'create') {
if (doSync) {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
payload.create({
await payload.create({
collection: 'search',
data: {
...dataToSave,
priority: defaultPriority,
},
req,
})
}
}
@@ -70,6 +72,7 @@ const syncWithSearch: SyncWithSearch = async (args) => {
const searchDocQuery = await payload.find({
collection: 'search',
depth: 0,
req,
where: {
'doc.value': {
equals: id,
@@ -88,15 +91,12 @@ const syncWithSearch: SyncWithSearch = async (args) => {
// to ensure the same, out-of-date result does not appear twice (where only syncing the first found doc)
if (duplicativeDocs.length > 0) {
try {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
Promise.all(
duplicativeDocs.map(({ id: duplicativeDocID }) =>
payload.delete({
id: duplicativeDocID,
collection: 'search',
}),
), // eslint-disable-line function-paren-newline
)
const duplicativeDocIDs = duplicativeDocs.map(({ id }) => id)
await payload.delete({
collection: 'search',
req,
where: { id: { in: duplicativeDocIDs } },
})
} catch (err: unknown) {
payload.logger.error(`Error deleting duplicative search documents.`)
}
@@ -108,14 +108,14 @@ const syncWithSearch: SyncWithSearch = async (args) => {
if (doSync) {
// update the doc normally
try {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
payload.update({
await payload.update({
id: searchDocID,
collection: 'search',
data: {
...dataToSave,
priority: foundDoc.priority || defaultPriority,
},
req,
})
} catch (err: unknown) {
payload.logger.error(`Error updating search document.`)
@@ -124,10 +124,10 @@ const syncWithSearch: SyncWithSearch = async (args) => {
if (deleteDrafts && status === 'draft') {
// do not include draft docs in search results, so delete the record
try {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
payload.delete({
await payload.delete({
id: searchDocID,
collection: 'search',
req,
})
} catch (err: unknown) {
payload.logger.error(`Error deleting search document: ${err}`)
@@ -135,13 +135,13 @@ const syncWithSearch: SyncWithSearch = async (args) => {
}
} else if (doSync) {
try {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
payload.create({
await payload.create({
collection: 'search',
data: {
...dataToSave,
priority: defaultPriority,
},
req,
})
} catch (err: unknown) {
payload.logger.error(`Error creating search document: ${err}`)

View File

@@ -34,7 +34,7 @@ const Search =
afterChange: [
...(existingHooks?.afterChange || []),
async (args: any) => {
syncWithSearch({
await syncWithSearch({
...args,
collection: collection.slug,
searchConfig,

View File

@@ -1,5 +1,5 @@
import type { Payload } from 'payload'
import type { CollectionAfterChangeHook, CollectionConfig } from 'payload/types'
import type { CollectionAfterChangeHook, CollectionConfig, PayloadRequest } from 'payload/types'
export interface DocToSync {
[key: string]: any
@@ -15,6 +15,7 @@ export type BeforeSync = (args: {
[key: string]: any
}
payload: Payload
req: PayloadRequest
searchDoc: DocToSync
}) => DocToSync | Promise<DocToSync>

View File

@@ -45,7 +45,7 @@
"cross-env": "^7.0.3",
"dotenv": "^8.2.0",
"jest": "^29.5.0",
"nodemon": "^2.0.6",
"nodemon": "3.0.3",
"payload": "workspace:*",
"ts-jest": "^29.1.0",
"webpack": "^5.78.0"

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/plugin-seo",
"version": "2.2.0",
"version": "2.2.1",
"homepage:": "https://payloadcms.com",
"repository": "git@github.com:payloadcms/plugin-seo.git",
"description": "SEO plugin for Payload",

View File

@@ -1,6 +1,6 @@
{
"name": "@payloadcms/richtext-lexical",
"version": "0.6.0",
"version": "0.6.1",
"description": "The officially supported Lexical richtext adapter for Payload",
"repository": "https://github.com/payloadcms/payload",
"license": "MIT",
@@ -19,31 +19,31 @@
},
"dependencies": {
"@faceless-ui/modal": "2.0.1",
"@lexical/headless": "0.12.6",
"@lexical/link": "0.12.6",
"@lexical/list": "0.12.6",
"@lexical/mark": "0.12.6",
"@lexical/markdown": "0.12.6",
"@lexical/react": "0.12.6",
"@lexical/rich-text": "0.12.6",
"@lexical/selection": "0.12.6",
"@lexical/utils": "0.12.6",
"@lexical/headless": "0.13.1",
"@lexical/link": "0.13.1",
"@lexical/list": "0.13.1",
"@lexical/mark": "0.13.1",
"@lexical/markdown": "0.13.1",
"@lexical/react": "0.13.1",
"@lexical/rich-text": "0.13.1",
"@lexical/selection": "0.13.1",
"@lexical/utils": "0.13.1",
"bson-objectid": "2.0.4",
"classnames": "^2.3.2",
"deep-equal": "2.2.3",
"i18next": "22.5.1",
"json-schema": "^0.4.0",
"lexical": "0.12.6",
"lexical": "0.13.1",
"lodash": "4.17.21",
"react": "18.2.0",
"react-dom": "18.2.0",
"react-error-boundary": "^4.0.11",
"react-error-boundary": "4.0.12",
"react-i18next": "11.18.6",
"ts-essentials": "7.0.3"
},
"devDependencies": {
"@payloadcms/eslint-config": "workspace:*",
"@types/json-schema": "7.0.12",
"@types/json-schema": "7.0.15",
"@types/node": "20.6.2",
"@types/react": "18.2.15",
"payload": "workspace:*"

View File

@@ -49,7 +49,7 @@ const RichText: React.FC<FieldProps> = (props) => {
validate: memoizedValidate,
})
const { errorMessage, setValue, showError, value } = fieldType
const { errorMessage, initialValue, setValue, showError, value } = fieldType
const classes = [
baseClass,
@@ -77,6 +77,7 @@ const RichText: React.FC<FieldProps> = (props) => {
<LexicalProvider
editorConfig={editorConfig}
fieldProps={props}
key={JSON.stringify({ initialValue, path })} // makes sure lexical is completely re-rendered when initialValue changes, bypassing the lexical-internal value memoization. That way, external changes to the form will update the editor. More infos in PR description (https://github.com/payloadcms/payload/pull/5010)
onChange={(editorState) => {
let serializedEditorState = editorState.toJSON()

View File

@@ -2,7 +2,7 @@ import type { SerializedQuoteNode } from '@lexical/rich-text'
import { $createQuoteNode, QuoteNode } from '@lexical/rich-text'
import { $setBlocksType } from '@lexical/selection'
import { $INTERNAL_isPointSelection, $getSelection } from 'lexical'
import { $getSelection } from 'lexical'
import type { HTMLConverter } from '../converters/html/converter/types'
import type { FeatureProvider } from '../types'
@@ -31,9 +31,7 @@ export const BlockQuoteFeature = (): FeatureProvider => {
onClick: ({ editor }) => {
editor.update(() => {
const selection = $getSelection()
if ($INTERNAL_isPointSelection(selection)) {
$setBlocksType(selection, () => $createQuoteNode())
}
$setBlocksType(selection, () => $createQuoteNode())
})
},
order: 20,
@@ -44,6 +42,7 @@ export const BlockQuoteFeature = (): FeatureProvider => {
markdownTransformers: [MarkdownTransformer],
nodes: [
{
type: QuoteNode.getType(),
converters: {
html: {
converter: async ({ converters, node, parent }) => {
@@ -62,7 +61,6 @@ export const BlockQuoteFeature = (): FeatureProvider => {
} as HTMLConverter<SerializedQuoteNode>,
},
node: QuoteNode,
type: QuoteNode.getType(),
},
],
props: null,
@@ -82,9 +80,7 @@ export const BlockQuoteFeature = (): FeatureProvider => {
keywords: ['quote', 'blockquote'],
onSelect: () => {
const selection = $getSelection()
if ($INTERNAL_isPointSelection(selection)) {
$setBlocksType(selection, () => $createQuoteNode())
}
$setBlocksType(selection, () => $createQuoteNode())
},
}),
],

View File

@@ -39,8 +39,16 @@ export function BlocksPlugin(): JSX.Element | null {
const { focus } = selection
const focusNode = focus.getNode()
// First, delete currently selected node if it's an empty paragraph
if ($isParagraphNode(focusNode) && focusNode.getTextContentSize() === 0) {
// First, delete currently selected node if it's an empty paragraph and if there are sufficient
// paragraph nodes (more than 1) left in the parent node, so that we don't "trap" the user
if (
$isParagraphNode(focusNode) &&
focusNode.getTextContentSize() === 0 &&
focusNode
.getParent()
.getChildren()
.filter((node) => $isParagraphNode(node)).length > 1
) {
focusNode.remove()
}

View File

@@ -2,7 +2,7 @@ import type { HeadingTagType, SerializedHeadingNode } from '@lexical/rich-text'
import { $createHeadingNode, HeadingNode } from '@lexical/rich-text'
import { $setBlocksType } from '@lexical/selection'
import { $INTERNAL_isPointSelection, $getSelection } from 'lexical'
import { $getSelection } from 'lexical'
import type { HTMLConverter } from '../converters/html/converter/types'
import type { FeatureProvider } from '../types'
@@ -14,9 +14,7 @@ import { MarkdownTransformer } from './markdownTransformer'
const setHeading = (headingSize: HeadingTagType) => {
const selection = $getSelection()
if ($INTERNAL_isPointSelection(selection)) {
$setBlocksType(selection, () => $createHeadingNode(headingSize))
}
$setBlocksType(selection, () => $createHeadingNode(headingSize))
}
type Props = {
@@ -67,6 +65,7 @@ export const HeadingFeature = (props: Props): FeatureProvider => {
markdownTransformers: [MarkdownTransformer(enabledHeadingSizes)],
nodes: [
{
type: HeadingNode.getType(),
converters: {
html: {
converter: async ({ converters, node, parent }) => {
@@ -85,7 +84,6 @@ export const HeadingFeature = (props: Props): FeatureProvider => {
} as HTMLConverter<SerializedHeadingNode>,
},
node: HeadingNode,
type: HeadingNode.getType(),
},
],
props,

View File

@@ -22,6 +22,8 @@ import {
import type { LinkPayload } from '../plugins/floatingLinkEditor/types'
import { $isAutoLinkNode } from './AutoLinkNode'
export type LinkFields = {
// unknown, custom fields:
[key: string]: unknown
@@ -140,8 +142,8 @@ export class LinkNode extends ElementNode {
exportJSON(): SerializedLinkNode {
return {
...super.exportJSON(),
fields: this.getFields(),
type: this.getType(),
fields: this.getFields(),
version: 2,
}
}

View File

@@ -235,7 +235,8 @@ function handleLinkCreation(
onChange: ChangeHandler,
): void {
let currentNodes = [...nodes]
let text = currentNodes.map((node) => node.getTextContent()).join('')
const initialText = currentNodes.map((node) => node.getTextContent()).join('')
let text = initialText
let match
let invalidMatchEnd = 0
@@ -247,7 +248,7 @@ function handleLinkCreation(
const isValid = isContentAroundIsValid(
invalidMatchEnd + matchStart,
invalidMatchEnd + matchEnd,
text,
initialText,
currentNodes,
)

View File

@@ -34,6 +34,8 @@ import { useEditorConfigContext } from '../../../../../lexical/config/EditorConf
import { getSelectedNode } from '../../../../../lexical/utils/getSelectedNode'
import { setFloatingElemPositionForLinkEditor } from '../../../../../lexical/utils/setFloatingElemPositionForLinkEditor'
import { LinkDrawer } from '../../../drawer'
import { $isAutoLinkNode } from '../../../nodes/AutoLinkNode'
import { $createLinkNode } from '../../../nodes/LinkNode'
import { $isLinkNode, TOGGLE_LINK_COMMAND } from '../../../nodes/LinkNode'
import { transformExtraFields } from '../utilities'
import { TOGGLE_LINK_WITH_MODAL_COMMAND } from './commands'
@@ -73,7 +75,7 @@ export function LinkEditor({
// Sanitize custom fields here
const validRelationships = config.collections.map((c) => c.slug) || []
const fields = sanitizeFields({
config: config,
config,
fields: fieldsUnsanitized,
validRelationships,
})
@@ -84,10 +86,11 @@ export function LinkEditor({
const { closeModal, toggleModal } = useModal()
const editDepth = useEditDepth()
const [isLink, setIsLink] = useState(false)
const [isAutoLink, setIsAutoLink] = useState(false)
const drawerSlug = formatDrawerSlug({
depth: editDepth,
slug: `lexical-rich-text-link-` + uuid,
depth: editDepth,
})
const updateLinkEditor = useCallback(async () => {
@@ -98,9 +101,10 @@ export function LinkEditor({
if ($isRangeSelection(selection)) {
const node = getSelectedNode(selection)
selectedNodeDomRect = editor.getElementByKey(node.getKey())?.getBoundingClientRect()
const linkParent: LinkNode = $findMatchingParent(node, $isLinkNode) as LinkNode
const linkParent: LinkNode = $findMatchingParent(node, $isLinkNode)
if (linkParent == null) {
setIsLink(false)
setIsAutoLink(false)
setLinkUrl('')
setLinkLabel('')
return
@@ -152,6 +156,11 @@ export function LinkEditor({
})
setInitialState(state)
setIsLink(true)
if ($isAutoLinkNode(linkParent)) {
setIsAutoLink(true)
} else {
setIsAutoLink(false)
}
}
const editorElem = editorRef.current
@@ -265,6 +274,7 @@ export function LinkEditor({
() => {
if (isLink) {
setIsLink(false)
setIsAutoLink(false)
return true
}
return false
@@ -301,18 +311,20 @@ export function LinkEditor({
tabIndex={0}
type="button"
/>
<button
aria-label="Remove link"
className="link-trash"
onClick={() => {
editor.dispatchCommand(TOGGLE_LINK_COMMAND, null)
}}
onMouseDown={(event) => {
event.preventDefault()
}}
tabIndex={0}
type="button"
/>
{!isAutoLink && (
<button
aria-label="Remove link"
className="link-trash"
onClick={() => {
editor.dispatchCommand(TOGGLE_LINK_COMMAND, null)
}}
onMouseDown={(event) => {
event.preventDefault()
}}
tabIndex={0}
type="button"
/>
)}
</React.Fragment>
)}
</div>
@@ -325,6 +337,22 @@ export function LinkEditor({
const newLinkPayload: LinkPayload = data as LinkPayload
// See: https://github.com/facebook/lexical/pull/5536. This updates autolink nodes to link nodes whenever a change was made (which is good!).
editor.update(() => {
const selection = $getSelection()
if ($isRangeSelection(selection)) {
const parent = getSelectedNode(selection).getParent()
if ($isAutoLinkNode(parent)) {
const linkNode = $createLinkNode({
fields: newLinkPayload.fields,
})
parent.replace(linkNode, true)
}
}
})
// Needs to happen AFTER a potential auto link => link node conversion, as otherwise, the updated text to display may be lost due to
// it being applied to the auto link node instead of the link node.
editor.dispatchCommand(TOGGLE_LINK_COMMAND, newLinkPayload)
}}
initialState={initialState}

View File

@@ -1,5 +1,5 @@
import { $setBlocksType } from '@lexical/selection'
import { $INTERNAL_isPointSelection, $createParagraphNode, $getSelection } from 'lexical'
import { $createParagraphNode, $getSelection } from 'lexical'
import type { FeatureProvider } from '../types'
@@ -23,9 +23,7 @@ export const ParagraphFeature = (): FeatureProvider => {
onClick: ({ editor }) => {
editor.update(() => {
const selection = $getSelection()
if ($INTERNAL_isPointSelection(selection)) {
$setBlocksType(selection, () => $createParagraphNode())
}
$setBlocksType(selection, () => $createParagraphNode())
})
},
order: 1,
@@ -49,9 +47,7 @@ export const ParagraphFeature = (): FeatureProvider => {
onSelect: ({ editor }) => {
editor.update(() => {
const selection = $getSelection()
if ($INTERNAL_isPointSelection(selection)) {
$setBlocksType(selection, () => $createParagraphNode())
}
$setBlocksType(selection, () => $createParagraphNode())
})
},
}),

View File

@@ -54,8 +54,16 @@ export function RelationshipPlugin(props?: RelationshipFeatureProps): JSX.Elemen
const { focus } = selection
const focusNode = focus.getNode()
// First, delete currently selected node if it's an empty paragraph
if ($isParagraphNode(focusNode) && focusNode.getTextContentSize() === 0) {
// First, delete currently selected node if it's an empty paragraph and if there are sufficient
// paragraph nodes (more than 1) left in the parent node, so that we don't "trap" the user
if (
$isParagraphNode(focusNode) &&
focusNode.getTextContentSize() === 0 &&
focusNode
.getParent()
.getChildren()
.filter((node) => $isParagraphNode(node)).length > 1
) {
focusNode.remove()
}

View File

@@ -53,8 +53,16 @@ export function UploadPlugin(): JSX.Element | null {
const { focus } = selection
const focusNode = focus.getNode()
// First, delete currently selected node if it's an empty paragraph
if ($isParagraphNode(focusNode) && focusNode.getTextContentSize() === 0) {
// First, delete currently selected node if it's an empty paragraph and if there are sufficient
// paragraph nodes (more than 1) left in the parent node, so that we don't "trap" the user
if (
$isParagraphNode(focusNode) &&
focusNode.getTextContentSize() === 0 &&
focusNode
.getParent()
.getChildren()
.filter((node) => $isParagraphNode(node)).length > 1
) {
focusNode.remove()
}

View File

@@ -56,6 +56,22 @@ export const AlignFeature = (): FeatureProvider => {
order: 3,
},
]),
AlignDropdownSectionWithEntries([
{
ChildComponent: () =>
// @ts-expect-error
import('../../lexical/ui/icons/AlignJustify').then(
(module) => module.AlignJustifyIcon,
),
isActive: () => false,
key: 'align-justify',
label: `Align Justify`,
onClick: ({ editor }) => {
editor.dispatchCommand(FORMAT_ELEMENT_COMMAND, 'justify')
},
order: 4,
},
]),
],
},
props: null,

View File

@@ -2,12 +2,8 @@
import type { ParagraphNode } from 'lexical'
import { useLexicalComposerContext } from '@lexical/react/LexicalComposerContext'
import {
$getNearestNodeFromDOMNode,
$getNodeByKey,
type LexicalEditor,
type LexicalNode,
} from 'lexical'
import { $createParagraphNode } from 'lexical'
import { $getNodeByKey, type LexicalEditor, type LexicalNode } from 'lexical'
import * as React from 'react'
import { useCallback, useEffect, useRef, useState } from 'react'
import { createPortal } from 'react-dom'
@@ -50,14 +46,13 @@ function getBlockElement(
horizontalOffset = 0,
): {
blockElem: HTMLElement | null
shouldRemove: boolean
blockNode: LexicalNode | null
} {
const anchorElementRect = anchorElem.getBoundingClientRect()
const topLevelNodeKeys = getTopLevelNodeKeys(editor)
let blockElem: HTMLElement | null = null
let blockNode: LexicalNode | null = null
let shouldRemove = false
// Return null if matching block element is the first or last node
editor.getEditorState().read(() => {
@@ -82,7 +77,6 @@ function getBlockElement(
if (blockElem) {
return {
blockElem: null,
shouldRemove,
}
}
}
@@ -118,16 +112,6 @@ function getBlockElement(
blockElem = elem
blockNode = $getNodeByKey(key)
prevIndex = index
// Check if blockNode is an empty text node
if (
!blockNode ||
blockNode.getType() !== 'paragraph' ||
blockNode.getTextContent() !== ''
) {
blockElem = null
shouldRemove = true
}
break
}
@@ -147,8 +131,8 @@ function getBlockElement(
})
return {
blockElem: blockElem,
shouldRemove,
blockElem,
blockNode,
}
}
@@ -160,7 +144,10 @@ function useAddBlockHandle(
const scrollerElem = anchorElem.parentElement
const menuRef = useRef<HTMLButtonElement>(null)
const [emptyBlockElem, setEmptyBlockElem] = useState<HTMLElement | null>(null)
const [hoveredElement, setHoveredElement] = useState<{
elem: HTMLElement
node: LexicalNode
} | null>(null)
useEffect(() => {
function onDocumentMouseMove(event: MouseEvent) {
@@ -185,7 +172,7 @@ function useAddBlockHandle(
pageX < left - horizontalBuffer ||
pageX > right + horizontalBuffer
) {
setEmptyBlockElem(null)
setHoveredElement(null)
return
}
@@ -199,21 +186,24 @@ function useAddBlockHandle(
if (isOnHandleElement(target, ADD_BLOCK_MENU_CLASSNAME)) {
return
}
const { blockElem: _emptyBlockElem, shouldRemove } = getBlockElement(
const { blockElem: _emptyBlockElem, blockNode } = getBlockElement(
anchorElem,
editor,
event,
false,
-distanceFromScrollerElem,
)
if (!_emptyBlockElem && !shouldRemove) {
if (!_emptyBlockElem) {
return
}
setEmptyBlockElem(_emptyBlockElem)
setHoveredElement({
elem: _emptyBlockElem,
node: blockNode,
})
}
// Since the draggableBlockElem is outside the actual editor, we need to listen to the document
// to be able to detect when the mouse is outside the editor and respect a buffer around the
// to be able to detect when the mouse is outside the editor and respect a buffer around
// the scrollerElem to avoid the draggableBlockElem disappearing too early.
document?.addEventListener('mousemove', onDocumentMouseMove)
@@ -223,42 +213,86 @@ function useAddBlockHandle(
}, [scrollerElem, anchorElem, editor])
useEffect(() => {
if (menuRef.current) {
setHandlePosition(emptyBlockElem, menuRef.current, anchorElem, SPACE)
if (menuRef.current && hoveredElement?.node) {
editor.getEditorState().read(() => {
// Check if blockNode is an empty text node
let isEmptyParagraph = true
if (
hoveredElement.node.getType() !== 'paragraph' ||
hoveredElement.node.getTextContent() !== ''
) {
isEmptyParagraph = false
}
setHandlePosition(
hoveredElement?.elem,
menuRef.current,
anchorElem,
isEmptyParagraph ? SPACE : SPACE - 20,
)
})
}
}, [anchorElem, emptyBlockElem])
}, [anchorElem, hoveredElement, editor])
const handleAddClick = useCallback(
(event) => {
if (!emptyBlockElem) {
let hoveredElementToUse = hoveredElement
if (!hoveredElementToUse?.node) {
return
}
let node: ParagraphNode
editor.update(() => {
node = $getNearestNodeFromDOMNode(emptyBlockElem) as ParagraphNode
if (!node || node.getType() !== 'paragraph') {
return
}
editor.focus()
node.select()
/*const ns = $createNodeSelection();
ns.add(node.getKey())
$setSelection(ns)*/
// 1. Update hoveredElement.node to a new paragraph node if the hoveredElement.node is not a paragraph node
editor.update(() => {
// Check if blockNode is an empty text node
let isEmptyParagraph = true
if (
hoveredElementToUse.node.getType() !== 'paragraph' ||
hoveredElementToUse.node.getTextContent() !== ''
) {
isEmptyParagraph = false
}
if (!isEmptyParagraph) {
const newParagraph = $createParagraphNode()
hoveredElementToUse.node.insertAfter(newParagraph)
setTimeout(() => {
hoveredElementToUse = {
elem: editor.getElementByKey(newParagraph.getKey()),
node: newParagraph,
}
setHoveredElement(hoveredElementToUse)
}, 0)
}
})
// Make sure this is called AFTER the editorfocus() event has been processed by the browser
// 2. Focus on the new paragraph node
setTimeout(() => {
editor.update(() => {
editor.focus()
if (
hoveredElementToUse.node &&
'select' in hoveredElementToUse.node &&
typeof hoveredElementToUse.node.select === 'function'
) {
hoveredElementToUse.node.select()
}
})
}, 1)
// Make sure this is called AFTER the focusing has been processed by the browser
// Otherwise, this won't work
setTimeout(() => {
editor.dispatchCommand(ENABLE_SLASH_MENU_COMMAND, {
node: node,
node: hoveredElementToUse.node as ParagraphNode,
})
}, 0)
}, 2)
event.stopPropagation()
event.preventDefault()
},
[editor, emptyBlockElem],
[editor, hoveredElement],
)
return createPortal(

View File

@@ -56,6 +56,7 @@ export const LexicalEditorTheme: EditorThemeClasses = {
inlineImage: 'LexicalEditor__inline-image',
link: 'LexicalEditorTheme__link',
list: {
checklist: 'LexicalEditorTheme__checklist',
listitem: 'LexicalEditorTheme__listItem',
listitemChecked: 'LexicalEditorTheme__listItemChecked',
listitemUnchecked: 'LexicalEditorTheme__listItemUnchecked',

View File

@@ -0,0 +1,18 @@
import React from 'react'
export const AlignJustifyIcon: React.FC = () => (
<svg
aria-hidden="true"
className="icon"
fill="none"
focusable="false"
height="20"
viewBox="0 0 20 20"
width="20"
xmlns="http://www.w3.org/2000/svg"
>
<path d="M2.5 5H17.5" stroke="currentColor" strokeWidth="1.5" />
<path d="M2.5 10H17.5" stroke="currentColor" strokeWidth="1.5" />
<path d="M2.5 15H17.5" stroke="currentColor" strokeWidth="1.5" />
</svg>
)

View File

@@ -326,7 +326,7 @@ const RichText: React.FC<FieldProps> = (props) => {
<Label htmlFor={`field-${path.replace(/\./g, '__')}`} label={label} required={required} />
<Slate
editor={editor}
key={JSON.stringify({ initialValue, path })}
key={JSON.stringify({ initialValue, path })} // makes sure slate is completely re-rendered when initialValue changes, bypassing the slate-internal value memoization. That way, external changes to the form will update the editor
onChange={handleChange}
value={valueToRender as any[]}
>

781
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -35,6 +35,13 @@ const databaseAdapters = {
connectionString: process.env.POSTGRES_URL || 'postgres://127.0.0.1:5432/payloadtests',
},
}),
'postgres-uuid': postgresAdapter({
idType: 'uuid',
migrationDir,
pool: {
connectionString: process.env.POSTGRES_URL || 'postgres://127.0.0.1:5432/payloadtests',
},
}),
supabase: postgresAdapter({
migrationDir,
pool: {

View File

@@ -115,6 +115,10 @@ const BlockFields: CollectionConfig = {
slug: blockFieldsSlug,
fields: [
getBlocksField(),
{
...getBlocksField(),
name: 'duplicate',
},
{
...getBlocksField('localized'),
name: 'collapsedByDefaultBlocks',

View File

@@ -100,8 +100,8 @@ describe('Fields', () => {
const { id } = await payload.create({
collection: 'text-fields',
data: {
text,
localizedHasMany,
text,
},
locale: 'en',
})
@@ -977,6 +977,70 @@ describe('Fields', () => {
expect(result.docs).toHaveLength(1)
expect(result.docs[0]).toMatchObject(blockDoc)
})
it('should query by blockType', async () => {
const text = 'blockType query test'
const hit = await payload.create({
collection: blockFieldsSlug,
data: {
blocks: [
{
blockType: 'content',
text,
},
],
},
})
const miss = await payload.create({
collection: blockFieldsSlug,
data: {
blocks: [
{
blockType: 'number',
number: 5,
},
],
duplicate: [
{
blockType: 'content',
text,
},
],
},
})
const { docs: equalsDocs } = await payload.find({
collection: blockFieldsSlug,
where: {
and: [
{
'blocks.blockType': { equals: 'content' },
},
{
'blocks.text': { equals: text },
},
],
},
})
const { docs: inDocs } = await payload.find({
collection: blockFieldsSlug,
where: {
'blocks.blockType': { in: ['content'] },
},
})
const equalsHitResult = equalsDocs.find(({ id }) => id === hit.id)
const inHitResult = inDocs.find(({ id }) => id === hit.id)
const equalsMissResult = equalsDocs.find(({ id }) => id === miss.id)
const inMissResult = inDocs.find(({ id }) => id === miss.id)
expect(equalsHitResult.id).toStrictEqual(hit.id)
expect(inHitResult.id).toStrictEqual(hit.id)
expect(equalsMissResult).toBeUndefined()
expect(inMissResult).toBeUndefined()
})
})
describe('json', () => {
@@ -1145,6 +1209,63 @@ describe('Fields', () => {
expect(existTrueIDs).toContain(hasJSON.id)
expect(existFalseIDs).not.toContain(hasJSON.id)
})
it('exists should not return null values', async () => {
const { id } = await payload.create({
collection: 'select-fields',
data: {
select: 'one',
},
})
const existsResult = await payload.find({
collection: 'select-fields',
where: {
id: { equals: id },
select: { exists: true },
},
})
expect(existsResult.docs).toHaveLength(1)
const existsFalseResult = await payload.find({
collection: 'select-fields',
where: {
id: { equals: id },
select: { exists: false },
},
})
expect(existsFalseResult.docs).toHaveLength(0)
await payload.update({
id,
collection: 'select-fields',
data: {
select: null,
},
})
const existsTrueResult = await payload.find({
collection: 'select-fields',
where: {
id: { equals: id },
select: { exists: true },
},
})
expect(existsTrueResult.docs).toHaveLength(0)
const result = await payload.find({
collection: 'select-fields',
where: {
id: { equals: id },
select: { exists: false },
},
})
expect(result.docs).toHaveLength(1)
})
})
})
@@ -1200,8 +1321,8 @@ describe('Fields', () => {
expect(nodes).toBeDefined()
const child = nodes.flatMap((n) => n.children).find((c) => c.doc)
expect(child).toMatchObject({
linkType: 'internal',
type: 'link',
linkType: 'internal',
})
expect(child.doc.relationTo).toEqual('array-fields')
@@ -1249,4 +1370,63 @@ describe('Fields', () => {
expect(query.docs).toBeDefined()
})
})
describe('clearable fields - exists', () => {
it('exists should not return null values', async () => {
const { id } = await payload.create({
collection: 'select-fields',
data: {
select: 'one',
},
})
const existsResult = await payload.find({
collection: 'select-fields',
where: {
id: { equals: id },
select: { exists: true },
},
})
expect(existsResult.docs).toHaveLength(1)
const existsFalseResult = await payload.find({
collection: 'select-fields',
where: {
id: { equals: id },
select: { exists: false },
},
})
expect(existsFalseResult.docs).toHaveLength(0)
await payload.update({
id,
collection: 'select-fields',
data: {
select: null,
},
})
const existsTrueResult = await payload.find({
collection: 'select-fields',
where: {
id: { equals: id },
select: { exists: true },
},
})
expect(existsTrueResult.docs).toHaveLength(0)
const result = await payload.find({
collection: 'select-fields',
where: {
id: { equals: id },
select: { exists: false },
},
})
expect(result.docs).toHaveLength(1)
})
})
})

View File

@@ -1,6 +1,7 @@
import type { Locator, Page } from '@playwright/test'
import { expect } from '@playwright/test'
import shelljs from 'shelljs'
import wait from '../packages/payload/src/utilities/wait'
import { devUser } from './credentials'
@@ -133,3 +134,21 @@ export function initPageConsoleErrorCatch(page: Page) {
}
})
}
export function describeIfInCIOrHasLocalstack(): jest.Describe {
if (process.env.CI) {
return describe
}
// Check that localstack is running
const { code } = shelljs.exec(`docker ps | grep localstack`)
if (code !== 0) {
console.warn('Localstack is not running. Skipping test suite.')
return describe.skip
}
console.log('Localstack is running. Running test suite.')
return describe
}

View File

@@ -9,4 +9,6 @@ module.exports = () => {
} else {
console.log('\n\nNo database specified, using default')
}
process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER = 's3'
}

View File

@@ -16,7 +16,7 @@ let adapter: Adapter
let uploadOptions
dotenv.config({
path: path.resolve(__dirname, '.env'),
path: path.resolve(__dirname, '.env.emulated'),
})
if (process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 'azure') {
@@ -31,7 +31,20 @@ if (process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 'azure') {
// }
}
if (process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 's3') {
if (process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 'gcs') {
adapter = gcsAdapter({
options: {
apiEndpoint: process.env.GCS_ENDPOINT,
projectId: process.env.GCS_PROJECT_ID,
},
bucket: process.env.GCS_BUCKET,
})
}
if (
process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 's3' ||
!process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER
) {
// The s3 adapter supports using temp files for uploads
uploadOptions = {
useTempFiles: true,
@@ -66,15 +79,9 @@ if (process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 'r2') {
})
}
if (process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER === 'gcs') {
adapter = gcsAdapter({
options: {
apiEndpoint: process.env.GCS_ENDPOINT,
projectId: process.env.GCS_PROJECT_ID,
},
bucket: process.env.GCS_BUCKET,
})
}
console.log(
`Using plugin-cloud-storage adapter: ${process.env.PAYLOAD_PUBLIC_CLOUD_STORAGE_ADAPTER}`,
)
export default buildConfigWithDefaults({
collections: [Media, Users],
@@ -120,6 +127,5 @@ export default buildConfigWithDefaults({
password: devUser.password,
},
})
console.log(process.env.S3_ENDPOINT)
},
})

View File

@@ -1,11 +1,110 @@
/* eslint-disable jest/require-top-level-describe */
import * as AWS from '@aws-sdk/client-s3'
import path from 'path'
import payload from '../../packages/payload/src'
import { describeIfInCIOrHasLocalstack } from '../helpers'
import { initPayloadTest } from '../helpers/configHelpers'
describe('plugin-cloud-storage', () => {
beforeAll(async () => {
await initPayloadTest({ __dirname, init: { local: true } })
})
const TEST_BUCKET = 'payload-bucket'
describe('tests', () => {
it.todo('plugin-cloud-storage tests')
let client: AWS.S3Client
describeIfInCIOrHasLocalstack()('plugin-cloud-storage', () => {
describe('S3', () => {
beforeAll(async () => {
client = new AWS.S3({
endpoint: 'http://localhost:4566',
region: 'us-east-1',
forcePathStyle: true, // required for localstack
})
await createTestBucket()
await initPayloadTest({ __dirname, init: { local: true } })
})
afterEach(async () => {
await clearTestBucket()
})
it('can upload', async () => {
const upload = await payload.create({
collection: 'media',
data: {},
filePath: path.resolve(__dirname, '../uploads/image.png'),
})
expect(upload.id).toBeTruthy()
await verifyUploads(upload.id)
})
})
})
describe('Azure', () => {
it.todo('can upload')
})
describe('GCS', () => {
it.todo('can upload')
})
describe('R2', () => {
it.todo('can upload')
})
async function createTestBucket() {
const makeBucketRes = await client.send(new AWS.CreateBucketCommand({ Bucket: TEST_BUCKET }))
if (makeBucketRes.$metadata.httpStatusCode !== 200) {
throw new Error(`Failed to create bucket. ${makeBucketRes.$metadata.httpStatusCode}`)
}
}
async function clearTestBucket() {
const listedObjects = await client.send(
new AWS.ListObjectsV2Command({
Bucket: TEST_BUCKET,
}),
)
if (!listedObjects?.Contents?.length) return
const deleteParams = {
Bucket: TEST_BUCKET,
Delete: { Objects: [] },
}
listedObjects.Contents.forEach(({ Key }) => {
deleteParams.Delete.Objects.push({ Key })
})
const deleteResult = await client.send(new AWS.DeleteObjectsCommand(deleteParams))
if (deleteResult.Errors?.length) {
throw new Error(JSON.stringify(deleteResult.Errors))
}
}
async function verifyUploads(uploadId: number | string) {
try {
const uploadData = (await payload.findByID({
collection: 'media',
id: uploadId,
})) as unknown as { filename: string; sizes: Record<string, { filename: string }> }
const fileKeys = Object.keys(uploadData.sizes).map((key) => uploadData.sizes[key].filename)
fileKeys.push(uploadData.filename)
for (const key of fileKeys) {
const { $metadata } = await client.send(
new AWS.HeadObjectCommand({ Bucket: TEST_BUCKET, Key: key }),
)
// Verify each size was properly uploaded
expect($metadata.httpStatusCode).toBe(200)
}
} catch (error: unknown) {
console.error('Error verifying uploads:', error)
throw error
}
}

View File

@@ -22,23 +22,19 @@ describe('Search Plugin', () => {
collection: 'pages',
data: {
_status: 'published',
title: 'Hello, world!',
excerpt: 'This is a test page',
title: 'Hello, world!',
},
})
// wait for the search document to be created
// we do not await this within the `syncToSearch` hook
await wait(200)
const { docs: results } = await payload.find({
collection: 'search',
depth: 0,
where: {
'doc.value': {
equals: pageToSync.id,
},
},
depth: 0,
})
expect(results).toHaveLength(1)
@@ -52,8 +48,8 @@ describe('Search Plugin', () => {
collection: 'pages',
data: {
_status: 'draft',
title: 'Hello, world!',
excerpt: 'This is a test page',
title: 'Hello, world!',
},
})
@@ -63,12 +59,12 @@ describe('Search Plugin', () => {
const { docs: results } = await payload.find({
collection: 'search',
depth: 0,
where: {
'doc.value': {
equals: draftPage.id,
},
},
depth: 0,
})
expect(results).toHaveLength(0)
@@ -79,23 +75,19 @@ describe('Search Plugin', () => {
collection: 'pages',
data: {
_status: 'published',
title: 'Hello, world!',
excerpt: 'This is a test page',
title: 'Hello, world!',
},
})
// wait for the search document to be created
// we do not await this within the `syncToSearch` hook
await wait(200)
const { docs: results } = await payload.find({
collection: 'search',
depth: 0,
where: {
'doc.value': {
equals: pageToReceiveUpdates.id,
},
},
depth: 0,
})
expect(results).toHaveLength(1)
@@ -104,11 +96,11 @@ describe('Search Plugin', () => {
expect(results[0].excerpt).toBe('This is a test page')
await payload.update({
collection: 'pages',
id: pageToReceiveUpdates.id,
collection: 'pages',
data: {
title: 'Hello, world! (updated)',
excerpt: 'This is a test page (updated)',
title: 'Hello, world! (updated)',
},
})
@@ -119,12 +111,12 @@ describe('Search Plugin', () => {
// Do not add `limit` to this query, this way we can test if multiple documents were created
const { docs: updatedResults } = await payload.find({
collection: 'search',
depth: 0,
where: {
'doc.value': {
equals: pageToReceiveUpdates.id,
},
},
depth: 0,
})
expect(updatedResults).toHaveLength(1)
@@ -138,8 +130,8 @@ describe('Search Plugin', () => {
collection: 'pages',
data: {
_status: 'published',
title: 'Hello, world!',
excerpt: 'This is a test page',
title: 'Hello, world!',
},
})
@@ -149,20 +141,20 @@ describe('Search Plugin', () => {
const { docs: results } = await payload.find({
collection: 'search',
depth: 0,
where: {
'doc.value': {
equals: page.id,
},
},
depth: 0,
})
expect(results).toHaveLength(1)
expect(results[0].doc.value).toBe(page.id)
await payload.delete({
collection: 'pages',
id: page.id,
collection: 'pages',
})
// wait for the search document to be potentially deleted
@@ -171,12 +163,12 @@ describe('Search Plugin', () => {
const { docs: deletedResults } = await payload.find({
collection: 'search',
depth: 0,
where: {
'doc.value': {
equals: page.id,
},
},
depth: 0,
})
expect(deletedResults).toHaveLength(0)

View File

@@ -1,6 +1,5 @@
import { randomBytes } from 'crypto'
import type { PayloadRequest } from '../../packages/payload/src/express/types'
import type {
ChainedRelation,
CustomIdNumberRelation,
@@ -273,6 +272,74 @@ describe('Relationships', () => {
expect(query.totalDocs).toEqual(2)
})
// https://github.com/payloadcms/payload/issues/4240
it('should allow querying by relationship id field', async () => {
/**
* This test shows something which breaks on postgres but not on mongodb.
*/
const someDirector = await payload.create({
collection: 'directors',
data: {
name: 'Quentin Tarantino',
},
})
await payload.create({
collection: 'movies',
data: {
name: 'Pulp Fiction',
},
})
await payload.create({
collection: 'movies',
data: {
name: 'Pulp Fiction',
},
})
await payload.create({
collection: 'movies',
data: {
name: 'Harry Potter',
},
})
await payload.create({
collection: 'movies',
data: {
name: 'Lord of the Rings is boring',
director: someDirector.id,
},
})
// This causes the following error:
// "Your "id" field references a column "directors"."id", but the table "directors" is not part of the query! Did you forget to join it?"
// This only happens on postgres, not on mongodb
const query = await payload.find({
collection: 'movies',
depth: 5,
limit: 1,
where: {
or: [
{
name: {
equals: 'Pulp Fiction',
},
},
{
'director.id': {
equals: someDirector.id,
},
},
],
},
})
expect(query.totalDocs).toEqual(3)
expect(query.docs).toHaveLength(1) // Due to limit: 1
})
describe('Custom ID', () => {
it('should query a custom id relation', async () => {
const { doc } = await client.findByID<Post>({ id: post.id })
@@ -288,7 +355,7 @@ describe('Relationships', () => {
await expect(async () =>
createPost({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore Sending bad data to test error handling
// @ts-expect-error Sending bad data to test error handling
customIdRelation: 1234,
}),
).rejects.toThrow('The following field is invalid: customIdRelation')
@@ -298,7 +365,7 @@ describe('Relationships', () => {
await expect(async () =>
createPost({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore Sending bad data to test error handling
// @ts-expect-error Sending bad data to test error handling
customIdNumberRelation: 'bad-input',
}),
).rejects.toThrow('The following field is invalid: customIdNumberRelation')
@@ -614,35 +681,6 @@ describe('Relationships', () => {
})
})
describe('Creating', () => {
describe('With transactions', () => {
it('should be able to create filtered relations within a transaction', async () => {
const req = {} as PayloadRequest
req.transactionID = await payload.db.beginTransaction?.()
const related = await payload.create({
collection: relationSlug,
data: {
name: 'parent',
},
req,
})
const withRelation = await payload.create({
collection: slug,
data: {
filteredRelation: related.id,
},
req,
})
if (req.transactionID) {
await payload.db.commitTransaction?.(req.transactionID)
}
expect(withRelation.filteredRelation.id).toEqual(related.id)
})
})
})
describe('Polymorphic Relationships', () => {
it('should allow REST querying on polymorphic relationships', async () => {
const movie = await payload.create({

View File

@@ -110,14 +110,23 @@ describe('Versions', () => {
expect(collectionLocalVersionID).toBeDefined()
})
it('should properly paginate versions', async () => {
it('should paginate versions', async () => {
const versions = await payload.findVersions({
collection,
limit: 1,
collection: draftCollectionSlug,
limit: 5,
})
const versionsPage2 = await payload.findVersions({
collection: draftCollectionSlug,
limit: 5,
page: 2,
})
expect(versions.docs).toHaveLength(1)
expect(versions.docs).toHaveLength(5)
expect(versions.page).toBe(1)
expect(versionsPage2.docs).toHaveLength(5)
expect(versionsPage2.page).toBe(2)
expect(versions.docs[0].id).not.toBe(versionsPage2.docs[0].id)
})
it('should allow saving multiple versions of models with unique fields', async () => {
@@ -231,17 +240,17 @@ describe('Versions', () => {
const draftPost = await payload.create({
collection: draftCollectionSlug,
data: {
title: 'Some Title',
description: 'Description',
title: 'Some Title',
},
})
await payload.create({
collection: draftCollectionSlug,
data: {
title: 'With Relation',
description: 'Description',
relation: draftPost.id,
title: 'With Relation',
},
})
@@ -264,15 +273,15 @@ describe('Versions', () => {
const draftsAscending = await payload.findVersions({
collection: draftCollectionSlug,
draft: true,
sort: 'createdAt',
limit: 100,
sort: 'createdAt',
})
const draftsDescending = await payload.findVersions({
collection: draftCollectionSlug,
draft: true,
sort: '-createdAt',
limit: 100,
sort: '-createdAt',
})
expect(draftsAscending).toBeDefined()
@@ -712,8 +721,8 @@ describe('Versions', () => {
const { id } = await payload.create({
collection: 'draft-posts',
data: {
title: 'Title',
description: 'Description',
title: 'Title',
},
})
@@ -857,25 +866,25 @@ describe('Versions', () => {
// modify the post to create a new version
// language=graphQL
const update = `mutation {
updateAutosavePost(id: ${formatGraphQLID(
collectionGraphQLPostID,
)}, data: {title: "${updatedTitle2}"}) {
title
updatedAt
createdAt
}
updateAutosavePost(id: ${formatGraphQLID(
collectionGraphQLPostID,
)}, data: {title: "${updatedTitle2}"}) {
title
updatedAt
createdAt
}
}`
await graphQLClient.request(update)
// language=graphQL
const query = `query {
versionsAutosavePosts(where: { parent: { equals: ${formatGraphQLID(
collectionGraphQLPostID,
)} } }) {
docs {
id
}
versionsAutosavePosts(where: { parent: { equals: ${formatGraphQLID(
collectionGraphQLPostID,
)} } }) {
docs {
id
}
}
}`
const response = await graphQLClient.request(query)
@@ -908,17 +917,17 @@ describe('Versions', () => {
it('should allow read of versions by querying version content', async () => {
// language=graphQL
const query = `query {
versionsAutosavePosts(where: { version__title: {equals: "${collectionGraphQLOriginalTitle}" } }) {
docs {
id
parent {
id
}
version {
title
}
}
versionsAutosavePosts(where: { version__title: {equals: "${collectionGraphQLOriginalTitle}" } }) {
docs {
id
parent {
id
}
version {
title
}
}
}
}`
const response = await graphQLClient.request(query)
@@ -937,25 +946,25 @@ describe('Versions', () => {
// modify the post to create a new version
// language=graphQL
const update = `mutation {
updateAutosavePost(id: ${formatGraphQLID(
collectionGraphQLPostID,
)}, data: {title: "${collectionGraphQLOriginalTitle}"}) {
title
updatedAt
createdAt
}
updateAutosavePost(id: ${formatGraphQLID(
collectionGraphQLPostID,
)}, data: {title: "${collectionGraphQLOriginalTitle}"}) {
title
updatedAt
createdAt
}
}`
await graphQLClient.request(update)
// language=graphQL
const query = `query {
versionsAutosavePosts(where: { parent: { equals: ${formatGraphQLID(
collectionGraphQLPostID,
)} } }) {
docs {
id
}
versionsAutosavePosts(where: { parent: { equals: ${formatGraphQLID(
collectionGraphQLPostID,
)} } }) {
docs {
id
}
}
}`
const response = await graphQLClient.request(query)
@@ -1001,17 +1010,17 @@ describe('Versions', () => {
beforeEach(async () => {
const title2 = 'Here is an updated global title in EN'
await payload.updateGlobal({
slug: globalSlug,
data: {
title: 'Test Global',
},
slug: globalSlug,
})
const updatedGlobal = await payload.updateGlobal({
slug: globalSlug,
data: {
title: title2,
},
slug: globalSlug,
})
const versions = await payload.findGlobalVersions({
@@ -1049,18 +1058,18 @@ describe('Versions', () => {
const spanishTitle = 'Title in ES'
await payload.updateGlobal({
slug: globalSlug,
data: {
title: englishTitle,
},
slug: globalSlug,
})
const updatedGlobalES = await payload.updateGlobal({
slug: globalSlug,
data: {
title: spanishTitle,
},
locale: 'es',
slug: globalSlug,
})
expect(updatedGlobalES.title).toBe(spanishTitle)
@@ -1068,15 +1077,15 @@ describe('Versions', () => {
const newEnglishTitle = 'New title in EN'
await payload.updateGlobal({
slug: globalSlug,
data: {
title: newEnglishTitle,
},
slug: globalSlug,
})
const versions = await payload.findGlobalVersions({
locale: 'all',
slug: globalSlug,
locale: 'all',
})
expect(versions.docs[0].version.title.en).toStrictEqual(newEnglishTitle)
@@ -1089,18 +1098,18 @@ describe('Versions', () => {
const title2 = 'Another updated title in EN'
const updatedGlobal = await payload.updateGlobal({
slug: globalSlug,
data: {
title: title2,
},
slug: globalSlug,
})
expect(updatedGlobal.title).toBe(title2)
// Make sure it was updated correctly
const foundUpdatedGlobal = await payload.findGlobal({
draft: true,
slug: globalSlug,
draft: true,
})
expect(foundUpdatedGlobal.title).toBe(title2)
@@ -1118,8 +1127,8 @@ describe('Versions', () => {
expect(restore.title).toBeDefined()
const restoredGlobal = await payload.findGlobal({
draft: true,
slug: globalSlug,
draft: true,
})
expect(restoredGlobal.title).toBe(restore.title)
@@ -1131,43 +1140,43 @@ describe('Versions', () => {
const originalTitle = 'Here is a published global'
await payload.updateGlobal({
slug: globalSlug,
data: {
_status: 'published',
description: 'kjnjyhbbdsfseankuhsjsfghb',
title: originalTitle,
},
slug: globalSlug,
})
const publishedGlobal = await payload.findGlobal({
draft: true,
slug: globalSlug,
draft: true,
})
const updatedTitle2 = 'Here is a draft global with a patched title'
await payload.updateGlobal({
slug: globalSlug,
data: {
title: updatedTitle2,
},
draft: true,
locale: 'en',
slug: globalSlug,
})
await payload.updateGlobal({
slug: globalSlug,
data: {
title: updatedTitle2,
},
draft: true,
locale: 'es',
slug: globalSlug,
})
const updatedGlobal = await payload.findGlobal({
slug: globalSlug,
draft: true,
locale: 'all',
slug: globalSlug,
})
expect(publishedGlobal.title).toBe(originalTitle)
@@ -1179,22 +1188,22 @@ describe('Versions', () => {
const originalTitle = 'Here is a draft'
await payload.updateGlobal({
slug: globalSlug,
data: {
_status: 'draft',
title: originalTitle,
},
draft: true,
slug: globalSlug,
})
const updatedTitle2 = 'Now try to publish'
const result = await payload.updateGlobal({
slug: globalSlug,
data: {
_status: 'published',
title: updatedTitle2,
},
slug: globalSlug,
})
expect(result.title).toBe(updatedTitle2)
@@ -1206,25 +1215,25 @@ describe('Versions', () => {
beforeEach(async () => {
// language=graphql
const update = `mutation {
updateAutosaveGlobal(draft: true, data: {
title: "${globalGraphQLOriginalTitle}"
}) {
_status
title
}
updateAutosaveGlobal(draft: true, data: {
title: "${globalGraphQLOriginalTitle}"
}) {
_status
title
}
}`
await graphQLClient.request(update)
// language=graphQL
const query = `query {
versionsAutosaveGlobal(where: { version__title: { equals: "${globalGraphQLOriginalTitle}" } }) {
docs {
id
version {
title
}
}
versionsAutosaveGlobal(where: { version__title: { equals: "${globalGraphQLOriginalTitle}" } }) {
docs {
id
version {
title
}
}
}
}`
const response = await graphQLClient.request(query)
@@ -1235,12 +1244,12 @@ describe('Versions', () => {
it('should allow read of versions by version id', async () => {
// language=graphql
const query = `query {
versionAutosaveGlobal(id: ${formatGraphQLID(globalGraphQLVersionID)}) {
id
version {
title
}
versionAutosaveGlobal(id: ${formatGraphQLID(globalGraphQLVersionID)}) {
id
version {
title
}
}
}`
const response = await graphQLClient.request(query)
@@ -1254,14 +1263,14 @@ describe('Versions', () => {
it('should allow read of versions by querying version content', async () => {
// language=graphQL
const query = `query {
versionsAutosaveGlobal(where: { version__title: {equals: "${globalGraphQLOriginalTitle}" } }) {
docs {
id
version {
title
}
}
versionsAutosaveGlobal(where: { version__title: {equals: "${globalGraphQLOriginalTitle}" } }) {
docs {
id
version {
title
}
}
}
}`
const response = await graphQLClient.request(query)
@@ -1278,9 +1287,9 @@ describe('Versions', () => {
it('should allow a version to be restored', async () => {
// language=graphql
const restore = `mutation {
restoreVersionAutosaveGlobal(id: ${formatGraphQLID(globalGraphQLVersionID)}) {
title
}
restoreVersionAutosaveGlobal(id: ${formatGraphQLID(globalGraphQLVersionID)}) {
title
}
}`
await graphQLClient.request(restore)