Compare commits
127 Commits
revert-121
...
docs/migra
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9fdbbc27cf | ||
|
|
071c61fe49 | ||
|
|
cceb793257 | ||
|
|
1b1e36e2df | ||
|
|
6b6948f92c | ||
|
|
9ef51a7cf3 | ||
|
|
0f7dc38012 | ||
|
|
c720ce3c08 | ||
|
|
3a73a67ef4 | ||
|
|
4c6fde0e89 | ||
|
|
c1c0db3b01 | ||
|
|
00667faf8d | ||
|
|
898e97ed17 | ||
|
|
8142a00da6 | ||
|
|
08a3dfbbcb | ||
|
|
fc83823e5d | ||
|
|
2a41d3fbb1 | ||
|
|
c772a3207c | ||
|
|
c701dd41a9 | ||
|
|
4dfb2d24bb | ||
|
|
230128b92e | ||
|
|
23f42040ab | ||
|
|
8596ac5694 | ||
|
|
324daff553 | ||
|
|
22b1858ee8 | ||
|
|
2ab8e2e194 | ||
|
|
1235a183ff | ||
|
|
81d333f4b0 | ||
|
|
4fe3423e54 | ||
|
|
e8c2b15e2b | ||
|
|
3127d6ad6d | ||
|
|
72ab319d37 | ||
|
|
2a929cf385 | ||
|
|
38029cdd6e | ||
|
|
14252696ce | ||
|
|
5855f3a475 | ||
|
|
529bfe149e | ||
|
|
18f2f899c5 | ||
|
|
d4899b84cc | ||
|
|
6fb2beb983 | ||
|
|
4166621966 | ||
|
|
e395a0aa66 | ||
|
|
cead312d4b | ||
|
|
219fd01717 | ||
|
|
1f6efe9a46 | ||
|
|
88769c8244 | ||
|
|
bd6ee317c1 | ||
|
|
561708720d | ||
|
|
58fc2f9a74 | ||
|
|
5fce501589 | ||
|
|
3e7db302ee | ||
|
|
7498d09f1c | ||
|
|
3edfd7cc6d | ||
|
|
77bb7e3638 | ||
|
|
8ebadd4190 | ||
|
|
e258cd73ef | ||
|
|
d63c8baea5 | ||
|
|
93d79b9c62 | ||
|
|
9779cf7f7d | ||
|
|
b7b2b390fc | ||
|
|
7130834152 | ||
|
|
1d5d96d2c3 | ||
|
|
faa7794cc7 | ||
|
|
98283ca18c | ||
|
|
e93d0baf89 | ||
|
|
cd455741e5 | ||
|
|
735d699804 | ||
|
|
d9c0c43154 | ||
|
|
a9cc747038 | ||
|
|
fd67d461ac | ||
|
|
8219c046de | ||
|
|
021932cc8b | ||
|
|
edeb381fb4 | ||
|
|
c43891b2ba | ||
|
|
3701de5056 | ||
|
|
09f15ff874 | ||
|
|
72662257a8 | ||
|
|
18693775e4 | ||
|
|
b3cac753d6 | ||
|
|
05ae957cd5 | ||
|
|
800c424777 | ||
|
|
9a6bb44e50 | ||
|
|
38186346f7 | ||
|
|
a6d76d6058 | ||
|
|
0d10f436cc | ||
|
|
dcd4e37ccc | ||
|
|
446938b9cb | ||
|
|
292b462f34 | ||
|
|
2628b43639 | ||
|
|
3fb81ef43b | ||
|
|
3c9ee5d3b4 | ||
|
|
11018ebfe0 | ||
|
|
b480f81387 | ||
|
|
d7d37447aa | ||
|
|
ddf40d59ac | ||
|
|
1ef1c5564d | ||
|
|
055a263af3 | ||
|
|
a62cdc89d8 | ||
|
|
b6b02ac97c | ||
|
|
5365d4f1c2 | ||
|
|
e5683913b4 | ||
|
|
78d3af7dc9 | ||
|
|
c08c7071ee | ||
|
|
b9868c4a3b | ||
|
|
e5b28c98dc | ||
|
|
35c0404817 | ||
|
|
cfe8c97ab7 | ||
|
|
6133a1d183 | ||
|
|
710fe0949b | ||
|
|
4a56597b92 | ||
|
|
27d644f2f9 | ||
|
|
564fdb0e17 | ||
|
|
47a1eee765 | ||
|
|
8fee0163b5 | ||
|
|
1b17df9e0b | ||
|
|
3df1329e19 | ||
|
|
5492542c1a | ||
|
|
9948040ad2 | ||
|
|
b7ae4ee60a | ||
|
|
34ead72c85 | ||
|
|
caae5986f5 | ||
|
|
2f21d46de6 | ||
|
|
6b83086c6c | ||
|
|
5bd852c9b5 | ||
|
|
c85fb808b9 | ||
|
|
ab03f4f305 | ||
|
|
2157450805 |
2
.github/actions/setup/action.yml
vendored
2
.github/actions/setup/action.yml
vendored
@@ -6,7 +6,7 @@ inputs:
|
|||||||
node-version:
|
node-version:
|
||||||
description: Node.js version
|
description: Node.js version
|
||||||
required: true
|
required: true
|
||||||
default: 22.6.0
|
default: 23.11.0
|
||||||
pnpm-version:
|
pnpm-version:
|
||||||
description: Pnpm version
|
description: Pnpm version
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
14
.github/workflows/main.yml
vendored
14
.github/workflows/main.yml
vendored
@@ -16,7 +16,7 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_VERSION: 22.6.0
|
NODE_VERSION: 23.11.0
|
||||||
PNPM_VERSION: 9.7.1
|
PNPM_VERSION: 9.7.1
|
||||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||||
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
||||||
@@ -62,12 +62,6 @@ jobs:
|
|||||||
echo "templates: ${{ steps.filter.outputs.templates }}"
|
echo "templates: ${{ steps.filter.outputs.templates }}"
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
# Follows same github's ci skip: [skip lint], [lint skip], [no lint]
|
|
||||||
if: >
|
|
||||||
github.event_name == 'pull_request' &&
|
|
||||||
!contains(github.event.pull_request.title, '[skip lint]') &&
|
|
||||||
!contains(github.event.pull_request.title, '[lint skip]') &&
|
|
||||||
!contains(github.event.pull_request.title, '[no lint]')
|
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@@ -81,10 +75,8 @@ jobs:
|
|||||||
pnpm-version: ${{ env.PNPM_VERSION }}
|
pnpm-version: ${{ env.PNPM_VERSION }}
|
||||||
pnpm-install-cache-key: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
pnpm-install-cache-key: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||||
|
|
||||||
- name: Lint staged
|
- name: Lint
|
||||||
run: |
|
run: pnpm lint -- --quiet
|
||||||
git diff --name-only --diff-filter=d origin/${GITHUB_BASE_REF}...${GITHUB_SHA}
|
|
||||||
npx lint-staged --diff="origin/${GITHUB_BASE_REF}...${GITHUB_SHA}"
|
|
||||||
|
|
||||||
build:
|
build:
|
||||||
needs: changes
|
needs: changes
|
||||||
|
|||||||
2
.github/workflows/post-release-templates.yml
vendored
2
.github/workflows/post-release-templates.yml
vendored
@@ -7,7 +7,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_VERSION: 22.6.0
|
NODE_VERSION: 23.11.0
|
||||||
PNPM_VERSION: 9.7.1
|
PNPM_VERSION: 9.7.1
|
||||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||||
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
||||||
|
|||||||
2
.github/workflows/post-release.yml
vendored
2
.github/workflows/post-release.yml
vendored
@@ -12,7 +12,7 @@ on:
|
|||||||
default: ''
|
default: ''
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_VERSION: 22.6.0
|
NODE_VERSION: 23.11.0
|
||||||
PNPM_VERSION: 9.7.1
|
PNPM_VERSION: 9.7.1
|
||||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||||
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
||||||
|
|||||||
2
.github/workflows/publish-prerelease.yml
vendored
2
.github/workflows/publish-prerelease.yml
vendored
@@ -7,7 +7,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_VERSION: 22.6.0
|
NODE_VERSION: 23.11.0
|
||||||
PNPM_VERSION: 9.7.1
|
PNPM_VERSION: 9.7.1
|
||||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||||
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,6 +3,7 @@ package-lock.json
|
|||||||
dist
|
dist
|
||||||
/.idea/*
|
/.idea/*
|
||||||
!/.idea/runConfigurations
|
!/.idea/runConfigurations
|
||||||
|
/.idea/runConfigurations/_template*
|
||||||
!/.idea/payload.iml
|
!/.idea/payload.iml
|
||||||
|
|
||||||
# Custom actions
|
# Custom actions
|
||||||
|
|||||||
@@ -1,9 +0,0 @@
|
|||||||
<component name="ProjectRunConfigurationManager">
|
|
||||||
<configuration default="true" type="JavaScriptTestRunnerJest">
|
|
||||||
<node-interpreter value="project" />
|
|
||||||
<node-options value="--no-deprecation" />
|
|
||||||
<envs />
|
|
||||||
<scope-kind value="ALL" />
|
|
||||||
<method v="2" />
|
|
||||||
</configuration>
|
|
||||||
</component>
|
|
||||||
@@ -1 +1 @@
|
|||||||
v22.6.0
|
v23.11.0
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
pnpm 9.7.1
|
pnpm 9.7.1
|
||||||
nodejs 22.6.0
|
nodejs 23.11.0
|
||||||
|
|||||||
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@@ -63,6 +63,13 @@
|
|||||||
"request": "launch",
|
"request": "launch",
|
||||||
"type": "node-terminal"
|
"type": "node-terminal"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"command": "pnpm tsx --no-deprecation test/dev.ts query-presets",
|
||||||
|
"cwd": "${workspaceFolder}",
|
||||||
|
"name": "Run Dev Query Presets",
|
||||||
|
"request": "launch",
|
||||||
|
"type": "node-terminal"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"command": "pnpm tsx --no-deprecation test/dev.ts login-with-username",
|
"command": "pnpm tsx --no-deprecation test/dev.ts login-with-username",
|
||||||
"cwd": "${workspaceFolder}",
|
"cwd": "${workspaceFolder}",
|
||||||
@@ -111,6 +118,13 @@
|
|||||||
"request": "launch",
|
"request": "launch",
|
||||||
"type": "node-terminal"
|
"type": "node-terminal"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"command": "pnpm tsx --no-deprecation test/dev.ts folder-view",
|
||||||
|
"cwd": "${workspaceFolder}",
|
||||||
|
"name": "Run Dev Folder View",
|
||||||
|
"request": "launch",
|
||||||
|
"type": "node-terminal"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"command": "pnpm tsx --no-deprecation test/dev.ts localization",
|
"command": "pnpm tsx --no-deprecation test/dev.ts localization",
|
||||||
"cwd": "${workspaceFolder}",
|
"cwd": "${workspaceFolder}",
|
||||||
|
|||||||
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@@ -7,9 +7,6 @@
|
|||||||
},
|
},
|
||||||
"editor.formatOnSaveMode": "file",
|
"editor.formatOnSaveMode": "file",
|
||||||
"eslint.rules.customizations": [
|
"eslint.rules.customizations": [
|
||||||
// Defaultt all ESLint errors to 'warn' to differentate from TypeScript's 'error' level
|
|
||||||
{ "rule": "*", "severity": "warn" },
|
|
||||||
|
|
||||||
// Silence some warnings that will get auto-fixed
|
// Silence some warnings that will get auto-fixed
|
||||||
{ "rule": "perfectionist/*", "severity": "off", "fixable": true },
|
{ "rule": "perfectionist/*", "severity": "off", "fixable": true },
|
||||||
{ "rule": "curly", "severity": "off", "fixable": true },
|
{ "rule": "curly", "severity": "off", "fixable": true },
|
||||||
@@ -24,5 +21,8 @@
|
|||||||
"runtimeArgs": ["--no-deprecation"]
|
"runtimeArgs": ["--no-deprecation"]
|
||||||
},
|
},
|
||||||
// Essentially disables bun test buttons
|
// Essentially disables bun test buttons
|
||||||
"bun.test.filePattern": "bun.test.ts"
|
"bun.test.filePattern": "bun.test.ts",
|
||||||
|
"playwright.env": {
|
||||||
|
"NODE_OPTIONS": "--no-deprecation --no-experimental-strip-types"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -132,6 +132,7 @@ The following options are available:
|
|||||||
| `hideAPIURL` | Hides the "API URL" meta field while editing documents within this Collection. |
|
| `hideAPIURL` | Hides the "API URL" meta field while editing documents within this Collection. |
|
||||||
| `enableRichTextLink` | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
| `enableRichTextLink` | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
||||||
| `enableRichTextRelationship` | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
| `enableRichTextRelationship` | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
||||||
|
| `folders` | A boolean to enable folders for a given collection. Defaults to `false`. [More details](../folders/overview). |
|
||||||
| `meta` | Page metadata overrides to apply to this Collection within the Admin Panel. [More details](../admin/metadata). |
|
| `meta` | Page metadata overrides to apply to this Collection within the Admin Panel. [More details](../admin/metadata). |
|
||||||
| `preview` | Function to generate preview URLs within the Admin Panel that can point to your app. [More details](../admin/preview). |
|
| `preview` | Function to generate preview URLs within the Admin Panel that can point to your app. [More details](../admin/preview). |
|
||||||
| `livePreview` | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
|
| `livePreview` | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
|
||||||
|
|||||||
@@ -84,6 +84,7 @@ The following options are available:
|
|||||||
| **`csrf`** | A whitelist array of URLs to allow Payload to accept cookies from. [More details](../authentication/cookies#csrf-attacks). |
|
| **`csrf`** | A whitelist array of URLs to allow Payload to accept cookies from. [More details](../authentication/cookies#csrf-attacks). |
|
||||||
| **`defaultDepth`** | If a user does not specify `depth` while requesting a resource, this depth will be used. [More details](../queries/depth). |
|
| **`defaultDepth`** | If a user does not specify `depth` while requesting a resource, this depth will be used. [More details](../queries/depth). |
|
||||||
| **`defaultMaxTextLength`** | The maximum allowed string length to be permitted application-wide. Helps to prevent malicious public document creation. |
|
| **`defaultMaxTextLength`** | The maximum allowed string length to be permitted application-wide. Helps to prevent malicious public document creation. |
|
||||||
|
| `folders` | An optional object to configure global folder settings. [More details](../folders/overview). |
|
||||||
| `queryPresets` | An object that to configure Collection Query Presets. [More details](../query-presets/overview). |
|
| `queryPresets` | An object that to configure Collection Query Presets. [More details](../query-presets/overview). |
|
||||||
| **`maxDepth`** | The maximum allowed depth to be permitted application-wide. This setting helps prevent against malicious queries. Defaults to `10`. [More details](../queries/depth). |
|
| **`maxDepth`** | The maximum allowed depth to be permitted application-wide. This setting helps prevent against malicious queries. Defaults to `10`. [More details](../queries/depth). |
|
||||||
| **`indexSortableFields`** | Automatically index all sortable top-level fields in the database to improve sort performance and add database compatibility for Azure Cosmos and similar. |
|
| **`indexSortableFields`** | Automatically index all sortable top-level fields in the database to improve sort performance and add database compatibility for Azure Cosmos and similar. |
|
||||||
|
|||||||
@@ -189,6 +189,8 @@ In MongoDB, you'll only ever really need to run migrations for times where you c
|
|||||||
|
|
||||||
In this case, you can create a migration by running `pnpm payload migrate:create`, and then write the logic that you need to perform to migrate your documents to their new shape. You can then either run your migrations in CI before you build / deploy, or you can run them locally, against your production database, by using your production database connection string on your local computer and running the `pnpm payload migrate` command.
|
In this case, you can create a migration by running `pnpm payload migrate:create`, and then write the logic that you need to perform to migrate your documents to their new shape. You can then either run your migrations in CI before you build / deploy, or you can run them locally, against your production database, by using your production database connection string on your local computer and running the `pnpm payload migrate` command.
|
||||||
|
|
||||||
|
You can find [here](/database/mongodb#common-migration-scripts) examples of common MongoDB migrations.
|
||||||
|
|
||||||
#### Postgres
|
#### Postgres
|
||||||
|
|
||||||
In relational databases like Postgres, migrations are a bit more important, because each time you add a new field or a new collection, you'll need to update the shape of your database to match your Payload Config (otherwise you'll see errors upon trying to read / write your data).
|
In relational databases like Postgres, migrations are a bit more important, because each time you add a new field or a new collection, you'll need to update the shape of your database to match your Payload Config (otherwise you'll see errors upon trying to read / write your data).
|
||||||
@@ -298,3 +300,15 @@ Passing your migrations as shown above will tell Payload, in production only, to
|
|||||||
may slow down serverless cold starts on platforms such as Vercel. Generally,
|
may slow down serverless cold starts on platforms such as Vercel. Generally,
|
||||||
this option should only be used for long-running servers / containers.
|
this option should only be used for long-running servers / containers.
|
||||||
</Banner>
|
</Banner>
|
||||||
|
|
||||||
|
## Environment-Specific Configurations and Migrations
|
||||||
|
|
||||||
|
Your configuration may include environment-specific settings (e.g., enabling a plugin only in production). If you generate migrations without considering the environment, it can lead to discrepancies and issues. When running migrations locally, Payload uses the development environment, which might miss production-specific configurations. Similarly, running migrations in production could miss development-specific entities.
|
||||||
|
|
||||||
|
This is an easy oversight, so be mindful of any environment-specific logic in your config when handling migrations.
|
||||||
|
|
||||||
|
**Ways to address this:**
|
||||||
|
|
||||||
|
- Manually update your migration file after it is generated to include any environment-specific configurations.
|
||||||
|
- Temporarily enable any required production environment variables in your local setup when generating the migration to capture the necessary updates.
|
||||||
|
- Use separate migration files for each environment to ensure the correct migration is executed in the corresponding environment.
|
||||||
|
|||||||
@@ -61,3 +61,118 @@ Limitations with [DocumentDB](https://aws.amazon.com/documentdb/) and [Azure Cos
|
|||||||
- For Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`.
|
- For Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`.
|
||||||
- The [Join Field](../fields/join) is not supported in DocumentDB and Azure Cosmos DB, as we internally use MongoDB aggregations to query data for that field, which are limited there. This can be changed in the future.
|
- The [Join Field](../fields/join) is not supported in DocumentDB and Azure Cosmos DB, as we internally use MongoDB aggregations to query data for that field, which are limited there. This can be changed in the future.
|
||||||
- For DocumentDB pass `disableIndexHints: true` to disable hinting to the DB to use `id` as index which can cause problems with DocumentDB.
|
- For DocumentDB pass `disableIndexHints: true` to disable hinting to the DB to use `id` as index which can cause problems with DocumentDB.
|
||||||
|
|
||||||
|
## Common migration scripts
|
||||||
|
|
||||||
|
### Delete field from the database
|
||||||
|
|
||||||
|
With the MongoDB adapter, even if you delete a field from your Payload config, the existing field data will still be remained in the database.
|
||||||
|
If you want to ensure that the field is fully erased from the database, you can use the following script:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// pnpx payload migrate:create --name delete-field
|
||||||
|
export async function up({ payload, session }: MigrateUpArgs): Promise<void> {
|
||||||
|
await payload.db.collections.posts.collection.updateMany(
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
$unset: {
|
||||||
|
// delete title field
|
||||||
|
title: true,
|
||||||
|
// nested to array
|
||||||
|
'array.title': true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{ session },
|
||||||
|
)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Synchronize indexes
|
||||||
|
|
||||||
|
Payload won't automatically replace existing indexes in MongoDB when you change your Payload config.
|
||||||
|
For example, changing `index: true` to `unique: true` won't automatically update the index in MongoDB.
|
||||||
|
You can use the following script to synchronize indexes:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// pnpx payload migrate:create --name sync-posts-indexes
|
||||||
|
export async function up({ payload, session }: MigrateUpArgs): Promise<void> {
|
||||||
|
await payload.db.collections.posts.syncIndexes()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<Banner type="warning">
|
||||||
|
Note that this will also drop all indexes that aren't in the payload config.
|
||||||
|
If you have custom indexes that you want to keep, they must be added to the collection schema
|
||||||
|
or you can insert them manually after `syncIndexes` with:
|
||||||
|
`payload.db.collections.posts.collection.createIndex({ title: 1 })`
|
||||||
|
</Banner>
|
||||||
|
|
||||||
|
### Making field localized and vice versa
|
||||||
|
|
||||||
|
When you change a field to be localized or vice versa, you can use the following script to update the field in the database:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// pnpx payload migrate:create --name make-title-localized
|
||||||
|
export async function up({ payload, session }: MigrateUpArgs): Promise<void> {
|
||||||
|
const posts = await payload.db.collections.posts.collection
|
||||||
|
.find({}, { session })
|
||||||
|
.toArray()
|
||||||
|
|
||||||
|
// Make "title" localized
|
||||||
|
await payload.db.collections.posts.collection.bulkWrite(
|
||||||
|
posts.map((post) => ({
|
||||||
|
updateOne: {
|
||||||
|
filter: { _id: post._id },
|
||||||
|
update: { $set: { title: { en: post.title } } },
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
|
||||||
|
// Make "title" non-localized
|
||||||
|
await payload.db.collections.posts.collection.bulkWrite(
|
||||||
|
posts.map((post) => ({
|
||||||
|
updateOne: {
|
||||||
|
filter: { _id: post._id },
|
||||||
|
update: { $set: { title: post.title.en } },
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example renaming a collection
|
||||||
|
|
||||||
|
The following example renames a collection with slug "pages" to "articles" and it includes migrating the \_versions collection also.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { MigrateDownArgs, MigrateUpArgs } from '@payloadcms/db-mongodb'
|
||||||
|
// import type { Db } from 'mongodb' // you will need to add this package as a devDependency in the package.json if you want db to be typed as Db
|
||||||
|
|
||||||
|
export async function up({
|
||||||
|
payload,
|
||||||
|
req,
|
||||||
|
session,
|
||||||
|
}: MigrateUpArgs): Promise<void> {
|
||||||
|
const db = payload.db.connection.db as any
|
||||||
|
|
||||||
|
await db.renameCollection('pages', 'articles', { session, dropTarget: true })
|
||||||
|
await db.renameCollection('_pages_versions', '_articles_versions', {
|
||||||
|
session,
|
||||||
|
dropTarget: true,
|
||||||
|
}) // remove this line if you do not have versions enabled
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down({
|
||||||
|
payload,
|
||||||
|
req,
|
||||||
|
session,
|
||||||
|
}: MigrateDownArgs): Promise<void> {
|
||||||
|
const db = payload.db.connection.db as any
|
||||||
|
|
||||||
|
await db.renameCollection('articles', 'pages', { session, dropTarget: true })
|
||||||
|
await db.renameCollection('_articles_versions', '_pages_versions', {
|
||||||
|
session,
|
||||||
|
dropTarget: true,
|
||||||
|
}) // remove this line if you do not have versions enabled
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|||||||
@@ -35,9 +35,9 @@ export const MyGroupField: Field = {
|
|||||||
|
|
||||||
| Option | Description |
|
| Option | Description |
|
||||||
| ---------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
| ---------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) |
|
| **`name`** | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) |
|
||||||
| **`fields`** \* | Array of field types to nest within this Group. |
|
| **`fields`** \* | Array of field types to nest within this Group. |
|
||||||
| **`label`** | Used as a heading in the Admin Panel and to name the generated GraphQL type. |
|
| **`label`** | Used as a heading in the Admin Panel and to name the generated GraphQL type. Required when name is undefined, defaults to name converted to words. |
|
||||||
| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) |
|
| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) |
|
||||||
| **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. |
|
| **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. |
|
||||||
| **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). |
|
| **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). |
|
||||||
@@ -86,7 +86,7 @@ export const ExampleCollection: CollectionConfig = {
|
|||||||
slug: 'example-collection',
|
slug: 'example-collection',
|
||||||
fields: [
|
fields: [
|
||||||
{
|
{
|
||||||
name: 'pageMeta', // required
|
name: 'pageMeta',
|
||||||
type: 'group', // required
|
type: 'group', // required
|
||||||
interfaceName: 'Meta', // optional
|
interfaceName: 'Meta', // optional
|
||||||
fields: [
|
fields: [
|
||||||
@@ -110,3 +110,38 @@ export const ExampleCollection: CollectionConfig = {
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Presentational group fields
|
||||||
|
|
||||||
|
You can also use the Group field to create a presentational group of fields. This is useful when you want to group fields together visually without affecting the data structure.
|
||||||
|
The label will be required when a `name` is not provided.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import type { CollectionConfig } from 'payload'
|
||||||
|
|
||||||
|
export const ExampleCollection: CollectionConfig = {
|
||||||
|
slug: 'example-collection',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
label: 'Page meta',
|
||||||
|
type: 'group', // required
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
name: 'title',
|
||||||
|
type: 'text',
|
||||||
|
required: true,
|
||||||
|
minLength: 20,
|
||||||
|
maxLength: 100,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'description',
|
||||||
|
type: 'textarea',
|
||||||
|
required: true,
|
||||||
|
minLength: 40,
|
||||||
|
maxLength: 160,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|||||||
@@ -100,7 +100,7 @@ Here are the available Presentational Fields:
|
|||||||
|
|
||||||
### Virtual Fields
|
### Virtual Fields
|
||||||
|
|
||||||
Virtual fields are used to display data that is not stored in the database. They are useful for displaying computed values that populate within the APi response through hooks, etc.
|
Virtual fields are used to display data that is not stored in the database. They are useful for displaying computed values that populate within the API response through hooks, etc.
|
||||||
|
|
||||||
Here are the available Virtual Fields:
|
Here are the available Virtual Fields:
|
||||||
|
|
||||||
|
|||||||
@@ -94,6 +94,7 @@ The Relationship Field inherits all of the default options from the base [Field
|
|||||||
| **`allowCreate`** | Set to `false` if you'd like to disable the ability to create new documents from within the relationship field. |
|
| **`allowCreate`** | Set to `false` if you'd like to disable the ability to create new documents from within the relationship field. |
|
||||||
| **`allowEdit`** | Set to `false` if you'd like to disable the ability to edit documents from within the relationship field. |
|
| **`allowEdit`** | Set to `false` if you'd like to disable the ability to edit documents from within the relationship field. |
|
||||||
| **`sortOptions`** | Define a default sorting order for the options within a Relationship field's dropdown. [More](#sort-options) |
|
| **`sortOptions`** | Define a default sorting order for the options within a Relationship field's dropdown. [More](#sort-options) |
|
||||||
|
| **`placeholder`** | Define a custom text or function to replace the generic default placeholder |
|
||||||
| **`appearance`** | Set to `drawer` or `select` to change the behavior of the field. Defaults to `select`. |
|
| **`appearance`** | Set to `drawer` or `select` to change the behavior of the field. Defaults to `select`. |
|
||||||
|
|
||||||
### Sort Options
|
### Sort Options
|
||||||
@@ -149,7 +150,7 @@ The `filterOptions` property can either be a `Where` query, or a function return
|
|||||||
| `id` | The `id` of the current document being edited. Will be `undefined` during the `create` operation or when called on a `Filter` component within the list view. |
|
| `id` | The `id` of the current document being edited. Will be `undefined` during the `create` operation or when called on a `Filter` component within the list view. |
|
||||||
| `relationTo` | The collection `slug` to filter against, limited to this field's `relationTo` property. |
|
| `relationTo` | The collection `slug` to filter against, limited to this field's `relationTo` property. |
|
||||||
| `req` | The Payload Request, which contains references to `payload`, `user`, `locale`, and more. |
|
| `req` | The Payload Request, which contains references to `payload`, `user`, `locale`, and more. |
|
||||||
| `siblingData` | An object containing document data that is scoped to only fields within the same parent of this field. Will be an emprt object when called on a `Filter` component within the list view. |
|
| `siblingData` | An object containing document data that is scoped to only fields within the same parent of this field. Will be an empty object when called on a `Filter` component within the list view. |
|
||||||
| `user` | An object containing the currently authenticated user. |
|
| `user` | An object containing the currently authenticated user. |
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|||||||
@@ -89,6 +89,7 @@ The Select Field inherits all of the default options from the base [Field Admin
|
|||||||
| ----------------- | ------------------------------------------------------------------------------------------------------------------------------------------- |
|
| ----------------- | ------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
| **`isClearable`** | Set to `true` if you'd like this field to be clearable within the Admin UI. |
|
| **`isClearable`** | Set to `true` if you'd like this field to be clearable within the Admin UI. |
|
||||||
| **`isSortable`** | Set to `true` if you'd like this field to be sortable within the Admin UI using drag and drop. (Only works when `hasMany` is set to `true`) |
|
| **`isSortable`** | Set to `true` if you'd like this field to be sortable within the Admin UI using drag and drop. (Only works when `hasMany` is set to `true`) |
|
||||||
|
| **`placeholder`** | Define a custom text or function to replace the generic default placeholder |
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
|
|||||||
105
docs/folders/overview.mdx
Normal file
105
docs/folders/overview.mdx
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
---
|
||||||
|
title: Folders
|
||||||
|
label: Folders
|
||||||
|
order: 10
|
||||||
|
desc: Folders allow you to group documents across collections, and are a great way to organize your content.
|
||||||
|
keywords: folders, folder, content organization
|
||||||
|
---
|
||||||
|
|
||||||
|
Folders allow you to group documents across collections, and are a great way to organize your content. Folders are built on top of relationship fields, when you enable folders on a collection, Payload adds a hidden relationship field `folders`, that relates to a folder — or no folder. Folders also have the `folder` field, allowing folders to be nested within other folders.
|
||||||
|
|
||||||
|
The configuration for folders is done in two places, the collection config and the Payload config. The collection config is where you enable folders, and the Payload config is where you configure the global folder settings.
|
||||||
|
|
||||||
|
<Banner type="warning">
|
||||||
|
**Note:** The Folders feature is currently in beta and may be subject to
|
||||||
|
change in minor versions updates prior to being stable.
|
||||||
|
</Banner>
|
||||||
|
|
||||||
|
## Folder Configuration
|
||||||
|
|
||||||
|
On the payload config, you can configure the following settings under the `folders` property:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// Type definition
|
||||||
|
|
||||||
|
type RootFoldersConfiguration = {
|
||||||
|
/**
|
||||||
|
* An array of functions to be ran when the folder collection is initialized
|
||||||
|
* This allows plugins to modify the collection configuration
|
||||||
|
*/
|
||||||
|
collectionOverrides?: (({
|
||||||
|
collection,
|
||||||
|
}: {
|
||||||
|
collection: CollectionConfig
|
||||||
|
}) => CollectionConfig | Promise<CollectionConfig>)[]
|
||||||
|
/**
|
||||||
|
* Ability to view hidden fields and collections related to folders
|
||||||
|
*
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
debug?: boolean
|
||||||
|
/**
|
||||||
|
* The Folder field name
|
||||||
|
*
|
||||||
|
* @default "folder"
|
||||||
|
*/
|
||||||
|
fieldName?: string
|
||||||
|
/**
|
||||||
|
* Slug for the folder collection
|
||||||
|
*
|
||||||
|
* @default "payload-folders"
|
||||||
|
*/
|
||||||
|
slug?: string
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// Example usage
|
||||||
|
|
||||||
|
import { buildConfig } from 'payload'
|
||||||
|
|
||||||
|
const config = buildConfig({
|
||||||
|
// ...
|
||||||
|
folders: {
|
||||||
|
// highlight-start
|
||||||
|
debug: true, // optional
|
||||||
|
collectionOverrides: [
|
||||||
|
async ({ collection }) => {
|
||||||
|
return collection
|
||||||
|
},
|
||||||
|
], // optional
|
||||||
|
fieldName: 'folder', // optional
|
||||||
|
slug: 'payload-folders', // optional
|
||||||
|
// highlight-end
|
||||||
|
},
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Collection Configuration
|
||||||
|
|
||||||
|
To enable folders on a collection, you need to set the `admin.folders` property to `true` on the collection config. This will add a hidden relationship field to the collection that relates to a folder — or no folder.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// Type definition
|
||||||
|
|
||||||
|
type CollectionFoldersConfiguration = boolean
|
||||||
|
```
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// Example usage
|
||||||
|
|
||||||
|
import { buildConfig } from 'payload'
|
||||||
|
|
||||||
|
const config = buildConfig({
|
||||||
|
collections: [
|
||||||
|
{
|
||||||
|
slug: 'pages',
|
||||||
|
// highlight-start
|
||||||
|
admin: {
|
||||||
|
folders: true, // defaults to false
|
||||||
|
},
|
||||||
|
// highlight-end
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
```
|
||||||
@@ -63,19 +63,50 @@ const config = buildConfig({
|
|||||||
export default config
|
export default config
|
||||||
```
|
```
|
||||||
|
|
||||||
Now in your Next.js app, include the `?encodeSourceMaps=true` parameter in any of your API requests. For performance reasons, this should only be done when in draft mode or on preview deployments.
|
## Enabling Content Source Maps
|
||||||
|
|
||||||
|
Now in your Next.js app, you need to add the `encodeSourceMaps` query parameter to your API requests. This will tell Payload to include the Content Source Maps in the API response.
|
||||||
|
|
||||||
|
<Banner type="warning">
|
||||||
|
**Note:** For performance reasons, this should only be done when in draft mode
|
||||||
|
or on preview deployments.
|
||||||
|
</Banner>
|
||||||
|
|
||||||
|
#### REST API
|
||||||
|
|
||||||
|
If you're using the REST API, include the `?encodeSourceMaps=true` search parameter.
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
if (isDraftMode || process.env.VERCEL_ENV === 'preview') {
|
if (isDraftMode || process.env.VERCEL_ENV === 'preview') {
|
||||||
const res = await fetch(
|
const res = await fetch(
|
||||||
`${process.env.NEXT_PUBLIC_PAYLOAD_CMS_URL}/api/pages?where[slug][equals]=${slug}&encodeSourceMaps=true`,
|
`${process.env.NEXT_PUBLIC_PAYLOAD_CMS_URL}/api/pages?encodeSourceMaps=true&where[slug][equals]=${slug}`,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Local API
|
||||||
|
|
||||||
|
If you're using the Local API, include the `encodeSourceMaps` via the `context` property.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
if (isDraftMode || process.env.VERCEL_ENV === 'preview') {
|
||||||
|
const res = await payload.find({
|
||||||
|
collection: 'pages',
|
||||||
|
where: {
|
||||||
|
slug: {
|
||||||
|
equals: slug,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
context: {
|
||||||
|
encodeSourceMaps: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
And that's it! You are now ready to enter Edit Mode and begin visually editing your content.
|
And that's it! You are now ready to enter Edit Mode and begin visually editing your content.
|
||||||
|
|
||||||
#### Edit Mode
|
## Edit Mode
|
||||||
|
|
||||||
To see Content Link on your site, you first need to visit any preview deployment on Vercel and login using the Vercel Toolbar. When Content Source Maps are detected on the page, a pencil icon will appear in the toolbar. Clicking this icon will enable Edit Mode, highlighting all editable fields on the page in blue.
|
To see Content Link on your site, you first need to visit any preview deployment on Vercel and login using the Vercel Toolbar. When Content Source Maps are detected on the page, a pencil icon will appear in the toolbar. Clicking this icon will enable Edit Mode, highlighting all editable fields on the page in blue.
|
||||||
|
|
||||||
@@ -94,7 +125,9 @@ const { cleaned, encoded } = vercelStegaSplit(text)
|
|||||||
|
|
||||||
### Blocks and array fields
|
### Blocks and array fields
|
||||||
|
|
||||||
All `blocks` and `array` fields by definition do not have plain text strings to encode. For this reason, they are given an additional `_encodedSourceMap` property, which you can use to enable Content Link on entire _sections_ of your site. You can then specify the editing container by adding the `data-vercel-edit-target` HTML attribute to any top-level element of your block.
|
All `blocks` and `array` fields by definition do not have plain text strings to encode. For this reason, they are automatically given an additional `_encodedSourceMap` property, which you can use to enable Content Link on entire _sections_ of your site.
|
||||||
|
|
||||||
|
You can then specify the editing container by adding the `data-vercel-edit-target` HTML attribute to any top-level element of your block.
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
<div data-vercel-edit-target>
|
<div data-vercel-edit-target>
|
||||||
|
|||||||
@@ -85,6 +85,7 @@ formBuilderPlugin({
|
|||||||
checkbox: true,
|
checkbox: true,
|
||||||
number: true,
|
number: true,
|
||||||
message: true,
|
message: true,
|
||||||
|
date: false,
|
||||||
payment: false,
|
payment: false,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@@ -349,6 +350,18 @@ Maps to a `checkbox` input on your front-end. Used to collect a boolean value.
|
|||||||
| `width` | string | The width of the field on the front-end. |
|
| `width` | string | The width of the field on the front-end. |
|
||||||
| `required` | checkbox | Whether or not the field is required when submitted. |
|
| `required` | checkbox | Whether or not the field is required when submitted. |
|
||||||
|
|
||||||
|
### Date
|
||||||
|
|
||||||
|
Maps to a `date` input on your front-end. Used to collect a date value.
|
||||||
|
|
||||||
|
| Property | Type | Description |
|
||||||
|
| -------------- | -------- | ---------------------------------------------------- |
|
||||||
|
| `name` | string | The name of the field. |
|
||||||
|
| `label` | string | The label of the field. |
|
||||||
|
| `defaultValue` | date | The default value of the field. |
|
||||||
|
| `width` | string | The width of the field on the front-end. |
|
||||||
|
| `required` | checkbox | Whether or not the field is required when submitted. |
|
||||||
|
|
||||||
### Number
|
### Number
|
||||||
|
|
||||||
Maps to a `number` input on your front-end. Used to collect a number.
|
Maps to a `number` input on your front-end. Used to collect a number.
|
||||||
@@ -421,6 +434,42 @@ formBuilderPlugin({
|
|||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Customizing the date field default value
|
||||||
|
|
||||||
|
You can custommise the default value of the date field and any other aspects of the date block in this way.
|
||||||
|
Note that the end submission source will be responsible for the timezone of the date. Payload only stores the date in UTC format.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { fields as formFields } from '@payloadcms/plugin-form-builder'
|
||||||
|
|
||||||
|
// payload.config.ts
|
||||||
|
formBuilderPlugin({
|
||||||
|
fields: {
|
||||||
|
// date: true, // just enable it without any customizations
|
||||||
|
date: {
|
||||||
|
...formFields.date,
|
||||||
|
fields: [
|
||||||
|
...(formFields.date && 'fields' in formFields.date
|
||||||
|
? formFields.date.fields.map((field) => {
|
||||||
|
if ('name' in field && field.name === 'defaultValue') {
|
||||||
|
return {
|
||||||
|
...field,
|
||||||
|
timezone: true, // optionally enable timezone
|
||||||
|
admin: {
|
||||||
|
...field.admin,
|
||||||
|
description: 'This is a date field',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return field
|
||||||
|
})
|
||||||
|
: []),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
## Email
|
## Email
|
||||||
|
|
||||||
This plugin relies on the [email configuration](../email/overview) defined in your Payload configuration. It will read from your config and attempt to send your emails using the credentials provided.
|
This plugin relies on the [email configuration](../email/overview) defined in your Payload configuration. It will read from your config and attempt to send your emails using the credentials provided.
|
||||||
|
|||||||
@@ -309,7 +309,3 @@ import {
|
|||||||
...
|
...
|
||||||
} from '@payloadcms/plugin-stripe/types';
|
} from '@payloadcms/plugin-stripe/types';
|
||||||
```
|
```
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
The [Templates Directory](https://github.com/payloadcms/payload/tree/main/templates) contains an official [E-commerce Template](https://github.com/payloadcms/payload/tree/main/templates/ecommerce) which demonstrates exactly how to configure this plugin in Payload and implement it on your front-end. You can also check out [How to Build An E-Commerce Site With Next.js](https://payloadcms.com/blog/how-to-build-an-e-commerce-site-with-nextjs) post for a bit more context around this template.
|
|
||||||
|
|||||||
@@ -55,10 +55,11 @@ All collection `find` queries are paginated automatically. Responses are returne
|
|||||||
|
|
||||||
All Payload APIs support the pagination controls below. With them, you can create paginated lists of documents within your application:
|
All Payload APIs support the pagination controls below. With them, you can create paginated lists of documents within your application:
|
||||||
|
|
||||||
| Control | Description |
|
| Control | Default | Description |
|
||||||
| ------- | --------------------------------------- |
|
| ------------ | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
| `limit` | Limits the number of documents returned |
|
| `limit` | `10` | Limits the number of documents returned per page - set to `0` to show all documents, we automatically disabled pagination for you when `limit` is `0` for optimisation |
|
||||||
| `page` | Get a specific page number |
|
| `pagination` | `true` | Set to `false` to disable pagination and return all documents |
|
||||||
|
| `page` | `1` | Get a specific page number |
|
||||||
|
|
||||||
### Disabling pagination within Local API
|
### Disabling pagination within Local API
|
||||||
|
|
||||||
|
|||||||
@@ -6,14 +6,14 @@ desc: Converting between lexical richtext and HTML
|
|||||||
keywords: lexical, richtext, html
|
keywords: lexical, richtext, html
|
||||||
---
|
---
|
||||||
|
|
||||||
## Converting Rich Text to HTML
|
## Rich Text to HTML
|
||||||
|
|
||||||
There are two main approaches to convert your Lexical-based rich text to HTML:
|
There are two main approaches to convert your Lexical-based rich text to HTML:
|
||||||
|
|
||||||
1. **Generate HTML on-demand (Recommended)**: Convert JSON to HTML wherever you need it, on-demand.
|
1. **Generate HTML on-demand (Recommended)**: Convert JSON to HTML wherever you need it, on-demand.
|
||||||
2. **Generate HTML within your Collection**: Create a new field that automatically converts your saved JSON content to HTML. This is not recommended because it adds overhead to the Payload API and may not work well with live preview.
|
2. **Generate HTML within your Collection**: Create a new field that automatically converts your saved JSON content to HTML. This is not recommended because it adds overhead to the Payload API and may not work well with live preview.
|
||||||
|
|
||||||
### Generating HTML on-demand (Recommended)
|
### On-demand
|
||||||
|
|
||||||
To convert JSON to HTML on-demand, use the `convertLexicalToHTML` function from `@payloadcms/richtext-lexical/html`. Here's an example of how to use it in a React component in your frontend:
|
To convert JSON to HTML on-demand, use the `convertLexicalToHTML` function from `@payloadcms/richtext-lexical/html`. Here's an example of how to use it in a React component in your frontend:
|
||||||
|
|
||||||
@@ -32,61 +32,81 @@ export const MyComponent = ({ data }: { data: SerializedEditorState }) => {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Converting Lexical Blocks
|
#### Dynamic Population (Advanced)
|
||||||
|
|
||||||
If your rich text includes Lexical blocks, you need to provide a way to convert them to HTML. For example:
|
By default, `convertLexicalToHTML` expects fully populated data (e.g. uploads, links, etc.). If you need to dynamically fetch and populate those nodes, use the async variant, `convertLexicalToHTMLAsync`, from `@payloadcms/richtext-lexical/html-async`. You must provide a `populate` function:
|
||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
'use client'
|
'use client'
|
||||||
|
|
||||||
import type { MyInlineBlock, MyTextBlock } from '@/payload-types'
|
|
||||||
import type {
|
|
||||||
DefaultNodeTypes,
|
|
||||||
SerializedBlockNode,
|
|
||||||
SerializedInlineBlockNode,
|
|
||||||
} from '@payloadcms/richtext-lexical'
|
|
||||||
import type { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical'
|
import type { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical'
|
||||||
|
|
||||||
import {
|
import { getRestPopulateFn } from '@payloadcms/richtext-lexical/client'
|
||||||
convertLexicalToHTML,
|
import { convertLexicalToHTMLAsync } from '@payloadcms/richtext-lexical/html-async'
|
||||||
type HTMLConvertersFunction,
|
import React, { useEffect, useState } from 'react'
|
||||||
} from '@payloadcms/richtext-lexical/html'
|
|
||||||
import React from 'react'
|
|
||||||
|
|
||||||
type NodeTypes =
|
|
||||||
| DefaultNodeTypes
|
|
||||||
| SerializedBlockNode<MyTextBlock>
|
|
||||||
| SerializedInlineBlockNode<MyInlineBlock>
|
|
||||||
|
|
||||||
const htmlConverters: HTMLConvertersFunction<NodeTypes> = ({
|
|
||||||
defaultConverters,
|
|
||||||
}) => ({
|
|
||||||
...defaultConverters,
|
|
||||||
blocks: {
|
|
||||||
// Each key should match your block's slug
|
|
||||||
myTextBlock: ({ node, providedCSSString }) =>
|
|
||||||
`<div style="background-color: red;${providedCSSString}">${node.fields.text}</div>`,
|
|
||||||
},
|
|
||||||
inlineBlocks: {
|
|
||||||
// Each key should match your inline block's slug
|
|
||||||
myInlineBlock: ({ node, providedStyleTag }) =>
|
|
||||||
`<span${providedStyleTag}>${node.fields.text}</span$>`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
export const MyComponent = ({ data }: { data: SerializedEditorState }) => {
|
export const MyComponent = ({ data }: { data: SerializedEditorState }) => {
|
||||||
const html = convertLexicalToHTML({
|
const [html, setHTML] = useState<null | string>(null)
|
||||||
converters: htmlConverters,
|
useEffect(() => {
|
||||||
|
async function convert() {
|
||||||
|
const html = await convertLexicalToHTMLAsync({
|
||||||
data,
|
data,
|
||||||
|
populate: getRestPopulateFn({
|
||||||
|
apiURL: `http://localhost:3000/api`,
|
||||||
|
}),
|
||||||
})
|
})
|
||||||
|
setHTML(html)
|
||||||
|
}
|
||||||
|
|
||||||
return <div dangerouslySetInnerHTML={{ __html: html }} />
|
void convert()
|
||||||
|
}, [data])
|
||||||
|
|
||||||
|
return html && <div dangerouslySetInnerHTML={{ __html: html }} />
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Outputting HTML from the Collection
|
Using the REST populate function will send a separate request for each node. If you need to populate a large number of nodes, this may be slow. For improved performance on the server, you can use the `getPayloadPopulateFn` function:
|
||||||
|
|
||||||
To automatically generate HTML from the saved richText field in your Collection, use the `lexicalHTMLField()` helper. This approach converts the JSON to HTML using an `afterRead` hook. For instance:
|
```tsx
|
||||||
|
import type { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical'
|
||||||
|
|
||||||
|
import { getPayloadPopulateFn } from '@payloadcms/richtext-lexical'
|
||||||
|
import { convertLexicalToHTMLAsync } from '@payloadcms/richtext-lexical/html-async'
|
||||||
|
import { getPayload } from 'payload'
|
||||||
|
import React from 'react'
|
||||||
|
|
||||||
|
import config from '../../config.js'
|
||||||
|
|
||||||
|
export const MyRSCComponent = async ({
|
||||||
|
data,
|
||||||
|
}: {
|
||||||
|
data: SerializedEditorState
|
||||||
|
}) => {
|
||||||
|
const payload = await getPayload({
|
||||||
|
config,
|
||||||
|
})
|
||||||
|
|
||||||
|
const html = await convertLexicalToHTMLAsync({
|
||||||
|
data,
|
||||||
|
populate: await getPayloadPopulateFn({
|
||||||
|
currentDepth: 0,
|
||||||
|
depth: 1,
|
||||||
|
payload,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
return html && <div dangerouslySetInnerHTML={{ __html: html }} />
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### HTML field
|
||||||
|
|
||||||
|
The `lexicalHTMLField()` helper converts JSON to HTML and saves it in a field that is updated every time you read it via an `afterRead` hook. It's generally not recommended for two reasons:
|
||||||
|
|
||||||
|
1. It creates a column with duplicate content in another format.
|
||||||
|
2. In [client-side live preview](/docs/live-preview/client), it makes it not "live".
|
||||||
|
|
||||||
|
Consider using the [on-demand HTML converter above](/docs/rich-text/converting-html#on-demand-recommended) or the [JSX converter](/docs/rich-text/converting-jsx) unless you have a good reason.
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
import type { HTMLConvertersFunction } from '@payloadcms/richtext-lexical/html'
|
import type { HTMLConvertersFunction } from '@payloadcms/richtext-lexical/html'
|
||||||
@@ -154,74 +174,59 @@ const Pages: CollectionConfig = {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Generating HTML in Your Frontend with Dynamic Population (Advanced)
|
## Blocks to HTML
|
||||||
|
|
||||||
By default, `convertLexicalToHTML` expects fully populated data (e.g. uploads, links, etc.). If you need to dynamically fetch and populate those nodes, use the async variant, `convertLexicalToHTMLAsync`, from `@payloadcms/richtext-lexical/html-async`. You must provide a `populate` function:
|
If your rich text includes Lexical blocks, you need to provide a way to convert them to HTML. For example:
|
||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
'use client'
|
'use client'
|
||||||
|
|
||||||
|
import type { MyInlineBlock, MyTextBlock } from '@/payload-types'
|
||||||
|
import type {
|
||||||
|
DefaultNodeTypes,
|
||||||
|
SerializedBlockNode,
|
||||||
|
SerializedInlineBlockNode,
|
||||||
|
} from '@payloadcms/richtext-lexical'
|
||||||
import type { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical'
|
import type { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical'
|
||||||
|
|
||||||
import { getRestPopulateFn } from '@payloadcms/richtext-lexical/client'
|
import {
|
||||||
import { convertLexicalToHTMLAsync } from '@payloadcms/richtext-lexical/html-async'
|
convertLexicalToHTML,
|
||||||
import React, { useEffect, useState } from 'react'
|
type HTMLConvertersFunction,
|
||||||
|
} from '@payloadcms/richtext-lexical/html'
|
||||||
export const MyComponent = ({ data }: { data: SerializedEditorState }) => {
|
|
||||||
const [html, setHTML] = useState<null | string>(null)
|
|
||||||
useEffect(() => {
|
|
||||||
async function convert() {
|
|
||||||
const html = await convertLexicalToHTMLAsync({
|
|
||||||
data,
|
|
||||||
populate: getRestPopulateFn({
|
|
||||||
apiURL: `http://localhost:3000/api`,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
setHTML(html)
|
|
||||||
}
|
|
||||||
|
|
||||||
void convert()
|
|
||||||
}, [data])
|
|
||||||
|
|
||||||
return html && <div dangerouslySetInnerHTML={{ __html: html }} />
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Using the REST populate function will send a separate request for each node. If you need to populate a large number of nodes, this may be slow. For improved performance on the server, you can use the `getPayloadPopulateFn` function:
|
|
||||||
|
|
||||||
```tsx
|
|
||||||
import type { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical'
|
|
||||||
|
|
||||||
import { getPayloadPopulateFn } from '@payloadcms/richtext-lexical'
|
|
||||||
import { convertLexicalToHTMLAsync } from '@payloadcms/richtext-lexical/html-async'
|
|
||||||
import { getPayload } from 'payload'
|
|
||||||
import React from 'react'
|
import React from 'react'
|
||||||
|
|
||||||
import config from '../../config.js'
|
type NodeTypes =
|
||||||
|
| DefaultNodeTypes
|
||||||
|
| SerializedBlockNode<MyTextBlock>
|
||||||
|
| SerializedInlineBlockNode<MyInlineBlock>
|
||||||
|
|
||||||
export const MyRSCComponent = async ({
|
const htmlConverters: HTMLConvertersFunction<NodeTypes> = ({
|
||||||
data,
|
defaultConverters,
|
||||||
}: {
|
}) => ({
|
||||||
data: SerializedEditorState
|
...defaultConverters,
|
||||||
}) => {
|
blocks: {
|
||||||
const payload = await getPayload({
|
// Each key should match your block's slug
|
||||||
config,
|
myTextBlock: ({ node, providedCSSString }) =>
|
||||||
|
`<div style="background-color: red;${providedCSSString}">${node.fields.text}</div>`,
|
||||||
|
},
|
||||||
|
inlineBlocks: {
|
||||||
|
// Each key should match your inline block's slug
|
||||||
|
myInlineBlock: ({ node, providedStyleTag }) =>
|
||||||
|
`<span${providedStyleTag}>${node.fields.text}</span$>`,
|
||||||
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const html = await convertLexicalToHTMLAsync({
|
export const MyComponent = ({ data }: { data: SerializedEditorState }) => {
|
||||||
|
const html = convertLexicalToHTML({
|
||||||
|
converters: htmlConverters,
|
||||||
data,
|
data,
|
||||||
populate: await getPayloadPopulateFn({
|
|
||||||
currentDepth: 0,
|
|
||||||
depth: 1,
|
|
||||||
payload,
|
|
||||||
}),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return html && <div dangerouslySetInnerHTML={{ __html: html }} />
|
return <div dangerouslySetInnerHTML={{ __html: html }} />
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## Converting HTML to Richtext
|
## HTML to Richtext
|
||||||
|
|
||||||
If you need to convert raw HTML into a Lexical editor state, use `convertHTMLToLexical` from `@payloadcms/richtext-lexical`, along with the [editorConfigFactory to retrieve the editor config](/docs/rich-text/converters#retrieving-the-editor-config):
|
If you need to convert raw HTML into a Lexical editor state, use `convertHTMLToLexical` from `@payloadcms/richtext-lexical`, along with the [editorConfigFactory to retrieve the editor config](/docs/rich-text/converters#retrieving-the-editor-config):
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ desc: Converting between lexical richtext and JSX
|
|||||||
keywords: lexical, richtext, jsx
|
keywords: lexical, richtext, jsx
|
||||||
---
|
---
|
||||||
|
|
||||||
## Converting Richtext to JSX
|
## Richtext to JSX
|
||||||
|
|
||||||
To convert richtext to JSX, import the `RichText` component from `@payloadcms/richtext-lexical/react` and pass the richtext content to it:
|
To convert richtext to JSX, import the `RichText` component from `@payloadcms/richtext-lexical/react` and pass the richtext content to it:
|
||||||
|
|
||||||
@@ -28,7 +28,7 @@ The `RichText` component includes built-in converters for common Lexical nodes.
|
|||||||
populated data to work correctly.
|
populated data to work correctly.
|
||||||
</Banner>
|
</Banner>
|
||||||
|
|
||||||
### Converting Internal Links
|
### Internal Links
|
||||||
|
|
||||||
By default, Payload doesn't know how to convert **internal** links to JSX, as it doesn't know what the corresponding URL of the internal link is. You'll notice that you get a "found internal link, but internalDocToHref is not provided" error in the console when you try to render content with internal links.
|
By default, Payload doesn't know how to convert **internal** links to JSX, as it doesn't know what the corresponding URL of the internal link is. You'll notice that you get a "found internal link, but internalDocToHref is not provided" error in the console when you try to render content with internal links.
|
||||||
|
|
||||||
@@ -81,7 +81,7 @@ export const MyComponent: React.FC<{
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Converting Lexical Blocks
|
### Lexical Blocks
|
||||||
|
|
||||||
If your rich text includes custom Blocks or Inline Blocks, you must supply custom converters that match each block's slug. This converter is not included by default, as Payload doesn't know how to render your custom blocks.
|
If your rich text includes custom Blocks or Inline Blocks, you must supply custom converters that match each block's slug. This converter is not included by default, as Payload doesn't know how to render your custom blocks.
|
||||||
|
|
||||||
@@ -133,7 +133,7 @@ export const MyComponent: React.FC<{
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Overriding Default JSX Converters
|
### Overriding Converters
|
||||||
|
|
||||||
You can override any of the default JSX converters by passing passing your custom converter, keyed to the node type, to the `converters` prop / the converters function.
|
You can override any of the default JSX converters by passing passing your custom converter, keyed to the node type, to the `converters` prop / the converters function.
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ desc: Converting between lexical richtext and Markdown / MDX
|
|||||||
keywords: lexical, richtext, markdown, md, mdx
|
keywords: lexical, richtext, markdown, md, mdx
|
||||||
---
|
---
|
||||||
|
|
||||||
## Converting Richtext to Markdown
|
## Richtext to Markdown
|
||||||
|
|
||||||
If you have access to the Payload Config and the [lexical editor config](/docs/rich-text/converters#retrieving-the-editor-config), you can convert the lexical editor state to Markdown with the following:
|
If you have access to the Payload Config and the [lexical editor config](/docs/rich-text/converters#retrieving-the-editor-config), you can convert the lexical editor state to Markdown with the following:
|
||||||
|
|
||||||
@@ -91,7 +91,7 @@ const Pages: CollectionConfig = {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## Converting Markdown to Richtext
|
## Markdown to Richtext
|
||||||
|
|
||||||
If you have access to the Payload Config and the [lexical editor config](/docs/rich-text/converters#retrieving-the-editor-config), you can convert Markdown to the lexical editor state with the following:
|
If you have access to the Payload Config and the [lexical editor config](/docs/rich-text/converters#retrieving-the-editor-config), you can convert Markdown to the lexical editor state with the following:
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ desc: Converting between lexical richtext and plaintext
|
|||||||
keywords: lexical, richtext, plaintext, text
|
keywords: lexical, richtext, plaintext, text
|
||||||
---
|
---
|
||||||
|
|
||||||
## Converting Richtext to Plaintext
|
## Richtext to Plaintext
|
||||||
|
|
||||||
Here's how you can convert richtext data to plaintext using `@payloadcms/richtext-lexical/plaintext`.
|
Here's how you can convert richtext data to plaintext using `@payloadcms/richtext-lexical/plaintext`.
|
||||||
|
|
||||||
|
|||||||
@@ -143,7 +143,7 @@ import { CallToAction } from '../blocks/CallToAction'
|
|||||||
Here's an overview of all the included features:
|
Here's an overview of all the included features:
|
||||||
|
|
||||||
| Feature Name | Included by default | Description |
|
| Feature Name | Included by default | Description |
|
||||||
| ------------------------------- | ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
| ----------------------------------- | ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
| **`BoldFeature`** | Yes | Handles the bold text format |
|
| **`BoldFeature`** | Yes | Handles the bold text format |
|
||||||
| **`ItalicFeature`** | Yes | Handles the italic text format |
|
| **`ItalicFeature`** | Yes | Handles the italic text format |
|
||||||
| **`UnderlineFeature`** | Yes | Handles the underline text format |
|
| **`UnderlineFeature`** | Yes | Handles the underline text format |
|
||||||
@@ -168,6 +168,7 @@ Here's an overview of all the included features:
|
|||||||
| **`BlocksFeature`** | No | Allows you to use Payload's [Blocks Field](../fields/blocks) directly inside your editor. In the feature props, you can specify the allowed blocks - just like in the Blocks field. |
|
| **`BlocksFeature`** | No | Allows you to use Payload's [Blocks Field](../fields/blocks) directly inside your editor. In the feature props, you can specify the allowed blocks - just like in the Blocks field. |
|
||||||
| **`TreeViewFeature`** | No | Adds a debug box under the editor, which allows you to see the current editor state live, the dom, as well as time travel. Very useful for debugging |
|
| **`TreeViewFeature`** | No | Adds a debug box under the editor, which allows you to see the current editor state live, the dom, as well as time travel. Very useful for debugging |
|
||||||
| **`EXPERIMENTAL_TableFeature`** | No | Adds support for tables. This feature may be removed or receive breaking changes in the future - even within a stable lexical release, without needing a major release. |
|
| **`EXPERIMENTAL_TableFeature`** | No | Adds support for tables. This feature may be removed or receive breaking changes in the future - even within a stable lexical release, without needing a major release. |
|
||||||
|
| **`EXPERIMENTAL_TextStateFeature`** | No | Allows you to store key-value attributes within TextNodes and assign them inline styles. |
|
||||||
|
|
||||||
Notice how even the toolbars are features? That's how extensible our lexical editor is - you could theoretically create your own toolbar if you wanted to!
|
Notice how even the toolbars are features? That's how extensible our lexical editor is - you could theoretically create your own toolbar if you wanted to!
|
||||||
|
|
||||||
|
|||||||
@@ -84,6 +84,7 @@ pnpm add @payloadcms/storage-s3
|
|||||||
- The `config` object can be any [`S3ClientConfig`](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3) object (from [`@aws-sdk/client-s3`](https://github.com/aws/aws-sdk-js-v3)). _This is highly dependent on your AWS setup_. Check the AWS documentation for more information.
|
- The `config` object can be any [`S3ClientConfig`](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3) object (from [`@aws-sdk/client-s3`](https://github.com/aws/aws-sdk-js-v3)). _This is highly dependent on your AWS setup_. Check the AWS documentation for more information.
|
||||||
- When enabled, this package will automatically set `disableLocalStorage` to `true` for each collection.
|
- When enabled, this package will automatically set `disableLocalStorage` to `true` for each collection.
|
||||||
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client. You must allow CORS PUT method for the bucket to your website.
|
- When deploying to Vercel, server uploads are limited with 4.5MB. Set `clientUploads` to `true` to do uploads directly on the client. You must allow CORS PUT method for the bucket to your website.
|
||||||
|
- Configure `signedDownloads` (either globally of per-collection in `collections`) to use [presigned URLs](https://docs.aws.amazon.com/AmazonS3/latest/userguide/using-presigned-url.html) for files downloading. This can improve performance for large files (like videos) while still respecting your access control.
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
import { s3Storage } from '@payloadcms/storage-s3'
|
import { s3Storage } from '@payloadcms/storage-s3'
|
||||||
|
|||||||
@@ -74,21 +74,13 @@ export const rootEslintConfig = [
|
|||||||
'no-console': 'off',
|
'no-console': 'off',
|
||||||
'perfectionist/sort-object-types': 'off',
|
'perfectionist/sort-object-types': 'off',
|
||||||
'perfectionist/sort-objects': 'off',
|
'perfectionist/sort-objects': 'off',
|
||||||
|
'payload/no-relative-monorepo-imports': 'off',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
export default [
|
export default [
|
||||||
...rootEslintConfig,
|
...rootEslintConfig,
|
||||||
{
|
|
||||||
languageOptions: {
|
|
||||||
parserOptions: {
|
|
||||||
...rootParserOptions,
|
|
||||||
projectService: true,
|
|
||||||
tsconfigRootDir: import.meta.dirname,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
files: ['packages/eslint-config/**/*.ts'],
|
files: ['packages/eslint-config/**/*.ts'],
|
||||||
rules: {
|
rules: {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import type { CollectionConfig } from 'payload/types'
|
import type { CollectionConfig } from 'payload/types'
|
||||||
|
|
||||||
import { admins } from './access/admins'
|
import { admins } from './access/admins'
|
||||||
import adminsAndUser from './access/adminsAndUser'
|
import { adminsAndUser } from './access/adminsAndUser'
|
||||||
import { anyone } from './access/anyone'
|
import { anyone } from './access/anyone'
|
||||||
import { checkRole } from './access/checkRole'
|
import { checkRole } from './access/checkRole'
|
||||||
import { loginAfterCreate } from './hooks/loginAfterCreate'
|
import { loginAfterCreate } from './hooks/loginAfterCreate'
|
||||||
@@ -25,6 +25,7 @@ export const Users: CollectionConfig = {
|
|||||||
create: anyone,
|
create: anyone,
|
||||||
update: adminsAndUser,
|
update: adminsAndUser,
|
||||||
delete: admins,
|
delete: admins,
|
||||||
|
unlock: admins,
|
||||||
admin: ({ req: { user } }) => checkRole(['admin'], user),
|
admin: ({ req: { user } }) => checkRole(['admin'], user),
|
||||||
},
|
},
|
||||||
hooks: {
|
hooks: {
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import type { Access } from 'payload/config'
|
import type { Access } from 'payload'
|
||||||
|
|
||||||
import { checkRole } from './checkRole'
|
import { checkRole } from './checkRole'
|
||||||
|
|
||||||
|
|||||||
@@ -1,19 +1,17 @@
|
|||||||
import type { Access } from 'payload/config'
|
import type { Access } from 'payload'
|
||||||
|
|
||||||
import { checkRole } from './checkRole'
|
import { checkRole } from './checkRole'
|
||||||
|
|
||||||
const adminsAndUser: Access = ({ req: { user } }) => {
|
export const adminsAndUser: Access = ({ req: { user } }) => {
|
||||||
if (user) {
|
if (user) {
|
||||||
if (checkRole(['admin'], user)) {
|
if (checkRole(['admin'], user)) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: user.id,
|
id: { equals: user.id },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
export default adminsAndUser
|
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
import type { Access } from 'payload/config'
|
import type { Access } from 'payload'
|
||||||
|
|
||||||
export const anyone: Access = () => true
|
export const anyone: Access = () => true
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import type { User } from '../../payload-types'
|
import type { User } from '../../payload-types'
|
||||||
|
|
||||||
export const checkRole = (allRoles: User['roles'] = [], user: User = undefined): boolean => {
|
export const checkRole = (allRoles: User['roles'] = [], user: User | null = null): boolean => {
|
||||||
if (user) {
|
if (user) {
|
||||||
if (
|
if (
|
||||||
allRoles.some((role) => {
|
allRoles.some((role) => {
|
||||||
@@ -8,8 +8,9 @@ export const checkRole = (allRoles: User['roles'] = [], user: User = undefined):
|
|||||||
return individualRole === role
|
return individualRole === role
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
)
|
) {
|
||||||
{return true}
|
return true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return false
|
return false
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import type { FieldHook } from 'payload/types'
|
import type { FieldHook } from 'payload'
|
||||||
|
|
||||||
import type { User } from '../../payload-types'
|
import type { User } from '../../payload-types'
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import { mongooseAdapter } from '@payloadcms/db-mongodb'
|
import { mongooseAdapter } from '@payloadcms/db-mongodb'
|
||||||
import { lexicalEditor } from '@payloadcms/richtext-lexical'
|
import { lexicalEditor } from '@payloadcms/richtext-lexical'
|
||||||
import path from 'path'
|
import path from 'path'
|
||||||
import express from 'express'
|
|
||||||
import { buildConfig } from 'payload'
|
import { buildConfig } from 'payload'
|
||||||
import { fileURLToPath } from 'url'
|
import { fileURLToPath } from 'url'
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import express from 'express'
|
import express from 'express'
|
||||||
import type { Request, Response } from 'express'
|
|
||||||
import { parse } from 'url'
|
import { parse } from 'url'
|
||||||
import next from 'next'
|
import next from 'next'
|
||||||
|
|
||||||
|
|||||||
@@ -58,7 +58,7 @@ See the [Collections](https://payloadcms.com/docs/configuration/collections) doc
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
For more details on how to extend this functionality, see the [Live Preview](https://payloadcms.com/docs/live-preview) docs.
|
For more details on how to extend this functionality, see the [Live Preview](https://payloadcms.com/docs/live-preview/overview) docs.
|
||||||
|
|
||||||
## Front-end
|
## Front-end
|
||||||
|
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ export const home: Partial<Page> = {
|
|||||||
type: 'link',
|
type: 'link',
|
||||||
children: [{ text: 'Live Preview' }],
|
children: [{ text: 'Live Preview' }],
|
||||||
newTab: true,
|
newTab: true,
|
||||||
url: 'https://payloadcms.com/docs/live-preview',
|
url: 'https://payloadcms.com/docs/live-preview/overview',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
text: ' you can edit this page in the admin panel and see the changes reflected here in real time.',
|
text: ' you can edit this page in the admin panel and see the changes reflected here in real time.',
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
import { BeforeSync, DocToSync } from '@payloadcms/plugin-search/types'
|
import { BeforeSync, DocToSync } from '@payloadcms/plugin-search/types'
|
||||||
|
|
||||||
export const beforeSyncWithSearch: BeforeSync = async ({ originalDoc, searchDoc, payload }) => {
|
export const beforeSyncWithSearch: BeforeSync = async ({ req, originalDoc, searchDoc }) => {
|
||||||
const {
|
const {
|
||||||
doc: { relationTo: collection },
|
doc: { relationTo: collection },
|
||||||
} = searchDoc
|
} = searchDoc
|
||||||
|
|
||||||
const { slug, id, categories, title, meta, excerpt } = originalDoc
|
const { slug, id, categories, title, meta } = originalDoc
|
||||||
|
|
||||||
const modifiedDoc: DocToSync = {
|
const modifiedDoc: DocToSync = {
|
||||||
...searchDoc,
|
...searchDoc,
|
||||||
@@ -20,25 +20,41 @@ export const beforeSyncWithSearch: BeforeSync = async ({ originalDoc, searchDoc,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (categories && Array.isArray(categories) && categories.length > 0) {
|
if (categories && Array.isArray(categories) && categories.length > 0) {
|
||||||
// get full categories and keep a flattened copy of their most important properties
|
const populatedCategories: { id: string | number; title: string }[] = []
|
||||||
try {
|
for (const category of categories) {
|
||||||
const mappedCategories = categories.map((category) => {
|
if (!category) {
|
||||||
const { id, title } = category
|
continue
|
||||||
|
|
||||||
return {
|
|
||||||
relationTo: 'categories',
|
|
||||||
id,
|
|
||||||
title,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (typeof category === 'object') {
|
||||||
|
populatedCategories.push(category)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const doc = await req.payload.findByID({
|
||||||
|
collection: 'categories',
|
||||||
|
id: category,
|
||||||
|
disableErrors: true,
|
||||||
|
depth: 0,
|
||||||
|
select: { title: true },
|
||||||
|
req,
|
||||||
})
|
})
|
||||||
|
|
||||||
modifiedDoc.categories = mappedCategories
|
if (doc !== null) {
|
||||||
} catch (err) {
|
populatedCategories.push(doc)
|
||||||
|
} else {
|
||||||
console.error(
|
console.error(
|
||||||
`Failed. Category not found when syncing collection '${collection}' with id: '${id}' to search.`,
|
`Failed. Category not found when syncing collection '${collection}' with id: '${id}' to search.`,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
modifiedDoc.categories = populatedCategories.map((each) => ({
|
||||||
|
relationTo: 'categories',
|
||||||
|
categoryID: String(each.id),
|
||||||
|
title: each.title,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
return modifiedDoc
|
return modifiedDoc
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ export const searchFields: Field[] = [
|
|||||||
type: 'text',
|
type: 'text',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'id',
|
name: 'categoryID',
|
||||||
type: 'text',
|
type: 'text',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "payload-monorepo",
|
"name": "payload-monorepo",
|
||||||
"version": "3.35.1",
|
"version": "3.39.1",
|
||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -74,9 +74,9 @@
|
|||||||
"docker:start": "docker compose -f test/docker-compose.yml up -d",
|
"docker:start": "docker compose -f test/docker-compose.yml up -d",
|
||||||
"docker:stop": "docker compose -f test/docker-compose.yml down",
|
"docker:stop": "docker compose -f test/docker-compose.yml down",
|
||||||
"force:build": "pnpm run build:core:force",
|
"force:build": "pnpm run build:core:force",
|
||||||
"lint": "turbo run lint --concurrency 1 --continue",
|
"lint": "turbo run lint --log-order=grouped --continue",
|
||||||
"lint-staged": "lint-staged",
|
"lint-staged": "lint-staged",
|
||||||
"lint:fix": "turbo run lint:fix --concurrency 1 --continue",
|
"lint:fix": "turbo run lint:fix --log-order=grouped --continue",
|
||||||
"obliterate-playwright-cache-macos": "rm -rf ~/Library/Caches/ms-playwright && find /System/Volumes/Data/private/var/folders -type d -name 'playwright*' -exec rm -rf {} +",
|
"obliterate-playwright-cache-macos": "rm -rf ~/Library/Caches/ms-playwright && find /System/Volumes/Data/private/var/folders -type d -name 'playwright*' -exec rm -rf {} +",
|
||||||
"prepare": "husky",
|
"prepare": "husky",
|
||||||
"prepare-run-test-against-prod": "pnpm bf && rm -rf test/packed && rm -rf test/node_modules && rm -rf app && rm -f test/pnpm-lock.yaml && pnpm run script:pack --all --no-build --dest test/packed && pnpm runts test/setupProd.ts && cd test && pnpm i --ignore-workspace && cd ..",
|
"prepare-run-test-against-prod": "pnpm bf && rm -rf test/packed && rm -rf test/node_modules && rm -rf app && rm -f test/pnpm-lock.yaml && pnpm run script:pack --all --no-build --dest test/packed && pnpm runts test/setupProd.ts && cd test && pnpm i --ignore-workspace && cd ..",
|
||||||
|
|||||||
@@ -1,18 +0,0 @@
|
|||||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
|
||||||
|
|
||||||
/** @typedef {import('eslint').Linter.Config} Config */
|
|
||||||
|
|
||||||
/** @type {Config[]} */
|
|
||||||
export const index = [
|
|
||||||
...rootEslintConfig,
|
|
||||||
{
|
|
||||||
languageOptions: {
|
|
||||||
parserOptions: {
|
|
||||||
...rootParserOptions,
|
|
||||||
tsconfigRootDir: import.meta.dirname,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
export default index
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@payloadcms/admin-bar",
|
"name": "@payloadcms/admin-bar",
|
||||||
"version": "3.35.1",
|
"version": "3.39.1",
|
||||||
"description": "An admin bar for React apps using Payload",
|
"description": "An admin bar for React apps using Payload",
|
||||||
"homepage": "https://payloadcms.com",
|
"homepage": "https://payloadcms.com",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
|||||||
@@ -1,19 +0,0 @@
|
|||||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
|
||||||
|
|
||||||
/** @typedef {import('eslint').Linter.Config} Config */
|
|
||||||
|
|
||||||
/** @type {Config[]} */
|
|
||||||
export const index = [
|
|
||||||
...rootEslintConfig,
|
|
||||||
{
|
|
||||||
ignores: ['bin/cli.js'],
|
|
||||||
languageOptions: {
|
|
||||||
parserOptions: {
|
|
||||||
...rootParserOptions,
|
|
||||||
tsconfigRootDir: import.meta.dirname,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
export default index
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "create-payload-app",
|
"name": "create-payload-app",
|
||||||
"version": "3.35.1",
|
"version": "3.39.1",
|
||||||
"homepage": "https://payloadcms.com",
|
"homepage": "https://payloadcms.com",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
import { readFileSync } from 'fs'
|
|
||||||
import { fileURLToPath } from 'node:url'
|
|
||||||
import path from 'path'
|
|
||||||
const filename = fileURLToPath(import.meta.url)
|
|
||||||
const dirname = path.dirname(filename)
|
|
||||||
|
|
||||||
const packageJson = JSON.parse(readFileSync(path.resolve(dirname, '../../package.json'), 'utf-8'))
|
|
||||||
export const PACKAGE_VERSION = packageJson.version
|
|
||||||
@@ -10,6 +10,7 @@ import type { CliArgs, DbType, ProjectExample, ProjectTemplate } from '../types.
|
|||||||
import { createProject } from './create-project.js'
|
import { createProject } from './create-project.js'
|
||||||
import { dbReplacements } from './replacements.js'
|
import { dbReplacements } from './replacements.js'
|
||||||
import { getValidTemplates } from './templates.js'
|
import { getValidTemplates } from './templates.js'
|
||||||
|
import { manageEnvFiles } from './manage-env-files.js'
|
||||||
|
|
||||||
describe('createProject', () => {
|
describe('createProject', () => {
|
||||||
let projectDir: string
|
let projectDir: string
|
||||||
@@ -154,5 +155,75 @@ describe('createProject', () => {
|
|||||||
expect(content).toContain(dbReplacement.configReplacement().join('\n'))
|
expect(content).toContain(dbReplacement.configReplacement().join('\n'))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
describe('managing env files', () => {
|
||||||
|
it('updates .env files without overwriting existing data', async () => {
|
||||||
|
const envFilePath = path.join(projectDir, '.env')
|
||||||
|
const envExampleFilePath = path.join(projectDir, '.env.example')
|
||||||
|
|
||||||
|
fse.ensureDirSync(projectDir)
|
||||||
|
fse.ensureFileSync(envFilePath)
|
||||||
|
fse.ensureFileSync(envExampleFilePath)
|
||||||
|
|
||||||
|
const initialEnvContent = `CUSTOM_VAR=custom-value\nDATABASE_URI=old-connection\n`
|
||||||
|
const initialEnvExampleContent = `CUSTOM_VAR=custom-value\nDATABASE_URI=old-connection\nPAYLOAD_SECRET=YOUR_SECRET_HERE\n`
|
||||||
|
|
||||||
|
fse.writeFileSync(envFilePath, initialEnvContent)
|
||||||
|
fse.writeFileSync(envExampleFilePath, initialEnvExampleContent)
|
||||||
|
|
||||||
|
await manageEnvFiles({
|
||||||
|
cliArgs: {
|
||||||
|
'--debug': true,
|
||||||
|
} as CliArgs,
|
||||||
|
databaseType: 'mongodb',
|
||||||
|
databaseUri: 'mongodb://localhost:27017/test',
|
||||||
|
payloadSecret: 'test-secret',
|
||||||
|
projectDir,
|
||||||
|
template: undefined,
|
||||||
|
})
|
||||||
|
|
||||||
|
const updatedEnvContent = fse.readFileSync(envFilePath, 'utf-8')
|
||||||
|
|
||||||
|
expect(updatedEnvContent).toContain('CUSTOM_VAR=custom-value')
|
||||||
|
expect(updatedEnvContent).toContain('DATABASE_URI=mongodb://localhost:27017/test')
|
||||||
|
expect(updatedEnvContent).toContain('PAYLOAD_SECRET=test-secret')
|
||||||
|
|
||||||
|
const updatedEnvExampleContent = fse.readFileSync(envExampleFilePath, 'utf-8')
|
||||||
|
|
||||||
|
expect(updatedEnvExampleContent).toContain('CUSTOM_VAR=custom-value')
|
||||||
|
expect(updatedEnvContent).toContain('DATABASE_URI=mongodb://localhost:27017/test')
|
||||||
|
expect(updatedEnvContent).toContain('PAYLOAD_SECRET=test-secret')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('creates .env and .env.example if they do not exist', async () => {
|
||||||
|
const envFilePath = path.join(projectDir, '.env')
|
||||||
|
const envExampleFilePath = path.join(projectDir, '.env.example')
|
||||||
|
|
||||||
|
fse.ensureDirSync(projectDir)
|
||||||
|
|
||||||
|
if (fse.existsSync(envFilePath)) fse.removeSync(envFilePath)
|
||||||
|
if (fse.existsSync(envExampleFilePath)) fse.removeSync(envExampleFilePath)
|
||||||
|
|
||||||
|
await manageEnvFiles({
|
||||||
|
cliArgs: {
|
||||||
|
'--debug': true,
|
||||||
|
} as CliArgs,
|
||||||
|
databaseUri: '',
|
||||||
|
payloadSecret: '',
|
||||||
|
projectDir,
|
||||||
|
template: undefined,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(fse.existsSync(envFilePath)).toBe(true)
|
||||||
|
expect(fse.existsSync(envExampleFilePath)).toBe(true)
|
||||||
|
|
||||||
|
const updatedEnvContent = fse.readFileSync(envFilePath, 'utf-8')
|
||||||
|
expect(updatedEnvContent).toContain('DATABASE_URI=your-connection-string-here')
|
||||||
|
expect(updatedEnvContent).toContain('PAYLOAD_SECRET=YOUR_SECRET_HERE')
|
||||||
|
|
||||||
|
const updatedEnvExampleContent = fse.readFileSync(envExampleFilePath, 'utf-8')
|
||||||
|
expect(updatedEnvExampleContent).toContain('DATABASE_URI=your-connection-string-here')
|
||||||
|
expect(updatedEnvExampleContent).toContain('PAYLOAD_SECRET=YOUR_SECRET_HERE')
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -6,66 +6,57 @@ import type { CliArgs, DbType, ProjectTemplate } from '../types.js'
|
|||||||
import { debug, error } from '../utils/log.js'
|
import { debug, error } from '../utils/log.js'
|
||||||
import { dbChoiceRecord } from './select-db.js'
|
import { dbChoiceRecord } from './select-db.js'
|
||||||
|
|
||||||
const updateEnvExampleVariables = (contents: string, databaseType: DbType | undefined): string => {
|
const updateEnvExampleVariables = (
|
||||||
return contents
|
contents: string,
|
||||||
|
databaseType: DbType | undefined,
|
||||||
|
payloadSecret?: string,
|
||||||
|
databaseUri?: string,
|
||||||
|
): string => {
|
||||||
|
const seenKeys = new Set<string>()
|
||||||
|
const updatedEnv = contents
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.map((line) => {
|
.map((line) => {
|
||||||
if (line.startsWith('#') || !line.includes('=')) {
|
if (line.startsWith('#') || !line.includes('=')) {
|
||||||
return line // Preserve comments and unrelated lines
|
return line
|
||||||
}
|
}
|
||||||
|
|
||||||
const [key] = line.split('=')
|
const [key] = line.split('=')
|
||||||
|
|
||||||
|
if (!key) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if (key === 'DATABASE_URI' || key === 'POSTGRES_URL' || key === 'MONGODB_URI') {
|
if (key === 'DATABASE_URI' || key === 'POSTGRES_URL' || key === 'MONGODB_URI') {
|
||||||
const dbChoice = databaseType ? dbChoiceRecord[databaseType] : null
|
const dbChoice = databaseType ? dbChoiceRecord[databaseType] : null
|
||||||
|
|
||||||
if (dbChoice) {
|
if (dbChoice) {
|
||||||
const placeholderUri = `${dbChoice.dbConnectionPrefix}your-database-name${
|
const placeholderUri = databaseUri
|
||||||
dbChoice.dbConnectionSuffix || ''
|
? databaseUri
|
||||||
}`
|
: `${dbChoice.dbConnectionPrefix}your-database-name${dbChoice.dbConnectionSuffix || ''}`
|
||||||
return databaseType === 'vercel-postgres'
|
line =
|
||||||
|
databaseType === 'vercel-postgres'
|
||||||
? `POSTGRES_URL=${placeholderUri}`
|
? `POSTGRES_URL=${placeholderUri}`
|
||||||
: `DATABASE_URI=${placeholderUri}`
|
: `DATABASE_URI=${placeholderUri}`
|
||||||
}
|
}
|
||||||
|
|
||||||
return `DATABASE_URI=your-database-connection-here` // Fallback
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (key === 'PAYLOAD_SECRET' || key === 'PAYLOAD_SECRET_KEY') {
|
if (key === 'PAYLOAD_SECRET' || key === 'PAYLOAD_SECRET_KEY') {
|
||||||
return `PAYLOAD_SECRET=YOUR_SECRET_HERE`
|
line = `PAYLOAD_SECRET=${payloadSecret || 'YOUR_SECRET_HERE'}`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// handles dupes
|
||||||
|
if (seenKeys.has(key)) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
seenKeys.add(key)
|
||||||
|
|
||||||
return line
|
return line
|
||||||
})
|
})
|
||||||
|
.filter(Boolean)
|
||||||
|
.reverse()
|
||||||
.join('\n')
|
.join('\n')
|
||||||
}
|
|
||||||
|
|
||||||
const generateEnvContent = (
|
return updatedEnv
|
||||||
existingEnv: string,
|
|
||||||
databaseType: DbType | undefined,
|
|
||||||
databaseUri: string,
|
|
||||||
payloadSecret: string,
|
|
||||||
): string => {
|
|
||||||
const dbKey = databaseType === 'vercel-postgres' ? 'POSTGRES_URL' : 'DATABASE_URI'
|
|
||||||
|
|
||||||
const envVars: Record<string, string> = {}
|
|
||||||
existingEnv
|
|
||||||
.split('\n')
|
|
||||||
.filter((line) => line.includes('=') && !line.startsWith('#'))
|
|
||||||
.forEach((line) => {
|
|
||||||
const [key, value] = line.split('=')
|
|
||||||
// @ts-expect-error - vestiges of when tsconfig was not strict. Feel free to improve
|
|
||||||
envVars[key] = value
|
|
||||||
})
|
|
||||||
|
|
||||||
// Override specific keys
|
|
||||||
envVars[dbKey] = databaseUri
|
|
||||||
envVars['PAYLOAD_SECRET'] = payloadSecret
|
|
||||||
|
|
||||||
// Rebuild content
|
|
||||||
return Object.entries(envVars)
|
|
||||||
.map(([key, value]) => `${key}=${value}`)
|
|
||||||
.join('\n')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Parse and swap .env.example values and write .env */
|
/** Parse and swap .env.example values and write .env */
|
||||||
@@ -88,42 +79,71 @@ export async function manageEnvFiles(args: {
|
|||||||
|
|
||||||
const envExamplePath = path.join(projectDir, '.env.example')
|
const envExamplePath = path.join(projectDir, '.env.example')
|
||||||
const envPath = path.join(projectDir, '.env')
|
const envPath = path.join(projectDir, '.env')
|
||||||
|
const emptyEnvContent = `# Added by Payload\nDATABASE_URI=your-connection-string-here\nPAYLOAD_SECRET=YOUR_SECRET_HERE\n`
|
||||||
try {
|
try {
|
||||||
let updatedExampleContents: string
|
let updatedExampleContents: string
|
||||||
|
|
||||||
// Update .env.example
|
if (template?.type === 'plugin') {
|
||||||
if (template?.type === 'starter') {
|
if (debugFlag) {
|
||||||
if (!fs.existsSync(envExamplePath)) {
|
debug(`plugin template detected - no .env added .env.example added`)
|
||||||
error(`.env.example file not found at ${envExamplePath}`)
|
}
|
||||||
process.exit(1)
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!fs.existsSync(envExamplePath)) {
|
||||||
|
updatedExampleContents = updateEnvExampleVariables(
|
||||||
|
emptyEnvContent,
|
||||||
|
databaseType,
|
||||||
|
payloadSecret,
|
||||||
|
databaseUri,
|
||||||
|
)
|
||||||
|
|
||||||
|
await fs.writeFile(envExamplePath, updatedExampleContents)
|
||||||
|
if (debugFlag) {
|
||||||
|
debug(`.env.example file successfully created`)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
const envExampleContents = await fs.readFile(envExamplePath, 'utf8')
|
const envExampleContents = await fs.readFile(envExamplePath, 'utf8')
|
||||||
updatedExampleContents = updateEnvExampleVariables(envExampleContents, databaseType)
|
const mergedEnvs = envExampleContents + '\n' + emptyEnvContent
|
||||||
|
updatedExampleContents = updateEnvExampleVariables(
|
||||||
await fs.writeFile(envExamplePath, updatedExampleContents.trimEnd() + '\n')
|
mergedEnvs,
|
||||||
|
databaseType,
|
||||||
|
payloadSecret,
|
||||||
|
databaseUri,
|
||||||
|
)
|
||||||
|
|
||||||
|
await fs.writeFile(envExamplePath, updatedExampleContents)
|
||||||
if (debugFlag) {
|
if (debugFlag) {
|
||||||
debug(`.env.example file successfully updated`)
|
debug(`.env.example file successfully updated`)
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
updatedExampleContents = `# Added by Payload\nDATABASE_URI=your-connection-string-here\nPAYLOAD_SECRET=YOUR_SECRET_HERE\n`
|
|
||||||
await fs.writeFile(envExamplePath, updatedExampleContents.trimEnd() + '\n')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Merge existing variables and create or update .env
|
if (!fs.existsSync(envPath)) {
|
||||||
const envExampleContents = await fs.readFile(envExamplePath, 'utf8')
|
const envContent = updateEnvExampleVariables(
|
||||||
const envContent = generateEnvContent(
|
emptyEnvContent,
|
||||||
envExampleContents,
|
|
||||||
databaseType,
|
databaseType,
|
||||||
databaseUri,
|
|
||||||
payloadSecret,
|
payloadSecret,
|
||||||
|
databaseUri,
|
||||||
)
|
)
|
||||||
await fs.writeFile(envPath, `# Added by Payload\n${envContent.trimEnd()}\n`)
|
await fs.writeFile(envPath, envContent)
|
||||||
|
|
||||||
if (debugFlag) {
|
if (debugFlag) {
|
||||||
debug(`.env file successfully created or updated`)
|
debug(`.env file successfully created`)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const envContents = await fs.readFile(envPath, 'utf8')
|
||||||
|
const mergedEnvs = envContents + '\n' + emptyEnvContent
|
||||||
|
const updatedEnvContents = updateEnvExampleVariables(
|
||||||
|
mergedEnvs,
|
||||||
|
databaseType,
|
||||||
|
payloadSecret,
|
||||||
|
databaseUri,
|
||||||
|
)
|
||||||
|
|
||||||
|
await fs.writeFile(envPath, updatedEnvContents)
|
||||||
|
if (debugFlag) {
|
||||||
|
debug(`.env file successfully updated`)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (err: unknown) {
|
} catch (err: unknown) {
|
||||||
error('Unable to manage environment files')
|
error('Unable to manage environment files')
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import type { ProjectTemplate } from '../types.js'
|
import type { ProjectTemplate } from '../types.js'
|
||||||
|
|
||||||
import { error, info } from '../utils/log.js'
|
import { error, info } from '../utils/log.js'
|
||||||
import { PACKAGE_VERSION } from './constants.js'
|
|
||||||
|
|
||||||
export function validateTemplate(templateName: string): boolean {
|
export function validateTemplate({ templateName }: { templateName: string }): boolean {
|
||||||
const validTemplates = getValidTemplates()
|
const validTemplates = getValidTemplates()
|
||||||
if (!validTemplates.map((t) => t.name).includes(templateName)) {
|
if (!validTemplates.map((t) => t.name).includes(templateName)) {
|
||||||
error(`'${templateName}' is not a valid template.`)
|
error(`'${templateName}' is not a valid template.`)
|
||||||
@@ -20,13 +19,13 @@ export function getValidTemplates(): ProjectTemplate[] {
|
|||||||
name: 'blank',
|
name: 'blank',
|
||||||
type: 'starter',
|
type: 'starter',
|
||||||
description: 'Blank 3.0 Template',
|
description: 'Blank 3.0 Template',
|
||||||
url: `https://github.com/payloadcms/payload/templates/blank#v${PACKAGE_VERSION}`,
|
url: `https://github.com/payloadcms/payload/templates/blank#main`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'website',
|
name: 'website',
|
||||||
type: 'starter',
|
type: 'starter',
|
||||||
description: 'Website Template',
|
description: 'Website Template',
|
||||||
url: `https://github.com/payloadcms/payload/templates/website#v${PACKAGE_VERSION}`,
|
url: `https://github.com/payloadcms/payload/templates/website#main`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'plugin',
|
name: 'plugin',
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import execa from 'execa'
|
|
||||||
import fse from 'fs-extra'
|
import fse from 'fs-extra'
|
||||||
import { fileURLToPath } from 'node:url'
|
import { fileURLToPath } from 'node:url'
|
||||||
import path from 'path'
|
import path from 'path'
|
||||||
@@ -9,6 +8,7 @@ const dirname = path.dirname(filename)
|
|||||||
import type { NextAppDetails } from '../types.js'
|
import type { NextAppDetails } from '../types.js'
|
||||||
|
|
||||||
import { copyRecursiveSync } from '../utils/copy-recursive-sync.js'
|
import { copyRecursiveSync } from '../utils/copy-recursive-sync.js'
|
||||||
|
import { getLatestPackageVersion } from '../utils/getLatestPackageVersion.js'
|
||||||
import { info } from '../utils/log.js'
|
import { info } from '../utils/log.js'
|
||||||
import { getPackageManager } from './get-package-manager.js'
|
import { getPackageManager } from './get-package-manager.js'
|
||||||
import { installPackages } from './install-packages.js'
|
import { installPackages } from './install-packages.js'
|
||||||
@@ -36,15 +36,8 @@ export async function updatePayloadInProject(
|
|||||||
|
|
||||||
const packageManager = await getPackageManager({ projectDir })
|
const packageManager = await getPackageManager({ projectDir })
|
||||||
|
|
||||||
// Fetch latest Payload version from npm
|
// Fetch latest Payload version
|
||||||
const { exitCode: getLatestVersionExitCode, stdout: latestPayloadVersion } = await execa('npm', [
|
const latestPayloadVersion = await getLatestPackageVersion({ packageName: 'payload' })
|
||||||
'show',
|
|
||||||
'payload',
|
|
||||||
'version',
|
|
||||||
])
|
|
||||||
if (getLatestVersionExitCode !== 0) {
|
|
||||||
throw new Error('Failed to fetch latest Payload version')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (payloadVersion === latestPayloadVersion) {
|
if (payloadVersion === latestPayloadVersion) {
|
||||||
return { message: `Payload v${payloadVersion} is already up to date.`, success: true }
|
return { message: `Payload v${payloadVersion} is already up to date.`, success: true }
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import path from 'path'
|
|||||||
import type { CliArgs } from './types.js'
|
import type { CliArgs } from './types.js'
|
||||||
|
|
||||||
import { configurePayloadConfig } from './lib/configure-payload-config.js'
|
import { configurePayloadConfig } from './lib/configure-payload-config.js'
|
||||||
import { PACKAGE_VERSION } from './lib/constants.js'
|
|
||||||
import { createProject } from './lib/create-project.js'
|
import { createProject } from './lib/create-project.js'
|
||||||
import { parseExample } from './lib/examples.js'
|
import { parseExample } from './lib/examples.js'
|
||||||
import { generateSecret } from './lib/generate-secret.js'
|
import { generateSecret } from './lib/generate-secret.js'
|
||||||
@@ -20,6 +19,7 @@ import { parseTemplate } from './lib/parse-template.js'
|
|||||||
import { selectDb } from './lib/select-db.js'
|
import { selectDb } from './lib/select-db.js'
|
||||||
import { getValidTemplates, validateTemplate } from './lib/templates.js'
|
import { getValidTemplates, validateTemplate } from './lib/templates.js'
|
||||||
import { updatePayloadInProject } from './lib/update-payload-in-project.js'
|
import { updatePayloadInProject } from './lib/update-payload-in-project.js'
|
||||||
|
import { getLatestPackageVersion } from './utils/getLatestPackageVersion.js'
|
||||||
import { debug, error, info } from './utils/log.js'
|
import { debug, error, info } from './utils/log.js'
|
||||||
import {
|
import {
|
||||||
feedbackOutro,
|
feedbackOutro,
|
||||||
@@ -78,13 +78,18 @@ export class Main {
|
|||||||
|
|
||||||
async init(): Promise<void> {
|
async init(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
|
const debugFlag = this.args['--debug']
|
||||||
|
|
||||||
|
const LATEST_VERSION = await getLatestPackageVersion({
|
||||||
|
debug: debugFlag,
|
||||||
|
packageName: 'payload',
|
||||||
|
})
|
||||||
|
|
||||||
if (this.args['--help']) {
|
if (this.args['--help']) {
|
||||||
helpMessage()
|
helpMessage()
|
||||||
process.exit(0)
|
process.exit(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
const debugFlag = this.args['--debug']
|
|
||||||
|
|
||||||
// eslint-disable-next-line no-console
|
// eslint-disable-next-line no-console
|
||||||
console.log('\n')
|
console.log('\n')
|
||||||
p.intro(chalk.bgCyan(chalk.black(' create-payload-app ')))
|
p.intro(chalk.bgCyan(chalk.black(' create-payload-app ')))
|
||||||
@@ -200,7 +205,7 @@ export class Main {
|
|||||||
|
|
||||||
const templateArg = this.args['--template']
|
const templateArg = this.args['--template']
|
||||||
if (templateArg) {
|
if (templateArg) {
|
||||||
const valid = validateTemplate(templateArg)
|
const valid = validateTemplate({ templateName: templateArg })
|
||||||
if (!valid) {
|
if (!valid) {
|
||||||
helpMessage()
|
helpMessage()
|
||||||
process.exit(1)
|
process.exit(1)
|
||||||
@@ -230,7 +235,7 @@ export class Main {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (debugFlag) {
|
if (debugFlag) {
|
||||||
debug(`Using ${exampleArg ? 'examples' : 'templates'} from git tag: v${PACKAGE_VERSION}`)
|
debug(`Using ${exampleArg ? 'examples' : 'templates'} from git tag: v${LATEST_VERSION}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!exampleArg) {
|
if (!exampleArg) {
|
||||||
|
|||||||
@@ -0,0 +1,34 @@
|
|||||||
|
/**
|
||||||
|
* Fetches the latest version of a package from the NPM registry.
|
||||||
|
*
|
||||||
|
* Used in determining the latest version of Payload to use in the generated templates.
|
||||||
|
*/
|
||||||
|
export async function getLatestPackageVersion({
|
||||||
|
debug = false,
|
||||||
|
packageName = 'payload',
|
||||||
|
}: {
|
||||||
|
debug?: boolean
|
||||||
|
/**
|
||||||
|
* Package name to fetch the latest version for based on the NPM registry URL
|
||||||
|
*
|
||||||
|
* Eg. for `'payload'`, it will fetch the version from `https://registry.npmjs.org/payload`
|
||||||
|
*
|
||||||
|
* @default 'payload'
|
||||||
|
*/
|
||||||
|
packageName?: string
|
||||||
|
}) {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`https://registry.npmjs.org/${packageName}`)
|
||||||
|
const data = await response.json()
|
||||||
|
const latestVersion = data['dist-tags'].latest
|
||||||
|
|
||||||
|
if (debug) {
|
||||||
|
console.log(`Found latest version of ${packageName}: ${latestVersion}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return latestVersion
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching Payload version:', error)
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
|
||||||
|
|
||||||
/** @typedef {import('eslint').Linter.Config} Config */
|
|
||||||
|
|
||||||
/** @type {Config[]} */
|
|
||||||
export const index = [
|
|
||||||
...rootEslintConfig,
|
|
||||||
{
|
|
||||||
languageOptions: {
|
|
||||||
parserOptions: {
|
|
||||||
...rootParserOptions,
|
|
||||||
tsconfigRootDir: import.meta.dirname,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
export default index
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@payloadcms/db-mongodb",
|
"name": "@payloadcms/db-mongodb",
|
||||||
"version": "3.35.1",
|
"version": "3.39.1",
|
||||||
"description": "The officially supported MongoDB database adapter for Payload",
|
"description": "The officially supported MongoDB database adapter for Payload",
|
||||||
"homepage": "https://payloadcms.com",
|
"homepage": "https://payloadcms.com",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
|||||||
@@ -372,6 +372,7 @@ const group: FieldSchemaGenerator<GroupField> = (
|
|||||||
buildSchemaOptions,
|
buildSchemaOptions,
|
||||||
parentIsLocalized,
|
parentIsLocalized,
|
||||||
): void => {
|
): void => {
|
||||||
|
if (fieldAffectsData(field)) {
|
||||||
const formattedBaseSchema = formatBaseSchema({ buildSchemaOptions, field, parentIsLocalized })
|
const formattedBaseSchema = formatBaseSchema({ buildSchemaOptions, field, parentIsLocalized })
|
||||||
|
|
||||||
// carry indexSortableFields through to versions if drafts enabled
|
// carry indexSortableFields through to versions if drafts enabled
|
||||||
@@ -400,8 +401,32 @@ const group: FieldSchemaGenerator<GroupField> = (
|
|||||||
}
|
}
|
||||||
|
|
||||||
schema.add({
|
schema.add({
|
||||||
[field.name]: localizeSchema(field, baseSchema, payload.config.localization, parentIsLocalized),
|
[field.name]: localizeSchema(
|
||||||
|
field,
|
||||||
|
baseSchema,
|
||||||
|
payload.config.localization,
|
||||||
|
parentIsLocalized,
|
||||||
|
),
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
field.fields.forEach((subField) => {
|
||||||
|
if (fieldIsVirtual(subField)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const addFieldSchema = getSchemaGenerator(subField.type)
|
||||||
|
|
||||||
|
if (addFieldSchema) {
|
||||||
|
addFieldSchema(
|
||||||
|
subField,
|
||||||
|
schema,
|
||||||
|
payload,
|
||||||
|
buildSchemaOptions,
|
||||||
|
(parentIsLocalized || field.localized) ?? false,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const json: FieldSchemaGenerator<JSONField> = (
|
const json: FieldSchemaGenerator<JSONField> = (
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ type SearchParam = {
|
|||||||
|
|
||||||
const subQueryOptions = {
|
const subQueryOptions = {
|
||||||
lean: true,
|
lean: true,
|
||||||
limit: 50,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -184,7 +183,7 @@ export async function buildSearchParam({
|
|||||||
select[joinPath] = true
|
select[joinPath] = true
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await SubModel.find(subQuery).lean().limit(50).select(select)
|
const result = await SubModel.find(subQuery).lean().select(select)
|
||||||
|
|
||||||
const $in: unknown[] = []
|
const $in: unknown[] = []
|
||||||
|
|
||||||
|
|||||||
@@ -57,12 +57,8 @@ const relationshipSort = ({
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const [i, segment] of segments.entries()) {
|
for (let i = 0; i < segments.length; i++) {
|
||||||
if (versions && i === 0 && segment === 'version') {
|
const segment = segments[i]
|
||||||
segments.shift()
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const field = currentFields.find((each) => each.name === segment)
|
const field = currentFields.find((each) => each.name === segment)
|
||||||
|
|
||||||
if (!field) {
|
if (!field) {
|
||||||
@@ -71,6 +67,10 @@ const relationshipSort = ({
|
|||||||
|
|
||||||
if ('fields' in field) {
|
if ('fields' in field) {
|
||||||
currentFields = field.flattenedFields
|
currentFields = field.flattenedFields
|
||||||
|
if (field.name === 'version' && versions && i === 0) {
|
||||||
|
segments.shift()
|
||||||
|
i--
|
||||||
|
}
|
||||||
} else if (
|
} else if (
|
||||||
(field.type === 'relationship' || field.type === 'upload') &&
|
(field.type === 'relationship' || field.type === 'upload') &&
|
||||||
i !== segments.length - 1
|
i !== segments.length - 1
|
||||||
@@ -106,7 +106,7 @@ const relationshipSort = ({
|
|||||||
as: `__${path}`,
|
as: `__${path}`,
|
||||||
foreignField: '_id',
|
foreignField: '_id',
|
||||||
from: foreignCollection.Model.collection.name,
|
from: foreignCollection.Model.collection.name,
|
||||||
localField: relationshipPath,
|
localField: versions ? `version.${relationshipPath}` : relationshipPath,
|
||||||
pipeline: [
|
pipeline: [
|
||||||
{
|
{
|
||||||
$project: {
|
$project: {
|
||||||
@@ -150,6 +150,18 @@ export const buildSortParam = ({
|
|||||||
sort = [sort]
|
sort = [sort]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// In the case of Mongo, when sorting by a field that is not unique, the results are not guaranteed to be in the same order each time.
|
||||||
|
// So we add a fallback sort to ensure that the results are always in the same order.
|
||||||
|
let fallbackSort = '-id'
|
||||||
|
|
||||||
|
if (timestamps) {
|
||||||
|
fallbackSort = '-createdAt'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(sort.includes(fallbackSort) || sort.includes(fallbackSort.replace('-', '')))) {
|
||||||
|
sort.push(fallbackSort)
|
||||||
|
}
|
||||||
|
|
||||||
const sorting = sort.reduce<Record<string, string>>((acc, item) => {
|
const sorting = sort.reduce<Record<string, string>>((acc, item) => {
|
||||||
let sortProperty: string
|
let sortProperty: string
|
||||||
let sortDirection: SortDirection
|
let sortDirection: SortDirection
|
||||||
|
|||||||
@@ -105,6 +105,7 @@ export const sanitizeQueryValue = ({
|
|||||||
| undefined => {
|
| undefined => {
|
||||||
let formattedValue = val
|
let formattedValue = val
|
||||||
let formattedOperator = operator
|
let formattedOperator = operator
|
||||||
|
|
||||||
if (['array', 'blocks', 'group', 'tab'].includes(field.type) && path.includes('.')) {
|
if (['array', 'blocks', 'group', 'tab'].includes(field.type) && path.includes('.')) {
|
||||||
const segments = path.split('.')
|
const segments = path.split('.')
|
||||||
segments.shift()
|
segments.shift()
|
||||||
|
|||||||
@@ -151,6 +151,7 @@ export const queryDrafts: QueryDrafts = async function queryDrafts(
|
|||||||
query: versionQuery,
|
query: versionQuery,
|
||||||
session: paginationOptions.options?.session ?? undefined,
|
session: paginationOptions.options?.session ?? undefined,
|
||||||
sort: paginationOptions.sort as object,
|
sort: paginationOptions.sort as object,
|
||||||
|
sortAggregation,
|
||||||
useEstimatedCount: paginationOptions.useEstimatedCount,
|
useEstimatedCount: paginationOptions.useEstimatedCount,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -128,7 +128,6 @@ const traverseFields = ({
|
|||||||
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
case 'blocks': {
|
case 'blocks': {
|
||||||
const blocksSelect = select[field.name] as SelectType
|
const blocksSelect = select[field.name] as SelectType
|
||||||
|
|
||||||
|
|||||||
@@ -425,6 +425,7 @@ export const transform = ({
|
|||||||
for (const locale of config.localization.localeCodes) {
|
for (const locale of config.localization.localeCodes) {
|
||||||
sanitizeDate({
|
sanitizeDate({
|
||||||
field,
|
field,
|
||||||
|
locale,
|
||||||
ref: fieldRef,
|
ref: fieldRef,
|
||||||
value: fieldRef[locale],
|
value: fieldRef[locale],
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,18 +0,0 @@
|
|||||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
|
||||||
|
|
||||||
/** @typedef {import('eslint').Linter.Config} Config */
|
|
||||||
|
|
||||||
/** @type {Config[]} */
|
|
||||||
export const index = [
|
|
||||||
...rootEslintConfig,
|
|
||||||
{
|
|
||||||
languageOptions: {
|
|
||||||
parserOptions: {
|
|
||||||
...rootParserOptions,
|
|
||||||
tsconfigRootDir: import.meta.dirname,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
export default index
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@payloadcms/db-postgres",
|
"name": "@payloadcms/db-postgres",
|
||||||
"version": "3.35.1",
|
"version": "3.39.1",
|
||||||
"description": "The officially supported Postgres database adapter for Payload",
|
"description": "The officially supported Postgres database adapter for Payload",
|
||||||
"homepage": "https://payloadcms.com",
|
"homepage": "https://payloadcms.com",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
|||||||
@@ -1,18 +0,0 @@
|
|||||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
|
||||||
|
|
||||||
/** @typedef {import('eslint').Linter.Config} Config */
|
|
||||||
|
|
||||||
/** @type {Config[]} */
|
|
||||||
export const index = [
|
|
||||||
...rootEslintConfig,
|
|
||||||
{
|
|
||||||
languageOptions: {
|
|
||||||
parserOptions: {
|
|
||||||
...rootParserOptions,
|
|
||||||
tsconfigRootDir: import.meta.dirname,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
export default index
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@payloadcms/db-sqlite",
|
"name": "@payloadcms/db-sqlite",
|
||||||
"version": "3.35.1",
|
"version": "3.39.1",
|
||||||
"description": "The officially supported SQLite database adapter for Payload",
|
"description": "The officially supported SQLite database adapter for Payload",
|
||||||
"homepage": "https://payloadcms.com",
|
"homepage": "https://payloadcms.com",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ export const countDistinct: CountDistinct = async function countDistinct(
|
|||||||
})
|
})
|
||||||
.from(this.tables[tableName])
|
.from(this.tables[tableName])
|
||||||
.where(where)
|
.where(where)
|
||||||
return Number(countResult[0]?.count)
|
return Number(countResult?.[0]?.count ?? 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
let query: SQLiteSelect = db
|
let query: SQLiteSelect = db
|
||||||
@@ -39,5 +39,5 @@ export const countDistinct: CountDistinct = async function countDistinct(
|
|||||||
// Instead, COUNT (GROUP BY id) can be used which is still slower than COUNT(*) but acceptable.
|
// Instead, COUNT (GROUP BY id) can be used which is still slower than COUNT(*) but acceptable.
|
||||||
const countResult = await query
|
const countResult = await query
|
||||||
|
|
||||||
return Number(countResult[0]?.count)
|
return Number(countResult?.[0]?.count ?? 0)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,18 +0,0 @@
|
|||||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
|
||||||
|
|
||||||
/** @typedef {import('eslint').Linter.Config} Config */
|
|
||||||
|
|
||||||
/** @type {Config[]} */
|
|
||||||
export const index = [
|
|
||||||
...rootEslintConfig,
|
|
||||||
{
|
|
||||||
languageOptions: {
|
|
||||||
parserOptions: {
|
|
||||||
...rootParserOptions,
|
|
||||||
tsconfigRootDir: import.meta.dirname,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
export default index
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@payloadcms/db-vercel-postgres",
|
"name": "@payloadcms/db-vercel-postgres",
|
||||||
"version": "3.35.1",
|
"version": "3.39.1",
|
||||||
"description": "Vercel Postgres adapter for Payload",
|
"description": "Vercel Postgres adapter for Payload",
|
||||||
"homepage": "https://payloadcms.com",
|
"homepage": "https://payloadcms.com",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@payloadcms/drizzle",
|
"name": "@payloadcms/drizzle",
|
||||||
"version": "3.35.1",
|
"version": "3.39.1",
|
||||||
"description": "A library of shared functions used by different payload database adapters",
|
"description": "A library of shared functions used by different payload database adapters",
|
||||||
"homepage": "https://payloadcms.com",
|
"homepage": "https://payloadcms.com",
|
||||||
"repository": {
|
"repository": {
|
||||||
@@ -53,6 +53,7 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"console-table-printer": "2.12.1",
|
"console-table-printer": "2.12.1",
|
||||||
|
"dequal": "2.0.3",
|
||||||
"drizzle-orm": "0.36.1",
|
"drizzle-orm": "0.36.1",
|
||||||
"prompts": "2.4.2",
|
"prompts": "2.4.2",
|
||||||
"to-snake-case": "1.0.0",
|
"to-snake-case": "1.0.0",
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import toSnakeCase from 'to-snake-case'
|
|||||||
|
|
||||||
import type { DrizzleAdapter } from './types.js'
|
import type { DrizzleAdapter } from './types.js'
|
||||||
|
|
||||||
import buildQuery from './queries/buildQuery.js'
|
import { buildQuery } from './queries/buildQuery.js'
|
||||||
import { getTransaction } from './utilities/getTransaction.js'
|
import { getTransaction } from './utilities/getTransaction.js'
|
||||||
|
|
||||||
export const count: Count = async function count(
|
export const count: Count = async function count(
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import toSnakeCase from 'to-snake-case'
|
|||||||
|
|
||||||
import type { DrizzleAdapter } from './types.js'
|
import type { DrizzleAdapter } from './types.js'
|
||||||
|
|
||||||
import buildQuery from './queries/buildQuery.js'
|
import { buildQuery } from './queries/buildQuery.js'
|
||||||
import { getTransaction } from './utilities/getTransaction.js'
|
import { getTransaction } from './utilities/getTransaction.js'
|
||||||
|
|
||||||
export const countGlobalVersions: CountGlobalVersions = async function countGlobalVersions(
|
export const countGlobalVersions: CountGlobalVersions = async function countGlobalVersions(
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import toSnakeCase from 'to-snake-case'
|
|||||||
|
|
||||||
import type { DrizzleAdapter } from './types.js'
|
import type { DrizzleAdapter } from './types.js'
|
||||||
|
|
||||||
import buildQuery from './queries/buildQuery.js'
|
import { buildQuery } from './queries/buildQuery.js'
|
||||||
import { getTransaction } from './utilities/getTransaction.js'
|
import { getTransaction } from './utilities/getTransaction.js'
|
||||||
|
|
||||||
export const countVersions: CountVersions = async function countVersions(
|
export const countVersions: CountVersions = async function countVersions(
|
||||||
|
|||||||
@@ -23,10 +23,10 @@ export async function createGlobal<T extends Record<string, unknown>>(
|
|||||||
data,
|
data,
|
||||||
db,
|
db,
|
||||||
fields: globalConfig.flattenedFields,
|
fields: globalConfig.flattenedFields,
|
||||||
|
ignoreResult: returning === false,
|
||||||
operation: 'create',
|
operation: 'create',
|
||||||
req,
|
req,
|
||||||
tableName,
|
tableName,
|
||||||
ignoreResult: returning === false,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if (returning === false) {
|
if (returning === false) {
|
||||||
|
|||||||
@@ -17,11 +17,11 @@ export async function createGlobalVersion<T extends TypeWithID>(
|
|||||||
globalSlug,
|
globalSlug,
|
||||||
publishedLocale,
|
publishedLocale,
|
||||||
req,
|
req,
|
||||||
|
returning,
|
||||||
select,
|
select,
|
||||||
snapshot,
|
snapshot,
|
||||||
updatedAt,
|
updatedAt,
|
||||||
versionData,
|
versionData,
|
||||||
returning,
|
|
||||||
}: CreateGlobalVersionArgs,
|
}: CreateGlobalVersionArgs,
|
||||||
) {
|
) {
|
||||||
const db = await getTransaction(this, req)
|
const db = await getTransaction(this, req)
|
||||||
@@ -42,11 +42,11 @@ export async function createGlobalVersion<T extends TypeWithID>(
|
|||||||
},
|
},
|
||||||
db,
|
db,
|
||||||
fields: buildVersionGlobalFields(this.payload.config, global, true),
|
fields: buildVersionGlobalFields(this.payload.config, global, true),
|
||||||
|
ignoreResult: returning === false ? 'idOnly' : false,
|
||||||
operation: 'create',
|
operation: 'create',
|
||||||
req,
|
req,
|
||||||
select,
|
select,
|
||||||
tableName,
|
tableName,
|
||||||
ignoreResult: returning === false ? 'idOnly' : false,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const table = this.tables[tableName]
|
const table = this.tables[tableName]
|
||||||
|
|||||||
@@ -18,11 +18,11 @@ export async function createVersion<T extends TypeWithID>(
|
|||||||
parent,
|
parent,
|
||||||
publishedLocale,
|
publishedLocale,
|
||||||
req,
|
req,
|
||||||
|
returning,
|
||||||
select,
|
select,
|
||||||
snapshot,
|
snapshot,
|
||||||
updatedAt,
|
updatedAt,
|
||||||
versionData,
|
versionData,
|
||||||
returning,
|
|
||||||
}: CreateVersionArgs<T>,
|
}: CreateVersionArgs<T>,
|
||||||
) {
|
) {
|
||||||
const db = await getTransaction(this, req)
|
const db = await getTransaction(this, req)
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import toSnakeCase from 'to-snake-case'
|
|||||||
import type { DrizzleAdapter } from './types.js'
|
import type { DrizzleAdapter } from './types.js'
|
||||||
|
|
||||||
import { buildFindManyArgs } from './find/buildFindManyArgs.js'
|
import { buildFindManyArgs } from './find/buildFindManyArgs.js'
|
||||||
import buildQuery from './queries/buildQuery.js'
|
import { buildQuery } from './queries/buildQuery.js'
|
||||||
import { selectDistinct } from './queries/selectDistinct.js'
|
import { selectDistinct } from './queries/selectDistinct.js'
|
||||||
import { transform } from './transform/read/index.js'
|
import { transform } from './transform/read/index.js'
|
||||||
import { getTransaction } from './utilities/getTransaction.js'
|
import { getTransaction } from './utilities/getTransaction.js'
|
||||||
|
|||||||
@@ -4,9 +4,10 @@ import { inArray } from 'drizzle-orm'
|
|||||||
|
|
||||||
import type { DrizzleAdapter } from '../types.js'
|
import type { DrizzleAdapter } from '../types.js'
|
||||||
|
|
||||||
import buildQuery from '../queries/buildQuery.js'
|
import { buildQuery } from '../queries/buildQuery.js'
|
||||||
import { selectDistinct } from '../queries/selectDistinct.js'
|
import { selectDistinct } from '../queries/selectDistinct.js'
|
||||||
import { transform } from '../transform/read/index.js'
|
import { transform } from '../transform/read/index.js'
|
||||||
|
import { getNameFromDrizzleTable } from '../utilities/getNameFromDrizzleTable.js'
|
||||||
import { getTransaction } from '../utilities/getTransaction.js'
|
import { getTransaction } from '../utilities/getTransaction.js'
|
||||||
import { buildFindManyArgs } from './buildFindManyArgs.js'
|
import { buildFindManyArgs } from './buildFindManyArgs.js'
|
||||||
|
|
||||||
@@ -46,6 +47,7 @@ export const findMany = async function find({
|
|||||||
const offset = skip || (page - 1) * limit
|
const offset = skip || (page - 1) * limit
|
||||||
|
|
||||||
if (limit === 0) {
|
if (limit === 0) {
|
||||||
|
pagination = false
|
||||||
limit = undefined
|
limit = undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -74,6 +76,26 @@ export const findMany = async function find({
|
|||||||
tableName,
|
tableName,
|
||||||
versions,
|
versions,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (orderBy) {
|
||||||
|
for (const key in selectFields) {
|
||||||
|
const column = selectFields[key]
|
||||||
|
if (column.primary) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
!orderBy.some(
|
||||||
|
(col) =>
|
||||||
|
col.column.name === column.name &&
|
||||||
|
getNameFromDrizzleTable(col.column.table) === getNameFromDrizzleTable(column.table),
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
delete selectFields[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const selectDistinctResult = await selectDistinct({
|
const selectDistinctResult = await selectDistinct({
|
||||||
adapter,
|
adapter,
|
||||||
db,
|
db,
|
||||||
|
|||||||
@@ -19,12 +19,17 @@ import toSnakeCase from 'to-snake-case'
|
|||||||
import type { BuildQueryJoinAliases, DrizzleAdapter } from '../types.js'
|
import type { BuildQueryJoinAliases, DrizzleAdapter } from '../types.js'
|
||||||
import type { Result } from './buildFindManyArgs.js'
|
import type { Result } from './buildFindManyArgs.js'
|
||||||
|
|
||||||
import buildQuery from '../queries/buildQuery.js'
|
import { buildQuery } from '../queries/buildQuery.js'
|
||||||
import { getTableAlias } from '../queries/getTableAlias.js'
|
import { getTableAlias } from '../queries/getTableAlias.js'
|
||||||
import { operatorMap } from '../queries/operatorMap.js'
|
import { operatorMap } from '../queries/operatorMap.js'
|
||||||
|
import { getArrayRelationName } from '../utilities/getArrayRelationName.js'
|
||||||
import { getNameFromDrizzleTable } from '../utilities/getNameFromDrizzleTable.js'
|
import { getNameFromDrizzleTable } from '../utilities/getNameFromDrizzleTable.js'
|
||||||
import { jsonAggBuildObject } from '../utilities/json.js'
|
import { jsonAggBuildObject } from '../utilities/json.js'
|
||||||
import { rawConstraint } from '../utilities/rawConstraint.js'
|
import { rawConstraint } from '../utilities/rawConstraint.js'
|
||||||
|
import {
|
||||||
|
InternalBlockTableNameIndex,
|
||||||
|
resolveBlockTableName,
|
||||||
|
} from '../utilities/validateExistingBlockIsIdentical.js'
|
||||||
|
|
||||||
const flattenAllWherePaths = (where: Where, paths: string[]) => {
|
const flattenAllWherePaths = (where: Where, paths: string[]) => {
|
||||||
for (const k in where) {
|
for (const k in where) {
|
||||||
@@ -196,7 +201,12 @@ export const traverseFields = ({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const relationName = field.dbName ? `_${arrayTableName}` : `${path}${field.name}`
|
const relationName = getArrayRelationName({
|
||||||
|
field,
|
||||||
|
path: `${path}${field.name}`,
|
||||||
|
tableName: arrayTableName,
|
||||||
|
})
|
||||||
|
|
||||||
currentArgs.with[relationName] = withArray
|
currentArgs.with[relationName] = withArray
|
||||||
|
|
||||||
traverseFields({
|
traverseFields({
|
||||||
@@ -244,7 +254,7 @@ export const traverseFields = ({
|
|||||||
|
|
||||||
;(field.blockReferences ?? field.blocks).forEach((_block) => {
|
;(field.blockReferences ?? field.blocks).forEach((_block) => {
|
||||||
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
|
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
|
||||||
const blockKey = `_blocks_${block.slug}`
|
const blockKey = `_blocks_${block.slug}${!block[InternalBlockTableNameIndex] ? '' : `_${block[InternalBlockTableNameIndex]}`}`
|
||||||
|
|
||||||
let blockSelect: boolean | SelectType | undefined
|
let blockSelect: boolean | SelectType | undefined
|
||||||
|
|
||||||
@@ -284,8 +294,9 @@ export const traverseFields = ({
|
|||||||
with: {},
|
with: {},
|
||||||
}
|
}
|
||||||
|
|
||||||
const tableName = adapter.tableNameMap.get(
|
const tableName = resolveBlockTableName(
|
||||||
`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`,
|
block,
|
||||||
|
adapter.tableNameMap.get(`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`),
|
||||||
)
|
)
|
||||||
|
|
||||||
if (typeof blockSelect === 'object') {
|
if (typeof blockSelect === 'object') {
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ export { migrateFresh } from './migrateFresh.js'
|
|||||||
export { migrateRefresh } from './migrateRefresh.js'
|
export { migrateRefresh } from './migrateRefresh.js'
|
||||||
export { migrateReset } from './migrateReset.js'
|
export { migrateReset } from './migrateReset.js'
|
||||||
export { migrateStatus } from './migrateStatus.js'
|
export { migrateStatus } from './migrateStatus.js'
|
||||||
export { default as buildQuery } from './queries/buildQuery.js'
|
export { buildQuery } from './queries/buildQuery.js'
|
||||||
export { operatorMap } from './queries/operatorMap.js'
|
export { operatorMap } from './queries/operatorMap.js'
|
||||||
export type { Operators } from './queries/operatorMap.js'
|
export type { Operators } from './queries/operatorMap.js'
|
||||||
export { parseParams } from './queries/parseParams.js'
|
export { parseParams } from './queries/parseParams.js'
|
||||||
|
|||||||
@@ -42,10 +42,6 @@ export const migrate: DrizzleAdapter['migrate'] = async function migrate(
|
|||||||
limit: 0,
|
limit: 0,
|
||||||
sort: '-name',
|
sort: '-name',
|
||||||
}))
|
}))
|
||||||
if (Number(migrationsInDB?.[0]?.batch) > 0) {
|
|
||||||
latestBatch = Number(migrationsInDB[0]?.batch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (migrationsInDB.find((m) => m.batch === -1)) {
|
if (migrationsInDB.find((m) => m.batch === -1)) {
|
||||||
const { confirm: runMigrations } = await prompts(
|
const { confirm: runMigrations } = await prompts(
|
||||||
@@ -67,6 +63,13 @@ export const migrate: DrizzleAdapter['migrate'] = async function migrate(
|
|||||||
if (!runMigrations) {
|
if (!runMigrations) {
|
||||||
process.exit(0)
|
process.exit(0)
|
||||||
}
|
}
|
||||||
|
// ignore the dev migration so that the latest batch number increments correctly
|
||||||
|
migrationsInDB = migrationsInDB.filter((m) => m.batch !== -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Number(migrationsInDB?.[0]?.batch) > 0) {
|
||||||
|
latestBatch = Number(migrationsInDB[0]?.batch)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const newBatch = latestBatch + 1
|
const newBatch = latestBatch + 1
|
||||||
|
|||||||
@@ -28,6 +28,8 @@ export async function migrateReset(this: DrizzleAdapter): Promise<void> {
|
|||||||
|
|
||||||
const req = await createLocalReq({}, payload)
|
const req = await createLocalReq({}, payload)
|
||||||
|
|
||||||
|
existingMigrations.reverse()
|
||||||
|
|
||||||
// Rollback all migrations in order
|
// Rollback all migrations in order
|
||||||
for (const migration of existingMigrations) {
|
for (const migration of existingMigrations) {
|
||||||
const migrationFile = migrationFiles.find((m) => m.name === migration.name)
|
const migrationFile = migrationFiles.find((m) => m.name === migration.name)
|
||||||
|
|||||||
@@ -16,7 +16,8 @@ export const countDistinct: CountDistinct = async function countDistinct(
|
|||||||
})
|
})
|
||||||
.from(this.tables[tableName])
|
.from(this.tables[tableName])
|
||||||
.where(where)
|
.where(where)
|
||||||
return Number(countResult[0].count)
|
|
||||||
|
return Number(countResult?.[0]?.count ?? 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
let query = db
|
let query = db
|
||||||
@@ -39,5 +40,5 @@ export const countDistinct: CountDistinct = async function countDistinct(
|
|||||||
// Instead, COUNT (GROUP BY id) can be used which is still slower than COUNT(*) but acceptable.
|
// Instead, COUNT (GROUP BY id) can be used which is still slower than COUNT(*) but acceptable.
|
||||||
const countResult = await query
|
const countResult = await query
|
||||||
|
|
||||||
return Number(countResult[0].count)
|
return Number(countResult?.[0]?.count ?? 0)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import type { FlattenedBlock, FlattenedField } from 'payload'
|
import type { FlattenedField } from 'payload'
|
||||||
|
|
||||||
type Args = {
|
type Args = {
|
||||||
doc: Record<string, unknown>
|
doc: Record<string, unknown>
|
||||||
@@ -54,7 +54,7 @@ export const traverseFields = ({ doc, fields, locale, path, rows }: Args) => {
|
|||||||
// Can ignore string blocks, as those were added in v3 and don't need to be migrated
|
// Can ignore string blocks, as those were added in v3 and don't need to be migrated
|
||||||
const matchedBlock = field.blocks.find(
|
const matchedBlock = field.blocks.find(
|
||||||
(block) => typeof block !== 'string' && block.slug === row.blockType,
|
(block) => typeof block !== 'string' && block.slug === row.blockType,
|
||||||
) as FlattenedBlock | undefined
|
)
|
||||||
|
|
||||||
if (matchedBlock) {
|
if (matchedBlock) {
|
||||||
return traverseFields({
|
return traverseFields({
|
||||||
@@ -75,7 +75,7 @@ export const traverseFields = ({ doc, fields, locale, path, rows }: Args) => {
|
|||||||
// Can ignore string blocks, as those were added in v3 and don't need to be migrated
|
// Can ignore string blocks, as those were added in v3 and don't need to be migrated
|
||||||
const matchedBlock = field.blocks.find(
|
const matchedBlock = field.blocks.find(
|
||||||
(block) => typeof block !== 'string' && block.slug === row.blockType,
|
(block) => typeof block !== 'string' && block.slug === row.blockType,
|
||||||
) as FlattenedBlock | undefined
|
)
|
||||||
|
|
||||||
if (matchedBlock) {
|
if (matchedBlock) {
|
||||||
return traverseFields({
|
return traverseFields({
|
||||||
|
|||||||
@@ -1,49 +1,126 @@
|
|||||||
export type Groups =
|
export type Groups =
|
||||||
| 'addColumn'
|
| 'addColumn'
|
||||||
| 'addConstraint'
|
| 'addConstraint'
|
||||||
|
| 'alterType'
|
||||||
|
| 'createIndex'
|
||||||
|
| 'createTable'
|
||||||
|
| 'createType'
|
||||||
|
| 'disableRowSecurity'
|
||||||
| 'dropColumn'
|
| 'dropColumn'
|
||||||
| 'dropConstraint'
|
| 'dropConstraint'
|
||||||
|
| 'dropIndex'
|
||||||
| 'dropTable'
|
| 'dropTable'
|
||||||
|
| 'dropType'
|
||||||
| 'notNull'
|
| 'notNull'
|
||||||
|
| 'renameColumn'
|
||||||
|
| 'setDefault'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert an "ADD COLUMN" statement to an "ALTER COLUMN" statement
|
* Convert an "ADD COLUMN" statement to an "ALTER COLUMN" statement.
|
||||||
* example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
|
* Works with or without a schema name.
|
||||||
* to: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
*
|
||||||
* @param sql
|
* Examples:
|
||||||
|
* 'ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;'
|
||||||
|
* => 'ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;'
|
||||||
|
*
|
||||||
|
* 'ALTER TABLE "public"."pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;'
|
||||||
|
* => 'ALTER TABLE "public"."pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;'
|
||||||
*/
|
*/
|
||||||
function convertAddColumnToAlterColumn(sql) {
|
function convertAddColumnToAlterColumn(sql) {
|
||||||
// Regular expression to match the ADD COLUMN statement with its constraints
|
// Regular expression to match the ADD COLUMN statement with its constraints
|
||||||
const regex = /ALTER TABLE ("[^"]+")\.(".*?") ADD COLUMN ("[^"]+") [\w\s]+ NOT NULL;/
|
const regex = /ALTER TABLE ((?:"[^"]+"\.)?"[^"]+") ADD COLUMN ("[^"]+") [^;]*?NOT NULL;/i
|
||||||
|
|
||||||
// Replace the matched part with "ALTER COLUMN ... SET NOT NULL;"
|
// Replace the matched part with "ALTER COLUMN ... SET NOT NULL;"
|
||||||
return sql.replace(regex, 'ALTER TABLE $1.$2 ALTER COLUMN $3 SET NOT NULL;')
|
return sql.replace(regex, 'ALTER TABLE $1 ALTER COLUMN $2 SET NOT NULL;')
|
||||||
}
|
}
|
||||||
|
|
||||||
export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> => {
|
export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> => {
|
||||||
const groups = {
|
const groups = {
|
||||||
|
/**
|
||||||
|
* example: ALTER TABLE "posts" ADD COLUMN "category_id" integer
|
||||||
|
*/
|
||||||
addColumn: 'ADD COLUMN',
|
addColumn: 'ADD COLUMN',
|
||||||
// example: ALTER TABLE "posts" ADD COLUMN "category_id" integer
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* example:
|
||||||
|
* DO $$ BEGIN
|
||||||
|
* ALTER TABLE "pages_blocks_my_block" ADD CONSTRAINT "pages_blocks_my_block_person_id_users_id_fk" FOREIGN KEY ("person_id") REFERENCES "users"("id") ON DELETE cascade ON UPDATE no action;
|
||||||
|
* EXCEPTION
|
||||||
|
* WHEN duplicate_object THEN null;
|
||||||
|
* END $$;
|
||||||
|
*/
|
||||||
addConstraint: 'ADD CONSTRAINT',
|
addConstraint: 'ADD CONSTRAINT',
|
||||||
//example:
|
|
||||||
// DO $$ BEGIN
|
|
||||||
// ALTER TABLE "pages_blocks_my_block" ADD CONSTRAINT "pages_blocks_my_block_person_id_users_id_fk" FOREIGN KEY ("person_id") REFERENCES "users"("id") ON DELETE cascade ON UPDATE no action;
|
|
||||||
// EXCEPTION
|
|
||||||
// WHEN duplicate_object THEN null;
|
|
||||||
// END $$;
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* example: CREATE TABLE IF NOT EXISTS "payload_locked_documents" (
|
||||||
|
* "id" serial PRIMARY KEY NOT NULL,
|
||||||
|
* "global_slug" varchar,
|
||||||
|
* "updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||||
|
* "created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
|
||||||
|
* );
|
||||||
|
*/
|
||||||
|
createTable: 'CREATE TABLE',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* example: ALTER TABLE "_posts_v_rels" DROP COLUMN IF EXISTS "posts_id";
|
||||||
|
*/
|
||||||
dropColumn: 'DROP COLUMN',
|
dropColumn: 'DROP COLUMN',
|
||||||
// example: ALTER TABLE "_posts_v_rels" DROP COLUMN IF EXISTS "posts_id";
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* example: ALTER TABLE "_posts_v_rels" DROP CONSTRAINT "_posts_v_rels_posts_fk";
|
||||||
|
*/
|
||||||
dropConstraint: 'DROP CONSTRAINT',
|
dropConstraint: 'DROP CONSTRAINT',
|
||||||
// example: ALTER TABLE "_posts_v_rels" DROP CONSTRAINT "_posts_v_rels_posts_fk";
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* example: DROP TABLE "pages_rels";
|
||||||
|
*/
|
||||||
dropTable: 'DROP TABLE',
|
dropTable: 'DROP TABLE',
|
||||||
// example: DROP TABLE "pages_rels";
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* example: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||||
|
*/
|
||||||
notNull: 'NOT NULL',
|
notNull: 'NOT NULL',
|
||||||
// example: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
|
||||||
|
/**
|
||||||
|
* example: CREATE TYPE "public"."enum__pages_v_published_locale" AS ENUM('en', 'es');
|
||||||
|
*/
|
||||||
|
createType: 'CREATE TYPE',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* example: ALTER TYPE "public"."enum_pages_blocks_cta" ADD VALUE 'copy';
|
||||||
|
*/
|
||||||
|
alterType: 'ALTER TYPE',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* example: ALTER TABLE "categories_rels" DISABLE ROW LEVEL SECURITY;
|
||||||
|
*/
|
||||||
|
disableRowSecurity: 'DISABLE ROW LEVEL SECURITY;',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* example: DROP INDEX IF EXISTS "pages_title_idx";
|
||||||
|
*/
|
||||||
|
dropIndex: 'DROP INDEX IF EXISTS',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* example: ALTER TABLE "pages" ALTER COLUMN "_status" SET DEFAULT 'draft';
|
||||||
|
*/
|
||||||
|
setDefault: 'SET DEFAULT',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* example: CREATE INDEX IF NOT EXISTS "payload_locked_documents_global_slug_idx" ON "payload_locked_documents" USING btree ("global_slug");
|
||||||
|
*/
|
||||||
|
createIndex: 'INDEX IF NOT EXISTS',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* example: DROP TYPE "public"."enum__pages_v_published_locale";
|
||||||
|
*/
|
||||||
|
dropType: 'DROP TYPE',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* columns were renamed from camelCase to snake_case
|
||||||
|
* example: ALTER TABLE "forms" RENAME COLUMN "confirmationType" TO "confirmation_type";
|
||||||
|
*/
|
||||||
|
renameColumn: 'RENAME COLUMN',
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = Object.keys(groups).reduce((result, group: Groups) => {
|
const result = Object.keys(groups).reduce((result, group: Groups) => {
|
||||||
@@ -51,7 +128,17 @@ export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> =
|
|||||||
return result
|
return result
|
||||||
}, {}) as Record<Groups, string[]>
|
}, {}) as Record<Groups, string[]>
|
||||||
|
|
||||||
|
// push multi-line changes to a single grouping
|
||||||
|
let isCreateTable = false
|
||||||
|
|
||||||
for (const line of list) {
|
for (const line of list) {
|
||||||
|
if (isCreateTable) {
|
||||||
|
result.createTable.push(line)
|
||||||
|
if (line.includes(');')) {
|
||||||
|
isCreateTable = false
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
Object.entries(groups).some(([key, value]) => {
|
Object.entries(groups).some(([key, value]) => {
|
||||||
if (line.endsWith('NOT NULL;')) {
|
if (line.endsWith('NOT NULL;')) {
|
||||||
// split up the ADD COLUMN and ALTER COLUMN NOT NULL statements
|
// split up the ADD COLUMN and ALTER COLUMN NOT NULL statements
|
||||||
@@ -64,7 +151,11 @@ export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> =
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if (line.includes(value)) {
|
if (line.includes(value)) {
|
||||||
result[key].push(line)
|
let statement = line
|
||||||
|
if (key === 'dropConstraint') {
|
||||||
|
statement = line.replace('" DROP CONSTRAINT "', '" DROP CONSTRAINT IF EXISTS "')
|
||||||
|
}
|
||||||
|
result[key].push(statement)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -20,6 +20,17 @@ type Args = {
|
|||||||
req?: Partial<PayloadRequest>
|
req?: Partial<PayloadRequest>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const runStatementGroup = async ({ adapter, db, debug, statements }) => {
|
||||||
|
const addColumnsStatement = statements.join('\n')
|
||||||
|
|
||||||
|
if (debug) {
|
||||||
|
adapter.payload.logger.info(debug)
|
||||||
|
adapter.payload.logger.info(addColumnsStatement)
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.execute(sql.raw(addColumnsStatement))
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Moves upload and relationship columns from the join table and into the tables while moving data
|
* Moves upload and relationship columns from the join table and into the tables while moving data
|
||||||
* This is done in the following order:
|
* This is done in the following order:
|
||||||
@@ -36,21 +47,11 @@ type Args = {
|
|||||||
*/
|
*/
|
||||||
export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
||||||
const adapter = payload.db as unknown as BasePostgresAdapter
|
const adapter = payload.db as unknown as BasePostgresAdapter
|
||||||
const db = await getTransaction(adapter, req)
|
|
||||||
const dir = payload.db.migrationDir
|
const dir = payload.db.migrationDir
|
||||||
|
|
||||||
// get the drizzle migrateUpSQL from drizzle using the last schema
|
// get the drizzle migrateUpSQL from drizzle using the last schema
|
||||||
const { generateDrizzleJson, generateMigration, upSnapshot } = adapter.requireDrizzleKit()
|
const { generateDrizzleJson, generateMigration, upSnapshot } = adapter.requireDrizzleKit()
|
||||||
|
const drizzleJsonAfter = generateDrizzleJson(adapter.schema) as DrizzleSnapshotJSON
|
||||||
const toSnapshot: Record<string, unknown> = {}
|
|
||||||
|
|
||||||
for (const key of Object.keys(adapter.schema).filter(
|
|
||||||
(key) => !key.startsWith('payload_locked_documents'),
|
|
||||||
)) {
|
|
||||||
toSnapshot[key] = adapter.schema[key]
|
|
||||||
}
|
|
||||||
|
|
||||||
const drizzleJsonAfter = generateDrizzleJson(toSnapshot) as DrizzleSnapshotJSON
|
|
||||||
|
|
||||||
// Get the previous migration snapshot
|
// Get the previous migration snapshot
|
||||||
const previousSnapshot = fs
|
const previousSnapshot = fs
|
||||||
@@ -82,16 +83,62 @@ export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
|||||||
|
|
||||||
const sqlUpStatements = groupUpSQLStatements(generatedSQL)
|
const sqlUpStatements = groupUpSQLStatements(generatedSQL)
|
||||||
|
|
||||||
const addColumnsStatement = sqlUpStatements.addColumn.join('\n')
|
const db = await getTransaction(adapter, req)
|
||||||
|
|
||||||
if (debug) {
|
await runStatementGroup({
|
||||||
payload.logger.info('CREATING NEW RELATIONSHIP COLUMNS')
|
adapter,
|
||||||
payload.logger.info(addColumnsStatement)
|
db,
|
||||||
}
|
debug: debug ? 'CREATING TYPES' : null,
|
||||||
|
statements: sqlUpStatements.createType,
|
||||||
|
})
|
||||||
|
|
||||||
await db.execute(sql.raw(addColumnsStatement))
|
await runStatementGroup({
|
||||||
|
adapter,
|
||||||
|
db,
|
||||||
|
debug: debug ? 'ALTERING TYPES' : null,
|
||||||
|
statements: sqlUpStatements.alterType,
|
||||||
|
})
|
||||||
|
|
||||||
|
await runStatementGroup({
|
||||||
|
adapter,
|
||||||
|
db,
|
||||||
|
debug: debug ? 'CREATING TABLES' : null,
|
||||||
|
statements: sqlUpStatements.createTable,
|
||||||
|
})
|
||||||
|
|
||||||
|
await runStatementGroup({
|
||||||
|
adapter,
|
||||||
|
db,
|
||||||
|
debug: debug ? 'RENAMING COLUMNS' : null,
|
||||||
|
statements: sqlUpStatements.renameColumn,
|
||||||
|
})
|
||||||
|
|
||||||
|
await runStatementGroup({
|
||||||
|
adapter,
|
||||||
|
db,
|
||||||
|
debug: debug ? 'CREATING NEW RELATIONSHIP COLUMNS' : null,
|
||||||
|
statements: sqlUpStatements.addColumn,
|
||||||
|
})
|
||||||
|
|
||||||
|
// SET DEFAULTS
|
||||||
|
await runStatementGroup({
|
||||||
|
adapter,
|
||||||
|
db,
|
||||||
|
debug: debug ? 'SETTING DEFAULTS' : null,
|
||||||
|
statements: sqlUpStatements.setDefault,
|
||||||
|
})
|
||||||
|
|
||||||
|
await runStatementGroup({
|
||||||
|
adapter,
|
||||||
|
db,
|
||||||
|
debug: debug ? 'CREATING INDEXES' : null,
|
||||||
|
statements: sqlUpStatements.createIndex,
|
||||||
|
})
|
||||||
|
|
||||||
for (const collection of payload.config.collections) {
|
for (const collection of payload.config.collections) {
|
||||||
|
if (collection.slug === 'payload-locked-documents') {
|
||||||
|
continue
|
||||||
|
}
|
||||||
const tableName = adapter.tableNameMap.get(toSnakeCase(collection.slug))
|
const tableName = adapter.tableNameMap.get(toSnakeCase(collection.slug))
|
||||||
const pathsToQuery: PathsToQuery = new Set()
|
const pathsToQuery: PathsToQuery = new Set()
|
||||||
|
|
||||||
@@ -237,52 +284,58 @@ export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ADD CONSTRAINT
|
// ADD CONSTRAINT
|
||||||
const addConstraintsStatement = sqlUpStatements.addConstraint.join('\n')
|
await runStatementGroup({
|
||||||
|
adapter,
|
||||||
if (debug) {
|
db,
|
||||||
payload.logger.info('ADDING CONSTRAINTS')
|
debug: debug ? 'ADDING CONSTRAINTS' : null,
|
||||||
payload.logger.info(addConstraintsStatement)
|
statements: sqlUpStatements.addConstraint,
|
||||||
}
|
})
|
||||||
|
|
||||||
await db.execute(sql.raw(addConstraintsStatement))
|
|
||||||
|
|
||||||
// NOT NULL
|
// NOT NULL
|
||||||
const notNullStatements = sqlUpStatements.notNull.join('\n')
|
await runStatementGroup({
|
||||||
|
adapter,
|
||||||
if (debug) {
|
db,
|
||||||
payload.logger.info('NOT NULL CONSTRAINTS')
|
debug: debug ? 'NOT NULL CONSTRAINTS' : null,
|
||||||
payload.logger.info(notNullStatements)
|
statements: sqlUpStatements.notNull,
|
||||||
}
|
})
|
||||||
|
|
||||||
await db.execute(sql.raw(notNullStatements))
|
|
||||||
|
|
||||||
// DROP TABLE
|
// DROP TABLE
|
||||||
const dropTablesStatement = sqlUpStatements.dropTable.join('\n')
|
await runStatementGroup({
|
||||||
|
adapter,
|
||||||
|
db,
|
||||||
|
debug: debug ? 'DROPPING TABLES' : null,
|
||||||
|
statements: sqlUpStatements.dropTable,
|
||||||
|
})
|
||||||
|
|
||||||
if (debug) {
|
// DROP INDEX
|
||||||
payload.logger.info('DROPPING TABLES')
|
await runStatementGroup({
|
||||||
payload.logger.info(dropTablesStatement)
|
adapter,
|
||||||
}
|
db,
|
||||||
|
debug: debug ? 'DROPPING INDEXES' : null,
|
||||||
await db.execute(sql.raw(dropTablesStatement))
|
statements: sqlUpStatements.dropIndex,
|
||||||
|
})
|
||||||
|
|
||||||
// DROP CONSTRAINT
|
// DROP CONSTRAINT
|
||||||
const dropConstraintsStatement = sqlUpStatements.dropConstraint.join('\n')
|
await runStatementGroup({
|
||||||
|
adapter,
|
||||||
if (debug) {
|
db,
|
||||||
payload.logger.info('DROPPING CONSTRAINTS')
|
debug: debug ? 'DROPPING CONSTRAINTS' : null,
|
||||||
payload.logger.info(dropConstraintsStatement)
|
statements: sqlUpStatements.dropConstraint,
|
||||||
}
|
})
|
||||||
|
|
||||||
await db.execute(sql.raw(dropConstraintsStatement))
|
|
||||||
|
|
||||||
// DROP COLUMN
|
// DROP COLUMN
|
||||||
const dropColumnsStatement = sqlUpStatements.dropColumn.join('\n')
|
await runStatementGroup({
|
||||||
|
adapter,
|
||||||
|
db,
|
||||||
|
debug: debug ? 'DROPPING COLUMNS' : null,
|
||||||
|
statements: sqlUpStatements.dropColumn,
|
||||||
|
})
|
||||||
|
|
||||||
if (debug) {
|
// DROP TYPES
|
||||||
payload.logger.info('DROPPING COLUMNS')
|
await runStatementGroup({
|
||||||
payload.logger.info(dropColumnsStatement)
|
adapter,
|
||||||
}
|
db,
|
||||||
|
debug: debug ? 'DROPPING TYPES' : null,
|
||||||
await db.execute(sql.raw(dropColumnsStatement))
|
statements: sqlUpStatements.dropType,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ export const migrateRelationships = async ({
|
|||||||
${where} ORDER BY parent_id LIMIT 500 OFFSET ${offset * 500};
|
${where} ORDER BY parent_id LIMIT 500 OFFSET ${offset * 500};
|
||||||
`
|
`
|
||||||
|
|
||||||
paginationResult = await adapter.drizzle.execute(sql.raw(`${paginationStatement}`))
|
paginationResult = await db.execute(sql.raw(`${paginationStatement}`))
|
||||||
|
|
||||||
if (paginationResult.rows.length === 0) {
|
if (paginationResult.rows.length === 0) {
|
||||||
return
|
return
|
||||||
@@ -72,7 +72,7 @@ export const migrateRelationships = async ({
|
|||||||
payload.logger.info(statement)
|
payload.logger.info(statement)
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await adapter.drizzle.execute(sql.raw(`${statement}`))
|
const result = await db.execute(sql.raw(`${statement}`))
|
||||||
|
|
||||||
const docsToResave: DocsToResave = {}
|
const docsToResave: DocsToResave = {}
|
||||||
|
|
||||||
|
|||||||
@@ -3,12 +3,14 @@ import type { FlattenedField, Where } from 'payload'
|
|||||||
|
|
||||||
import type { DrizzleAdapter, GenericColumn } from '../types.js'
|
import type { DrizzleAdapter, GenericColumn } from '../types.js'
|
||||||
import type { BuildQueryJoinAliases } from './buildQuery.js'
|
import type { BuildQueryJoinAliases } from './buildQuery.js'
|
||||||
|
import type { QueryContext } from './parseParams.js'
|
||||||
|
|
||||||
import { parseParams } from './parseParams.js'
|
import { parseParams } from './parseParams.js'
|
||||||
|
|
||||||
export function buildAndOrConditions({
|
export function buildAndOrConditions({
|
||||||
adapter,
|
adapter,
|
||||||
aliasTable,
|
aliasTable,
|
||||||
|
context,
|
||||||
fields,
|
fields,
|
||||||
joins,
|
joins,
|
||||||
locale,
|
locale,
|
||||||
@@ -21,6 +23,7 @@ export function buildAndOrConditions({
|
|||||||
adapter: DrizzleAdapter
|
adapter: DrizzleAdapter
|
||||||
aliasTable?: Table
|
aliasTable?: Table
|
||||||
collectionSlug?: string
|
collectionSlug?: string
|
||||||
|
context: QueryContext
|
||||||
fields: FlattenedField[]
|
fields: FlattenedField[]
|
||||||
globalSlug?: string
|
globalSlug?: string
|
||||||
joins: BuildQueryJoinAliases
|
joins: BuildQueryJoinAliases
|
||||||
@@ -41,6 +44,7 @@ export function buildAndOrConditions({
|
|||||||
const result = parseParams({
|
const result = parseParams({
|
||||||
adapter,
|
adapter,
|
||||||
aliasTable,
|
aliasTable,
|
||||||
|
context,
|
||||||
fields,
|
fields,
|
||||||
joins,
|
joins,
|
||||||
locale,
|
locale,
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import type { Table } from 'drizzle-orm'
|
import type { SQL, Table } from 'drizzle-orm'
|
||||||
import type { FlattenedField, Sort } from 'payload'
|
import type { FlattenedField, Sort } from 'payload'
|
||||||
|
|
||||||
import { asc, desc } from 'drizzle-orm'
|
import { asc, desc } from 'drizzle-orm'
|
||||||
@@ -16,6 +16,7 @@ type Args = {
|
|||||||
joins: BuildQueryJoinAliases
|
joins: BuildQueryJoinAliases
|
||||||
locale?: string
|
locale?: string
|
||||||
parentIsLocalized: boolean
|
parentIsLocalized: boolean
|
||||||
|
rawSort?: SQL
|
||||||
selectFields: Record<string, GenericColumn>
|
selectFields: Record<string, GenericColumn>
|
||||||
sort?: Sort
|
sort?: Sort
|
||||||
tableName: string
|
tableName: string
|
||||||
@@ -31,14 +32,16 @@ export const buildOrderBy = ({
|
|||||||
joins,
|
joins,
|
||||||
locale,
|
locale,
|
||||||
parentIsLocalized,
|
parentIsLocalized,
|
||||||
|
rawSort,
|
||||||
selectFields,
|
selectFields,
|
||||||
sort,
|
sort,
|
||||||
tableName,
|
tableName,
|
||||||
}: Args): BuildQueryResult['orderBy'] => {
|
}: Args): BuildQueryResult['orderBy'] => {
|
||||||
const orderBy: BuildQueryResult['orderBy'] = []
|
const orderBy: BuildQueryResult['orderBy'] = []
|
||||||
|
|
||||||
if (!sort) {
|
|
||||||
const createdAt = adapter.tables[tableName]?.createdAt
|
const createdAt = adapter.tables[tableName]?.createdAt
|
||||||
|
|
||||||
|
if (!sort) {
|
||||||
if (createdAt) {
|
if (createdAt) {
|
||||||
sort = '-createdAt'
|
sort = '-createdAt'
|
||||||
} else {
|
} else {
|
||||||
@@ -50,6 +53,18 @@ export const buildOrderBy = ({
|
|||||||
sort = [sort]
|
sort = [sort]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// In the case of Mongo, when sorting by a field that is not unique, the results are not guaranteed to be in the same order each time.
|
||||||
|
// So we add a fallback sort to ensure that the results are always in the same order.
|
||||||
|
let fallbackSort = '-id'
|
||||||
|
|
||||||
|
if (createdAt) {
|
||||||
|
fallbackSort = '-createdAt'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(sort.includes(fallbackSort) || sort.includes(fallbackSort.replace('-', '')))) {
|
||||||
|
sort.push(fallbackSort)
|
||||||
|
}
|
||||||
|
|
||||||
for (const sortItem of sort) {
|
for (const sortItem of sort) {
|
||||||
let sortProperty: string
|
let sortProperty: string
|
||||||
let sortDirection: 'asc' | 'desc'
|
let sortDirection: 'asc' | 'desc'
|
||||||
@@ -74,17 +89,23 @@ export const buildOrderBy = ({
|
|||||||
value: sortProperty,
|
value: sortProperty,
|
||||||
})
|
})
|
||||||
if (sortTable?.[sortTableColumnName]) {
|
if (sortTable?.[sortTableColumnName]) {
|
||||||
|
let order = sortDirection === 'asc' ? asc : desc
|
||||||
|
|
||||||
|
if (rawSort) {
|
||||||
|
order = () => rawSort
|
||||||
|
}
|
||||||
|
|
||||||
orderBy.push({
|
orderBy.push({
|
||||||
column:
|
column:
|
||||||
aliasTable && tableName === getNameFromDrizzleTable(sortTable)
|
aliasTable && tableName === getNameFromDrizzleTable(sortTable)
|
||||||
? aliasTable[sortTableColumnName]
|
? aliasTable[sortTableColumnName]
|
||||||
: sortTable[sortTableColumnName],
|
: sortTable[sortTableColumnName],
|
||||||
order: sortDirection === 'asc' ? asc : desc,
|
order,
|
||||||
})
|
})
|
||||||
|
|
||||||
selectFields[sortTableColumnName] = sortTable[sortTableColumnName]
|
selectFields[sortTableColumnName] = sortTable[sortTableColumnName]
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (_) {
|
||||||
// continue
|
// continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import type { PgTableWithColumns } from 'drizzle-orm/pg-core'
|
|||||||
import type { FlattenedField, Sort, Where } from 'payload'
|
import type { FlattenedField, Sort, Where } from 'payload'
|
||||||
|
|
||||||
import type { DrizzleAdapter, GenericColumn, GenericTable } from '../types.js'
|
import type { DrizzleAdapter, GenericColumn, GenericTable } from '../types.js'
|
||||||
|
import type { QueryContext } from './parseParams.js'
|
||||||
|
|
||||||
import { buildOrderBy } from './buildOrderBy.js'
|
import { buildOrderBy } from './buildOrderBy.js'
|
||||||
import { parseParams } from './parseParams.js'
|
import { parseParams } from './parseParams.js'
|
||||||
@@ -36,7 +37,8 @@ export type BuildQueryResult = {
|
|||||||
selectFields: Record<string, GenericColumn>
|
selectFields: Record<string, GenericColumn>
|
||||||
where: SQL
|
where: SQL
|
||||||
}
|
}
|
||||||
const buildQuery = function buildQuery({
|
|
||||||
|
export const buildQuery = function buildQuery({
|
||||||
adapter,
|
adapter,
|
||||||
aliasTable,
|
aliasTable,
|
||||||
fields,
|
fields,
|
||||||
@@ -52,24 +54,14 @@ const buildQuery = function buildQuery({
|
|||||||
id: adapter.tables[tableName].id,
|
id: adapter.tables[tableName].id,
|
||||||
}
|
}
|
||||||
|
|
||||||
const orderBy = buildOrderBy({
|
|
||||||
adapter,
|
|
||||||
aliasTable,
|
|
||||||
fields,
|
|
||||||
joins,
|
|
||||||
locale,
|
|
||||||
parentIsLocalized,
|
|
||||||
selectFields,
|
|
||||||
sort,
|
|
||||||
tableName,
|
|
||||||
})
|
|
||||||
|
|
||||||
let where: SQL
|
let where: SQL
|
||||||
|
|
||||||
|
const context: QueryContext = { sort }
|
||||||
if (incomingWhere && Object.keys(incomingWhere).length > 0) {
|
if (incomingWhere && Object.keys(incomingWhere).length > 0) {
|
||||||
where = parseParams({
|
where = parseParams({
|
||||||
adapter,
|
adapter,
|
||||||
aliasTable,
|
aliasTable,
|
||||||
|
context,
|
||||||
fields,
|
fields,
|
||||||
joins,
|
joins,
|
||||||
locale,
|
locale,
|
||||||
@@ -81,6 +73,19 @@ const buildQuery = function buildQuery({
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const orderBy = buildOrderBy({
|
||||||
|
adapter,
|
||||||
|
aliasTable,
|
||||||
|
fields,
|
||||||
|
joins,
|
||||||
|
locale,
|
||||||
|
parentIsLocalized,
|
||||||
|
rawSort: context.rawSort,
|
||||||
|
selectFields,
|
||||||
|
sort: context.sort,
|
||||||
|
tableName,
|
||||||
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
joins,
|
joins,
|
||||||
orderBy,
|
orderBy,
|
||||||
@@ -88,5 +93,3 @@ const buildQuery = function buildQuery({
|
|||||||
where,
|
where,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default buildQuery
|
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ import type { DrizzleAdapter, GenericColumn } from '../types.js'
|
|||||||
import type { BuildQueryJoinAliases } from './buildQuery.js'
|
import type { BuildQueryJoinAliases } from './buildQuery.js'
|
||||||
|
|
||||||
import { isPolymorphicRelationship } from '../utilities/isPolymorphicRelationship.js'
|
import { isPolymorphicRelationship } from '../utilities/isPolymorphicRelationship.js'
|
||||||
|
import { resolveBlockTableName } from '../utilities/validateExistingBlockIsIdentical.js'
|
||||||
import { addJoinTable } from './addJoinTable.js'
|
import { addJoinTable } from './addJoinTable.js'
|
||||||
import { getTableAlias } from './getTableAlias.js'
|
import { getTableAlias } from './getTableAlias.js'
|
||||||
|
|
||||||
@@ -193,8 +194,9 @@ export const getTableColumnFromPath = ({
|
|||||||
(block) => typeof block !== 'string' && block.slug === blockType,
|
(block) => typeof block !== 'string' && block.slug === blockType,
|
||||||
) as FlattenedBlock | undefined)
|
) as FlattenedBlock | undefined)
|
||||||
|
|
||||||
newTableName = adapter.tableNameMap.get(
|
newTableName = resolveBlockTableName(
|
||||||
`${tableName}_blocks_${toSnakeCase(block.slug)}`,
|
block,
|
||||||
|
adapter.tableNameMap.get(`${tableName}_blocks_${toSnakeCase(block.slug)}`),
|
||||||
)
|
)
|
||||||
|
|
||||||
const { newAliasTable } = getTableAlias({ adapter, tableName: newTableName })
|
const { newAliasTable } = getTableAlias({ adapter, tableName: newTableName })
|
||||||
@@ -220,7 +222,11 @@ export const getTableColumnFromPath = ({
|
|||||||
const hasBlockField = (field.blockReferences ?? field.blocks).some((_block) => {
|
const hasBlockField = (field.blockReferences ?? field.blocks).some((_block) => {
|
||||||
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
|
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
|
||||||
|
|
||||||
newTableName = adapter.tableNameMap.get(`${tableName}_blocks_${toSnakeCase(block.slug)}`)
|
newTableName = resolveBlockTableName(
|
||||||
|
block,
|
||||||
|
adapter.tableNameMap.get(`${tableName}_blocks_${toSnakeCase(block.slug)}`),
|
||||||
|
)
|
||||||
|
|
||||||
constraintPath = `${constraintPath}${field.name}.%.`
|
constraintPath = `${constraintPath}${field.name}.%.`
|
||||||
|
|
||||||
let result: TableColumn
|
let result: TableColumn
|
||||||
@@ -274,7 +280,7 @@ export const getTableColumnFromPath = ({
|
|||||||
tableName: newTableName,
|
tableName: newTableName,
|
||||||
value,
|
value,
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (_) {
|
||||||
// this is fine, not every block will have the field
|
// this is fine, not every block will have the field
|
||||||
}
|
}
|
||||||
if (!result) {
|
if (!result) {
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ import {
|
|||||||
notInArray,
|
notInArray,
|
||||||
or,
|
or,
|
||||||
type SQL,
|
type SQL,
|
||||||
type SQLWrapper,
|
|
||||||
} from 'drizzle-orm'
|
} from 'drizzle-orm'
|
||||||
|
|
||||||
type OperatorKeys =
|
type OperatorKeys =
|
||||||
@@ -35,7 +34,7 @@ type OperatorKeys =
|
|||||||
| 'not_like'
|
| 'not_like'
|
||||||
| 'or'
|
| 'or'
|
||||||
|
|
||||||
export type Operators = Record<OperatorKeys, (column: Column, value: SQLWrapper | unknown) => SQL>
|
export type Operators = Record<OperatorKeys, (column: Column, value: unknown) => SQL>
|
||||||
|
|
||||||
export const operatorMap: Operators = {
|
export const operatorMap: Operators = {
|
||||||
and,
|
and,
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import type { SQL, Table } from 'drizzle-orm'
|
import type { SQL, Table } from 'drizzle-orm'
|
||||||
import type { FlattenedField, Operator, Where } from 'payload'
|
import type { FlattenedField, Operator, Sort, Where } from 'payload'
|
||||||
|
|
||||||
import { and, isNotNull, isNull, ne, notInArray, or, sql } from 'drizzle-orm'
|
import { and, isNotNull, isNull, ne, notInArray, or, sql } from 'drizzle-orm'
|
||||||
import { PgUUID } from 'drizzle-orm/pg-core'
|
import { PgUUID } from 'drizzle-orm/pg-core'
|
||||||
@@ -14,9 +14,12 @@ import { buildAndOrConditions } from './buildAndOrConditions.js'
|
|||||||
import { getTableColumnFromPath } from './getTableColumnFromPath.js'
|
import { getTableColumnFromPath } from './getTableColumnFromPath.js'
|
||||||
import { sanitizeQueryValue } from './sanitizeQueryValue.js'
|
import { sanitizeQueryValue } from './sanitizeQueryValue.js'
|
||||||
|
|
||||||
|
export type QueryContext = { rawSort?: SQL; sort: Sort }
|
||||||
|
|
||||||
type Args = {
|
type Args = {
|
||||||
adapter: DrizzleAdapter
|
adapter: DrizzleAdapter
|
||||||
aliasTable?: Table
|
aliasTable?: Table
|
||||||
|
context: QueryContext
|
||||||
fields: FlattenedField[]
|
fields: FlattenedField[]
|
||||||
joins: BuildQueryJoinAliases
|
joins: BuildQueryJoinAliases
|
||||||
locale?: string
|
locale?: string
|
||||||
@@ -30,6 +33,7 @@ type Args = {
|
|||||||
export function parseParams({
|
export function parseParams({
|
||||||
adapter,
|
adapter,
|
||||||
aliasTable,
|
aliasTable,
|
||||||
|
context,
|
||||||
fields,
|
fields,
|
||||||
joins,
|
joins,
|
||||||
locale,
|
locale,
|
||||||
@@ -57,6 +61,7 @@ export function parseParams({
|
|||||||
const builtConditions = buildAndOrConditions({
|
const builtConditions = buildAndOrConditions({
|
||||||
adapter,
|
adapter,
|
||||||
aliasTable,
|
aliasTable,
|
||||||
|
context,
|
||||||
fields,
|
fields,
|
||||||
joins,
|
joins,
|
||||||
locale,
|
locale,
|
||||||
@@ -342,6 +347,8 @@ export function parseParams({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (geoConstraints.length) {
|
if (geoConstraints.length) {
|
||||||
|
context.sort = relationOrPath
|
||||||
|
context.rawSort = sql`${table[columnName]} <-> ST_SetSRID(ST_MakePoint(${lng}, ${lat}), 4326)`
|
||||||
constraints.push(and(...geoConstraints))
|
constraints.push(and(...geoConstraints))
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import type { QueryPromise, SQL } from 'drizzle-orm'
|
import type { QueryPromise, SQL } from 'drizzle-orm'
|
||||||
import type { PgSelect } from 'drizzle-orm/pg-core'
|
|
||||||
import type { SQLiteColumn, SQLiteSelect } from 'drizzle-orm/sqlite-core'
|
import type { SQLiteColumn, SQLiteSelect } from 'drizzle-orm/sqlite-core'
|
||||||
|
|
||||||
import type {
|
import type {
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ type Args = {
|
|||||||
* ie. indexes, multiple columns, etc
|
* ie. indexes, multiple columns, etc
|
||||||
*/
|
*/
|
||||||
baseIndexes?: Record<string, RawIndex>
|
baseIndexes?: Record<string, RawIndex>
|
||||||
|
blocksTableNameMap: Record<string, number>
|
||||||
buildNumbers?: boolean
|
buildNumbers?: boolean
|
||||||
buildRelationships?: boolean
|
buildRelationships?: boolean
|
||||||
compoundIndexes?: SanitizedCompoundIndex[]
|
compoundIndexes?: SanitizedCompoundIndex[]
|
||||||
@@ -70,6 +71,7 @@ export const buildTable = ({
|
|||||||
baseColumns = {},
|
baseColumns = {},
|
||||||
baseForeignKeys = {},
|
baseForeignKeys = {},
|
||||||
baseIndexes = {},
|
baseIndexes = {},
|
||||||
|
blocksTableNameMap,
|
||||||
compoundIndexes,
|
compoundIndexes,
|
||||||
disableNotNull,
|
disableNotNull,
|
||||||
disableRelsTableUnique = false,
|
disableRelsTableUnique = false,
|
||||||
@@ -120,6 +122,7 @@ export const buildTable = ({
|
|||||||
hasManyTextField,
|
hasManyTextField,
|
||||||
} = traverseFields({
|
} = traverseFields({
|
||||||
adapter,
|
adapter,
|
||||||
|
blocksTableNameMap,
|
||||||
columns,
|
columns,
|
||||||
disableNotNull,
|
disableNotNull,
|
||||||
disableRelsTableUnique,
|
disableRelsTableUnique,
|
||||||
|
|||||||
@@ -56,6 +56,7 @@ export const buildRawSchema = ({
|
|||||||
|
|
||||||
buildTable({
|
buildTable({
|
||||||
adapter,
|
adapter,
|
||||||
|
blocksTableNameMap: {},
|
||||||
compoundIndexes: collection.sanitizedIndexes,
|
compoundIndexes: collection.sanitizedIndexes,
|
||||||
disableNotNull: !!collection?.versions?.drafts,
|
disableNotNull: !!collection?.versions?.drafts,
|
||||||
disableUnique: false,
|
disableUnique: false,
|
||||||
@@ -75,6 +76,7 @@ export const buildRawSchema = ({
|
|||||||
|
|
||||||
buildTable({
|
buildTable({
|
||||||
adapter,
|
adapter,
|
||||||
|
blocksTableNameMap: {},
|
||||||
compoundIndexes: buildVersionCompoundIndexes({ indexes: collection.sanitizedIndexes }),
|
compoundIndexes: buildVersionCompoundIndexes({ indexes: collection.sanitizedIndexes }),
|
||||||
disableNotNull: !!collection.versions?.drafts,
|
disableNotNull: !!collection.versions?.drafts,
|
||||||
disableUnique: true,
|
disableUnique: true,
|
||||||
@@ -96,6 +98,7 @@ export const buildRawSchema = ({
|
|||||||
|
|
||||||
buildTable({
|
buildTable({
|
||||||
adapter,
|
adapter,
|
||||||
|
blocksTableNameMap: {},
|
||||||
disableNotNull: !!global?.versions?.drafts,
|
disableNotNull: !!global?.versions?.drafts,
|
||||||
disableUnique: false,
|
disableUnique: false,
|
||||||
fields: global.flattenedFields,
|
fields: global.flattenedFields,
|
||||||
@@ -118,6 +121,7 @@ export const buildRawSchema = ({
|
|||||||
|
|
||||||
buildTable({
|
buildTable({
|
||||||
adapter,
|
adapter,
|
||||||
|
blocksTableNameMap: {},
|
||||||
disableNotNull: !!global.versions?.drafts,
|
disableNotNull: !!global.versions?.drafts,
|
||||||
disableUnique: true,
|
disableUnique: true,
|
||||||
fields: versionFields,
|
fields: versionFields,
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import type { CompoundIndex, FlattenedField } from 'payload'
|
import type { FlattenedField } from 'payload'
|
||||||
|
|
||||||
import { InvalidConfiguration } from 'payload'
|
import { InvalidConfiguration } from 'payload'
|
||||||
import {
|
import {
|
||||||
array,
|
|
||||||
fieldAffectsData,
|
fieldAffectsData,
|
||||||
fieldIsVirtual,
|
fieldIsVirtual,
|
||||||
fieldShouldBeLocalized,
|
fieldShouldBeLocalized,
|
||||||
@@ -23,14 +22,20 @@ import type {
|
|||||||
|
|
||||||
import { createTableName } from '../createTableName.js'
|
import { createTableName } from '../createTableName.js'
|
||||||
import { buildIndexName } from '../utilities/buildIndexName.js'
|
import { buildIndexName } from '../utilities/buildIndexName.js'
|
||||||
|
import { getArrayRelationName } from '../utilities/getArrayRelationName.js'
|
||||||
import { hasLocalesTable } from '../utilities/hasLocalesTable.js'
|
import { hasLocalesTable } from '../utilities/hasLocalesTable.js'
|
||||||
import { validateExistingBlockIsIdentical } from '../utilities/validateExistingBlockIsIdentical.js'
|
import {
|
||||||
|
InternalBlockTableNameIndex,
|
||||||
|
setInternalBlockIndex,
|
||||||
|
validateExistingBlockIsIdentical,
|
||||||
|
} from '../utilities/validateExistingBlockIsIdentical.js'
|
||||||
import { buildTable } from './build.js'
|
import { buildTable } from './build.js'
|
||||||
import { idToUUID } from './idToUUID.js'
|
import { idToUUID } from './idToUUID.js'
|
||||||
import { withDefault } from './withDefault.js'
|
import { withDefault } from './withDefault.js'
|
||||||
|
|
||||||
type Args = {
|
type Args = {
|
||||||
adapter: DrizzleAdapter
|
adapter: DrizzleAdapter
|
||||||
|
blocksTableNameMap: Record<string, number>
|
||||||
columnPrefix?: string
|
columnPrefix?: string
|
||||||
columns: Record<string, RawColumn>
|
columns: Record<string, RawColumn>
|
||||||
disableNotNull: boolean
|
disableNotNull: boolean
|
||||||
@@ -71,6 +76,7 @@ type Result = {
|
|||||||
|
|
||||||
export const traverseFields = ({
|
export const traverseFields = ({
|
||||||
adapter,
|
adapter,
|
||||||
|
blocksTableNameMap,
|
||||||
columnPrefix,
|
columnPrefix,
|
||||||
columns,
|
columns,
|
||||||
disableNotNull,
|
disableNotNull,
|
||||||
@@ -249,6 +255,7 @@ export const traverseFields = ({
|
|||||||
baseColumns,
|
baseColumns,
|
||||||
baseForeignKeys,
|
baseForeignKeys,
|
||||||
baseIndexes,
|
baseIndexes,
|
||||||
|
blocksTableNameMap,
|
||||||
disableNotNull: disableNotNullFromHere,
|
disableNotNull: disableNotNullFromHere,
|
||||||
disableRelsTableUnique: true,
|
disableRelsTableUnique: true,
|
||||||
disableUnique,
|
disableUnique,
|
||||||
@@ -288,7 +295,11 @@ export const traverseFields = ({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const relationName = field.dbName ? `_${arrayTableName}` : fieldName
|
const relationName = getArrayRelationName({
|
||||||
|
field,
|
||||||
|
path: fieldName,
|
||||||
|
tableName: arrayTableName,
|
||||||
|
})
|
||||||
|
|
||||||
relationsToBuild.set(relationName, {
|
relationsToBuild.set(relationName, {
|
||||||
type: 'many',
|
type: 'many',
|
||||||
@@ -364,7 +375,7 @@ export const traverseFields = ({
|
|||||||
;(field.blockReferences ?? field.blocks).forEach((_block) => {
|
;(field.blockReferences ?? field.blocks).forEach((_block) => {
|
||||||
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
|
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
|
||||||
|
|
||||||
const blockTableName = createTableName({
|
let blockTableName = createTableName({
|
||||||
adapter,
|
adapter,
|
||||||
config: block,
|
config: block,
|
||||||
parentTableName: rootTableName,
|
parentTableName: rootTableName,
|
||||||
@@ -372,6 +383,27 @@ export const traverseFields = ({
|
|||||||
throwValidationError,
|
throwValidationError,
|
||||||
versionsCustomName: versions,
|
versionsCustomName: versions,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (typeof blocksTableNameMap[blockTableName] === 'undefined') {
|
||||||
|
blocksTableNameMap[blockTableName] = 1
|
||||||
|
} else if (
|
||||||
|
!validateExistingBlockIsIdentical({
|
||||||
|
block,
|
||||||
|
localized: field.localized,
|
||||||
|
rootTableName,
|
||||||
|
table: adapter.rawTables[blockTableName],
|
||||||
|
tableLocales: adapter.rawTables[`${blockTableName}${adapter.localesSuffix}`],
|
||||||
|
})
|
||||||
|
) {
|
||||||
|
blocksTableNameMap[blockTableName]++
|
||||||
|
setInternalBlockIndex(block, blocksTableNameMap[blockTableName])
|
||||||
|
blockTableName = `${blockTableName}_${blocksTableNameMap[blockTableName]}`
|
||||||
|
}
|
||||||
|
let relationName = `_blocks_${block.slug}`
|
||||||
|
if (typeof block[InternalBlockTableNameIndex] !== 'undefined') {
|
||||||
|
relationName = `_blocks_${block.slug}_${block[InternalBlockTableNameIndex]}`
|
||||||
|
}
|
||||||
|
|
||||||
if (!adapter.rawTables[blockTableName]) {
|
if (!adapter.rawTables[blockTableName]) {
|
||||||
const baseColumns: Record<string, RawColumn> = {
|
const baseColumns: Record<string, RawColumn> = {
|
||||||
_order: {
|
_order: {
|
||||||
@@ -451,6 +483,7 @@ export const traverseFields = ({
|
|||||||
baseColumns,
|
baseColumns,
|
||||||
baseForeignKeys,
|
baseForeignKeys,
|
||||||
baseIndexes,
|
baseIndexes,
|
||||||
|
blocksTableNameMap,
|
||||||
disableNotNull: disableNotNullFromHere,
|
disableNotNull: disableNotNullFromHere,
|
||||||
disableRelsTableUnique: true,
|
disableRelsTableUnique: true,
|
||||||
disableUnique,
|
disableUnique,
|
||||||
@@ -501,7 +534,7 @@ export const traverseFields = ({
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
references: ['id'],
|
references: ['id'],
|
||||||
relationName: `_blocks_${block.slug}`,
|
relationName,
|
||||||
to: rootTableName,
|
to: rootTableName,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -549,18 +582,10 @@ export const traverseFields = ({
|
|||||||
})
|
})
|
||||||
|
|
||||||
adapter.rawRelations[blockTableName] = blockRelations
|
adapter.rawRelations[blockTableName] = blockRelations
|
||||||
} else if (process.env.NODE_ENV !== 'production' && !versions) {
|
|
||||||
validateExistingBlockIsIdentical({
|
|
||||||
block,
|
|
||||||
localized: field.localized,
|
|
||||||
parentIsLocalized: parentIsLocalized || field.localized,
|
|
||||||
rootTableName,
|
|
||||||
table: adapter.rawTables[blockTableName],
|
|
||||||
tableLocales: adapter.rawTables[`${blockTableName}${adapter.localesSuffix}`],
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// blocks relationships are defined from the collection or globals table down to the block, bypassing any subBlocks
|
// blocks relationships are defined from the collection or globals table down to the block, bypassing any subBlocks
|
||||||
rootRelationsToBuild.set(`_blocks_${block.slug}`, {
|
rootRelationsToBuild.set(relationName, {
|
||||||
type: 'many',
|
type: 'many',
|
||||||
// blocks are not localized on the parent table
|
// blocks are not localized on the parent table
|
||||||
localized: false,
|
localized: false,
|
||||||
@@ -624,6 +649,7 @@ export const traverseFields = ({
|
|||||||
hasManyTextField: groupHasManyTextField,
|
hasManyTextField: groupHasManyTextField,
|
||||||
} = traverseFields({
|
} = traverseFields({
|
||||||
adapter,
|
adapter,
|
||||||
|
blocksTableNameMap,
|
||||||
columnPrefix: `${columnName}_`,
|
columnPrefix: `${columnName}_`,
|
||||||
columns,
|
columns,
|
||||||
disableNotNull: disableNotNullFromHere,
|
disableNotNull: disableNotNullFromHere,
|
||||||
@@ -840,6 +866,7 @@ export const traverseFields = ({
|
|||||||
baseColumns,
|
baseColumns,
|
||||||
baseForeignKeys,
|
baseForeignKeys,
|
||||||
baseIndexes,
|
baseIndexes,
|
||||||
|
blocksTableNameMap,
|
||||||
disableNotNull,
|
disableNotNull,
|
||||||
disableUnique,
|
disableUnique,
|
||||||
fields: [],
|
fields: [],
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user