Compare commits
1 Commits
docs/migra
...
fix/form-i
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6ef6f2e55c |
2
.github/actions/setup/action.yml
vendored
2
.github/actions/setup/action.yml
vendored
@@ -6,7 +6,7 @@ inputs:
|
||||
node-version:
|
||||
description: Node.js version
|
||||
required: true
|
||||
default: 23.11.0
|
||||
default: 22.6.0
|
||||
pnpm-version:
|
||||
description: Pnpm version
|
||||
required: true
|
||||
|
||||
14
.github/workflows/main.yml
vendored
14
.github/workflows/main.yml
vendored
@@ -16,7 +16,7 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
NODE_VERSION: 23.11.0
|
||||
NODE_VERSION: 22.6.0
|
||||
PNPM_VERSION: 9.7.1
|
||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
||||
@@ -62,6 +62,12 @@ jobs:
|
||||
echo "templates: ${{ steps.filter.outputs.templates }}"
|
||||
|
||||
lint:
|
||||
# Follows same github's ci skip: [skip lint], [lint skip], [no lint]
|
||||
if: >
|
||||
github.event_name == 'pull_request' &&
|
||||
!contains(github.event.pull_request.title, '[skip lint]') &&
|
||||
!contains(github.event.pull_request.title, '[lint skip]') &&
|
||||
!contains(github.event.pull_request.title, '[no lint]')
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -75,8 +81,10 @@ jobs:
|
||||
pnpm-version: ${{ env.PNPM_VERSION }}
|
||||
pnpm-install-cache-key: pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
|
||||
- name: Lint
|
||||
run: pnpm lint -- --quiet
|
||||
- name: Lint staged
|
||||
run: |
|
||||
git diff --name-only --diff-filter=d origin/${GITHUB_BASE_REF}...${GITHUB_SHA}
|
||||
npx lint-staged --diff="origin/${GITHUB_BASE_REF}...${GITHUB_SHA}"
|
||||
|
||||
build:
|
||||
needs: changes
|
||||
|
||||
2
.github/workflows/post-release-templates.yml
vendored
2
.github/workflows/post-release-templates.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
NODE_VERSION: 23.11.0
|
||||
NODE_VERSION: 22.6.0
|
||||
PNPM_VERSION: 9.7.1
|
||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
||||
|
||||
2
.github/workflows/post-release.yml
vendored
2
.github/workflows/post-release.yml
vendored
@@ -12,7 +12,7 @@ on:
|
||||
default: ''
|
||||
|
||||
env:
|
||||
NODE_VERSION: 23.11.0
|
||||
NODE_VERSION: 22.6.0
|
||||
PNPM_VERSION: 9.7.1
|
||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
||||
|
||||
2
.github/workflows/publish-prerelease.yml
vendored
2
.github/workflows/publish-prerelease.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
NODE_VERSION: 23.11.0
|
||||
NODE_VERSION: 22.6.0
|
||||
PNPM_VERSION: 9.7.1
|
||||
DO_NOT_TRACK: 1 # Disable Turbopack telemetry
|
||||
NEXT_TELEMETRY_DISABLED: 1 # Disable Next telemetry
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,7 +3,6 @@ package-lock.json
|
||||
dist
|
||||
/.idea/*
|
||||
!/.idea/runConfigurations
|
||||
/.idea/runConfigurations/_template*
|
||||
!/.idea/payload.iml
|
||||
|
||||
# Custom actions
|
||||
|
||||
9
.idea/runConfigurations/_template__of_JavaScriptTestRunnerJest.xml
generated
Normal file
9
.idea/runConfigurations/_template__of_JavaScriptTestRunnerJest.xml
generated
Normal file
@@ -0,0 +1,9 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="true" type="JavaScriptTestRunnerJest">
|
||||
<node-interpreter value="project" />
|
||||
<node-options value="--no-deprecation" />
|
||||
<envs />
|
||||
<scope-kind value="ALL" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
@@ -1 +1 @@
|
||||
v23.11.0
|
||||
v22.6.0
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
pnpm 9.7.1
|
||||
nodejs 23.11.0
|
||||
nodejs 22.6.0
|
||||
|
||||
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@@ -63,13 +63,6 @@
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "pnpm tsx --no-deprecation test/dev.ts query-presets",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"name": "Run Dev Query Presets",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "pnpm tsx --no-deprecation test/dev.ts login-with-username",
|
||||
"cwd": "${workspaceFolder}",
|
||||
@@ -118,13 +111,6 @@
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "pnpm tsx --no-deprecation test/dev.ts folder-view",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"name": "Run Dev Folder View",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
},
|
||||
{
|
||||
"command": "pnpm tsx --no-deprecation test/dev.ts localization",
|
||||
"cwd": "${workspaceFolder}",
|
||||
|
||||
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@@ -7,6 +7,9 @@
|
||||
},
|
||||
"editor.formatOnSaveMode": "file",
|
||||
"eslint.rules.customizations": [
|
||||
// Defaultt all ESLint errors to 'warn' to differentate from TypeScript's 'error' level
|
||||
{ "rule": "*", "severity": "warn" },
|
||||
|
||||
// Silence some warnings that will get auto-fixed
|
||||
{ "rule": "perfectionist/*", "severity": "off", "fixable": true },
|
||||
{ "rule": "curly", "severity": "off", "fixable": true },
|
||||
@@ -21,8 +24,5 @@
|
||||
"runtimeArgs": ["--no-deprecation"]
|
||||
},
|
||||
// Essentially disables bun test buttons
|
||||
"bun.test.filePattern": "bun.test.ts",
|
||||
"playwright.env": {
|
||||
"NODE_OPTIONS": "--no-deprecation --no-experimental-strip-types"
|
||||
}
|
||||
"bun.test.filePattern": "bun.test.ts"
|
||||
}
|
||||
|
||||
@@ -132,7 +132,6 @@ The following options are available:
|
||||
| `hideAPIURL` | Hides the "API URL" meta field while editing documents within this Collection. |
|
||||
| `enableRichTextLink` | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
||||
| `enableRichTextRelationship` | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. |
|
||||
| `folders` | A boolean to enable folders for a given collection. Defaults to `false`. [More details](../folders/overview). |
|
||||
| `meta` | Page metadata overrides to apply to this Collection within the Admin Panel. [More details](../admin/metadata). |
|
||||
| `preview` | Function to generate preview URLs within the Admin Panel that can point to your app. [More details](../admin/preview). |
|
||||
| `livePreview` | Enable real-time editing for instant visual feedback of your front-end application. [More details](../live-preview/overview). |
|
||||
|
||||
@@ -84,7 +84,6 @@ The following options are available:
|
||||
| **`csrf`** | A whitelist array of URLs to allow Payload to accept cookies from. [More details](../authentication/cookies#csrf-attacks). |
|
||||
| **`defaultDepth`** | If a user does not specify `depth` while requesting a resource, this depth will be used. [More details](../queries/depth). |
|
||||
| **`defaultMaxTextLength`** | The maximum allowed string length to be permitted application-wide. Helps to prevent malicious public document creation. |
|
||||
| `folders` | An optional object to configure global folder settings. [More details](../folders/overview). |
|
||||
| `queryPresets` | An object that to configure Collection Query Presets. [More details](../query-presets/overview). |
|
||||
| **`maxDepth`** | The maximum allowed depth to be permitted application-wide. This setting helps prevent against malicious queries. Defaults to `10`. [More details](../queries/depth). |
|
||||
| **`indexSortableFields`** | Automatically index all sortable top-level fields in the database to improve sort performance and add database compatibility for Azure Cosmos and similar. |
|
||||
|
||||
@@ -189,8 +189,6 @@ In MongoDB, you'll only ever really need to run migrations for times where you c
|
||||
|
||||
In this case, you can create a migration by running `pnpm payload migrate:create`, and then write the logic that you need to perform to migrate your documents to their new shape. You can then either run your migrations in CI before you build / deploy, or you can run them locally, against your production database, by using your production database connection string on your local computer and running the `pnpm payload migrate` command.
|
||||
|
||||
You can find [here](/database/mongodb#common-migration-scripts) examples of common MongoDB migrations.
|
||||
|
||||
#### Postgres
|
||||
|
||||
In relational databases like Postgres, migrations are a bit more important, because each time you add a new field or a new collection, you'll need to update the shape of your database to match your Payload Config (otherwise you'll see errors upon trying to read / write your data).
|
||||
|
||||
@@ -61,118 +61,3 @@ Limitations with [DocumentDB](https://aws.amazon.com/documentdb/) and [Azure Cos
|
||||
- For Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`.
|
||||
- The [Join Field](../fields/join) is not supported in DocumentDB and Azure Cosmos DB, as we internally use MongoDB aggregations to query data for that field, which are limited there. This can be changed in the future.
|
||||
- For DocumentDB pass `disableIndexHints: true` to disable hinting to the DB to use `id` as index which can cause problems with DocumentDB.
|
||||
|
||||
## Common migration scripts
|
||||
|
||||
### Delete field from the database
|
||||
|
||||
With the MongoDB adapter, even if you delete a field from your Payload config, the existing field data will still be remained in the database.
|
||||
If you want to ensure that the field is fully erased from the database, you can use the following script:
|
||||
|
||||
```ts
|
||||
// pnpx payload migrate:create --name delete-field
|
||||
export async function up({ payload, session }: MigrateUpArgs): Promise<void> {
|
||||
await payload.db.collections.posts.collection.updateMany(
|
||||
{},
|
||||
{
|
||||
$unset: {
|
||||
// delete title field
|
||||
title: true,
|
||||
// nested to array
|
||||
'array.title': true,
|
||||
},
|
||||
},
|
||||
{ session },
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
### Synchronize indexes
|
||||
|
||||
Payload won't automatically replace existing indexes in MongoDB when you change your Payload config.
|
||||
For example, changing `index: true` to `unique: true` won't automatically update the index in MongoDB.
|
||||
You can use the following script to synchronize indexes:
|
||||
|
||||
```ts
|
||||
// pnpx payload migrate:create --name sync-posts-indexes
|
||||
export async function up({ payload, session }: MigrateUpArgs): Promise<void> {
|
||||
await payload.db.collections.posts.syncIndexes()
|
||||
}
|
||||
```
|
||||
|
||||
<Banner type="warning">
|
||||
Note that this will also drop all indexes that aren't in the payload config.
|
||||
If you have custom indexes that you want to keep, they must be added to the collection schema
|
||||
or you can insert them manually after `syncIndexes` with:
|
||||
`payload.db.collections.posts.collection.createIndex({ title: 1 })`
|
||||
</Banner>
|
||||
|
||||
### Making field localized and vice versa
|
||||
|
||||
When you change a field to be localized or vice versa, you can use the following script to update the field in the database:
|
||||
|
||||
```ts
|
||||
// pnpx payload migrate:create --name make-title-localized
|
||||
export async function up({ payload, session }: MigrateUpArgs): Promise<void> {
|
||||
const posts = await payload.db.collections.posts.collection
|
||||
.find({}, { session })
|
||||
.toArray()
|
||||
|
||||
// Make "title" localized
|
||||
await payload.db.collections.posts.collection.bulkWrite(
|
||||
posts.map((post) => ({
|
||||
updateOne: {
|
||||
filter: { _id: post._id },
|
||||
update: { $set: { title: { en: post.title } } },
|
||||
},
|
||||
})),
|
||||
)
|
||||
|
||||
// Make "title" non-localized
|
||||
await payload.db.collections.posts.collection.bulkWrite(
|
||||
posts.map((post) => ({
|
||||
updateOne: {
|
||||
filter: { _id: post._id },
|
||||
update: { $set: { title: post.title.en } },
|
||||
},
|
||||
})),
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
### Example renaming a collection
|
||||
|
||||
The following example renames a collection with slug "pages" to "articles" and it includes migrating the \_versions collection also.
|
||||
|
||||
```ts
|
||||
import { MigrateDownArgs, MigrateUpArgs } from '@payloadcms/db-mongodb'
|
||||
// import type { Db } from 'mongodb' // you will need to add this package as a devDependency in the package.json if you want db to be typed as Db
|
||||
|
||||
export async function up({
|
||||
payload,
|
||||
req,
|
||||
session,
|
||||
}: MigrateUpArgs): Promise<void> {
|
||||
const db = payload.db.connection.db as any
|
||||
|
||||
await db.renameCollection('pages', 'articles', { session, dropTarget: true })
|
||||
await db.renameCollection('_pages_versions', '_articles_versions', {
|
||||
session,
|
||||
dropTarget: true,
|
||||
}) // remove this line if you do not have versions enabled
|
||||
}
|
||||
|
||||
export async function down({
|
||||
payload,
|
||||
req,
|
||||
session,
|
||||
}: MigrateDownArgs): Promise<void> {
|
||||
const db = payload.db.connection.db as any
|
||||
|
||||
await db.renameCollection('articles', 'pages', { session, dropTarget: true })
|
||||
await db.renameCollection('_articles_versions', '_pages_versions', {
|
||||
session,
|
||||
dropTarget: true,
|
||||
}) // remove this line if you do not have versions enabled
|
||||
}
|
||||
```
|
||||
|
||||
@@ -35,9 +35,9 @@ export const MyGroupField: Field = {
|
||||
|
||||
| Option | Description |
|
||||
| ---------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`name`** | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) |
|
||||
| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) |
|
||||
| **`fields`** \* | Array of field types to nest within this Group. |
|
||||
| **`label`** | Used as a heading in the Admin Panel and to name the generated GraphQL type. Required when name is undefined, defaults to name converted to words. |
|
||||
| **`label`** | Used as a heading in the Admin Panel and to name the generated GraphQL type. |
|
||||
| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) |
|
||||
| **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. |
|
||||
| **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). |
|
||||
@@ -86,7 +86,7 @@ export const ExampleCollection: CollectionConfig = {
|
||||
slug: 'example-collection',
|
||||
fields: [
|
||||
{
|
||||
name: 'pageMeta',
|
||||
name: 'pageMeta', // required
|
||||
type: 'group', // required
|
||||
interfaceName: 'Meta', // optional
|
||||
fields: [
|
||||
@@ -110,38 +110,3 @@ export const ExampleCollection: CollectionConfig = {
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Presentational group fields
|
||||
|
||||
You can also use the Group field to create a presentational group of fields. This is useful when you want to group fields together visually without affecting the data structure.
|
||||
The label will be required when a `name` is not provided.
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const ExampleCollection: CollectionConfig = {
|
||||
slug: 'example-collection',
|
||||
fields: [
|
||||
{
|
||||
label: 'Page meta',
|
||||
type: 'group', // required
|
||||
fields: [
|
||||
{
|
||||
name: 'title',
|
||||
type: 'text',
|
||||
required: true,
|
||||
minLength: 20,
|
||||
maxLength: 100,
|
||||
},
|
||||
{
|
||||
name: 'description',
|
||||
type: 'textarea',
|
||||
required: true,
|
||||
minLength: 40,
|
||||
maxLength: 160,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
@@ -100,7 +100,7 @@ Here are the available Presentational Fields:
|
||||
|
||||
### Virtual Fields
|
||||
|
||||
Virtual fields are used to display data that is not stored in the database. They are useful for displaying computed values that populate within the API response through hooks, etc.
|
||||
Virtual fields are used to display data that is not stored in the database. They are useful for displaying computed values that populate within the APi response through hooks, etc.
|
||||
|
||||
Here are the available Virtual Fields:
|
||||
|
||||
|
||||
@@ -1,105 +0,0 @@
|
||||
---
|
||||
title: Folders
|
||||
label: Folders
|
||||
order: 10
|
||||
desc: Folders allow you to group documents across collections, and are a great way to organize your content.
|
||||
keywords: folders, folder, content organization
|
||||
---
|
||||
|
||||
Folders allow you to group documents across collections, and are a great way to organize your content. Folders are built on top of relationship fields, when you enable folders on a collection, Payload adds a hidden relationship field `folders`, that relates to a folder — or no folder. Folders also have the `folder` field, allowing folders to be nested within other folders.
|
||||
|
||||
The configuration for folders is done in two places, the collection config and the Payload config. The collection config is where you enable folders, and the Payload config is where you configure the global folder settings.
|
||||
|
||||
<Banner type="warning">
|
||||
**Note:** The Folders feature is currently in beta and may be subject to
|
||||
change in minor versions updates prior to being stable.
|
||||
</Banner>
|
||||
|
||||
## Folder Configuration
|
||||
|
||||
On the payload config, you can configure the following settings under the `folders` property:
|
||||
|
||||
```ts
|
||||
// Type definition
|
||||
|
||||
type RootFoldersConfiguration = {
|
||||
/**
|
||||
* An array of functions to be ran when the folder collection is initialized
|
||||
* This allows plugins to modify the collection configuration
|
||||
*/
|
||||
collectionOverrides?: (({
|
||||
collection,
|
||||
}: {
|
||||
collection: CollectionConfig
|
||||
}) => CollectionConfig | Promise<CollectionConfig>)[]
|
||||
/**
|
||||
* Ability to view hidden fields and collections related to folders
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
debug?: boolean
|
||||
/**
|
||||
* The Folder field name
|
||||
*
|
||||
* @default "folder"
|
||||
*/
|
||||
fieldName?: string
|
||||
/**
|
||||
* Slug for the folder collection
|
||||
*
|
||||
* @default "payload-folders"
|
||||
*/
|
||||
slug?: string
|
||||
}
|
||||
```
|
||||
|
||||
```ts
|
||||
// Example usage
|
||||
|
||||
import { buildConfig } from 'payload'
|
||||
|
||||
const config = buildConfig({
|
||||
// ...
|
||||
folders: {
|
||||
// highlight-start
|
||||
debug: true, // optional
|
||||
collectionOverrides: [
|
||||
async ({ collection }) => {
|
||||
return collection
|
||||
},
|
||||
], // optional
|
||||
fieldName: 'folder', // optional
|
||||
slug: 'payload-folders', // optional
|
||||
// highlight-end
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
## Collection Configuration
|
||||
|
||||
To enable folders on a collection, you need to set the `admin.folders` property to `true` on the collection config. This will add a hidden relationship field to the collection that relates to a folder — or no folder.
|
||||
|
||||
```ts
|
||||
// Type definition
|
||||
|
||||
type CollectionFoldersConfiguration = boolean
|
||||
```
|
||||
|
||||
```ts
|
||||
// Example usage
|
||||
|
||||
import { buildConfig } from 'payload'
|
||||
|
||||
const config = buildConfig({
|
||||
collections: [
|
||||
{
|
||||
slug: 'pages',
|
||||
// highlight-start
|
||||
admin: {
|
||||
folders: true, // defaults to false
|
||||
},
|
||||
// highlight-end
|
||||
},
|
||||
],
|
||||
})
|
||||
```
|
||||
@@ -81,7 +81,7 @@ To install a Database Adapter, you can run **one** of the following commands:
|
||||
|
||||
#### 2. Copy Payload files into your Next.js app folder
|
||||
|
||||
Payload installs directly in your Next.js `/app` folder, and you'll need to place some files into that folder for Payload to run. You can copy these files from the [Blank Template](<https://github.com/payloadcms/payload/tree/main/templates/blank/src/app/(payload)>) on GitHub. Once you have the required Payload files in place in your `/app` folder, you should have something like this:
|
||||
Payload installs directly in your Next.js `/app` folder, and you'll need to place some files into that folder for Payload to run. You can copy these files from the [Blank Template](https://github.com/payloadcms/payload/tree/main/templates/blank/src/app/(payload)) on GitHub. Once you have the required Payload files in place in your `/app` folder, you should have something like this:
|
||||
|
||||
```plaintext
|
||||
app/
|
||||
|
||||
@@ -63,50 +63,19 @@ const config = buildConfig({
|
||||
export default config
|
||||
```
|
||||
|
||||
## Enabling Content Source Maps
|
||||
|
||||
Now in your Next.js app, you need to add the `encodeSourceMaps` query parameter to your API requests. This will tell Payload to include the Content Source Maps in the API response.
|
||||
|
||||
<Banner type="warning">
|
||||
**Note:** For performance reasons, this should only be done when in draft mode
|
||||
or on preview deployments.
|
||||
</Banner>
|
||||
|
||||
#### REST API
|
||||
|
||||
If you're using the REST API, include the `?encodeSourceMaps=true` search parameter.
|
||||
Now in your Next.js app, include the `?encodeSourceMaps=true` parameter in any of your API requests. For performance reasons, this should only be done when in draft mode or on preview deployments.
|
||||
|
||||
```ts
|
||||
if (isDraftMode || process.env.VERCEL_ENV === 'preview') {
|
||||
const res = await fetch(
|
||||
`${process.env.NEXT_PUBLIC_PAYLOAD_CMS_URL}/api/pages?encodeSourceMaps=true&where[slug][equals]=${slug}`,
|
||||
`${process.env.NEXT_PUBLIC_PAYLOAD_CMS_URL}/api/pages?where[slug][equals]=${slug}&encodeSourceMaps=true`,
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
#### Local API
|
||||
|
||||
If you're using the Local API, include the `encodeSourceMaps` via the `context` property.
|
||||
|
||||
```ts
|
||||
if (isDraftMode || process.env.VERCEL_ENV === 'preview') {
|
||||
const res = await payload.find({
|
||||
collection: 'pages',
|
||||
where: {
|
||||
slug: {
|
||||
equals: slug,
|
||||
},
|
||||
},
|
||||
context: {
|
||||
encodeSourceMaps: true,
|
||||
},
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
And that's it! You are now ready to enter Edit Mode and begin visually editing your content.
|
||||
|
||||
## Edit Mode
|
||||
#### Edit Mode
|
||||
|
||||
To see Content Link on your site, you first need to visit any preview deployment on Vercel and login using the Vercel Toolbar. When Content Source Maps are detected on the page, a pencil icon will appear in the toolbar. Clicking this icon will enable Edit Mode, highlighting all editable fields on the page in blue.
|
||||
|
||||
@@ -125,9 +94,7 @@ const { cleaned, encoded } = vercelStegaSplit(text)
|
||||
|
||||
### Blocks and array fields
|
||||
|
||||
All `blocks` and `array` fields by definition do not have plain text strings to encode. For this reason, they are automatically given an additional `_encodedSourceMap` property, which you can use to enable Content Link on entire _sections_ of your site.
|
||||
|
||||
You can then specify the editing container by adding the `data-vercel-edit-target` HTML attribute to any top-level element of your block.
|
||||
All `blocks` and `array` fields by definition do not have plain text strings to encode. For this reason, they are given an additional `_encodedSourceMap` property, which you can use to enable Content Link on entire _sections_ of your site. You can then specify the editing container by adding the `data-vercel-edit-target` HTML attribute to any top-level element of your block.
|
||||
|
||||
```ts
|
||||
<div data-vercel-edit-target>
|
||||
|
||||
@@ -85,7 +85,6 @@ formBuilderPlugin({
|
||||
checkbox: true,
|
||||
number: true,
|
||||
message: true,
|
||||
date: false,
|
||||
payment: false,
|
||||
},
|
||||
})
|
||||
@@ -350,18 +349,6 @@ Maps to a `checkbox` input on your front-end. Used to collect a boolean value.
|
||||
| `width` | string | The width of the field on the front-end. |
|
||||
| `required` | checkbox | Whether or not the field is required when submitted. |
|
||||
|
||||
### Date
|
||||
|
||||
Maps to a `date` input on your front-end. Used to collect a date value.
|
||||
|
||||
| Property | Type | Description |
|
||||
| -------------- | -------- | ---------------------------------------------------- |
|
||||
| `name` | string | The name of the field. |
|
||||
| `label` | string | The label of the field. |
|
||||
| `defaultValue` | date | The default value of the field. |
|
||||
| `width` | string | The width of the field on the front-end. |
|
||||
| `required` | checkbox | Whether or not the field is required when submitted. |
|
||||
|
||||
### Number
|
||||
|
||||
Maps to a `number` input on your front-end. Used to collect a number.
|
||||
@@ -434,42 +421,6 @@ formBuilderPlugin({
|
||||
})
|
||||
```
|
||||
|
||||
### Customizing the date field default value
|
||||
|
||||
You can custommise the default value of the date field and any other aspects of the date block in this way.
|
||||
Note that the end submission source will be responsible for the timezone of the date. Payload only stores the date in UTC format.
|
||||
|
||||
```ts
|
||||
import { fields as formFields } from '@payloadcms/plugin-form-builder'
|
||||
|
||||
// payload.config.ts
|
||||
formBuilderPlugin({
|
||||
fields: {
|
||||
// date: true, // just enable it without any customizations
|
||||
date: {
|
||||
...formFields.date,
|
||||
fields: [
|
||||
...(formFields.date && 'fields' in formFields.date
|
||||
? formFields.date.fields.map((field) => {
|
||||
if ('name' in field && field.name === 'defaultValue') {
|
||||
return {
|
||||
...field,
|
||||
timezone: true, // optionally enable timezone
|
||||
admin: {
|
||||
...field.admin,
|
||||
description: 'This is a date field',
|
||||
},
|
||||
}
|
||||
}
|
||||
return field
|
||||
})
|
||||
: []),
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
## Email
|
||||
|
||||
This plugin relies on the [email configuration](../email/overview) defined in your Payload configuration. It will read from your config and attempt to send your emails using the credentials provided.
|
||||
|
||||
@@ -309,3 +309,7 @@ import {
|
||||
...
|
||||
} from '@payloadcms/plugin-stripe/types';
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
The [Templates Directory](https://github.com/payloadcms/payload/tree/main/templates) contains an official [E-commerce Template](https://github.com/payloadcms/payload/tree/main/templates/ecommerce) which demonstrates exactly how to configure this plugin in Payload and implement it on your front-end. You can also check out [How to Build An E-Commerce Site With Next.js](https://payloadcms.com/blog/how-to-build-an-e-commerce-site-with-nextjs) post for a bit more context around this template.
|
||||
|
||||
@@ -6,14 +6,14 @@ desc: Converting between lexical richtext and HTML
|
||||
keywords: lexical, richtext, html
|
||||
---
|
||||
|
||||
## Rich Text to HTML
|
||||
## Converting Rich Text to HTML
|
||||
|
||||
There are two main approaches to convert your Lexical-based rich text to HTML:
|
||||
|
||||
1. **Generate HTML on-demand (Recommended)**: Convert JSON to HTML wherever you need it, on-demand.
|
||||
2. **Generate HTML within your Collection**: Create a new field that automatically converts your saved JSON content to HTML. This is not recommended because it adds overhead to the Payload API and may not work well with live preview.
|
||||
|
||||
### On-demand
|
||||
### Generating HTML on-demand (Recommended)
|
||||
|
||||
To convert JSON to HTML on-demand, use the `convertLexicalToHTML` function from `@payloadcms/richtext-lexical/html`. Here's an example of how to use it in a React component in your frontend:
|
||||
|
||||
@@ -32,81 +32,61 @@ export const MyComponent = ({ data }: { data: SerializedEditorState }) => {
|
||||
}
|
||||
```
|
||||
|
||||
#### Dynamic Population (Advanced)
|
||||
### Converting Lexical Blocks
|
||||
|
||||
By default, `convertLexicalToHTML` expects fully populated data (e.g. uploads, links, etc.). If you need to dynamically fetch and populate those nodes, use the async variant, `convertLexicalToHTMLAsync`, from `@payloadcms/richtext-lexical/html-async`. You must provide a `populate` function:
|
||||
If your rich text includes Lexical blocks, you need to provide a way to convert them to HTML. For example:
|
||||
|
||||
```tsx
|
||||
'use client'
|
||||
|
||||
import type { MyInlineBlock, MyTextBlock } from '@/payload-types'
|
||||
import type {
|
||||
DefaultNodeTypes,
|
||||
SerializedBlockNode,
|
||||
SerializedInlineBlockNode,
|
||||
} from '@payloadcms/richtext-lexical'
|
||||
import type { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical'
|
||||
|
||||
import { getRestPopulateFn } from '@payloadcms/richtext-lexical/client'
|
||||
import { convertLexicalToHTMLAsync } from '@payloadcms/richtext-lexical/html-async'
|
||||
import React, { useEffect, useState } from 'react'
|
||||
|
||||
export const MyComponent = ({ data }: { data: SerializedEditorState }) => {
|
||||
const [html, setHTML] = useState<null | string>(null)
|
||||
useEffect(() => {
|
||||
async function convert() {
|
||||
const html = await convertLexicalToHTMLAsync({
|
||||
data,
|
||||
populate: getRestPopulateFn({
|
||||
apiURL: `http://localhost:3000/api`,
|
||||
}),
|
||||
})
|
||||
setHTML(html)
|
||||
}
|
||||
|
||||
void convert()
|
||||
}, [data])
|
||||
|
||||
return html && <div dangerouslySetInnerHTML={{ __html: html }} />
|
||||
}
|
||||
```
|
||||
|
||||
Using the REST populate function will send a separate request for each node. If you need to populate a large number of nodes, this may be slow. For improved performance on the server, you can use the `getPayloadPopulateFn` function:
|
||||
|
||||
```tsx
|
||||
import type { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical'
|
||||
|
||||
import { getPayloadPopulateFn } from '@payloadcms/richtext-lexical'
|
||||
import { convertLexicalToHTMLAsync } from '@payloadcms/richtext-lexical/html-async'
|
||||
import { getPayload } from 'payload'
|
||||
import {
|
||||
convertLexicalToHTML,
|
||||
type HTMLConvertersFunction,
|
||||
} from '@payloadcms/richtext-lexical/html'
|
||||
import React from 'react'
|
||||
|
||||
import config from '../../config.js'
|
||||
type NodeTypes =
|
||||
| DefaultNodeTypes
|
||||
| SerializedBlockNode<MyTextBlock>
|
||||
| SerializedInlineBlockNode<MyInlineBlock>
|
||||
|
||||
export const MyRSCComponent = async ({
|
||||
data,
|
||||
}: {
|
||||
data: SerializedEditorState
|
||||
}) => {
|
||||
const payload = await getPayload({
|
||||
config,
|
||||
})
|
||||
const htmlConverters: HTMLConvertersFunction<NodeTypes> = ({
|
||||
defaultConverters,
|
||||
}) => ({
|
||||
...defaultConverters,
|
||||
blocks: {
|
||||
// Each key should match your block's slug
|
||||
myTextBlock: ({ node, providedCSSString }) =>
|
||||
`<div style="background-color: red;${providedCSSString}">${node.fields.text}</div>`,
|
||||
},
|
||||
inlineBlocks: {
|
||||
// Each key should match your inline block's slug
|
||||
myInlineBlock: ({ node, providedStyleTag }) =>
|
||||
`<span${providedStyleTag}>${node.fields.text}</span$>`,
|
||||
},
|
||||
})
|
||||
|
||||
const html = await convertLexicalToHTMLAsync({
|
||||
export const MyComponent = ({ data }: { data: SerializedEditorState }) => {
|
||||
const html = convertLexicalToHTML({
|
||||
converters: htmlConverters,
|
||||
data,
|
||||
populate: await getPayloadPopulateFn({
|
||||
currentDepth: 0,
|
||||
depth: 1,
|
||||
payload,
|
||||
}),
|
||||
})
|
||||
|
||||
return html && <div dangerouslySetInnerHTML={{ __html: html }} />
|
||||
return <div dangerouslySetInnerHTML={{ __html: html }} />
|
||||
}
|
||||
```
|
||||
|
||||
### HTML field
|
||||
### Outputting HTML from the Collection
|
||||
|
||||
The `lexicalHTMLField()` helper converts JSON to HTML and saves it in a field that is updated every time you read it via an `afterRead` hook. It's generally not recommended for two reasons:
|
||||
|
||||
1. It creates a column with duplicate content in another format.
|
||||
2. In [client-side live preview](/docs/live-preview/client), it makes it not "live".
|
||||
|
||||
Consider using the [on-demand HTML converter above](/docs/rich-text/converting-html#on-demand-recommended) or the [JSX converter](/docs/rich-text/converting-jsx) unless you have a good reason.
|
||||
To automatically generate HTML from the saved richText field in your Collection, use the `lexicalHTMLField()` helper. This approach converts the JSON to HTML using an `afterRead` hook. For instance:
|
||||
|
||||
```ts
|
||||
import type { HTMLConvertersFunction } from '@payloadcms/richtext-lexical/html'
|
||||
@@ -174,59 +154,74 @@ const Pages: CollectionConfig = {
|
||||
}
|
||||
```
|
||||
|
||||
## Blocks to HTML
|
||||
### Generating HTML in Your Frontend with Dynamic Population (Advanced)
|
||||
|
||||
If your rich text includes Lexical blocks, you need to provide a way to convert them to HTML. For example:
|
||||
By default, `convertLexicalToHTML` expects fully populated data (e.g. uploads, links, etc.). If you need to dynamically fetch and populate those nodes, use the async variant, `convertLexicalToHTMLAsync`, from `@payloadcms/richtext-lexical/html-async`. You must provide a `populate` function:
|
||||
|
||||
```tsx
|
||||
'use client'
|
||||
|
||||
import type { MyInlineBlock, MyTextBlock } from '@/payload-types'
|
||||
import type {
|
||||
DefaultNodeTypes,
|
||||
SerializedBlockNode,
|
||||
SerializedInlineBlockNode,
|
||||
} from '@payloadcms/richtext-lexical'
|
||||
import type { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical'
|
||||
|
||||
import {
|
||||
convertLexicalToHTML,
|
||||
type HTMLConvertersFunction,
|
||||
} from '@payloadcms/richtext-lexical/html'
|
||||
import React from 'react'
|
||||
|
||||
type NodeTypes =
|
||||
| DefaultNodeTypes
|
||||
| SerializedBlockNode<MyTextBlock>
|
||||
| SerializedInlineBlockNode<MyInlineBlock>
|
||||
|
||||
const htmlConverters: HTMLConvertersFunction<NodeTypes> = ({
|
||||
defaultConverters,
|
||||
}) => ({
|
||||
...defaultConverters,
|
||||
blocks: {
|
||||
// Each key should match your block's slug
|
||||
myTextBlock: ({ node, providedCSSString }) =>
|
||||
`<div style="background-color: red;${providedCSSString}">${node.fields.text}</div>`,
|
||||
},
|
||||
inlineBlocks: {
|
||||
// Each key should match your inline block's slug
|
||||
myInlineBlock: ({ node, providedStyleTag }) =>
|
||||
`<span${providedStyleTag}>${node.fields.text}</span$>`,
|
||||
},
|
||||
})
|
||||
import { getRestPopulateFn } from '@payloadcms/richtext-lexical/client'
|
||||
import { convertLexicalToHTMLAsync } from '@payloadcms/richtext-lexical/html-async'
|
||||
import React, { useEffect, useState } from 'react'
|
||||
|
||||
export const MyComponent = ({ data }: { data: SerializedEditorState }) => {
|
||||
const html = convertLexicalToHTML({
|
||||
converters: htmlConverters,
|
||||
data,
|
||||
})
|
||||
const [html, setHTML] = useState<null | string>(null)
|
||||
useEffect(() => {
|
||||
async function convert() {
|
||||
const html = await convertLexicalToHTMLAsync({
|
||||
data,
|
||||
populate: getRestPopulateFn({
|
||||
apiURL: `http://localhost:3000/api`,
|
||||
}),
|
||||
})
|
||||
setHTML(html)
|
||||
}
|
||||
|
||||
return <div dangerouslySetInnerHTML={{ __html: html }} />
|
||||
void convert()
|
||||
}, [data])
|
||||
|
||||
return html && <div dangerouslySetInnerHTML={{ __html: html }} />
|
||||
}
|
||||
```
|
||||
|
||||
## HTML to Richtext
|
||||
Using the REST populate function will send a separate request for each node. If you need to populate a large number of nodes, this may be slow. For improved performance on the server, you can use the `getPayloadPopulateFn` function:
|
||||
|
||||
```tsx
|
||||
import type { SerializedEditorState } from '@payloadcms/richtext-lexical/lexical'
|
||||
|
||||
import { getPayloadPopulateFn } from '@payloadcms/richtext-lexical'
|
||||
import { convertLexicalToHTMLAsync } from '@payloadcms/richtext-lexical/html-async'
|
||||
import { getPayload } from 'payload'
|
||||
import React from 'react'
|
||||
|
||||
import config from '../../config.js'
|
||||
|
||||
export const MyRSCComponent = async ({
|
||||
data,
|
||||
}: {
|
||||
data: SerializedEditorState
|
||||
}) => {
|
||||
const payload = await getPayload({
|
||||
config,
|
||||
})
|
||||
|
||||
const html = await convertLexicalToHTMLAsync({
|
||||
data,
|
||||
populate: await getPayloadPopulateFn({
|
||||
currentDepth: 0,
|
||||
depth: 1,
|
||||
payload,
|
||||
}),
|
||||
})
|
||||
|
||||
return html && <div dangerouslySetInnerHTML={{ __html: html }} />
|
||||
}
|
||||
```
|
||||
|
||||
## Converting HTML to Richtext
|
||||
|
||||
If you need to convert raw HTML into a Lexical editor state, use `convertHTMLToLexical` from `@payloadcms/richtext-lexical`, along with the [editorConfigFactory to retrieve the editor config](/docs/rich-text/converters#retrieving-the-editor-config):
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ desc: Converting between lexical richtext and JSX
|
||||
keywords: lexical, richtext, jsx
|
||||
---
|
||||
|
||||
## Richtext to JSX
|
||||
## Converting Richtext to JSX
|
||||
|
||||
To convert richtext to JSX, import the `RichText` component from `@payloadcms/richtext-lexical/react` and pass the richtext content to it:
|
||||
|
||||
@@ -28,7 +28,7 @@ The `RichText` component includes built-in converters for common Lexical nodes.
|
||||
populated data to work correctly.
|
||||
</Banner>
|
||||
|
||||
### Internal Links
|
||||
### Converting Internal Links
|
||||
|
||||
By default, Payload doesn't know how to convert **internal** links to JSX, as it doesn't know what the corresponding URL of the internal link is. You'll notice that you get a "found internal link, but internalDocToHref is not provided" error in the console when you try to render content with internal links.
|
||||
|
||||
@@ -81,7 +81,7 @@ export const MyComponent: React.FC<{
|
||||
}
|
||||
```
|
||||
|
||||
### Lexical Blocks
|
||||
### Converting Lexical Blocks
|
||||
|
||||
If your rich text includes custom Blocks or Inline Blocks, you must supply custom converters that match each block's slug. This converter is not included by default, as Payload doesn't know how to render your custom blocks.
|
||||
|
||||
@@ -133,7 +133,7 @@ export const MyComponent: React.FC<{
|
||||
}
|
||||
```
|
||||
|
||||
### Overriding Converters
|
||||
### Overriding Default JSX Converters
|
||||
|
||||
You can override any of the default JSX converters by passing passing your custom converter, keyed to the node type, to the `converters` prop / the converters function.
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ desc: Converting between lexical richtext and Markdown / MDX
|
||||
keywords: lexical, richtext, markdown, md, mdx
|
||||
---
|
||||
|
||||
## Richtext to Markdown
|
||||
## Converting Richtext to Markdown
|
||||
|
||||
If you have access to the Payload Config and the [lexical editor config](/docs/rich-text/converters#retrieving-the-editor-config), you can convert the lexical editor state to Markdown with the following:
|
||||
|
||||
@@ -91,7 +91,7 @@ const Pages: CollectionConfig = {
|
||||
}
|
||||
```
|
||||
|
||||
## Markdown to Richtext
|
||||
## Converting Markdown to Richtext
|
||||
|
||||
If you have access to the Payload Config and the [lexical editor config](/docs/rich-text/converters#retrieving-the-editor-config), you can convert Markdown to the lexical editor state with the following:
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ desc: Converting between lexical richtext and plaintext
|
||||
keywords: lexical, richtext, plaintext, text
|
||||
---
|
||||
|
||||
## Richtext to Plaintext
|
||||
## Converting Richtext to Plaintext
|
||||
|
||||
Here's how you can convert richtext data to plaintext using `@payloadcms/richtext-lexical/plaintext`.
|
||||
|
||||
|
||||
@@ -142,33 +142,32 @@ import { CallToAction } from '../blocks/CallToAction'
|
||||
|
||||
Here's an overview of all the included features:
|
||||
|
||||
| Feature Name | Included by default | Description |
|
||||
| ----------------------------------- | ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`BoldFeature`** | Yes | Handles the bold text format |
|
||||
| **`ItalicFeature`** | Yes | Handles the italic text format |
|
||||
| **`UnderlineFeature`** | Yes | Handles the underline text format |
|
||||
| **`StrikethroughFeature`** | Yes | Handles the strikethrough text format |
|
||||
| **`SubscriptFeature`** | Yes | Handles the subscript text format |
|
||||
| **`SuperscriptFeature`** | Yes | Handles the superscript text format |
|
||||
| **`InlineCodeFeature`** | Yes | Handles the inline-code text format |
|
||||
| **`ParagraphFeature`** | Yes | Handles paragraphs. Since they are already a key feature of lexical itself, this Feature mainly handles the Slash and Add-Block menu entries for paragraphs |
|
||||
| **`HeadingFeature`** | Yes | Adds Heading Nodes (by default, H1 - H6, but that can be customized) |
|
||||
| **`AlignFeature`** | Yes | Allows you to align text left, centered and right |
|
||||
| **`IndentFeature`** | Yes | Allows you to indent text with the tab key |
|
||||
| **`UnorderedListFeature`** | Yes | Adds unordered lists (ul) |
|
||||
| **`OrderedListFeature`** | Yes | Adds ordered lists (ol) |
|
||||
| **`ChecklistFeature`** | Yes | Adds checklists |
|
||||
| **`LinkFeature`** | Yes | Allows you to create internal and external links |
|
||||
| **`RelationshipFeature`** | Yes | Allows you to create block-level (not inline) relationships to other documents |
|
||||
| **`BlockquoteFeature`** | Yes | Allows you to create block-level quotes |
|
||||
| **`UploadFeature`** | Yes | Allows you to create block-level upload nodes - this supports all kinds of uploads, not just images |
|
||||
| **`HorizontalRuleFeature`** | Yes | Horizontal rules / separators. Basically displays an `<hr>` element |
|
||||
| **`InlineToolbarFeature`** | Yes | The inline toolbar is the floating toolbar which appears when you select text. This toolbar only contains actions relevant for selected text |
|
||||
| **`FixedToolbarFeature`** | No | This classic toolbar is pinned to the top and always visible. Both inline and fixed toolbars can be enabled at the same time. |
|
||||
| **`BlocksFeature`** | No | Allows you to use Payload's [Blocks Field](../fields/blocks) directly inside your editor. In the feature props, you can specify the allowed blocks - just like in the Blocks field. |
|
||||
| **`TreeViewFeature`** | No | Adds a debug box under the editor, which allows you to see the current editor state live, the dom, as well as time travel. Very useful for debugging |
|
||||
| **`EXPERIMENTAL_TableFeature`** | No | Adds support for tables. This feature may be removed or receive breaking changes in the future - even within a stable lexical release, without needing a major release. |
|
||||
| **`EXPERIMENTAL_TextStateFeature`** | No | Allows you to store key-value attributes within TextNodes and assign them inline styles. |
|
||||
| Feature Name | Included by default | Description |
|
||||
| ------------------------------- | ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **`BoldFeature`** | Yes | Handles the bold text format |
|
||||
| **`ItalicFeature`** | Yes | Handles the italic text format |
|
||||
| **`UnderlineFeature`** | Yes | Handles the underline text format |
|
||||
| **`StrikethroughFeature`** | Yes | Handles the strikethrough text format |
|
||||
| **`SubscriptFeature`** | Yes | Handles the subscript text format |
|
||||
| **`SuperscriptFeature`** | Yes | Handles the superscript text format |
|
||||
| **`InlineCodeFeature`** | Yes | Handles the inline-code text format |
|
||||
| **`ParagraphFeature`** | Yes | Handles paragraphs. Since they are already a key feature of lexical itself, this Feature mainly handles the Slash and Add-Block menu entries for paragraphs |
|
||||
| **`HeadingFeature`** | Yes | Adds Heading Nodes (by default, H1 - H6, but that can be customized) |
|
||||
| **`AlignFeature`** | Yes | Allows you to align text left, centered and right |
|
||||
| **`IndentFeature`** | Yes | Allows you to indent text with the tab key |
|
||||
| **`UnorderedListFeature`** | Yes | Adds unordered lists (ul) |
|
||||
| **`OrderedListFeature`** | Yes | Adds ordered lists (ol) |
|
||||
| **`ChecklistFeature`** | Yes | Adds checklists |
|
||||
| **`LinkFeature`** | Yes | Allows you to create internal and external links |
|
||||
| **`RelationshipFeature`** | Yes | Allows you to create block-level (not inline) relationships to other documents |
|
||||
| **`BlockquoteFeature`** | Yes | Allows you to create block-level quotes |
|
||||
| **`UploadFeature`** | Yes | Allows you to create block-level upload nodes - this supports all kinds of uploads, not just images |
|
||||
| **`HorizontalRuleFeature`** | Yes | Horizontal rules / separators. Basically displays an `<hr>` element |
|
||||
| **`InlineToolbarFeature`** | Yes | The inline toolbar is the floating toolbar which appears when you select text. This toolbar only contains actions relevant for selected text |
|
||||
| **`FixedToolbarFeature`** | No | This classic toolbar is pinned to the top and always visible. Both inline and fixed toolbars can be enabled at the same time. |
|
||||
| **`BlocksFeature`** | No | Allows you to use Payload's [Blocks Field](../fields/blocks) directly inside your editor. In the feature props, you can specify the allowed blocks - just like in the Blocks field. |
|
||||
| **`TreeViewFeature`** | No | Adds a debug box under the editor, which allows you to see the current editor state live, the dom, as well as time travel. Very useful for debugging |
|
||||
| **`EXPERIMENTAL_TableFeature`** | No | Adds support for tables. This feature may be removed or receive breaking changes in the future - even within a stable lexical release, without needing a major release. |
|
||||
|
||||
Notice how even the toolbars are features? That's how extensible our lexical editor is - you could theoretically create your own toolbar if you wanted to!
|
||||
|
||||
|
||||
@@ -74,13 +74,21 @@ export const rootEslintConfig = [
|
||||
'no-console': 'off',
|
||||
'perfectionist/sort-object-types': 'off',
|
||||
'perfectionist/sort-objects': 'off',
|
||||
'payload/no-relative-monorepo-imports': 'off',
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
projectService: true,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['packages/eslint-config/**/*.ts'],
|
||||
rules: {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { CollectionConfig } from 'payload/types'
|
||||
|
||||
import { admins } from './access/admins'
|
||||
import { adminsAndUser } from './access/adminsAndUser'
|
||||
import adminsAndUser from './access/adminsAndUser'
|
||||
import { anyone } from './access/anyone'
|
||||
import { checkRole } from './access/checkRole'
|
||||
import { loginAfterCreate } from './hooks/loginAfterCreate'
|
||||
@@ -25,7 +25,6 @@ export const Users: CollectionConfig = {
|
||||
create: anyone,
|
||||
update: adminsAndUser,
|
||||
delete: admins,
|
||||
unlock: admins,
|
||||
admin: ({ req: { user } }) => checkRole(['admin'], user),
|
||||
},
|
||||
hooks: {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { Access } from 'payload'
|
||||
import type { Access } from 'payload/config'
|
||||
|
||||
import { checkRole } from './checkRole'
|
||||
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
import type { Access } from 'payload'
|
||||
import type { Access } from 'payload/config'
|
||||
|
||||
import { checkRole } from './checkRole'
|
||||
|
||||
export const adminsAndUser: Access = ({ req: { user } }) => {
|
||||
const adminsAndUser: Access = ({ req: { user } }) => {
|
||||
if (user) {
|
||||
if (checkRole(['admin'], user)) {
|
||||
return true
|
||||
}
|
||||
|
||||
return {
|
||||
id: { equals: user.id },
|
||||
id: user.id,
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
export default adminsAndUser
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
import type { Access } from 'payload'
|
||||
import type { Access } from 'payload/config'
|
||||
|
||||
export const anyone: Access = () => true
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { User } from '../../payload-types'
|
||||
|
||||
export const checkRole = (allRoles: User['roles'] = [], user: User | null = null): boolean => {
|
||||
export const checkRole = (allRoles: User['roles'] = [], user: User = undefined): boolean => {
|
||||
if (user) {
|
||||
if (
|
||||
allRoles.some((role) => {
|
||||
@@ -8,9 +8,8 @@ export const checkRole = (allRoles: User['roles'] = [], user: User | null = null
|
||||
return individualRole === role
|
||||
})
|
||||
})
|
||||
) {
|
||||
return true
|
||||
}
|
||||
)
|
||||
{return true}
|
||||
}
|
||||
|
||||
return false
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { FieldHook } from 'payload'
|
||||
import type { FieldHook } from 'payload/types'
|
||||
|
||||
import type { User } from '../../payload-types'
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { mongooseAdapter } from '@payloadcms/db-mongodb'
|
||||
import { lexicalEditor } from '@payloadcms/richtext-lexical'
|
||||
import path from 'path'
|
||||
import express from 'express'
|
||||
import { buildConfig } from 'payload'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import express from 'express'
|
||||
import type { Request, Response } from 'express'
|
||||
import { parse } from 'url'
|
||||
import next from 'next'
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { BeforeSync, DocToSync } from '@payloadcms/plugin-search/types'
|
||||
|
||||
export const beforeSyncWithSearch: BeforeSync = async ({ req, originalDoc, searchDoc }) => {
|
||||
export const beforeSyncWithSearch: BeforeSync = async ({ originalDoc, searchDoc, payload }) => {
|
||||
const {
|
||||
doc: { relationTo: collection },
|
||||
} = searchDoc
|
||||
|
||||
const { slug, id, categories, title, meta } = originalDoc
|
||||
const { slug, id, categories, title, meta, excerpt } = originalDoc
|
||||
|
||||
const modifiedDoc: DocToSync = {
|
||||
...searchDoc,
|
||||
@@ -20,40 +20,24 @@ export const beforeSyncWithSearch: BeforeSync = async ({ req, originalDoc, searc
|
||||
}
|
||||
|
||||
if (categories && Array.isArray(categories) && categories.length > 0) {
|
||||
const populatedCategories: { id: string | number; title: string }[] = []
|
||||
for (const category of categories) {
|
||||
if (!category) {
|
||||
continue
|
||||
}
|
||||
// get full categories and keep a flattened copy of their most important properties
|
||||
try {
|
||||
const mappedCategories = categories.map((category) => {
|
||||
const { id, title } = category
|
||||
|
||||
if (typeof category === 'object') {
|
||||
populatedCategories.push(category)
|
||||
continue
|
||||
}
|
||||
|
||||
const doc = await req.payload.findByID({
|
||||
collection: 'categories',
|
||||
id: category,
|
||||
disableErrors: true,
|
||||
depth: 0,
|
||||
select: { title: true },
|
||||
req,
|
||||
return {
|
||||
relationTo: 'categories',
|
||||
id,
|
||||
title,
|
||||
}
|
||||
})
|
||||
|
||||
if (doc !== null) {
|
||||
populatedCategories.push(doc)
|
||||
} else {
|
||||
console.error(
|
||||
`Failed. Category not found when syncing collection '${collection}' with id: '${id}' to search.`,
|
||||
)
|
||||
}
|
||||
modifiedDoc.categories = mappedCategories
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`Failed. Category not found when syncing collection '${collection}' with id: '${id}' to search.`,
|
||||
)
|
||||
}
|
||||
|
||||
modifiedDoc.categories = populatedCategories.map((each) => ({
|
||||
relationTo: 'categories',
|
||||
categoryID: String(each.id),
|
||||
title: each.title,
|
||||
}))
|
||||
}
|
||||
|
||||
return modifiedDoc
|
||||
|
||||
@@ -52,7 +52,7 @@ export const searchFields: Field[] = [
|
||||
type: 'text',
|
||||
},
|
||||
{
|
||||
name: 'categoryID',
|
||||
name: 'id',
|
||||
type: 'text',
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "payload-monorepo",
|
||||
"version": "3.39.1",
|
||||
"version": "3.37.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
@@ -74,9 +74,9 @@
|
||||
"docker:start": "docker compose -f test/docker-compose.yml up -d",
|
||||
"docker:stop": "docker compose -f test/docker-compose.yml down",
|
||||
"force:build": "pnpm run build:core:force",
|
||||
"lint": "turbo run lint --log-order=grouped --continue",
|
||||
"lint": "turbo run lint --concurrency 1 --continue",
|
||||
"lint-staged": "lint-staged",
|
||||
"lint:fix": "turbo run lint:fix --log-order=grouped --continue",
|
||||
"lint:fix": "turbo run lint:fix --concurrency 1 --continue",
|
||||
"obliterate-playwright-cache-macos": "rm -rf ~/Library/Caches/ms-playwright && find /System/Volumes/Data/private/var/folders -type d -name 'playwright*' -exec rm -rf {} +",
|
||||
"prepare": "husky",
|
||||
"prepare-run-test-against-prod": "pnpm bf && rm -rf test/packed && rm -rf test/node_modules && rm -rf app && rm -f test/pnpm-lock.yaml && pnpm run script:pack --all --no-build --dest test/packed && pnpm runts test/setupProd.ts && cd test && pnpm i --ignore-workspace && cd ..",
|
||||
|
||||
18
packages/admin-bar/eslint.config.js
Normal file
18
packages/admin-bar/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/admin-bar",
|
||||
"version": "3.39.1",
|
||||
"version": "3.37.0",
|
||||
"description": "An admin bar for React apps using Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
19
packages/create-payload-app/eslint.config.js
Normal file
19
packages/create-payload-app/eslint.config.js
Normal file
@@ -0,0 +1,19 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
ignores: ['bin/cli.js'],
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "create-payload-app",
|
||||
"version": "3.39.1",
|
||||
"version": "3.37.0",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
8
packages/create-payload-app/src/lib/constants.ts
Normal file
8
packages/create-payload-app/src/lib/constants.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { readFileSync } from 'fs'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import path from 'path'
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
const dirname = path.dirname(filename)
|
||||
|
||||
const packageJson = JSON.parse(readFileSync(path.resolve(dirname, '../../package.json'), 'utf-8'))
|
||||
export const PACKAGE_VERSION = packageJson.version
|
||||
@@ -22,9 +22,7 @@ const updateEnvExampleVariables = (
|
||||
|
||||
const [key] = line.split('=')
|
||||
|
||||
if (!key) {
|
||||
return
|
||||
}
|
||||
if (!key) {return}
|
||||
|
||||
if (key === 'DATABASE_URI' || key === 'POSTGRES_URL' || key === 'MONGODB_URI') {
|
||||
const dbChoice = databaseType ? dbChoiceRecord[databaseType] : null
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import type { ProjectTemplate } from '../types.js'
|
||||
|
||||
import { error, info } from '../utils/log.js'
|
||||
import { PACKAGE_VERSION } from './constants.js'
|
||||
|
||||
export function validateTemplate({ templateName }: { templateName: string }): boolean {
|
||||
export function validateTemplate(templateName: string): boolean {
|
||||
const validTemplates = getValidTemplates()
|
||||
if (!validTemplates.map((t) => t.name).includes(templateName)) {
|
||||
error(`'${templateName}' is not a valid template.`)
|
||||
@@ -19,13 +20,13 @@ export function getValidTemplates(): ProjectTemplate[] {
|
||||
name: 'blank',
|
||||
type: 'starter',
|
||||
description: 'Blank 3.0 Template',
|
||||
url: `https://github.com/payloadcms/payload/templates/blank#main`,
|
||||
url: `https://github.com/payloadcms/payload/templates/blank#v${PACKAGE_VERSION}`,
|
||||
},
|
||||
{
|
||||
name: 'website',
|
||||
type: 'starter',
|
||||
description: 'Website Template',
|
||||
url: `https://github.com/payloadcms/payload/templates/website#main`,
|
||||
url: `https://github.com/payloadcms/payload/templates/website#v${PACKAGE_VERSION}`,
|
||||
},
|
||||
{
|
||||
name: 'plugin',
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import execa from 'execa'
|
||||
import fse from 'fs-extra'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import path from 'path'
|
||||
@@ -8,7 +9,6 @@ const dirname = path.dirname(filename)
|
||||
import type { NextAppDetails } from '../types.js'
|
||||
|
||||
import { copyRecursiveSync } from '../utils/copy-recursive-sync.js'
|
||||
import { getLatestPackageVersion } from '../utils/getLatestPackageVersion.js'
|
||||
import { info } from '../utils/log.js'
|
||||
import { getPackageManager } from './get-package-manager.js'
|
||||
import { installPackages } from './install-packages.js'
|
||||
@@ -36,8 +36,15 @@ export async function updatePayloadInProject(
|
||||
|
||||
const packageManager = await getPackageManager({ projectDir })
|
||||
|
||||
// Fetch latest Payload version
|
||||
const latestPayloadVersion = await getLatestPackageVersion({ packageName: 'payload' })
|
||||
// Fetch latest Payload version from npm
|
||||
const { exitCode: getLatestVersionExitCode, stdout: latestPayloadVersion } = await execa('npm', [
|
||||
'show',
|
||||
'payload',
|
||||
'version',
|
||||
])
|
||||
if (getLatestVersionExitCode !== 0) {
|
||||
throw new Error('Failed to fetch latest Payload version')
|
||||
}
|
||||
|
||||
if (payloadVersion === latestPayloadVersion) {
|
||||
return { message: `Payload v${payloadVersion} is already up to date.`, success: true }
|
||||
|
||||
@@ -8,6 +8,7 @@ import path from 'path'
|
||||
import type { CliArgs } from './types.js'
|
||||
|
||||
import { configurePayloadConfig } from './lib/configure-payload-config.js'
|
||||
import { PACKAGE_VERSION } from './lib/constants.js'
|
||||
import { createProject } from './lib/create-project.js'
|
||||
import { parseExample } from './lib/examples.js'
|
||||
import { generateSecret } from './lib/generate-secret.js'
|
||||
@@ -19,7 +20,6 @@ import { parseTemplate } from './lib/parse-template.js'
|
||||
import { selectDb } from './lib/select-db.js'
|
||||
import { getValidTemplates, validateTemplate } from './lib/templates.js'
|
||||
import { updatePayloadInProject } from './lib/update-payload-in-project.js'
|
||||
import { getLatestPackageVersion } from './utils/getLatestPackageVersion.js'
|
||||
import { debug, error, info } from './utils/log.js'
|
||||
import {
|
||||
feedbackOutro,
|
||||
@@ -78,18 +78,13 @@ export class Main {
|
||||
|
||||
async init(): Promise<void> {
|
||||
try {
|
||||
const debugFlag = this.args['--debug']
|
||||
|
||||
const LATEST_VERSION = await getLatestPackageVersion({
|
||||
debug: debugFlag,
|
||||
packageName: 'payload',
|
||||
})
|
||||
|
||||
if (this.args['--help']) {
|
||||
helpMessage()
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const debugFlag = this.args['--debug']
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('\n')
|
||||
p.intro(chalk.bgCyan(chalk.black(' create-payload-app ')))
|
||||
@@ -205,7 +200,7 @@ export class Main {
|
||||
|
||||
const templateArg = this.args['--template']
|
||||
if (templateArg) {
|
||||
const valid = validateTemplate({ templateName: templateArg })
|
||||
const valid = validateTemplate(templateArg)
|
||||
if (!valid) {
|
||||
helpMessage()
|
||||
process.exit(1)
|
||||
@@ -235,7 +230,7 @@ export class Main {
|
||||
}
|
||||
|
||||
if (debugFlag) {
|
||||
debug(`Using ${exampleArg ? 'examples' : 'templates'} from git tag: v${LATEST_VERSION}`)
|
||||
debug(`Using ${exampleArg ? 'examples' : 'templates'} from git tag: v${PACKAGE_VERSION}`)
|
||||
}
|
||||
|
||||
if (!exampleArg) {
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
/**
|
||||
* Fetches the latest version of a package from the NPM registry.
|
||||
*
|
||||
* Used in determining the latest version of Payload to use in the generated templates.
|
||||
*/
|
||||
export async function getLatestPackageVersion({
|
||||
debug = false,
|
||||
packageName = 'payload',
|
||||
}: {
|
||||
debug?: boolean
|
||||
/**
|
||||
* Package name to fetch the latest version for based on the NPM registry URL
|
||||
*
|
||||
* Eg. for `'payload'`, it will fetch the version from `https://registry.npmjs.org/payload`
|
||||
*
|
||||
* @default 'payload'
|
||||
*/
|
||||
packageName?: string
|
||||
}) {
|
||||
try {
|
||||
const response = await fetch(`https://registry.npmjs.org/${packageName}`)
|
||||
const data = await response.json()
|
||||
const latestVersion = data['dist-tags'].latest
|
||||
|
||||
if (debug) {
|
||||
console.log(`Found latest version of ${packageName}: ${latestVersion}`)
|
||||
}
|
||||
|
||||
return latestVersion
|
||||
} catch (error) {
|
||||
console.error('Error fetching Payload version:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
18
packages/db-mongodb/eslint.config.js
Normal file
18
packages/db-mongodb/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-mongodb",
|
||||
"version": "3.39.1",
|
||||
"version": "3.37.0",
|
||||
"description": "The officially supported MongoDB database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -372,61 +372,36 @@ const group: FieldSchemaGenerator<GroupField> = (
|
||||
buildSchemaOptions,
|
||||
parentIsLocalized,
|
||||
): void => {
|
||||
if (fieldAffectsData(field)) {
|
||||
const formattedBaseSchema = formatBaseSchema({ buildSchemaOptions, field, parentIsLocalized })
|
||||
const formattedBaseSchema = formatBaseSchema({ buildSchemaOptions, field, parentIsLocalized })
|
||||
|
||||
// carry indexSortableFields through to versions if drafts enabled
|
||||
const indexSortableFields =
|
||||
buildSchemaOptions.indexSortableFields &&
|
||||
field.name === 'version' &&
|
||||
buildSchemaOptions.draftsEnabled
|
||||
// carry indexSortableFields through to versions if drafts enabled
|
||||
const indexSortableFields =
|
||||
buildSchemaOptions.indexSortableFields &&
|
||||
field.name === 'version' &&
|
||||
buildSchemaOptions.draftsEnabled
|
||||
|
||||
const baseSchema: SchemaTypeOptions<any> = {
|
||||
...formattedBaseSchema,
|
||||
type: buildSchema({
|
||||
buildSchemaOptions: {
|
||||
disableUnique: buildSchemaOptions.disableUnique,
|
||||
draftsEnabled: buildSchemaOptions.draftsEnabled,
|
||||
indexSortableFields,
|
||||
options: {
|
||||
_id: false,
|
||||
id: false,
|
||||
minimize: false,
|
||||
},
|
||||
const baseSchema: SchemaTypeOptions<any> = {
|
||||
...formattedBaseSchema,
|
||||
type: buildSchema({
|
||||
buildSchemaOptions: {
|
||||
disableUnique: buildSchemaOptions.disableUnique,
|
||||
draftsEnabled: buildSchemaOptions.draftsEnabled,
|
||||
indexSortableFields,
|
||||
options: {
|
||||
_id: false,
|
||||
id: false,
|
||||
minimize: false,
|
||||
},
|
||||
configFields: field.fields,
|
||||
parentIsLocalized: parentIsLocalized || field.localized,
|
||||
payload,
|
||||
}),
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(
|
||||
field,
|
||||
baseSchema,
|
||||
payload.config.localization,
|
||||
parentIsLocalized,
|
||||
),
|
||||
})
|
||||
} else {
|
||||
field.fields.forEach((subField) => {
|
||||
if (fieldIsVirtual(subField)) {
|
||||
return
|
||||
}
|
||||
|
||||
const addFieldSchema = getSchemaGenerator(subField.type)
|
||||
|
||||
if (addFieldSchema) {
|
||||
addFieldSchema(
|
||||
subField,
|
||||
schema,
|
||||
payload,
|
||||
buildSchemaOptions,
|
||||
(parentIsLocalized || field.localized) ?? false,
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
configFields: field.fields,
|
||||
parentIsLocalized: parentIsLocalized || field.localized,
|
||||
payload,
|
||||
}),
|
||||
}
|
||||
|
||||
schema.add({
|
||||
[field.name]: localizeSchema(field, baseSchema, payload.config.localization, parentIsLocalized),
|
||||
})
|
||||
}
|
||||
|
||||
const json: FieldSchemaGenerator<JSONField> = (
|
||||
|
||||
@@ -20,6 +20,7 @@ type SearchParam = {
|
||||
|
||||
const subQueryOptions = {
|
||||
lean: true,
|
||||
limit: 50,
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -183,7 +184,7 @@ export async function buildSearchParam({
|
||||
select[joinPath] = true
|
||||
}
|
||||
|
||||
const result = await SubModel.find(subQuery).lean().select(select)
|
||||
const result = await SubModel.find(subQuery).lean().limit(50).select(select)
|
||||
|
||||
const $in: unknown[] = []
|
||||
|
||||
|
||||
@@ -57,8 +57,12 @@ const relationshipSort = ({
|
||||
return false
|
||||
}
|
||||
|
||||
for (let i = 0; i < segments.length; i++) {
|
||||
const segment = segments[i]
|
||||
for (const [i, segment] of segments.entries()) {
|
||||
if (versions && i === 0 && segment === 'version') {
|
||||
segments.shift()
|
||||
continue
|
||||
}
|
||||
|
||||
const field = currentFields.find((each) => each.name === segment)
|
||||
|
||||
if (!field) {
|
||||
@@ -67,10 +71,6 @@ const relationshipSort = ({
|
||||
|
||||
if ('fields' in field) {
|
||||
currentFields = field.flattenedFields
|
||||
if (field.name === 'version' && versions && i === 0) {
|
||||
segments.shift()
|
||||
i--
|
||||
}
|
||||
} else if (
|
||||
(field.type === 'relationship' || field.type === 'upload') &&
|
||||
i !== segments.length - 1
|
||||
@@ -106,7 +106,7 @@ const relationshipSort = ({
|
||||
as: `__${path}`,
|
||||
foreignField: '_id',
|
||||
from: foreignCollection.Model.collection.name,
|
||||
localField: versions ? `version.${relationshipPath}` : relationshipPath,
|
||||
localField: relationshipPath,
|
||||
pipeline: [
|
||||
{
|
||||
$project: {
|
||||
@@ -150,18 +150,6 @@ export const buildSortParam = ({
|
||||
sort = [sort]
|
||||
}
|
||||
|
||||
// In the case of Mongo, when sorting by a field that is not unique, the results are not guaranteed to be in the same order each time.
|
||||
// So we add a fallback sort to ensure that the results are always in the same order.
|
||||
let fallbackSort = '-id'
|
||||
|
||||
if (timestamps) {
|
||||
fallbackSort = '-createdAt'
|
||||
}
|
||||
|
||||
if (!(sort.includes(fallbackSort) || sort.includes(fallbackSort.replace('-', '')))) {
|
||||
sort.push(fallbackSort)
|
||||
}
|
||||
|
||||
const sorting = sort.reduce<Record<string, string>>((acc, item) => {
|
||||
let sortProperty: string
|
||||
let sortDirection: SortDirection
|
||||
|
||||
@@ -105,7 +105,6 @@ export const sanitizeQueryValue = ({
|
||||
| undefined => {
|
||||
let formattedValue = val
|
||||
let formattedOperator = operator
|
||||
|
||||
if (['array', 'blocks', 'group', 'tab'].includes(field.type) && path.includes('.')) {
|
||||
const segments = path.split('.')
|
||||
segments.shift()
|
||||
|
||||
@@ -151,7 +151,6 @@ export const queryDrafts: QueryDrafts = async function queryDrafts(
|
||||
query: versionQuery,
|
||||
session: paginationOptions.options?.session ?? undefined,
|
||||
sort: paginationOptions.sort as object,
|
||||
sortAggregation,
|
||||
useEstimatedCount: paginationOptions.useEstimatedCount,
|
||||
})
|
||||
} else {
|
||||
|
||||
@@ -128,6 +128,7 @@ const traverseFields = ({
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case 'blocks': {
|
||||
const blocksSelect = select[field.name] as SelectType
|
||||
|
||||
|
||||
@@ -425,7 +425,6 @@ export const transform = ({
|
||||
for (const locale of config.localization.localeCodes) {
|
||||
sanitizeDate({
|
||||
field,
|
||||
locale,
|
||||
ref: fieldRef,
|
||||
value: fieldRef[locale],
|
||||
})
|
||||
|
||||
18
packages/db-postgres/eslint.config.js
Normal file
18
packages/db-postgres/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-postgres",
|
||||
"version": "3.39.1",
|
||||
"version": "3.37.0",
|
||||
"description": "The officially supported Postgres database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
18
packages/db-sqlite/eslint.config.js
Normal file
18
packages/db-sqlite/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-sqlite",
|
||||
"version": "3.39.1",
|
||||
"version": "3.37.0",
|
||||
"description": "The officially supported SQLite database adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
18
packages/db-vercel-postgres/eslint.config.js
Normal file
18
packages/db-vercel-postgres/eslint.config.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { rootEslintConfig, rootParserOptions } from '../../eslint.config.js'
|
||||
|
||||
/** @typedef {import('eslint').Linter.Config} Config */
|
||||
|
||||
/** @type {Config[]} */
|
||||
export const index = [
|
||||
...rootEslintConfig,
|
||||
{
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
...rootParserOptions,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
export default index
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/db-vercel-postgres",
|
||||
"version": "3.39.1",
|
||||
"version": "3.37.0",
|
||||
"description": "Vercel Postgres adapter for Payload",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@payloadcms/drizzle",
|
||||
"version": "3.39.1",
|
||||
"version": "3.37.0",
|
||||
"description": "A library of shared functions used by different payload database adapters",
|
||||
"homepage": "https://payloadcms.com",
|
||||
"repository": {
|
||||
@@ -53,7 +53,6 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"console-table-printer": "2.12.1",
|
||||
"dequal": "2.0.3",
|
||||
"drizzle-orm": "0.36.1",
|
||||
"prompts": "2.4.2",
|
||||
"to-snake-case": "1.0.0",
|
||||
|
||||
@@ -4,7 +4,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export const count: Count = async function count(
|
||||
|
||||
@@ -5,7 +5,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export const countGlobalVersions: CountGlobalVersions = async function countGlobalVersions(
|
||||
|
||||
@@ -5,7 +5,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export const countVersions: CountVersions = async function countVersions(
|
||||
|
||||
@@ -23,10 +23,10 @@ export async function createGlobal<T extends Record<string, unknown>>(
|
||||
data,
|
||||
db,
|
||||
fields: globalConfig.flattenedFields,
|
||||
ignoreResult: returning === false,
|
||||
operation: 'create',
|
||||
req,
|
||||
tableName,
|
||||
ignoreResult: returning === false,
|
||||
})
|
||||
|
||||
if (returning === false) {
|
||||
|
||||
@@ -17,11 +17,11 @@ export async function createGlobalVersion<T extends TypeWithID>(
|
||||
globalSlug,
|
||||
publishedLocale,
|
||||
req,
|
||||
returning,
|
||||
select,
|
||||
snapshot,
|
||||
updatedAt,
|
||||
versionData,
|
||||
returning,
|
||||
}: CreateGlobalVersionArgs,
|
||||
) {
|
||||
const db = await getTransaction(this, req)
|
||||
@@ -42,11 +42,11 @@ export async function createGlobalVersion<T extends TypeWithID>(
|
||||
},
|
||||
db,
|
||||
fields: buildVersionGlobalFields(this.payload.config, global, true),
|
||||
ignoreResult: returning === false ? 'idOnly' : false,
|
||||
operation: 'create',
|
||||
req,
|
||||
select,
|
||||
tableName,
|
||||
ignoreResult: returning === false ? 'idOnly' : false,
|
||||
})
|
||||
|
||||
const table = this.tables[tableName]
|
||||
|
||||
@@ -18,11 +18,11 @@ export async function createVersion<T extends TypeWithID>(
|
||||
parent,
|
||||
publishedLocale,
|
||||
req,
|
||||
returning,
|
||||
select,
|
||||
snapshot,
|
||||
updatedAt,
|
||||
versionData,
|
||||
returning,
|
||||
}: CreateVersionArgs<T>,
|
||||
) {
|
||||
const db = await getTransaction(this, req)
|
||||
|
||||
@@ -6,7 +6,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildFindManyArgs } from './find/buildFindManyArgs.js'
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { selectDistinct } from './queries/selectDistinct.js'
|
||||
import { transform } from './transform/read/index.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
@@ -4,10 +4,9 @@ import { inArray } from 'drizzle-orm'
|
||||
|
||||
import type { DrizzleAdapter } from '../types.js'
|
||||
|
||||
import { buildQuery } from '../queries/buildQuery.js'
|
||||
import buildQuery from '../queries/buildQuery.js'
|
||||
import { selectDistinct } from '../queries/selectDistinct.js'
|
||||
import { transform } from '../transform/read/index.js'
|
||||
import { getNameFromDrizzleTable } from '../utilities/getNameFromDrizzleTable.js'
|
||||
import { getTransaction } from '../utilities/getTransaction.js'
|
||||
import { buildFindManyArgs } from './buildFindManyArgs.js'
|
||||
|
||||
@@ -76,26 +75,6 @@ export const findMany = async function find({
|
||||
tableName,
|
||||
versions,
|
||||
})
|
||||
|
||||
if (orderBy) {
|
||||
for (const key in selectFields) {
|
||||
const column = selectFields[key]
|
||||
if (column.primary) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
!orderBy.some(
|
||||
(col) =>
|
||||
col.column.name === column.name &&
|
||||
getNameFromDrizzleTable(col.column.table) === getNameFromDrizzleTable(column.table),
|
||||
)
|
||||
) {
|
||||
delete selectFields[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const selectDistinctResult = await selectDistinct({
|
||||
adapter,
|
||||
db,
|
||||
|
||||
@@ -19,17 +19,12 @@ import toSnakeCase from 'to-snake-case'
|
||||
import type { BuildQueryJoinAliases, DrizzleAdapter } from '../types.js'
|
||||
import type { Result } from './buildFindManyArgs.js'
|
||||
|
||||
import { buildQuery } from '../queries/buildQuery.js'
|
||||
import buildQuery from '../queries/buildQuery.js'
|
||||
import { getTableAlias } from '../queries/getTableAlias.js'
|
||||
import { operatorMap } from '../queries/operatorMap.js'
|
||||
import { getArrayRelationName } from '../utilities/getArrayRelationName.js'
|
||||
import { getNameFromDrizzleTable } from '../utilities/getNameFromDrizzleTable.js'
|
||||
import { jsonAggBuildObject } from '../utilities/json.js'
|
||||
import { rawConstraint } from '../utilities/rawConstraint.js'
|
||||
import {
|
||||
InternalBlockTableNameIndex,
|
||||
resolveBlockTableName,
|
||||
} from '../utilities/validateExistingBlockIsIdentical.js'
|
||||
|
||||
const flattenAllWherePaths = (where: Where, paths: string[]) => {
|
||||
for (const k in where) {
|
||||
@@ -201,12 +196,7 @@ export const traverseFields = ({
|
||||
}
|
||||
}
|
||||
|
||||
const relationName = getArrayRelationName({
|
||||
field,
|
||||
path: `${path}${field.name}`,
|
||||
tableName: arrayTableName,
|
||||
})
|
||||
|
||||
const relationName = field.dbName ? `_${arrayTableName}` : `${path}${field.name}`
|
||||
currentArgs.with[relationName] = withArray
|
||||
|
||||
traverseFields({
|
||||
@@ -254,7 +244,7 @@ export const traverseFields = ({
|
||||
|
||||
;(field.blockReferences ?? field.blocks).forEach((_block) => {
|
||||
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
|
||||
const blockKey = `_blocks_${block.slug}${!block[InternalBlockTableNameIndex] ? '' : `_${block[InternalBlockTableNameIndex]}`}`
|
||||
const blockKey = `_blocks_${block.slug}`
|
||||
|
||||
let blockSelect: boolean | SelectType | undefined
|
||||
|
||||
@@ -294,9 +284,8 @@ export const traverseFields = ({
|
||||
with: {},
|
||||
}
|
||||
|
||||
const tableName = resolveBlockTableName(
|
||||
block,
|
||||
adapter.tableNameMap.get(`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`),
|
||||
const tableName = adapter.tableNameMap.get(
|
||||
`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`,
|
||||
)
|
||||
|
||||
if (typeof blockSelect === 'object') {
|
||||
|
||||
@@ -23,7 +23,7 @@ export { migrateFresh } from './migrateFresh.js'
|
||||
export { migrateRefresh } from './migrateRefresh.js'
|
||||
export { migrateReset } from './migrateReset.js'
|
||||
export { migrateStatus } from './migrateStatus.js'
|
||||
export { buildQuery } from './queries/buildQuery.js'
|
||||
export { default as buildQuery } from './queries/buildQuery.js'
|
||||
export { operatorMap } from './queries/operatorMap.js'
|
||||
export type { Operators } from './queries/operatorMap.js'
|
||||
export { parseParams } from './queries/parseParams.js'
|
||||
|
||||
@@ -28,8 +28,6 @@ export async function migrateReset(this: DrizzleAdapter): Promise<void> {
|
||||
|
||||
const req = await createLocalReq({}, payload)
|
||||
|
||||
existingMigrations.reverse()
|
||||
|
||||
// Rollback all migrations in order
|
||||
for (const migration of existingMigrations) {
|
||||
const migrationFile = migrationFiles.find((m) => m.name === migration.name)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { FlattenedField } from 'payload'
|
||||
import type { FlattenedBlock, FlattenedField } from 'payload'
|
||||
|
||||
type Args = {
|
||||
doc: Record<string, unknown>
|
||||
@@ -54,7 +54,7 @@ export const traverseFields = ({ doc, fields, locale, path, rows }: Args) => {
|
||||
// Can ignore string blocks, as those were added in v3 and don't need to be migrated
|
||||
const matchedBlock = field.blocks.find(
|
||||
(block) => typeof block !== 'string' && block.slug === row.blockType,
|
||||
)
|
||||
) as FlattenedBlock | undefined
|
||||
|
||||
if (matchedBlock) {
|
||||
return traverseFields({
|
||||
@@ -75,7 +75,7 @@ export const traverseFields = ({ doc, fields, locale, path, rows }: Args) => {
|
||||
// Can ignore string blocks, as those were added in v3 and don't need to be migrated
|
||||
const matchedBlock = field.blocks.find(
|
||||
(block) => typeof block !== 'string' && block.slug === row.blockType,
|
||||
)
|
||||
) as FlattenedBlock | undefined
|
||||
|
||||
if (matchedBlock) {
|
||||
return traverseFields({
|
||||
|
||||
@@ -1,126 +1,49 @@
|
||||
export type Groups =
|
||||
| 'addColumn'
|
||||
| 'addConstraint'
|
||||
| 'alterType'
|
||||
| 'createIndex'
|
||||
| 'createTable'
|
||||
| 'createType'
|
||||
| 'disableRowSecurity'
|
||||
| 'dropColumn'
|
||||
| 'dropConstraint'
|
||||
| 'dropIndex'
|
||||
| 'dropTable'
|
||||
| 'dropType'
|
||||
| 'notNull'
|
||||
| 'renameColumn'
|
||||
| 'setDefault'
|
||||
|
||||
/**
|
||||
* Convert an "ADD COLUMN" statement to an "ALTER COLUMN" statement.
|
||||
* Works with or without a schema name.
|
||||
*
|
||||
* Examples:
|
||||
* 'ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;'
|
||||
* => 'ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;'
|
||||
*
|
||||
* 'ALTER TABLE "public"."pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;'
|
||||
* => 'ALTER TABLE "public"."pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;'
|
||||
* Convert an "ADD COLUMN" statement to an "ALTER COLUMN" statement
|
||||
* example: ALTER TABLE "pages_blocks_my_block" ADD COLUMN "person_id" integer NOT NULL;
|
||||
* to: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||
* @param sql
|
||||
*/
|
||||
function convertAddColumnToAlterColumn(sql) {
|
||||
// Regular expression to match the ADD COLUMN statement with its constraints
|
||||
const regex = /ALTER TABLE ((?:"[^"]+"\.)?"[^"]+") ADD COLUMN ("[^"]+") [^;]*?NOT NULL;/i
|
||||
const regex = /ALTER TABLE ("[^"]+")\.(".*?") ADD COLUMN ("[^"]+") [\w\s]+ NOT NULL;/
|
||||
|
||||
// Replace the matched part with "ALTER COLUMN ... SET NOT NULL;"
|
||||
return sql.replace(regex, 'ALTER TABLE $1 ALTER COLUMN $2 SET NOT NULL;')
|
||||
return sql.replace(regex, 'ALTER TABLE $1.$2 ALTER COLUMN $3 SET NOT NULL;')
|
||||
}
|
||||
|
||||
export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> => {
|
||||
const groups = {
|
||||
/**
|
||||
* example: ALTER TABLE "posts" ADD COLUMN "category_id" integer
|
||||
*/
|
||||
addColumn: 'ADD COLUMN',
|
||||
// example: ALTER TABLE "posts" ADD COLUMN "category_id" integer
|
||||
|
||||
/**
|
||||
* example:
|
||||
* DO $$ BEGIN
|
||||
* ALTER TABLE "pages_blocks_my_block" ADD CONSTRAINT "pages_blocks_my_block_person_id_users_id_fk" FOREIGN KEY ("person_id") REFERENCES "users"("id") ON DELETE cascade ON UPDATE no action;
|
||||
* EXCEPTION
|
||||
* WHEN duplicate_object THEN null;
|
||||
* END $$;
|
||||
*/
|
||||
addConstraint: 'ADD CONSTRAINT',
|
||||
//example:
|
||||
// DO $$ BEGIN
|
||||
// ALTER TABLE "pages_blocks_my_block" ADD CONSTRAINT "pages_blocks_my_block_person_id_users_id_fk" FOREIGN KEY ("person_id") REFERENCES "users"("id") ON DELETE cascade ON UPDATE no action;
|
||||
// EXCEPTION
|
||||
// WHEN duplicate_object THEN null;
|
||||
// END $$;
|
||||
|
||||
/**
|
||||
* example: CREATE TABLE IF NOT EXISTS "payload_locked_documents" (
|
||||
* "id" serial PRIMARY KEY NOT NULL,
|
||||
* "global_slug" varchar,
|
||||
* "updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL,
|
||||
* "created_at" timestamp(3) with time zone DEFAULT now() NOT NULL
|
||||
* );
|
||||
*/
|
||||
createTable: 'CREATE TABLE',
|
||||
|
||||
/**
|
||||
* example: ALTER TABLE "_posts_v_rels" DROP COLUMN IF EXISTS "posts_id";
|
||||
*/
|
||||
dropColumn: 'DROP COLUMN',
|
||||
// example: ALTER TABLE "_posts_v_rels" DROP COLUMN IF EXISTS "posts_id";
|
||||
|
||||
/**
|
||||
* example: ALTER TABLE "_posts_v_rels" DROP CONSTRAINT "_posts_v_rels_posts_fk";
|
||||
*/
|
||||
dropConstraint: 'DROP CONSTRAINT',
|
||||
// example: ALTER TABLE "_posts_v_rels" DROP CONSTRAINT "_posts_v_rels_posts_fk";
|
||||
|
||||
/**
|
||||
* example: DROP TABLE "pages_rels";
|
||||
*/
|
||||
dropTable: 'DROP TABLE',
|
||||
// example: DROP TABLE "pages_rels";
|
||||
|
||||
/**
|
||||
* example: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||
*/
|
||||
notNull: 'NOT NULL',
|
||||
|
||||
/**
|
||||
* example: CREATE TYPE "public"."enum__pages_v_published_locale" AS ENUM('en', 'es');
|
||||
*/
|
||||
createType: 'CREATE TYPE',
|
||||
|
||||
/**
|
||||
* example: ALTER TYPE "public"."enum_pages_blocks_cta" ADD VALUE 'copy';
|
||||
*/
|
||||
alterType: 'ALTER TYPE',
|
||||
|
||||
/**
|
||||
* example: ALTER TABLE "categories_rels" DISABLE ROW LEVEL SECURITY;
|
||||
*/
|
||||
disableRowSecurity: 'DISABLE ROW LEVEL SECURITY;',
|
||||
|
||||
/**
|
||||
* example: DROP INDEX IF EXISTS "pages_title_idx";
|
||||
*/
|
||||
dropIndex: 'DROP INDEX IF EXISTS',
|
||||
|
||||
/**
|
||||
* example: ALTER TABLE "pages" ALTER COLUMN "_status" SET DEFAULT 'draft';
|
||||
*/
|
||||
setDefault: 'SET DEFAULT',
|
||||
|
||||
/**
|
||||
* example: CREATE INDEX IF NOT EXISTS "payload_locked_documents_global_slug_idx" ON "payload_locked_documents" USING btree ("global_slug");
|
||||
*/
|
||||
createIndex: 'INDEX IF NOT EXISTS',
|
||||
|
||||
/**
|
||||
* example: DROP TYPE "public"."enum__pages_v_published_locale";
|
||||
*/
|
||||
dropType: 'DROP TYPE',
|
||||
|
||||
/**
|
||||
* columns were renamed from camelCase to snake_case
|
||||
* example: ALTER TABLE "forms" RENAME COLUMN "confirmationType" TO "confirmation_type";
|
||||
*/
|
||||
renameColumn: 'RENAME COLUMN',
|
||||
// example: ALTER TABLE "pages_blocks_my_block" ALTER COLUMN "person_id" SET NOT NULL;
|
||||
}
|
||||
|
||||
const result = Object.keys(groups).reduce((result, group: Groups) => {
|
||||
@@ -128,17 +51,7 @@ export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> =
|
||||
return result
|
||||
}, {}) as Record<Groups, string[]>
|
||||
|
||||
// push multi-line changes to a single grouping
|
||||
let isCreateTable = false
|
||||
|
||||
for (const line of list) {
|
||||
if (isCreateTable) {
|
||||
result.createTable.push(line)
|
||||
if (line.includes(');')) {
|
||||
isCreateTable = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
Object.entries(groups).some(([key, value]) => {
|
||||
if (line.endsWith('NOT NULL;')) {
|
||||
// split up the ADD COLUMN and ALTER COLUMN NOT NULL statements
|
||||
@@ -151,11 +64,7 @@ export const groupUpSQLStatements = (list: string[]): Record<Groups, string[]> =
|
||||
return true
|
||||
}
|
||||
if (line.includes(value)) {
|
||||
let statement = line
|
||||
if (key === 'dropConstraint') {
|
||||
statement = line.replace('" DROP CONSTRAINT "', '" DROP CONSTRAINT IF EXISTS "')
|
||||
}
|
||||
result[key].push(statement)
|
||||
result[key].push(line)
|
||||
return true
|
||||
}
|
||||
})
|
||||
|
||||
@@ -20,17 +20,6 @@ type Args = {
|
||||
req?: Partial<PayloadRequest>
|
||||
}
|
||||
|
||||
const runStatementGroup = async ({ adapter, db, debug, statements }) => {
|
||||
const addColumnsStatement = statements.join('\n')
|
||||
|
||||
if (debug) {
|
||||
adapter.payload.logger.info(debug)
|
||||
adapter.payload.logger.info(addColumnsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(addColumnsStatement))
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves upload and relationship columns from the join table and into the tables while moving data
|
||||
* This is done in the following order:
|
||||
@@ -51,7 +40,16 @@ export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
||||
|
||||
// get the drizzle migrateUpSQL from drizzle using the last schema
|
||||
const { generateDrizzleJson, generateMigration, upSnapshot } = adapter.requireDrizzleKit()
|
||||
const drizzleJsonAfter = generateDrizzleJson(adapter.schema) as DrizzleSnapshotJSON
|
||||
|
||||
const toSnapshot: Record<string, unknown> = {}
|
||||
|
||||
for (const key of Object.keys(adapter.schema).filter(
|
||||
(key) => !key.startsWith('payload_locked_documents'),
|
||||
)) {
|
||||
toSnapshot[key] = adapter.schema[key]
|
||||
}
|
||||
|
||||
const drizzleJsonAfter = generateDrizzleJson(toSnapshot) as DrizzleSnapshotJSON
|
||||
|
||||
// Get the previous migration snapshot
|
||||
const previousSnapshot = fs
|
||||
@@ -83,62 +81,18 @@ export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
||||
|
||||
const sqlUpStatements = groupUpSQLStatements(generatedSQL)
|
||||
|
||||
const addColumnsStatement = sqlUpStatements.addColumn.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('CREATING NEW RELATIONSHIP COLUMNS')
|
||||
payload.logger.info(addColumnsStatement)
|
||||
}
|
||||
|
||||
const db = await getTransaction(adapter, req)
|
||||
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'CREATING TYPES' : null,
|
||||
statements: sqlUpStatements.createType,
|
||||
})
|
||||
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'ALTERING TYPES' : null,
|
||||
statements: sqlUpStatements.alterType,
|
||||
})
|
||||
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'CREATING TABLES' : null,
|
||||
statements: sqlUpStatements.createTable,
|
||||
})
|
||||
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'RENAMING COLUMNS' : null,
|
||||
statements: sqlUpStatements.renameColumn,
|
||||
})
|
||||
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'CREATING NEW RELATIONSHIP COLUMNS' : null,
|
||||
statements: sqlUpStatements.addColumn,
|
||||
})
|
||||
|
||||
// SET DEFAULTS
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'SETTING DEFAULTS' : null,
|
||||
statements: sqlUpStatements.setDefault,
|
||||
})
|
||||
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'CREATING INDEXES' : null,
|
||||
statements: sqlUpStatements.createIndex,
|
||||
})
|
||||
await db.execute(sql.raw(addColumnsStatement))
|
||||
|
||||
for (const collection of payload.config.collections) {
|
||||
if (collection.slug === 'payload-locked-documents') {
|
||||
continue
|
||||
}
|
||||
const tableName = adapter.tableNameMap.get(toSnakeCase(collection.slug))
|
||||
const pathsToQuery: PathsToQuery = new Set()
|
||||
|
||||
@@ -284,58 +238,52 @@ export const migratePostgresV2toV3 = async ({ debug, payload, req }: Args) => {
|
||||
}
|
||||
|
||||
// ADD CONSTRAINT
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'ADDING CONSTRAINTS' : null,
|
||||
statements: sqlUpStatements.addConstraint,
|
||||
})
|
||||
const addConstraintsStatement = sqlUpStatements.addConstraint.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('ADDING CONSTRAINTS')
|
||||
payload.logger.info(addConstraintsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(addConstraintsStatement))
|
||||
|
||||
// NOT NULL
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'NOT NULL CONSTRAINTS' : null,
|
||||
statements: sqlUpStatements.notNull,
|
||||
})
|
||||
const notNullStatements = sqlUpStatements.notNull.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('NOT NULL CONSTRAINTS')
|
||||
payload.logger.info(notNullStatements)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(notNullStatements))
|
||||
|
||||
// DROP TABLE
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'DROPPING TABLES' : null,
|
||||
statements: sqlUpStatements.dropTable,
|
||||
})
|
||||
const dropTablesStatement = sqlUpStatements.dropTable.join('\n')
|
||||
|
||||
// DROP INDEX
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'DROPPING INDEXES' : null,
|
||||
statements: sqlUpStatements.dropIndex,
|
||||
})
|
||||
if (debug) {
|
||||
payload.logger.info('DROPPING TABLES')
|
||||
payload.logger.info(dropTablesStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(dropTablesStatement))
|
||||
|
||||
// DROP CONSTRAINT
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'DROPPING CONSTRAINTS' : null,
|
||||
statements: sqlUpStatements.dropConstraint,
|
||||
})
|
||||
const dropConstraintsStatement = sqlUpStatements.dropConstraint.join('\n')
|
||||
|
||||
if (debug) {
|
||||
payload.logger.info('DROPPING CONSTRAINTS')
|
||||
payload.logger.info(dropConstraintsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(dropConstraintsStatement))
|
||||
|
||||
// DROP COLUMN
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'DROPPING COLUMNS' : null,
|
||||
statements: sqlUpStatements.dropColumn,
|
||||
})
|
||||
const dropColumnsStatement = sqlUpStatements.dropColumn.join('\n')
|
||||
|
||||
// DROP TYPES
|
||||
await runStatementGroup({
|
||||
adapter,
|
||||
db,
|
||||
debug: debug ? 'DROPPING TYPES' : null,
|
||||
statements: sqlUpStatements.dropType,
|
||||
})
|
||||
if (debug) {
|
||||
payload.logger.info('DROPPING COLUMNS')
|
||||
payload.logger.info(dropColumnsStatement)
|
||||
}
|
||||
|
||||
await db.execute(sql.raw(dropColumnsStatement))
|
||||
}
|
||||
|
||||
@@ -56,7 +56,7 @@ export const migrateRelationships = async ({
|
||||
${where} ORDER BY parent_id LIMIT 500 OFFSET ${offset * 500};
|
||||
`
|
||||
|
||||
paginationResult = await db.execute(sql.raw(`${paginationStatement}`))
|
||||
paginationResult = await adapter.drizzle.execute(sql.raw(`${paginationStatement}`))
|
||||
|
||||
if (paginationResult.rows.length === 0) {
|
||||
return
|
||||
@@ -72,7 +72,7 @@ export const migrateRelationships = async ({
|
||||
payload.logger.info(statement)
|
||||
}
|
||||
|
||||
const result = await db.execute(sql.raw(`${statement}`))
|
||||
const result = await adapter.drizzle.execute(sql.raw(`${statement}`))
|
||||
|
||||
const docsToResave: DocsToResave = {}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { SQL, Table } from 'drizzle-orm'
|
||||
import type { Table } from 'drizzle-orm'
|
||||
import type { FlattenedField, Sort } from 'payload'
|
||||
|
||||
import { asc, desc } from 'drizzle-orm'
|
||||
@@ -16,7 +16,6 @@ type Args = {
|
||||
joins: BuildQueryJoinAliases
|
||||
locale?: string
|
||||
parentIsLocalized: boolean
|
||||
rawSort?: SQL
|
||||
selectFields: Record<string, GenericColumn>
|
||||
sort?: Sort
|
||||
tableName: string
|
||||
@@ -32,16 +31,14 @@ export const buildOrderBy = ({
|
||||
joins,
|
||||
locale,
|
||||
parentIsLocalized,
|
||||
rawSort,
|
||||
selectFields,
|
||||
sort,
|
||||
tableName,
|
||||
}: Args): BuildQueryResult['orderBy'] => {
|
||||
const orderBy: BuildQueryResult['orderBy'] = []
|
||||
|
||||
const createdAt = adapter.tables[tableName]?.createdAt
|
||||
|
||||
if (!sort) {
|
||||
const createdAt = adapter.tables[tableName]?.createdAt
|
||||
if (createdAt) {
|
||||
sort = '-createdAt'
|
||||
} else {
|
||||
@@ -53,18 +50,6 @@ export const buildOrderBy = ({
|
||||
sort = [sort]
|
||||
}
|
||||
|
||||
// In the case of Mongo, when sorting by a field that is not unique, the results are not guaranteed to be in the same order each time.
|
||||
// So we add a fallback sort to ensure that the results are always in the same order.
|
||||
let fallbackSort = '-id'
|
||||
|
||||
if (createdAt) {
|
||||
fallbackSort = '-createdAt'
|
||||
}
|
||||
|
||||
if (!(sort.includes(fallbackSort) || sort.includes(fallbackSort.replace('-', '')))) {
|
||||
sort.push(fallbackSort)
|
||||
}
|
||||
|
||||
for (const sortItem of sort) {
|
||||
let sortProperty: string
|
||||
let sortDirection: 'asc' | 'desc'
|
||||
@@ -89,23 +74,17 @@ export const buildOrderBy = ({
|
||||
value: sortProperty,
|
||||
})
|
||||
if (sortTable?.[sortTableColumnName]) {
|
||||
let order = sortDirection === 'asc' ? asc : desc
|
||||
|
||||
if (rawSort) {
|
||||
order = () => rawSort
|
||||
}
|
||||
|
||||
orderBy.push({
|
||||
column:
|
||||
aliasTable && tableName === getNameFromDrizzleTable(sortTable)
|
||||
? aliasTable[sortTableColumnName]
|
||||
: sortTable[sortTableColumnName],
|
||||
order,
|
||||
order: sortDirection === 'asc' ? asc : desc,
|
||||
})
|
||||
|
||||
selectFields[sortTableColumnName] = sortTable[sortTableColumnName]
|
||||
}
|
||||
} catch (_) {
|
||||
} catch (err) {
|
||||
// continue
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,8 +37,7 @@ export type BuildQueryResult = {
|
||||
selectFields: Record<string, GenericColumn>
|
||||
where: SQL
|
||||
}
|
||||
|
||||
export const buildQuery = function buildQuery({
|
||||
const buildQuery = function buildQuery({
|
||||
adapter,
|
||||
aliasTable,
|
||||
fields,
|
||||
@@ -80,7 +79,6 @@ export const buildQuery = function buildQuery({
|
||||
joins,
|
||||
locale,
|
||||
parentIsLocalized,
|
||||
rawSort: context.rawSort,
|
||||
selectFields,
|
||||
sort: context.sort,
|
||||
tableName,
|
||||
@@ -93,3 +91,5 @@ export const buildQuery = function buildQuery({
|
||||
where,
|
||||
}
|
||||
}
|
||||
|
||||
export default buildQuery
|
||||
|
||||
@@ -19,7 +19,6 @@ import type { DrizzleAdapter, GenericColumn } from '../types.js'
|
||||
import type { BuildQueryJoinAliases } from './buildQuery.js'
|
||||
|
||||
import { isPolymorphicRelationship } from '../utilities/isPolymorphicRelationship.js'
|
||||
import { resolveBlockTableName } from '../utilities/validateExistingBlockIsIdentical.js'
|
||||
import { addJoinTable } from './addJoinTable.js'
|
||||
import { getTableAlias } from './getTableAlias.js'
|
||||
|
||||
@@ -194,9 +193,8 @@ export const getTableColumnFromPath = ({
|
||||
(block) => typeof block !== 'string' && block.slug === blockType,
|
||||
) as FlattenedBlock | undefined)
|
||||
|
||||
newTableName = resolveBlockTableName(
|
||||
block,
|
||||
adapter.tableNameMap.get(`${tableName}_blocks_${toSnakeCase(block.slug)}`),
|
||||
newTableName = adapter.tableNameMap.get(
|
||||
`${tableName}_blocks_${toSnakeCase(block.slug)}`,
|
||||
)
|
||||
|
||||
const { newAliasTable } = getTableAlias({ adapter, tableName: newTableName })
|
||||
@@ -222,11 +220,7 @@ export const getTableColumnFromPath = ({
|
||||
const hasBlockField = (field.blockReferences ?? field.blocks).some((_block) => {
|
||||
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
|
||||
|
||||
newTableName = resolveBlockTableName(
|
||||
block,
|
||||
adapter.tableNameMap.get(`${tableName}_blocks_${toSnakeCase(block.slug)}`),
|
||||
)
|
||||
|
||||
newTableName = adapter.tableNameMap.get(`${tableName}_blocks_${toSnakeCase(block.slug)}`)
|
||||
constraintPath = `${constraintPath}${field.name}.%.`
|
||||
|
||||
let result: TableColumn
|
||||
@@ -280,7 +274,7 @@ export const getTableColumnFromPath = ({
|
||||
tableName: newTableName,
|
||||
value,
|
||||
})
|
||||
} catch (_) {
|
||||
} catch (error) {
|
||||
// this is fine, not every block will have the field
|
||||
}
|
||||
if (!result) {
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
notInArray,
|
||||
or,
|
||||
type SQL,
|
||||
type SQLWrapper,
|
||||
} from 'drizzle-orm'
|
||||
|
||||
type OperatorKeys =
|
||||
@@ -34,7 +35,7 @@ type OperatorKeys =
|
||||
| 'not_like'
|
||||
| 'or'
|
||||
|
||||
export type Operators = Record<OperatorKeys, (column: Column, value: unknown) => SQL>
|
||||
export type Operators = Record<OperatorKeys, (column: Column, value: SQLWrapper | unknown) => SQL>
|
||||
|
||||
export const operatorMap: Operators = {
|
||||
and,
|
||||
|
||||
@@ -14,7 +14,7 @@ import { buildAndOrConditions } from './buildAndOrConditions.js'
|
||||
import { getTableColumnFromPath } from './getTableColumnFromPath.js'
|
||||
import { sanitizeQueryValue } from './sanitizeQueryValue.js'
|
||||
|
||||
export type QueryContext = { rawSort?: SQL; sort: Sort }
|
||||
export type QueryContext = { sort: Sort }
|
||||
|
||||
type Args = {
|
||||
adapter: DrizzleAdapter
|
||||
@@ -348,7 +348,6 @@ export function parseParams({
|
||||
}
|
||||
if (geoConstraints.length) {
|
||||
context.sort = relationOrPath
|
||||
context.rawSort = sql`${table[columnName]} <-> ST_SetSRID(ST_MakePoint(${lng}, ${lat}), 4326)`
|
||||
constraints.push(and(...geoConstraints))
|
||||
}
|
||||
break
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import type { QueryPromise, SQL } from 'drizzle-orm'
|
||||
import type { PgSelect } from 'drizzle-orm/pg-core'
|
||||
import type { SQLiteColumn, SQLiteSelect } from 'drizzle-orm/sqlite-core'
|
||||
|
||||
import type {
|
||||
|
||||
@@ -32,7 +32,6 @@ type Args = {
|
||||
* ie. indexes, multiple columns, etc
|
||||
*/
|
||||
baseIndexes?: Record<string, RawIndex>
|
||||
blocksTableNameMap: Record<string, number>
|
||||
buildNumbers?: boolean
|
||||
buildRelationships?: boolean
|
||||
compoundIndexes?: SanitizedCompoundIndex[]
|
||||
@@ -71,7 +70,6 @@ export const buildTable = ({
|
||||
baseColumns = {},
|
||||
baseForeignKeys = {},
|
||||
baseIndexes = {},
|
||||
blocksTableNameMap,
|
||||
compoundIndexes,
|
||||
disableNotNull,
|
||||
disableRelsTableUnique = false,
|
||||
@@ -122,7 +120,6 @@ export const buildTable = ({
|
||||
hasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
blocksTableNameMap,
|
||||
columns,
|
||||
disableNotNull,
|
||||
disableRelsTableUnique,
|
||||
|
||||
@@ -56,7 +56,6 @@ export const buildRawSchema = ({
|
||||
|
||||
buildTable({
|
||||
adapter,
|
||||
blocksTableNameMap: {},
|
||||
compoundIndexes: collection.sanitizedIndexes,
|
||||
disableNotNull: !!collection?.versions?.drafts,
|
||||
disableUnique: false,
|
||||
@@ -76,7 +75,6 @@ export const buildRawSchema = ({
|
||||
|
||||
buildTable({
|
||||
adapter,
|
||||
blocksTableNameMap: {},
|
||||
compoundIndexes: buildVersionCompoundIndexes({ indexes: collection.sanitizedIndexes }),
|
||||
disableNotNull: !!collection.versions?.drafts,
|
||||
disableUnique: true,
|
||||
@@ -98,7 +96,6 @@ export const buildRawSchema = ({
|
||||
|
||||
buildTable({
|
||||
adapter,
|
||||
blocksTableNameMap: {},
|
||||
disableNotNull: !!global?.versions?.drafts,
|
||||
disableUnique: false,
|
||||
fields: global.flattenedFields,
|
||||
@@ -121,7 +118,6 @@ export const buildRawSchema = ({
|
||||
|
||||
buildTable({
|
||||
adapter,
|
||||
blocksTableNameMap: {},
|
||||
disableNotNull: !!global.versions?.drafts,
|
||||
disableUnique: true,
|
||||
fields: versionFields,
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import type { FlattenedField } from 'payload'
|
||||
import type { CompoundIndex, FlattenedField } from 'payload'
|
||||
|
||||
import { InvalidConfiguration } from 'payload'
|
||||
import {
|
||||
array,
|
||||
fieldAffectsData,
|
||||
fieldIsVirtual,
|
||||
fieldShouldBeLocalized,
|
||||
@@ -22,20 +23,14 @@ import type {
|
||||
|
||||
import { createTableName } from '../createTableName.js'
|
||||
import { buildIndexName } from '../utilities/buildIndexName.js'
|
||||
import { getArrayRelationName } from '../utilities/getArrayRelationName.js'
|
||||
import { hasLocalesTable } from '../utilities/hasLocalesTable.js'
|
||||
import {
|
||||
InternalBlockTableNameIndex,
|
||||
setInternalBlockIndex,
|
||||
validateExistingBlockIsIdentical,
|
||||
} from '../utilities/validateExistingBlockIsIdentical.js'
|
||||
import { validateExistingBlockIsIdentical } from '../utilities/validateExistingBlockIsIdentical.js'
|
||||
import { buildTable } from './build.js'
|
||||
import { idToUUID } from './idToUUID.js'
|
||||
import { withDefault } from './withDefault.js'
|
||||
|
||||
type Args = {
|
||||
adapter: DrizzleAdapter
|
||||
blocksTableNameMap: Record<string, number>
|
||||
columnPrefix?: string
|
||||
columns: Record<string, RawColumn>
|
||||
disableNotNull: boolean
|
||||
@@ -76,7 +71,6 @@ type Result = {
|
||||
|
||||
export const traverseFields = ({
|
||||
adapter,
|
||||
blocksTableNameMap,
|
||||
columnPrefix,
|
||||
columns,
|
||||
disableNotNull,
|
||||
@@ -255,7 +249,6 @@ export const traverseFields = ({
|
||||
baseColumns,
|
||||
baseForeignKeys,
|
||||
baseIndexes,
|
||||
blocksTableNameMap,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
disableRelsTableUnique: true,
|
||||
disableUnique,
|
||||
@@ -295,11 +288,7 @@ export const traverseFields = ({
|
||||
}
|
||||
}
|
||||
|
||||
const relationName = getArrayRelationName({
|
||||
field,
|
||||
path: fieldName,
|
||||
tableName: arrayTableName,
|
||||
})
|
||||
const relationName = field.dbName ? `_${arrayTableName}` : fieldName
|
||||
|
||||
relationsToBuild.set(relationName, {
|
||||
type: 'many',
|
||||
@@ -375,7 +364,7 @@ export const traverseFields = ({
|
||||
;(field.blockReferences ?? field.blocks).forEach((_block) => {
|
||||
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
|
||||
|
||||
let blockTableName = createTableName({
|
||||
const blockTableName = createTableName({
|
||||
adapter,
|
||||
config: block,
|
||||
parentTableName: rootTableName,
|
||||
@@ -383,27 +372,6 @@ export const traverseFields = ({
|
||||
throwValidationError,
|
||||
versionsCustomName: versions,
|
||||
})
|
||||
|
||||
if (typeof blocksTableNameMap[blockTableName] === 'undefined') {
|
||||
blocksTableNameMap[blockTableName] = 1
|
||||
} else if (
|
||||
!validateExistingBlockIsIdentical({
|
||||
block,
|
||||
localized: field.localized,
|
||||
rootTableName,
|
||||
table: adapter.rawTables[blockTableName],
|
||||
tableLocales: adapter.rawTables[`${blockTableName}${adapter.localesSuffix}`],
|
||||
})
|
||||
) {
|
||||
blocksTableNameMap[blockTableName]++
|
||||
setInternalBlockIndex(block, blocksTableNameMap[blockTableName])
|
||||
blockTableName = `${blockTableName}_${blocksTableNameMap[blockTableName]}`
|
||||
}
|
||||
let relationName = `_blocks_${block.slug}`
|
||||
if (typeof block[InternalBlockTableNameIndex] !== 'undefined') {
|
||||
relationName = `_blocks_${block.slug}_${block[InternalBlockTableNameIndex]}`
|
||||
}
|
||||
|
||||
if (!adapter.rawTables[blockTableName]) {
|
||||
const baseColumns: Record<string, RawColumn> = {
|
||||
_order: {
|
||||
@@ -483,7 +451,6 @@ export const traverseFields = ({
|
||||
baseColumns,
|
||||
baseForeignKeys,
|
||||
baseIndexes,
|
||||
blocksTableNameMap,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
disableRelsTableUnique: true,
|
||||
disableUnique,
|
||||
@@ -534,7 +501,7 @@ export const traverseFields = ({
|
||||
},
|
||||
],
|
||||
references: ['id'],
|
||||
relationName,
|
||||
relationName: `_blocks_${block.slug}`,
|
||||
to: rootTableName,
|
||||
},
|
||||
}
|
||||
@@ -582,10 +549,18 @@ export const traverseFields = ({
|
||||
})
|
||||
|
||||
adapter.rawRelations[blockTableName] = blockRelations
|
||||
} else if (process.env.NODE_ENV !== 'production' && !versions) {
|
||||
validateExistingBlockIsIdentical({
|
||||
block,
|
||||
localized: field.localized,
|
||||
parentIsLocalized: parentIsLocalized || field.localized,
|
||||
rootTableName,
|
||||
table: adapter.rawTables[blockTableName],
|
||||
tableLocales: adapter.rawTables[`${blockTableName}${adapter.localesSuffix}`],
|
||||
})
|
||||
}
|
||||
|
||||
// blocks relationships are defined from the collection or globals table down to the block, bypassing any subBlocks
|
||||
rootRelationsToBuild.set(relationName, {
|
||||
rootRelationsToBuild.set(`_blocks_${block.slug}`, {
|
||||
type: 'many',
|
||||
// blocks are not localized on the parent table
|
||||
localized: false,
|
||||
@@ -649,7 +624,6 @@ export const traverseFields = ({
|
||||
hasManyTextField: groupHasManyTextField,
|
||||
} = traverseFields({
|
||||
adapter,
|
||||
blocksTableNameMap,
|
||||
columnPrefix: `${columnName}_`,
|
||||
columns,
|
||||
disableNotNull: disableNotNullFromHere,
|
||||
@@ -866,7 +840,6 @@ export const traverseFields = ({
|
||||
baseColumns,
|
||||
baseForeignKeys,
|
||||
baseIndexes,
|
||||
blocksTableNameMap,
|
||||
disableNotNull,
|
||||
disableUnique,
|
||||
fields: [],
|
||||
|
||||
@@ -12,7 +12,7 @@ export const commitTransaction: CommitTransaction = async function commitTransac
|
||||
|
||||
try {
|
||||
await this.sessions[id].resolve()
|
||||
} catch (_) {
|
||||
} catch (err: unknown) {
|
||||
await this.sessions[id].reject()
|
||||
}
|
||||
|
||||
|
||||
@@ -49,7 +49,6 @@ export const transform = <T extends Record<string, unknown> | TypeWithID>({
|
||||
}
|
||||
|
||||
const blocks = createBlocksMap(data)
|
||||
|
||||
const deletions = []
|
||||
|
||||
const result = traverseFields<T>({
|
||||
|
||||
@@ -6,8 +6,6 @@ import toSnakeCase from 'to-snake-case'
|
||||
import type { DrizzleAdapter } from '../../types.js'
|
||||
import type { BlocksMap } from '../../utilities/createBlocksMap.js'
|
||||
|
||||
import { getArrayRelationName } from '../../utilities/getArrayRelationName.js'
|
||||
import { resolveBlockTableName } from '../../utilities/validateExistingBlockIsIdentical.js'
|
||||
import { transformHasManyNumber } from './hasManyNumber.js'
|
||||
import { transformHasManyText } from './hasManyText.js'
|
||||
import { transformRelationship } from './relationship.js'
|
||||
@@ -123,7 +121,9 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
`${currentTableName}_${tablePath}${toSnakeCase(field.name)}`,
|
||||
)
|
||||
|
||||
fieldData = table[getArrayRelationName({ field, path: fieldName, tableName: arrayTableName })]
|
||||
if (field.dbName) {
|
||||
fieldData = table[`_${arrayTableName}`]
|
||||
}
|
||||
|
||||
if (Array.isArray(fieldData)) {
|
||||
if (isLocalized) {
|
||||
@@ -249,9 +249,8 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
(block) => typeof block !== 'string' && block.slug === row.blockType,
|
||||
) as FlattenedBlock | undefined)
|
||||
|
||||
const tableName = resolveBlockTableName(
|
||||
block,
|
||||
adapter.tableNameMap.get(`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`),
|
||||
const tableName = adapter.tableNameMap.get(
|
||||
`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`,
|
||||
)
|
||||
|
||||
if (block) {
|
||||
@@ -329,11 +328,8 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
delete row._index
|
||||
}
|
||||
|
||||
const tableName = resolveBlockTableName(
|
||||
block,
|
||||
adapter.tableNameMap.get(
|
||||
`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`,
|
||||
),
|
||||
const tableName = adapter.tableNameMap.get(
|
||||
`${topLevelTableName}_blocks_${toSnakeCase(block.slug)}`,
|
||||
)
|
||||
|
||||
acc.push(
|
||||
@@ -670,6 +666,10 @@ export const traverseFields = <T extends Record<string, unknown>>({
|
||||
withinArrayOrBlockLocale: locale || withinArrayOrBlockLocale,
|
||||
})
|
||||
|
||||
if ('_order' in ref) {
|
||||
delete ref._order
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ import toSnakeCase from 'to-snake-case'
|
||||
import type { DrizzleAdapter } from '../../types.js'
|
||||
import type { BlockRowToInsert, RelationshipToDelete } from './types.js'
|
||||
|
||||
import { resolveBlockTableName } from '../../utilities/validateExistingBlockIsIdentical.js'
|
||||
import { traverseFields } from './traverseFields.js'
|
||||
|
||||
type Args = {
|
||||
@@ -67,6 +66,10 @@ export const transformBlocks = ({
|
||||
}
|
||||
const blockType = toSnakeCase(blockRow.blockType)
|
||||
|
||||
if (!blocks[blockType]) {
|
||||
blocks[blockType] = []
|
||||
}
|
||||
|
||||
const newRow: BlockRowToInsert = {
|
||||
arrays: {},
|
||||
locales: {},
|
||||
@@ -83,14 +86,7 @@ export const transformBlocks = ({
|
||||
newRow.row._locale = withinArrayOrBlockLocale
|
||||
}
|
||||
|
||||
const blockTableName = resolveBlockTableName(
|
||||
matchedBlock,
|
||||
adapter.tableNameMap.get(`${baseTableName}_blocks_${blockType}`),
|
||||
)
|
||||
|
||||
if (!blocks[blockTableName]) {
|
||||
blocks[blockTableName] = []
|
||||
}
|
||||
const blockTableName = adapter.tableNameMap.get(`${baseTableName}_blocks_${blockType}`)
|
||||
|
||||
const hasUUID = adapter.tables[blockTableName]._uuid
|
||||
|
||||
@@ -128,6 +124,6 @@ export const transformBlocks = ({
|
||||
withinArrayOrBlockLocale,
|
||||
})
|
||||
|
||||
blocks[blockTableName].push(newRow)
|
||||
blocks[blockType].push(newRow)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ export const transformRelationship = ({ baseRow, data, field, relationships }: A
|
||||
if (Array.isArray(field.relationTo) && valueIsValueWithRelation(relation)) {
|
||||
relationRow[`${relation.relationTo}ID`] = relation.value
|
||||
relationships.push(relationRow)
|
||||
} else if (typeof field.relationTo === 'string') {
|
||||
} else {
|
||||
relationRow[`${field.relationTo}ID`] = relation
|
||||
if (relation) {
|
||||
relationships.push(relationRow)
|
||||
|
||||
@@ -8,7 +8,6 @@ import type { DrizzleAdapter } from '../../types.js'
|
||||
import type { ArrayRowToInsert, BlockRowToInsert, RelationshipToDelete } from './types.js'
|
||||
|
||||
import { isArrayOfRows } from '../../utilities/isArrayOfRows.js'
|
||||
import { resolveBlockTableName } from '../../utilities/validateExistingBlockIsIdentical.js'
|
||||
import { transformArray } from './array.js'
|
||||
import { transformBlocks } from './blocks.js'
|
||||
import { transformNumbers } from './numbers.js'
|
||||
@@ -176,17 +175,7 @@ export const traverseFields = ({
|
||||
|
||||
if (field.type === 'blocks') {
|
||||
;(field.blockReferences ?? field.blocks).forEach((block) => {
|
||||
const matchedBlock =
|
||||
typeof block === 'string'
|
||||
? adapter.payload.config.blocks.find((each) => each.slug === block)
|
||||
: block
|
||||
|
||||
blocksToDelete.add(
|
||||
resolveBlockTableName(
|
||||
matchedBlock,
|
||||
adapter.tableNameMap.get(`${baseTableName}_blocks_${toSnakeCase(matchedBlock.slug)}`),
|
||||
),
|
||||
)
|
||||
blocksToDelete.add(toSnakeCase(typeof block === 'string' ? block : block.slug))
|
||||
})
|
||||
|
||||
if (isLocalized) {
|
||||
|
||||
@@ -28,7 +28,7 @@ export type RowToInsert = {
|
||||
[tableName: string]: ArrayRowToInsert[]
|
||||
}
|
||||
blocks: {
|
||||
[tableName: string]: BlockRowToInsert[]
|
||||
[blockType: string]: BlockRowToInsert[]
|
||||
}
|
||||
blocksToDelete: Set<string>
|
||||
locales: {
|
||||
|
||||
@@ -9,7 +9,7 @@ import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
export async function updateGlobal<T extends Record<string, unknown>>(
|
||||
this: DrizzleAdapter,
|
||||
{ slug, data, req, returning, select }: UpdateGlobalArgs,
|
||||
{ slug, data, req, select, returning }: UpdateGlobalArgs,
|
||||
): Promise<T> {
|
||||
const db = await getTransaction(this, req)
|
||||
const globalConfig = this.payload.globals.config.find((config) => config.slug === slug)
|
||||
@@ -23,10 +23,10 @@ export async function updateGlobal<T extends Record<string, unknown>>(
|
||||
data,
|
||||
db,
|
||||
fields: globalConfig.flattenedFields,
|
||||
ignoreResult: returning === false,
|
||||
req,
|
||||
select,
|
||||
tableName,
|
||||
ignoreResult: returning === false,
|
||||
})
|
||||
|
||||
if (returning === false) {
|
||||
|
||||
@@ -10,7 +10,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { upsertRow } from './upsertRow/index.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
@@ -21,10 +21,10 @@ export async function updateGlobalVersion<T extends TypeWithID>(
|
||||
global,
|
||||
locale,
|
||||
req,
|
||||
returning,
|
||||
select,
|
||||
versionData,
|
||||
where: whereArg,
|
||||
returning,
|
||||
}: UpdateGlobalVersionArgs<T>,
|
||||
) {
|
||||
const db = await getTransaction(this, req)
|
||||
@@ -53,12 +53,12 @@ export async function updateGlobalVersion<T extends TypeWithID>(
|
||||
data: versionData,
|
||||
db,
|
||||
fields,
|
||||
ignoreResult: returning === false,
|
||||
operation: 'update',
|
||||
req,
|
||||
select,
|
||||
tableName,
|
||||
where,
|
||||
ignoreResult: returning === false,
|
||||
})
|
||||
|
||||
if (returning === false) {
|
||||
|
||||
@@ -5,7 +5,7 @@ import toSnakeCase from 'to-snake-case'
|
||||
|
||||
import type { DrizzleAdapter } from './types.js'
|
||||
|
||||
import { buildQuery } from './queries/buildQuery.js'
|
||||
import buildQuery from './queries/buildQuery.js'
|
||||
import { selectDistinct } from './queries/selectDistinct.js'
|
||||
import { upsertRow } from './upsertRow/index.js'
|
||||
import { getTransaction } from './utilities/getTransaction.js'
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user