feat(db-postgres): allow to store blocks in a JSON column (#12750)

Continuation of https://github.com/payloadcms/payload/pull/6245.
This PR allows you to pass `blocksAsJSON: true` to SQL adapters and the
adapter instead of aligning with the SQL preferred relation approach for
blocks will just use a simple JSON column, which can improve performance
with a large amount of blocks.

To try these changes you can install `3.43.0-internal.c5bbc84`.
This commit is contained in:
Sasha
2025-06-16 23:03:35 +03:00
committed by GitHub
parent 704518248c
commit 215f49efa5
19 changed files with 106 additions and 11 deletions

View File

@@ -81,6 +81,7 @@ export default buildConfig({
| `generateSchemaOutputFile` | Override generated schema from `payload generate:db-schema` file path. Defaults to `{CWD}/src/payload-generated.schema.ts` |
| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. |
| `readReplicas` | An array of DB read replicas connection strings, can be used to offload read-heavy traffic. |
| `blocksAsJSON` | Store blocks as a JSON column instead of using the relational structure which can improve performance with a large amount of blocks |
## Access to Drizzle

View File

@@ -50,6 +50,7 @@ export default buildConfig({
| `generateSchemaOutputFile` | Override generated schema from `payload generate:db-schema` file path. Defaults to `{CWD}/src/payload-generated.schema.ts` |
| `autoIncrement` | Pass `true` to enable SQLite [AUTOINCREMENT](https://www.sqlite.org/autoinc.html) for primary keys to ensure the same ID cannot be reused from deleted rows |
| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. |
| `blocksAsJSON` | Store blocks as a JSON column instead of using the relational structure which can improve performance with a large amount of blocks |
## Access to Drizzle

View File

@@ -99,6 +99,7 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
afterSchemaInit: args.afterSchemaInit ?? [],
allowIDOnCreate,
beforeSchemaInit: args.beforeSchemaInit ?? [],
blocksAsJSON: args.blocksAsJSON ?? false,
createDatabase,
createExtensions,
createMigration: buildCreateMigration({

View File

@@ -41,6 +41,10 @@ export type Args = {
* To generate Drizzle schema from the database, see [Drizzle Kit introspection](https://orm.drizzle.team/kit-docs/commands#introspect--pull)
*/
beforeSchemaInit?: PostgresSchemaHook[]
/**
* Store blocks as JSON column instead of storing them in relational structure.
*/
blocksAsJSON?: boolean
/**
* Pass `true` to disale auto database creation if it doesn't exist.
* @default false

View File

@@ -89,6 +89,7 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj<SQLiteAdapter> {
allowIDOnCreate,
autoIncrement: args.autoIncrement ?? false,
beforeSchemaInit: args.beforeSchemaInit ?? [],
blocksAsJSON: args.blocksAsJSON ?? false,
// @ts-expect-error - vestiges of when tsconfig was not strict. Feel free to improve
client: undefined,
clientConfig: args.client,

View File

@@ -50,6 +50,10 @@ export type Args = {
* To generate Drizzle schema from the database, see [Drizzle Kit introspection](https://orm.drizzle.team/kit-docs/commands#introspect--pull)
*/
beforeSchemaInit?: SQLiteSchemaHook[]
/**
* Store blocks as JSON column instead of storing them in relational structure.
*/
blocksAsJSON?: boolean
client: Config
/** Generated schema from payload generate:db-schema file path */
generateSchemaOutputFile?: string

View File

@@ -95,6 +95,7 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<Verce
afterSchemaInit: args.afterSchemaInit ?? [],
allowIDOnCreate,
beforeSchemaInit: args.beforeSchemaInit ?? [],
blocksAsJSON: args.blocksAsJSON ?? false,
createDatabase,
createExtensions,
defaultDrizzleSnapshot,

View File

@@ -33,6 +33,10 @@ export type Args = {
* To generate Drizzle schema from the database, see [Drizzle Kit introspection](https://orm.drizzle.team/kit-docs/commands#introspect--pull)
*/
beforeSchemaInit?: PostgresSchemaHook[]
/**
* Store blocks as JSON column instead of storing them in relational structure.
*/
blocksAsJSON?: boolean
connectionString?: string
/**
* Pass `true` to disale auto database creation if it doesn't exist.

View File

@@ -252,6 +252,20 @@ export const traverseFields = ({
}
}
if (adapter.blocksAsJSON) {
if (select || selectAllOnCurrentLevel) {
const fieldPath = `${path}${field.name}`
if ((isFieldLocalized || parentIsLocalized) && _locales) {
_locales.columns[fieldPath] = true
} else if (adapter.tables[currentTableName]?.[fieldPath]) {
currentArgs.columns[fieldPath] = true
}
}
break
}
;(field.blockReferences ?? field.blocks).forEach((_block) => {
const block = typeof _block === 'string' ? adapter.payload.blocks[_block] : _block
const blockKey = `_blocks_${block.slug}${!block[InternalBlockTableNameIndex] ? '' : `_${block[InternalBlockTableNameIndex]}`}`

View File

@@ -180,6 +180,9 @@ export const getTableColumnFromPath = ({
})
}
case 'blocks': {
if (adapter.blocksAsJSON) {
break
}
let blockTableColumn: TableColumn
let newTableName: string

View File

@@ -117,7 +117,8 @@ export function parseParams({
})
if (
['json', 'richText'].includes(field.type) &&
(['json', 'richText'].includes(field.type) ||
(field.type === 'blocks' && adapter.blocksAsJSON)) &&
Array.isArray(pathSegments) &&
pathSegments.length > 1
) {

View File

@@ -141,7 +141,7 @@ export const traverseFields = ({
adapter.payload.config.localization &&
(isFieldLocalized || forceLocalized) &&
field.type !== 'array' &&
field.type !== 'blocks' &&
(field.type !== 'blocks' || adapter.blocksAsJSON) &&
(('hasMany' in field && field.hasMany !== true) || !('hasMany' in field))
) {
hasLocalizedField = true
@@ -370,6 +370,17 @@ export const traverseFields = ({
break
}
case 'blocks': {
if (adapter.blocksAsJSON) {
targetTable[fieldName] = withDefault(
{
name: columnName,
type: 'jsonb',
},
field,
)
break
}
const disableNotNullFromHere = Boolean(field.admin?.condition) || disableNotNull
;(field.blockReferences ?? field.blocks).forEach((_block) => {

View File

@@ -221,7 +221,7 @@ export const traverseFields = <T extends Record<string, unknown>>({
return result
}
if (field.type === 'blocks') {
if (field.type === 'blocks' && !adapter.blocksAsJSON) {
const blockFieldPath = `${sanitizedPath}${field.name}`
const blocksByPath = blocks[blockFieldPath]

View File

@@ -188,7 +188,7 @@ export const traverseFields = ({
return
}
if (field.type === 'blocks') {
if (field.type === 'blocks' && !adapter.blocksAsJSON) {
;(field.blockReferences ?? field.blocks).forEach((block) => {
const matchedBlock =
typeof block === 'string'

View File

@@ -315,6 +315,7 @@ export type BuildDrizzleTable<T extends DrizzleAdapter = DrizzleAdapter> = (args
}) => void
export interface DrizzleAdapter extends BaseDatabaseAdapter {
blocksAsJSON?: boolean
convertPathToJSONTraversal?: (incomingSegments: string[]) => string
countDistinct: CountDistinct
createJSONQuery: (args: CreateJSONQueryArgs) => string
@@ -323,8 +324,8 @@ export interface DrizzleAdapter extends BaseDatabaseAdapter {
drizzle: LibSQLDatabase | PostgresDB
dropDatabase: DropDatabase
enums?: never | Record<string, unknown>
execute: Execute<unknown>
execute: Execute<unknown>
features: {
json?: boolean
}

View File

@@ -2618,6 +2618,54 @@ describe('database', () => {
expect(res.testBlocksLocalized[0]?.text).toBe('text-localized')
})
it('should CRUD with blocks as JSON in SQL adapters', async () => {
// eslint-disable-next-line jest/no-conditional-in-test
if (!('drizzle' in payload.db)) {
return
}
process.env.PAYLOAD_FORCE_DRIZZLE_PUSH = 'true'
payload.db.blocksAsJSON = true
delete payload.db.pool
await payload.db.init()
await payload.db.connect()
expect(payload.db.tables.blocks_docs.testBlocks).toBeDefined()
expect(payload.db.tables.blocks_docs_locales.testBlocksLocalized).toBeDefined()
const res = await payload.create({
collection: 'blocks-docs',
data: {
testBlocks: [{ blockType: 'cta', text: 'text' }],
testBlocksLocalized: [{ blockType: 'cta', text: 'text-localized' }],
},
})
expect(res.testBlocks[0]?.text).toBe('text')
expect(res.testBlocksLocalized[0]?.text).toBe('text-localized')
const res_es = await payload.update({
collection: 'blocks-docs',
id: res.id,
locale: 'es',
data: {
testBlocksLocalized: [{ blockType: 'cta', text: 'text-localized-es' }],
testBlocks: [{ blockType: 'cta', text: 'text_updated' }],
},
})
expect(res_es.testBlocks[0]?.text).toBe('text_updated')
expect(res_es.testBlocksLocalized[0]?.text).toBe('text-localized-es')
const res_all = await payload.findByID({
collection: 'blocks-docs',
id: res.id,
locale: 'all',
})
expect(res_all.testBlocks[0]?.text).toBe('text_updated')
expect(res_all.testBlocksLocalized.es[0]?.text).toBe('text-localized-es')
expect(res_all.testBlocksLocalized.en[0]?.text).toBe('text-localized')
payload.db.blocksAsJSON = false
process.env.PAYLOAD_FORCE_DRIZZLE_PUSH = 'false'
delete payload.db.pool
await payload.db.init()
await payload.db.connect()
})
it('should support in with null', async () => {
await payload.delete({ collection: 'posts', where: {} })
const post_1 = await payload.create({

View File

@@ -1,5 +1,5 @@
{
"id": "353cac31-1e1a-4190-8584-025abe855faa",
"id": "3c35a6b5-e20d-4a43-af15-a6b3a0844000",
"prevId": "00000000-0000-0000-0000-000000000000",
"version": "7",
"dialect": "postgresql",

View File

@@ -1,4 +1,4 @@
import type { MigrateDownArgs, MigrateUpArgs } from '@payloadcms/db-postgres'
import type { MigrateDownArgs, MigrateUpArgs} from '@payloadcms/db-postgres';
import { sql } from '@payloadcms/db-postgres'

View File

@@ -1,9 +1,9 @@
import * as migration_20250611_163948 from './20250611_163948.js'
import * as migration_20250616_190121 from './20250616_190121.js'
export const migrations = [
{
up: migration_20250611_163948.up,
down: migration_20250611_163948.down,
name: '20250611_163948',
up: migration_20250616_190121.up,
down: migration_20250616_190121.down,
name: '20250616_190121',
},
]