feat(db-postgres): deep querying on json and rich text fields (#9102)

### What?
Allows to query on JSON / Rich Text fields in Postgres the same way as
in Mongodb with any nesting level.

Example:
Data:
```js
{
  json: {
    array: [
      {
        text: 'some-text', // nested to array + object
        object: {
          text: 'deep-text', // nested to array + 2x object
          array: [10], // number is nested to array + 2x object + array
        },
      },
    ],
  }
}
```
Query:
```ts
payload.find({
  collection: 'json-fields',
  where: {
    and: [
      {
        'json.array.text': {
          equals: 'some-text',
        },
      },
      {
        'json.array.object.text': {
          equals: 'deep-text',
        },
      },
      {
        'json.array.object.array': {
          in: [10, 20],
        },
      },
      {
        'json.array.object.array': {
          exists: true,
        },
      },
      {
        'json.array.object.notexists': {
          exists: false,
        },
      },
    ],
  },
})
```

### How?
Utilizes [the `jsonb_path_exists` postgres
function](https://www.postgresql.org/docs/current/functions-json.html)
This commit is contained in:
Sasha
2024-11-12 09:26:04 +02:00
committed by GitHub
parent 23907e432e
commit b878daf27a
11 changed files with 144 additions and 107 deletions

View File

@@ -35,7 +35,6 @@ import {
updateVersion,
} from '@payloadcms/drizzle'
import {
convertPathToJSONTraversal,
countDistinct,
createDatabase,
createExtensions,
@@ -125,7 +124,6 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj<PostgresAdapter>
args.transactionOptions === false ? defaultBeginTransaction() : beginTransaction,
commitTransaction,
connect,
convertPathToJSONTraversal,
count,
countDistinct,
countGlobalVersions,

View File

@@ -35,7 +35,6 @@ import {
updateVersion,
} from '@payloadcms/drizzle'
import {
convertPathToJSONTraversal,
countDistinct,
createDatabase,
createExtensions,
@@ -126,7 +125,6 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj<Verce
args.transactionOptions === false ? defaultBeginTransaction() : beginTransaction,
commitTransaction,
connect,
convertPathToJSONTraversal,
count,
countDistinct,
countGlobalVersions,

View File

@@ -1,7 +1,6 @@
export { countDistinct } from '../postgres/countDistinct.js'
export { createDatabase } from '../postgres/createDatabase.js'
export { createExtensions } from '../postgres/createExtensions.js'
export { convertPathToJSONTraversal } from '../postgres/createJSONQuery/convertPathToJSONTraversal.js'
export { createJSONQuery } from '../postgres/createJSONQuery/index.js'
export { createMigration } from '../postgres/createMigration.js'
export { defaultDrizzleSnapshot } from '../postgres/defaultSnapshot.js'

View File

@@ -1,15 +0,0 @@
import { formatJSONPathSegment } from './formatJSONPathSegment.js'
export const convertPathToJSONTraversal = (incomingSegments: string[]) => {
const segments = [...incomingSegments]
segments.shift()
return segments.reduce((res, segment, i) => {
const formattedSegment = formatJSONPathSegment(segment)
if (i + 1 === segments.length) {
return `${res}->>${formattedSegment}`
}
return `${res}->${formattedSegment}`
}, '')
}

View File

@@ -1,3 +0,0 @@
export const formatJSONPathSegment = (segment: string) => {
return Number.isNaN(parseInt(segment)) ? `'${segment}'` : segment
}

View File

@@ -1,87 +1,43 @@
import { convertPathToJSONTraversal } from './convertPathToJSONTraversal.js'
import { formatJSONPathSegment } from './formatJSONPathSegment.js'
import type { CreateJSONQueryArgs } from '../../types.js'
const operatorMap = {
contains: '~*',
equals: '=',
like: '~*',
const operatorMap: Record<string, string> = {
contains: '~',
equals: '==',
in: 'in',
like: 'like_regex',
not_equals: '!=',
not_in: 'in',
}
type FromArrayArgs = {
isRoot?: true
operator: string
pathSegments: string[]
treatAsArray?: string[]
value: unknown
}
const fromArray = ({ isRoot, operator, pathSegments, treatAsArray, value }: FromArrayArgs) => {
const newPathSegments = pathSegments.slice(isRoot ? 1 : 2)
const alias = `${pathSegments[isRoot ? 0 : 1]}_alias_${newPathSegments.length}`
newPathSegments.unshift(alias)
const arrayElements = isRoot
? pathSegments[0]
: `${pathSegments[0]} -> ${formatJSONPathSegment(pathSegments[1])}`
return `EXISTS (
SELECT 1
FROM jsonb_array_elements(${arrayElements}) AS ${alias}
WHERE ${createJSONQuery({
operator,
pathSegments: newPathSegments,
treatAsArray,
value,
})}
)`
}
type CreateConstraintArgs = {
operator: string
pathSegments: string[]
treatAsArray?: string[]
value: unknown
}
const createConstraint = ({ operator, pathSegments, value }: CreateConstraintArgs): string => {
const jsonQuery = convertPathToJSONTraversal(pathSegments)
return `${pathSegments[0]}${jsonQuery} ${operatorMap[operator]} '${value}'`
}
type Args = {
operator: string
pathSegments: string[]
treatAsArray?: string[]
treatRootAsArray?: boolean
value: unknown
}
export const createJSONQuery = ({
operator,
pathSegments,
treatAsArray,
treatRootAsArray,
value,
}: Args): string => {
if (treatRootAsArray) {
return fromArray({
isRoot: true,
operator,
pathSegments,
treatAsArray,
value,
})
const sanitizeValue = (value: unknown, operator?: string) => {
if (typeof value === 'string') {
// ignore casing with like
return `"${operator === 'like' ? '(?i)' : ''}${value}"`
}
if (treatAsArray.includes(pathSegments[1])) {
return fromArray({
operator,
pathSegments,
treatAsArray,
value,
return value as string
}
export const createJSONQuery = ({ column, operator, pathSegments, value }: CreateJSONQueryArgs) => {
const columnName = typeof column === 'object' ? column.name : column
const jsonPaths = pathSegments
.slice(1)
.map((key) => {
return `${key}[*]`
})
.join('.')
let sql = ''
if (['in', 'not_in'].includes(operator) && Array.isArray(value)) {
value.forEach((item, i) => {
sql = `${sql}${createJSONQuery({ column, operator: operator === 'in' ? 'equals' : 'not_equals', pathSegments, value: item })}${i === value.length - 1 ? '' : ` ${operator === 'in' ? 'OR' : 'AND'} `}`
})
} else if (operator === 'exists') {
sql = `${value === false ? 'NOT ' : ''}jsonb_path_exists(${columnName}, '$.${jsonPaths}')`
} else {
sql = `jsonb_path_exists(${columnName}, '$.${jsonPaths} ? (@ ${operatorMap[operator]} ${sanitizeValue(value, operator)})')`
}
return createConstraint({ operator, pathSegments, treatAsArray, value })
return sql
}

View File

@@ -103,6 +103,18 @@ export function parseParams({
Array.isArray(pathSegments) &&
pathSegments.length > 1
) {
if (adapter.name === 'postgres') {
const constraint = adapter.createJSONQuery({
column: rawColumn || table[columnName],
operator,
pathSegments,
value: val,
})
constraints.push(sql.raw(constraint))
break
}
const segments = pathSegments.slice(1)
segments.unshift(table[columnName].name)
@@ -142,11 +154,7 @@ export function parseParams({
if (adapter.name === 'sqlite' && operator === 'equals' && !isNaN(val)) {
formattedValue = val
} else if (['in', 'not_in'].includes(operator) && Array.isArray(val)) {
if (adapter.name === 'sqlite') {
formattedValue = `(${val.map((v) => `${v}`).join(',')})`
} else {
formattedValue = `(${val.map((v) => `'${v}'`).join(', ')})`
}
} else {
formattedValue = `'${operatorKeys[operator].wildcard}${val}${operatorKeys[operator].wildcard}'`
}

View File

@@ -1,4 +1,5 @@
import type {
Column,
ColumnBaseConfig,
ColumnDataType,
DrizzleConfig,
@@ -148,6 +149,7 @@ export type Migration = {
} & MigrationData
export type CreateJSONQueryArgs = {
column?: Column | string
operator: string
pathSegments: string[]
table?: string
@@ -157,7 +159,7 @@ export type CreateJSONQueryArgs = {
}
export interface DrizzleAdapter extends BaseDatabaseAdapter {
convertPathToJSONTraversal: (incomingSegments: string[]) => string
convertPathToJSONTraversal?: (incomingSegments: string[]) => string
countDistinct: CountDistinct
createJSONQuery: (args: CreateJSONQueryArgs) => string
defaultDrizzleSnapshot: Record<string, unknown>

View File

@@ -19,6 +19,33 @@ const JSON: CollectionConfig = {
schema: {
type: 'object',
properties: {
array: {
type: 'array',
items: {
type: 'object',
additionalProperties: false,
properties: {
object: {
type: 'object',
additionalProperties: false,
properties: {
array: {
type: 'array',
items: {
type: 'number',
},
},
text: {
type: 'string',
},
},
},
text: {
type: 'string',
},
},
},
},
foo: {
enum: ['bar', 'foobar'],
},

View File

@@ -2657,6 +2657,66 @@ describe('Fields', () => {
expect(docIDs).not.toContain(3)
expect(docIDs).toContain(2)
})
it('should query deeply', async () => {
// eslint-disable-next-line jest/no-conditional-in-test
if (payload.db.name === 'sqlite') {
return
}
const json_1 = await payload.create({
collection: 'json-fields',
data: {
json: {
array: [
{
text: 'some-text',
object: {
text: 'deep-text',
array: [10],
},
},
],
},
},
})
const { docs } = await payload.find({
collection: 'json-fields',
where: {
and: [
{
'json.array.text': {
equals: 'some-text',
},
},
{
'json.array.object.text': {
equals: 'deep-text',
},
},
{
'json.array.object.array': {
in: [10, 20],
},
},
{
'json.array.object.array': {
exists: true,
},
},
{
'json.array.object.notexists': {
exists: false,
},
},
],
},
})
expect(docs).toHaveLength(1)
expect(docs[0].id).toBe(json_1.id)
})
})
})

View File

@@ -1183,6 +1183,13 @@ export interface IndexedField {
export interface JsonField {
id: string;
json?: {
array?: {
object?: {
array?: number[];
text?: string;
};
text?: string;
}[];
foo?: 'bar' | 'foobar';
number?: 10 | 5;
[k: string]: unknown;