chore: wires up contains in pg, skips point tests

This commit is contained in:
James
2023-09-18 13:00:00 -04:00
parent 2442144c98
commit 029e319c7b
10 changed files with 401 additions and 374 deletions

View File

@@ -94,6 +94,7 @@ export function mongooseAdapter({
globals: undefined, globals: undefined,
init, init,
...(migrationDir && { migrationDir }), ...(migrationDir && { migrationDir }),
name: 'mongoose',
mongoMemoryServer: undefined, mongoMemoryServer: undefined,
payload, payload,
queryDrafts, queryDrafts,

View File

@@ -1,19 +1,20 @@
import type { SQL } from 'drizzle-orm'; import type { SQL } from 'drizzle-orm'
import type { PgSelect } from 'drizzle-orm/pg-core'; import type { PgSelect } from 'drizzle-orm/pg-core'
import type { Find } from 'payload/database'; import type { Find } from 'payload/database'
import type { PayloadRequest, SanitizedCollectionConfig, TypeWithID } from 'payload/types'; import type { PayloadRequest, SanitizedCollectionConfig, TypeWithID } from 'payload/types'
import { asc, desc, inArray, sql } from 'drizzle-orm'; import { asc, desc, inArray, sql } from 'drizzle-orm'
import toSnakeCase from 'to-snake-case'; import toSnakeCase from 'to-snake-case'
import type { GenericColumn, PostgresAdapter } from './types'; import type { GenericColumn, PostgresAdapter } from './types'
import { buildFindManyArgs } from './find/buildFindManyArgs'; import { buildFindManyArgs } from './find/buildFindManyArgs'
import buildQuery from './queries/buildQuery'; import buildQuery from './queries/buildQuery'
import { transform } from './transform/read'; import { transform } from './transform/read'
export const find: Find = async function find( export const find: Find = async function find(
this: PostgresAdapter, { this: PostgresAdapter,
{
collection, collection,
limit: limitArg, limit: limitArg,
locale, locale,
@@ -24,44 +25,40 @@ export const find: Find = async function find(
where: whereArg, where: whereArg,
}, },
) { ) {
const collectionConfig: SanitizedCollectionConfig = this.payload.collections[collection].config; const collectionConfig: SanitizedCollectionConfig = this.payload.collections[collection].config
const tableName = toSnakeCase(collection); const tableName = toSnakeCase(collection)
const table = this.tables[tableName]; const table = this.tables[tableName]
const sort = typeof sortArg === 'string' ? sortArg : collectionConfig.defaultSort; const sort = typeof sortArg === 'string' ? sortArg : collectionConfig.defaultSort
let limit = limitArg; let limit = limitArg
let totalDocs: number; let totalDocs: number
let totalPages: number; let totalPages: number
let hasPrevPage: boolean; let hasPrevPage: boolean
let hasNextPage: boolean; let hasNextPage: boolean
let pagingCounter: number; let pagingCounter: number
let selectDistinctResult; let selectDistinctResult
const { const { joins, orderBy, selectFields, where } = await buildQuery({
joins,
orderBy,
selectFields,
where,
} = await buildQuery({
adapter: this, adapter: this,
fields: collectionConfig.fields, fields: collectionConfig.fields,
locale, locale,
sort, sort,
tableName, tableName,
where: whereArg, where: whereArg,
}); })
const db = req.transactionID ? this.sessions[req.transactionID] : this.db; const db = this.sessions?.[req.transactionID] || this.db
const orderedIDMap: Record<number | string, number> = {}
let selectCount: PgSelect<string, { count: SQL<number> }, 'partial', Record<string, 'not-null'>>
const orderedIDMap: Record<number | string, number> = {}; const methodsToCall: {
let selectQuery: PgSelect<string, Record<string, GenericColumn>, 'partial', Record<string, 'not-null'>>; args: unknown[]
let selectCount: PgSelect<string, { count: SQL<number>; }, "partial", Record<string, "not-null">>; method: string
}[] = []
if (orderBy?.order && orderBy?.column) { if (orderBy?.order && orderBy?.column) {
selectQuery = db.selectDistinct(selectFields) methodsToCall.push({
.from(table) args: [orderBy.order(orderBy.column)],
.orderBy(orderBy.order(orderBy.column)); method: 'orderBy',
} else { })
selectQuery = db.selectDistinct(selectFields)
.from(table)
} }
const findManyArgs = buildFindManyArgs({ const findManyArgs = buildFindManyArgs({
@@ -69,23 +66,28 @@ export const find: Find = async function find(
depth: 0, depth: 0,
fields: collectionConfig.fields, fields: collectionConfig.fields,
tableName, tableName,
}); })
// only fetch IDs when a sort or where query is used that needs to be done on join tables, otherwise these can be done directly on the table in findMany // only fetch IDs when a sort or where query is used that needs to be done on join tables, otherwise these can be done directly on the table in findMany
if (Object.keys(joins).length > 0) { if (Object.keys(joins).length > 0) {
if (where) { if (where) {
selectQuery.where(where); methodsToCall.push({ args: [where], method: 'where' })
} }
Object.entries(joins) Object.entries(joins).forEach(([joinTable, condition]) => {
.forEach(([joinTable, condition]) => { if (joinTable) {
if (joinTable) { methodsToCall.push({
selectQuery.leftJoin(this.tables[joinTable], condition); args: [this.tables[joinTable], condition],
} method: 'leftJoin',
}); })
}
})
selectDistinctResult = await selectQuery methodsToCall.push({ args: [(page - 1) * limit], method: 'offset' })
.offset((page - 1) * limit) methodsToCall.push({ args: [limit === 0 ? undefined : limit], method: 'limit' })
.limit(limit === 0 ? undefined : limit);
selectDistinctResult = await methodsToCall.reduce((query, { args, method }) => {
return query[method](...args)
}, db.selectDistinct(selectFields).from(table))
if (selectDistinctResult.length === 0) { if (selectDistinctResult.length === 0) {
return { return {
@@ -99,62 +101,62 @@ export const find: Find = async function find(
prevPage: null, prevPage: null,
totalDocs: 0, totalDocs: 0,
totalPages: 0, totalPages: 0,
}; }
} }
// set the id in an object for sorting later // set the id in an object for sorting later
selectDistinctResult.forEach(({ id }, i) => { selectDistinctResult.forEach(({ id }, i) => {
orderedIDMap[id as (number | string)] = i; orderedIDMap[id as number | string] = i
}); })
findManyArgs.where = inArray(this.tables[tableName].id, Object.keys(orderedIDMap)); findManyArgs.where = inArray(this.tables[tableName].id, Object.keys(orderedIDMap))
} else { } else {
findManyArgs.limit = limitArg === 0 ? undefined : limitArg; findManyArgs.limit = limitArg === 0 ? undefined : limitArg
findManyArgs.offset = (page - 1) * limitArg; findManyArgs.offset = (page - 1) * limitArg
if (where) { if (where) {
findManyArgs.where = where; findManyArgs.where = where
} }
// orderBy will only be set if a complex sort is needed on a relation // orderBy will only be set if a complex sort is needed on a relation
if (sort) { if (sort) {
if (sort[0] === '-') { if (sort[0] === '-') {
findManyArgs.orderBy = desc(this.tables[tableName][sort.substring(1)]); findManyArgs.orderBy = desc(this.tables[tableName][sort.substring(1)])
} else { } else {
findManyArgs.orderBy = asc(this.tables[tableName][sort]); findManyArgs.orderBy = asc(this.tables[tableName][sort])
} }
} }
} }
const findPromise = db.query[tableName].findMany(findManyArgs); const findPromise = db.query[tableName].findMany(findManyArgs)
if (pagination !== false || selectDistinctResult?.length > limit) { if (pagination !== false || selectDistinctResult?.length > limit) {
selectCount = db.select({ count: sql<number>`count(*)` }) selectCount = db
.select({ count: sql<number>`count(*)` })
.from(table) .from(table)
.where(where); .where(where)
Object.entries(joins) Object.entries(joins).forEach(([joinTable, condition]) => {
.forEach(([joinTable, condition]) => { if (joinTable) {
if (joinTable) { selectCount.leftJoin(this.tables[joinTable], condition)
selectCount.leftJoin(this.tables[joinTable], condition); }
} })
}); const countResult = await selectCount
const countResult = await selectCount; totalDocs = Number(countResult[0].count)
totalDocs = Number(countResult[0].count); totalPages = typeof limit === 'number' ? Math.ceil(totalDocs / limit) : 1
totalPages = typeof limit === 'number' ? Math.ceil(totalDocs / limit) : 1; hasPrevPage = page > 1
hasPrevPage = page > 1; hasNextPage = totalPages > page
hasNextPage = totalPages > page; pagingCounter = (page - 1) * limit + 1
pagingCounter = ((page - 1) * limit) + 1;
} }
const rawDocs = await findPromise; const rawDocs = await findPromise
// sort rawDocs from selectQuery // sort rawDocs from selectQuery
if (Object.keys(orderedIDMap).length > 0) { if (Object.keys(orderedIDMap).length > 0) {
rawDocs.sort((a, b) => (orderedIDMap[a.id] - orderedIDMap[b.id])); rawDocs.sort((a, b) => orderedIDMap[a.id] - orderedIDMap[b.id])
} }
if (pagination === false) { if (pagination === false) {
totalDocs = rawDocs.length; totalDocs = rawDocs.length
limit = totalDocs; limit = totalDocs
totalPages = 1; totalPages = 1
pagingCounter = 1; pagingCounter = 1
hasPrevPage = false; hasPrevPage = false
hasNextPage = false; hasNextPage = false
} }
const docs = rawDocs.map((data: TypeWithID) => { const docs = rawDocs.map((data: TypeWithID) => {
@@ -162,8 +164,8 @@ export const find: Find = async function find(
config: this.payload.config, config: this.payload.config,
data, data,
fields: collectionConfig.fields, fields: collectionConfig.fields,
}); })
}); })
return { return {
docs, docs,
@@ -176,5 +178,5 @@ export const find: Find = async function find(
prevPage: hasPrevPage ? page - 1 : null, prevPage: hasPrevPage ? page - 1 : null,
totalDocs, totalDocs,
totalPages, totalPages,
}; }
}; }

View File

@@ -47,6 +47,7 @@ export function postgresAdapter(args: Args): PostgresAdapterResult {
db: undefined, db: undefined,
defaultIDType: 'number', defaultIDType: 'number',
// destroy, // destroy,
name: 'postgres',
deleteMany, deleteMany,
deleteOne, deleteOne,
enums: {}, enums: {},

View File

@@ -1,8 +1,23 @@
import { and, eq, gt, gte, ilike, inArray, isNotNull, isNull, lt, lte, ne, notInArray, or } from 'drizzle-orm'; import {
and,
eq,
gt,
gte,
ilike,
inArray,
isNotNull,
isNull,
lt,
lte,
ne,
notInArray,
or,
} from 'drizzle-orm'
export const operatorMap = { export const operatorMap = {
// intersects: intersects, // intersects: intersects,
and, and,
contains: ilike,
equals: eq, equals: eq,
exists: isNotNull, exists: isNotNull,
greater_than: gt, greater_than: gt,
@@ -19,4 +34,4 @@ export const operatorMap = {
// all: all, // all: all,
not_in: notInArray, not_in: notInArray,
or, or,
}; }

View File

@@ -1,17 +1,17 @@
/* eslint-disable no-await-in-loop */ /* eslint-disable no-await-in-loop */
import type { SQL } from 'drizzle-orm'; import type { SQL } from 'drizzle-orm'
import type { Field, Operator, Where } from 'payload/types'; import type { Field, Operator, Where } from 'payload/types'
import { and } from 'drizzle-orm'; import { and } from 'drizzle-orm'
import { validOperators } from 'payload/types'; import { validOperators } from 'payload/types'
import type { GenericColumn, PostgresAdapter } from '../types'; import type { GenericColumn, PostgresAdapter } from '../types'
import type { BuildQueryJoins } from './buildQuery'; import type { BuildQueryJoins } from './buildQuery'
import { buildAndOrConditions } from './buildAndOrConditions'; import { buildAndOrConditions } from './buildAndOrConditions'
import { getTableColumnFromPath } from './getTableColumnFromPath'; import { getTableColumnFromPath } from './getTableColumnFromPath'
import { operatorMap } from './operatorMap'; import { operatorMap } from './operatorMap'
import { sanitizeQueryValue } from './sanitizeQueryValue'; import { sanitizeQueryValue } from './sanitizeQueryValue'
type Args = { type Args = {
adapter: PostgresAdapter adapter: PostgresAdapter
@@ -32,19 +32,19 @@ export async function parseParams({
tableName, tableName,
where, where,
}: Args): Promise<SQL> { }: Args): Promise<SQL> {
let result: SQL; let result: SQL
const constraints: SQL[] = []; const constraints: SQL[] = []
if (typeof where === 'object' && Object.keys(where).length > 0) { if (typeof where === 'object' && Object.keys(where).length > 0) {
// We need to determine if the whereKey is an AND, OR, or a schema path // We need to determine if the whereKey is an AND, OR, or a schema path
for (const relationOrPath of Object.keys(where)) { for (const relationOrPath of Object.keys(where)) {
if (relationOrPath) { if (relationOrPath) {
const condition = where[relationOrPath]; const condition = where[relationOrPath]
let conditionOperator: 'and' | 'or'; let conditionOperator: 'and' | 'or'
if (relationOrPath.toLowerCase() === 'and') { if (relationOrPath.toLowerCase() === 'and') {
conditionOperator = 'and'; conditionOperator = 'and'
} else if (relationOrPath.toLowerCase() === 'or') { } else if (relationOrPath.toLowerCase() === 'or') {
conditionOperator = 'or'; conditionOperator = 'or'
} }
if (Array.isArray(condition)) { if (Array.isArray(condition)) {
const builtConditions = await buildAndOrConditions({ const builtConditions = await buildAndOrConditions({
@@ -55,19 +55,19 @@ export async function parseParams({
selectFields, selectFields,
tableName, tableName,
where: condition, where: condition,
}); })
if (builtConditions.length > 0) { if (builtConditions.length > 0) {
if (result) { if (result) {
result = operatorMap[conditionOperator](result, ...builtConditions); result = operatorMap[conditionOperator](result, ...builtConditions)
} else { } else {
result = operatorMap[conditionOperator](...builtConditions); result = operatorMap[conditionOperator](...builtConditions)
} }
} }
} else { } else {
// It's a path - and there can be multiple comparisons on a single path. // It's a path - and there can be multiple comparisons on a single path.
// For example - title like 'test' and title not equal to 'tester' // For example - title like 'test' and title not equal to 'tester'
// So we need to loop on keys again here to handle each operator independently // So we need to loop on keys again here to handle each operator independently
const pathOperators = where[relationOrPath]; const pathOperators = where[relationOrPath]
if (typeof pathOperators === 'object') { if (typeof pathOperators === 'object') {
for (const operator of Object.keys(pathOperators)) { for (const operator of Object.keys(pathOperators)) {
if (validOperators.includes(operator as Operator)) { if (validOperators.includes(operator as Operator)) {
@@ -86,22 +86,23 @@ export async function parseParams({
pathSegments: relationOrPath.split('.'), pathSegments: relationOrPath.split('.'),
selectFields, selectFields,
tableName, tableName,
}); })
const { operator: queryOperator, value: queryValue } = sanitizeQueryValue({ const { operator: queryOperator, value: queryValue } = sanitizeQueryValue({
field, field,
operator, operator,
val: where[relationOrPath][operator], val: where[relationOrPath][operator],
}); })
queryConstraints.forEach(({ queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => {
columnName: col, constraints.push(operatorMap.equals(constraintTable[col], value))
table: constraintTable, })
value, if (!operatorMap[queryOperator]) {
}) => { console.log('found it')
constraints.push(operatorMap.equals(constraintTable[col], value)); }
}); constraints.push(
constraints.push(operatorMap[queryOperator](rawColumn || table[columnName], queryValue)); operatorMap[queryOperator](rawColumn || table[columnName], queryValue),
)
} }
} }
} }
@@ -111,14 +112,14 @@ export async function parseParams({
} }
if (constraints.length > 0) { if (constraints.length > 0) {
if (result) { if (result) {
result = and(result, ...constraints); result = and(result, ...constraints)
} else { } else {
result = and(...constraints); result = and(...constraints)
} }
} }
if (constraints.length === 1 && !result) { if (constraints.length === 1 && !result) {
[result] = constraints; ;[result] = constraints
} }
return result; return result
} }

View File

@@ -1,7 +1,6 @@
import type { Field, TabAsField } from 'payload/types'; import { APIError } from 'payload/errors'
import { type Field, type TabAsField, fieldAffectsData } from 'payload/types'
import { APIError } from 'payload/errors'; import { createArrayFromCommaDelineated } from 'payload/utilities'
import { createArrayFromCommaDelineated } from 'payload/utilities';
type SanitizeQueryValueArgs = { type SanitizeQueryValueArgs = {
field: Field | TabAsField field: Field | TabAsField
@@ -9,9 +8,13 @@ type SanitizeQueryValueArgs = {
val: any val: any
} }
export const sanitizeQueryValue = ({ field, operator: operatorArg, val }: SanitizeQueryValueArgs): { operator: string, value: unknown } => { export const sanitizeQueryValue = ({
let operator = operatorArg; field,
let formattedValue = val; operator: operatorArg,
val,
}: SanitizeQueryValueArgs): { operator: string; value: unknown } => {
let operator = operatorArg
let formattedValue = val
// // Disregard invalid _ids // // Disregard invalid _ids
// if (path === '_id' && typeof val === 'string' && val.split(',').length === 1) { // if (path === '_id' && typeof val === 'string' && val.split(',').length === 1) {
@@ -32,35 +35,36 @@ export const sanitizeQueryValue = ({ field, operator: operatorArg, val }: Saniti
// } // }
// } // }
if (!fieldAffectsData(field)) return { operator, value: formattedValue }
// Cast incoming values as proper searchable types // Cast incoming values as proper searchable types
if (field.type === 'checkbox' && typeof val === 'string') { if (field.type === 'checkbox' && typeof val === 'string') {
if (val.toLowerCase() === 'true') formattedValue = true; if (val.toLowerCase() === 'true') formattedValue = true
if (val.toLowerCase() === 'false') formattedValue = false; if (val.toLowerCase() === 'false') formattedValue = false
} }
if (['all', 'in', 'not_in'].includes(operator) && typeof formattedValue === 'string') { if (['all', 'in', 'not_in'].includes(operator) && typeof formattedValue === 'string') {
formattedValue = createArrayFromCommaDelineated(formattedValue); formattedValue = createArrayFromCommaDelineated(formattedValue)
if (field.type === 'number') { if (field.type === 'number') {
formattedValue = formattedValue.map((arrayVal) => parseFloat(arrayVal)); formattedValue = formattedValue.map((arrayVal) => parseFloat(arrayVal))
} }
} }
if (field.type === 'number' && typeof formattedValue === 'string') { if (field.type === 'number' && typeof formattedValue === 'string') {
formattedValue = Number(val); formattedValue = Number(val)
} }
if (field.type === 'date' && typeof val === 'string') { if (field.type === 'date' && typeof val === 'string') {
formattedValue = new Date(val); formattedValue = new Date(val)
if (Number.isNaN(Date.parse(formattedValue))) { if (Number.isNaN(Date.parse(formattedValue))) {
return { operator, value: undefined }; return { operator, value: undefined }
} }
} }
if (['relationship', 'upload'].includes(field.type)) { if (['relationship', 'upload'].includes(field.type)) {
if (val === 'null') { if (val === 'null') {
formattedValue = null; formattedValue = null
} }
// if (operator === 'in' && Array.isArray(formattedValue)) { // if (operator === 'in' && Array.isArray(formattedValue)) {
@@ -80,21 +84,21 @@ export const sanitizeQueryValue = ({ field, operator: operatorArg, val }: Saniti
} }
if (operator === 'near' || operator === 'within' || operator === 'intersects') { if (operator === 'near' || operator === 'within' || operator === 'intersects') {
throw new APIError(`Querying with '${operator}' is not supported with the postgres database adapter.`); throw new APIError(
`Querying with '${operator}' is not supported with the postgres database adapter.`,
)
} }
// if (path !== '_id' || (path === '_id' && hasCustomID && field.type === 'text')) { if (operator === 'contains') {
// if (operator === 'contains') { formattedValue = `%${formattedValue}%`
// formattedValue = { $regex: formattedValue, $options: 'i' }; }
// }
// }
if (operator === 'exists') { if (operator === 'exists') {
formattedValue = (formattedValue === 'true' || formattedValue === true); formattedValue = formattedValue === 'true' || formattedValue === true
if (formattedValue === false) { if (formattedValue === false) {
operator = 'isNull'; operator = 'isNull'
} }
} }
return { operator, value: formattedValue }; return { operator, value: formattedValue }
}; }

View File

@@ -1,42 +1,39 @@
import type { PgTransactionConfig } from 'drizzle-orm/pg-core'; import type { PgTransactionConfig } from 'drizzle-orm/pg-core'
import type { BeginTransaction } from 'payload/database'; import type { BeginTransaction } from 'payload/database'
import { sql } from 'drizzle-orm'; import { sql } from 'drizzle-orm'
import { drizzle } from 'drizzle-orm/node-postgres'; import { drizzle } from 'drizzle-orm/node-postgres'
import { Client, Pool } from 'pg'; import { Client, Pool } from 'pg'
import { v4 as uuid } from 'uuid'; import { v4 as uuid } from 'uuid'
import type { DrizzleDB } from '../types'; import type { DrizzleDB } from '../types'
export const beginTransaction: BeginTransaction = async function beginTransaction( export const beginTransaction: BeginTransaction = async function beginTransaction(
options: PgTransactionConfig = {}, options: PgTransactionConfig = {},
) { ) {
let id; let id
try { try {
let db: DrizzleDB; let db: DrizzleDB
id = uuid(); id = uuid()
if ('pool' in this && this.pool !== false) { // if ('pool' in this && this.pool !== false) {
const pool = new Pool(this.pool); // const pool = new Pool(this.pool);
db = drizzle(pool, { schema: this.schema }); // db = drizzle(pool, { schema: this.schema });
await pool.connect(); // await pool.connect();
} // }
if ('client' in this && this.client !== false) { // if ('client' in this && this.client !== false) {
const client = new Client(this.client); // const client = new Client(this.client);
db = drizzle(client, { schema: this.schema }); // db = drizzle(client, { schema: this.schema });
await client.connect(); // await client.connect();
} // }
this.sessions[id] = db; this.sessions[id] = this.db
await this.sessions[id].execute(sql`BEGIN;`); // await this.sessions[id].execute(sql`BEGIN;`);
} catch (err) { } catch (err) {
this.payload.logger.error( this.payload.logger.error(`Error: cannot begin transaction: ${err.message}`, err)
`Error: cannot begin transaction: ${err.message}`, process.exit(1)
err,
);
process.exit(1);
} }
return id; return id
}; }

View File

@@ -99,6 +99,10 @@ export interface DatabaseAdapter {
* Path to read and write migration files from * Path to read and write migration files from
*/ */
migrationDir?: string migrationDir?: string
/**
* The name of the database adapter
*/
name: string
/** /**
* reference to the instance of payload * reference to the instance of payload
*/ */

View File

@@ -8,7 +8,7 @@ import { mongooseAdapter } from '../packages/db-mongodb/src/index'
import { postgresAdapter } from '../packages/db-postgres/src/index' import { postgresAdapter } from '../packages/db-postgres/src/index'
import { buildConfig as buildPayloadConfig } from '../packages/payload/src/config/build' import { buildConfig as buildPayloadConfig } from '../packages/payload/src/config/build'
// process.env.PAYLOAD_DATABASE = 'postgres' process.env.PAYLOAD_DATABASE = 'postgres'
const databaseAdapters = { const databaseAdapters = {
mongoose: mongooseAdapter({ mongoose: mongooseAdapter({

View File

@@ -1,11 +1,11 @@
import { GraphQLClient } from 'graphql-request'; import { GraphQLClient } from 'graphql-request'
import type { Post } from './payload-types'; import type { Post } from './payload-types'
import payload from '../../packages/payload/src'; import payload from '../../packages/payload/src'
import { mapAsync } from '../../packages/payload/src/utilities/mapAsync'; import { mapAsync } from '../../packages/payload/src/utilities/mapAsync'
import { initPayloadTest } from '../helpers/configHelpers'; import { initPayloadTest } from '../helpers/configHelpers'
import configPromise, { pointSlug, slug } from './config'; import configPromise, { pointSlug, slug } from './config'
const title = 'title' const title = 'title'
@@ -20,13 +20,13 @@ describe('collections-graphql', () => {
// Wait for indexes to be created, // Wait for indexes to be created,
// as we need them to query by point // as we need them to query by point
if (payload.db?.collections?.point?.ensureIndexes) { if (payload.db.name === 'mongoose') {
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
payload.db?.collections?.point?.ensureIndexes(function (err) { payload.db?.collections?.point?.ensureIndexes(function (err) {
if (err) reject(err); if (err) reject(err)
resolve(true); resolve(true)
}); })
}); })
} }
}) })
@@ -402,225 +402,227 @@ describe('collections-graphql', () => {
expect(docs).toContainEqual(expect.objectContaining({ id: specialPost.id })) expect(docs).toContainEqual(expect.objectContaining({ id: specialPost.id }))
}) })
describe('near', () => { if (['mongoose'].includes(process.env.PAYLOAD_DATABASE)) {
const point = [10, 20] describe('near', () => {
const [lat, lng] = point const point = [10, 20]
const [lat, lng] = point
it('should return a document near a point', async () => { it('should return a document near a point', async () => {
const nearQuery = ` const nearQuery = `
query { query {
Points( Points(
where: { where: {
point: { point: {
near: [${lat + 0.01}, ${lng + 0.01}, 10000] near: [${lat + 0.01}, ${lng + 0.01}, 10000]
}
}
) {
docs {
id
point
} }
} }
) { }`
docs {
id
point
}
}
}`
const response = await client.request(nearQuery) const response = await client.request(nearQuery)
const { docs } = response.Points const { docs } = response.Points
expect(docs).toHaveLength(1) expect(docs).toHaveLength(1)
})
it('should not return a point far away', async () => {
const nearQuery = `
query {
Points(
where: {
point: {
near: [${lng + 1}, ${lat - 1}, 5000]
}
}
) {
docs {
id
point
}
}
}`
const response = await client.request(nearQuery)
const { docs } = response.Points
expect(docs).toHaveLength(0)
})
it('should sort find results by nearest distance', async () => {
// creating twice as many records as we are querying to get a random sample
await mapAsync([...Array(10)], async () => {
// setTimeout used to randomize the creation timestamp
setTimeout(async () => {
await payload.create({
collection: pointSlug,
data: {
// only randomize longitude to make distance comparison easy
point: [Math.random(), 0],
},
})
}, Math.random())
}) })
const nearQuery = ` it('should not return a point far away', async () => {
query { const nearQuery = `
Points( query {
where: { Points(
point: { where: {
near: [0, 0, 100000, 0] point: {
near: [${lng + 1}, ${lat - 1}, 5000]
}
}
) {
docs {
id
point
} }
},
limit: 5
) {
docs {
id
point
} }
} }`
}`
const response = await client.request(nearQuery) const response = await client.request(nearQuery)
const { docs } = response.Points const { docs } = response.Points
let previous = 0 expect(docs).toHaveLength(0)
docs.forEach(({ point: coordinates }) => { })
// The next document point should always be greater than the one before
expect(previous).toBeLessThanOrEqual(coordinates[0]) it('should sort find results by nearest distance', async () => {
;[previous] = coordinates // creating twice as many records as we are querying to get a random sample
await mapAsync([...Array(10)], async () => {
// setTimeout used to randomize the creation timestamp
setTimeout(async () => {
await payload.create({
collection: pointSlug,
data: {
// only randomize longitude to make distance comparison easy
point: [Math.random(), 0],
},
})
}, Math.random())
})
const nearQuery = `
query {
Points(
where: {
point: {
near: [0, 0, 100000, 0]
}
},
limit: 5
) {
docs {
id
point
}
}
}`
const response = await client.request(nearQuery)
const { docs } = response.Points
let previous = 0
docs.forEach(({ point: coordinates }) => {
// The next document point should always be greater than the one before
expect(previous).toBeLessThanOrEqual(coordinates[0])
;[previous] = coordinates
})
}) })
}) })
})
describe('within', () => { describe('within', () => {
type Point = [number, number] type Point = [number, number]
const polygon: Point[] = [ const polygon: Point[] = [
[9.0, 19.0], // bottom-left [9.0, 19.0], // bottom-left
[9.0, 21.0], // top-left [9.0, 21.0], // top-left
[11.0, 21.0], // top-right [11.0, 21.0], // top-right
[11.0, 19.0], // bottom-right [11.0, 19.0], // bottom-right
[9.0, 19.0], // back to starting point to close the polygon [9.0, 19.0], // back to starting point to close the polygon
] ]
it('should return a document with the point inside the polygon', async () => { it('should return a document with the point inside the polygon', async () => {
const query = ` const query = `
query { query {
Points( Points(
where: { where: {
point: { point: {
within: { within: {
type: "Polygon", type: "Polygon",
coordinates: ${JSON.stringify([polygon])} coordinates: ${JSON.stringify([polygon])}
}
} }
}) {
docs {
id
point
} }
}) {
docs {
id
point
} }
} }`
}`
const response = await client.request(query) const response = await client.request(query)
const { docs } = response.Points const { docs } = response.Points
expect(docs).toHaveLength(1) expect(docs).toHaveLength(1)
expect(docs[0].point).toEqual([10, 20]) expect(docs[0].point).toEqual([10, 20])
})
it('should not return a document with the point outside the polygon', async () => {
const reducedPolygon = polygon.map((vertex) => vertex.map((coord) => coord * 0.1))
const query = `
query {
Points(
where: {
point: {
within: {
type: "Polygon",
coordinates: ${JSON.stringify([reducedPolygon])}
}
}
}) {
docs {
id
point
}
}
}`
const response = await client.request(query)
const { docs } = response.Points
expect(docs).toHaveLength(0)
})
}) })
it('should not return a document with the point outside the polygon', async () => { describe('intersects', () => {
const reducedPolygon = polygon.map((vertex) => vertex.map((coord) => coord * 0.1)) type Point = [number, number]
const query = ` const polygon: Point[] = [
query { [9.0, 19.0], // bottom-left
Points( [9.0, 21.0], // top-left
where: { [11.0, 21.0], // top-right
point: { [11.0, 19.0], // bottom-right
within: { [9.0, 19.0], // back to starting point to close the polygon
type: "Polygon", ]
coordinates: ${JSON.stringify([reducedPolygon])}
it('should return a document with the point intersecting the polygon', async () => {
const query = `
query {
Points(
where: {
point: {
intersects: {
type: "Polygon",
coordinates: ${JSON.stringify([polygon])}
}
} }
}) {
docs {
id
point
} }
}) {
docs {
id
point
} }
} }`
}`
const response = await client.request(query) const response = await client.request(query)
const { docs } = response.Points const { docs } = response.Points
expect(docs).toHaveLength(0) expect(docs).toHaveLength(1)
}) expect(docs[0].point).toEqual([10, 20])
}) })
describe('intersects', () => { it('should not return a document with the point not intersecting a smaller polygon', async () => {
type Point = [number, number] const reducedPolygon = polygon.map((vertex) => vertex.map((coord) => coord * 0.1))
const polygon: Point[] = [ const query = `
[9.0, 19.0], // bottom-left query {
[9.0, 21.0], // top-left Points(
[11.0, 21.0], // top-right where: {
[11.0, 19.0], // bottom-right point: {
[9.0, 19.0], // back to starting point to close the polygon within: {
] type: "Polygon",
coordinates: ${JSON.stringify([reducedPolygon])}
it('should return a document with the point intersecting the polygon', async () => { }
const query = `
query {
Points(
where: {
point: {
intersects: {
type: "Polygon",
coordinates: ${JSON.stringify([polygon])}
} }
}) {
docs {
id
point
} }
}) {
docs {
id
point
} }
} }`
}`
const response = await client.request(query) const response = await client.request(query)
const { docs } = response.Points const { docs } = response.Points
expect(docs).toHaveLength(1) expect(docs).toHaveLength(0)
expect(docs[0].point).toEqual([10, 20]) })
}) })
}
it('should not return a document with the point not intersecting a smaller polygon', async () => {
const reducedPolygon = polygon.map((vertex) => vertex.map((coord) => coord * 0.1))
const query = `
query {
Points(
where: {
point: {
within: {
type: "Polygon",
coordinates: ${JSON.stringify([reducedPolygon])}
}
}
}) {
docs {
id
point
}
}
}`
const response = await client.request(query)
const { docs } = response.Points
expect(docs).toHaveLength(0)
})
})
it('can query deeply nested fields within rows, tabs, collapsibles', async () => { it('can query deeply nested fields within rows, tabs, collapsibles', async () => {
const withNestedField = await createPost({ D1: { D2: { D3: { D4: 'nested message' } } } }) const withNestedField = await createPost({ D1: { D2: { D3: { D4: 'nested message' } } } })