diff --git a/packages/db-mongodb/src/index.ts b/packages/db-mongodb/src/index.ts index 1d2e678c7e..d265635544 100644 --- a/packages/db-mongodb/src/index.ts +++ b/packages/db-mongodb/src/index.ts @@ -94,6 +94,7 @@ export function mongooseAdapter({ globals: undefined, init, ...(migrationDir && { migrationDir }), + name: 'mongoose', mongoMemoryServer: undefined, payload, queryDrafts, diff --git a/packages/db-postgres/src/find.ts b/packages/db-postgres/src/find.ts index 9725e8acb1..d7c349b73e 100644 --- a/packages/db-postgres/src/find.ts +++ b/packages/db-postgres/src/find.ts @@ -1,19 +1,20 @@ -import type { SQL } from 'drizzle-orm'; -import type { PgSelect } from 'drizzle-orm/pg-core'; -import type { Find } from 'payload/database'; -import type { PayloadRequest, SanitizedCollectionConfig, TypeWithID } from 'payload/types'; +import type { SQL } from 'drizzle-orm' +import type { PgSelect } from 'drizzle-orm/pg-core' +import type { Find } from 'payload/database' +import type { PayloadRequest, SanitizedCollectionConfig, TypeWithID } from 'payload/types' -import { asc, desc, inArray, sql } from 'drizzle-orm'; -import toSnakeCase from 'to-snake-case'; +import { asc, desc, inArray, sql } from 'drizzle-orm' +import toSnakeCase from 'to-snake-case' -import type { GenericColumn, PostgresAdapter } from './types'; +import type { GenericColumn, PostgresAdapter } from './types' -import { buildFindManyArgs } from './find/buildFindManyArgs'; -import buildQuery from './queries/buildQuery'; -import { transform } from './transform/read'; +import { buildFindManyArgs } from './find/buildFindManyArgs' +import buildQuery from './queries/buildQuery' +import { transform } from './transform/read' export const find: Find = async function find( - this: PostgresAdapter, { + this: PostgresAdapter, + { collection, limit: limitArg, locale, @@ -24,44 +25,40 @@ export const find: Find = async function find( where: whereArg, }, ) { - const collectionConfig: SanitizedCollectionConfig = this.payload.collections[collection].config; - const tableName = toSnakeCase(collection); - const table = this.tables[tableName]; - const sort = typeof sortArg === 'string' ? sortArg : collectionConfig.defaultSort; - let limit = limitArg; - let totalDocs: number; - let totalPages: number; - let hasPrevPage: boolean; - let hasNextPage: boolean; - let pagingCounter: number; - let selectDistinctResult; + const collectionConfig: SanitizedCollectionConfig = this.payload.collections[collection].config + const tableName = toSnakeCase(collection) + const table = this.tables[tableName] + const sort = typeof sortArg === 'string' ? sortArg : collectionConfig.defaultSort + let limit = limitArg + let totalDocs: number + let totalPages: number + let hasPrevPage: boolean + let hasNextPage: boolean + let pagingCounter: number + let selectDistinctResult - const { - joins, - orderBy, - selectFields, - where, - } = await buildQuery({ + const { joins, orderBy, selectFields, where } = await buildQuery({ adapter: this, fields: collectionConfig.fields, locale, sort, tableName, where: whereArg, - }); - const db = req.transactionID ? this.sessions[req.transactionID] : this.db; + }) + const db = this.sessions?.[req.transactionID] || this.db + const orderedIDMap: Record = {} + let selectCount: PgSelect }, 'partial', Record> - const orderedIDMap: Record = {}; - let selectQuery: PgSelect, 'partial', Record>; - let selectCount: PgSelect; }, "partial", Record>; + const methodsToCall: { + args: unknown[] + method: string + }[] = [] if (orderBy?.order && orderBy?.column) { - selectQuery = db.selectDistinct(selectFields) - .from(table) - .orderBy(orderBy.order(orderBy.column)); - } else { - selectQuery = db.selectDistinct(selectFields) - .from(table) + methodsToCall.push({ + args: [orderBy.order(orderBy.column)], + method: 'orderBy', + }) } const findManyArgs = buildFindManyArgs({ @@ -69,23 +66,28 @@ export const find: Find = async function find( depth: 0, fields: collectionConfig.fields, tableName, - }); + }) // only fetch IDs when a sort or where query is used that needs to be done on join tables, otherwise these can be done directly on the table in findMany if (Object.keys(joins).length > 0) { if (where) { - selectQuery.where(where); + methodsToCall.push({ args: [where], method: 'where' }) } - Object.entries(joins) - .forEach(([joinTable, condition]) => { - if (joinTable) { - selectQuery.leftJoin(this.tables[joinTable], condition); - } - }); + Object.entries(joins).forEach(([joinTable, condition]) => { + if (joinTable) { + methodsToCall.push({ + args: [this.tables[joinTable], condition], + method: 'leftJoin', + }) + } + }) - selectDistinctResult = await selectQuery - .offset((page - 1) * limit) - .limit(limit === 0 ? undefined : limit); + methodsToCall.push({ args: [(page - 1) * limit], method: 'offset' }) + methodsToCall.push({ args: [limit === 0 ? undefined : limit], method: 'limit' }) + + selectDistinctResult = await methodsToCall.reduce((query, { args, method }) => { + return query[method](...args) + }, db.selectDistinct(selectFields).from(table)) if (selectDistinctResult.length === 0) { return { @@ -99,62 +101,62 @@ export const find: Find = async function find( prevPage: null, totalDocs: 0, totalPages: 0, - }; + } } // set the id in an object for sorting later selectDistinctResult.forEach(({ id }, i) => { - orderedIDMap[id as (number | string)] = i; - }); - findManyArgs.where = inArray(this.tables[tableName].id, Object.keys(orderedIDMap)); + orderedIDMap[id as number | string] = i + }) + findManyArgs.where = inArray(this.tables[tableName].id, Object.keys(orderedIDMap)) } else { - findManyArgs.limit = limitArg === 0 ? undefined : limitArg; - findManyArgs.offset = (page - 1) * limitArg; + findManyArgs.limit = limitArg === 0 ? undefined : limitArg + findManyArgs.offset = (page - 1) * limitArg if (where) { - findManyArgs.where = where; + findManyArgs.where = where } // orderBy will only be set if a complex sort is needed on a relation if (sort) { if (sort[0] === '-') { - findManyArgs.orderBy = desc(this.tables[tableName][sort.substring(1)]); + findManyArgs.orderBy = desc(this.tables[tableName][sort.substring(1)]) } else { - findManyArgs.orderBy = asc(this.tables[tableName][sort]); + findManyArgs.orderBy = asc(this.tables[tableName][sort]) } } } - const findPromise = db.query[tableName].findMany(findManyArgs); + const findPromise = db.query[tableName].findMany(findManyArgs) if (pagination !== false || selectDistinctResult?.length > limit) { - selectCount = db.select({ count: sql`count(*)` }) + selectCount = db + .select({ count: sql`count(*)` }) .from(table) - .where(where); - Object.entries(joins) - .forEach(([joinTable, condition]) => { - if (joinTable) { - selectCount.leftJoin(this.tables[joinTable], condition); - } - }); - const countResult = await selectCount; - totalDocs = Number(countResult[0].count); - totalPages = typeof limit === 'number' ? Math.ceil(totalDocs / limit) : 1; - hasPrevPage = page > 1; - hasNextPage = totalPages > page; - pagingCounter = ((page - 1) * limit) + 1; + .where(where) + Object.entries(joins).forEach(([joinTable, condition]) => { + if (joinTable) { + selectCount.leftJoin(this.tables[joinTable], condition) + } + }) + const countResult = await selectCount + totalDocs = Number(countResult[0].count) + totalPages = typeof limit === 'number' ? Math.ceil(totalDocs / limit) : 1 + hasPrevPage = page > 1 + hasNextPage = totalPages > page + pagingCounter = (page - 1) * limit + 1 } - const rawDocs = await findPromise; + const rawDocs = await findPromise // sort rawDocs from selectQuery if (Object.keys(orderedIDMap).length > 0) { - rawDocs.sort((a, b) => (orderedIDMap[a.id] - orderedIDMap[b.id])); + rawDocs.sort((a, b) => orderedIDMap[a.id] - orderedIDMap[b.id]) } if (pagination === false) { - totalDocs = rawDocs.length; - limit = totalDocs; - totalPages = 1; - pagingCounter = 1; - hasPrevPage = false; - hasNextPage = false; + totalDocs = rawDocs.length + limit = totalDocs + totalPages = 1 + pagingCounter = 1 + hasPrevPage = false + hasNextPage = false } const docs = rawDocs.map((data: TypeWithID) => { @@ -162,8 +164,8 @@ export const find: Find = async function find( config: this.payload.config, data, fields: collectionConfig.fields, - }); - }); + }) + }) return { docs, @@ -176,5 +178,5 @@ export const find: Find = async function find( prevPage: hasPrevPage ? page - 1 : null, totalDocs, totalPages, - }; -}; + } +} diff --git a/packages/db-postgres/src/index.ts b/packages/db-postgres/src/index.ts index 00b750b2dd..2238b94397 100644 --- a/packages/db-postgres/src/index.ts +++ b/packages/db-postgres/src/index.ts @@ -47,6 +47,7 @@ export function postgresAdapter(args: Args): PostgresAdapterResult { db: undefined, defaultIDType: 'number', // destroy, + name: 'postgres', deleteMany, deleteOne, enums: {}, diff --git a/packages/db-postgres/src/queries/operatorMap.ts b/packages/db-postgres/src/queries/operatorMap.ts index 6f21c40a07..75475bba40 100644 --- a/packages/db-postgres/src/queries/operatorMap.ts +++ b/packages/db-postgres/src/queries/operatorMap.ts @@ -1,8 +1,23 @@ -import { and, eq, gt, gte, ilike, inArray, isNotNull, isNull, lt, lte, ne, notInArray, or } from 'drizzle-orm'; +import { + and, + eq, + gt, + gte, + ilike, + inArray, + isNotNull, + isNull, + lt, + lte, + ne, + notInArray, + or, +} from 'drizzle-orm' export const operatorMap = { // intersects: intersects, and, + contains: ilike, equals: eq, exists: isNotNull, greater_than: gt, @@ -19,4 +34,4 @@ export const operatorMap = { // all: all, not_in: notInArray, or, -}; +} diff --git a/packages/db-postgres/src/queries/parseParams.ts b/packages/db-postgres/src/queries/parseParams.ts index 80cc39bfb7..3899de76ea 100644 --- a/packages/db-postgres/src/queries/parseParams.ts +++ b/packages/db-postgres/src/queries/parseParams.ts @@ -1,17 +1,17 @@ /* eslint-disable no-await-in-loop */ -import type { SQL } from 'drizzle-orm'; -import type { Field, Operator, Where } from 'payload/types'; +import type { SQL } from 'drizzle-orm' +import type { Field, Operator, Where } from 'payload/types' -import { and } from 'drizzle-orm'; -import { validOperators } from 'payload/types'; +import { and } from 'drizzle-orm' +import { validOperators } from 'payload/types' -import type { GenericColumn, PostgresAdapter } from '../types'; -import type { BuildQueryJoins } from './buildQuery'; +import type { GenericColumn, PostgresAdapter } from '../types' +import type { BuildQueryJoins } from './buildQuery' -import { buildAndOrConditions } from './buildAndOrConditions'; -import { getTableColumnFromPath } from './getTableColumnFromPath'; -import { operatorMap } from './operatorMap'; -import { sanitizeQueryValue } from './sanitizeQueryValue'; +import { buildAndOrConditions } from './buildAndOrConditions' +import { getTableColumnFromPath } from './getTableColumnFromPath' +import { operatorMap } from './operatorMap' +import { sanitizeQueryValue } from './sanitizeQueryValue' type Args = { adapter: PostgresAdapter @@ -32,19 +32,19 @@ export async function parseParams({ tableName, where, }: Args): Promise { - let result: SQL; - const constraints: SQL[] = []; + let result: SQL + const constraints: SQL[] = [] if (typeof where === 'object' && Object.keys(where).length > 0) { // We need to determine if the whereKey is an AND, OR, or a schema path for (const relationOrPath of Object.keys(where)) { if (relationOrPath) { - const condition = where[relationOrPath]; - let conditionOperator: 'and' | 'or'; + const condition = where[relationOrPath] + let conditionOperator: 'and' | 'or' if (relationOrPath.toLowerCase() === 'and') { - conditionOperator = 'and'; + conditionOperator = 'and' } else if (relationOrPath.toLowerCase() === 'or') { - conditionOperator = 'or'; + conditionOperator = 'or' } if (Array.isArray(condition)) { const builtConditions = await buildAndOrConditions({ @@ -55,19 +55,19 @@ export async function parseParams({ selectFields, tableName, where: condition, - }); + }) if (builtConditions.length > 0) { if (result) { - result = operatorMap[conditionOperator](result, ...builtConditions); + result = operatorMap[conditionOperator](result, ...builtConditions) } else { - result = operatorMap[conditionOperator](...builtConditions); + result = operatorMap[conditionOperator](...builtConditions) } } } else { // It's a path - and there can be multiple comparisons on a single path. // For example - title like 'test' and title not equal to 'tester' // So we need to loop on keys again here to handle each operator independently - const pathOperators = where[relationOrPath]; + const pathOperators = where[relationOrPath] if (typeof pathOperators === 'object') { for (const operator of Object.keys(pathOperators)) { if (validOperators.includes(operator as Operator)) { @@ -86,22 +86,23 @@ export async function parseParams({ pathSegments: relationOrPath.split('.'), selectFields, tableName, - }); + }) const { operator: queryOperator, value: queryValue } = sanitizeQueryValue({ field, operator, val: where[relationOrPath][operator], - }); + }) - queryConstraints.forEach(({ - columnName: col, - table: constraintTable, - value, - }) => { - constraints.push(operatorMap.equals(constraintTable[col], value)); - }); - constraints.push(operatorMap[queryOperator](rawColumn || table[columnName], queryValue)); + queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => { + constraints.push(operatorMap.equals(constraintTable[col], value)) + }) + if (!operatorMap[queryOperator]) { + console.log('found it') + } + constraints.push( + operatorMap[queryOperator](rawColumn || table[columnName], queryValue), + ) } } } @@ -111,14 +112,14 @@ export async function parseParams({ } if (constraints.length > 0) { if (result) { - result = and(result, ...constraints); + result = and(result, ...constraints) } else { - result = and(...constraints); + result = and(...constraints) } } if (constraints.length === 1 && !result) { - [result] = constraints; + ;[result] = constraints } - return result; + return result } diff --git a/packages/db-postgres/src/queries/sanitizeQueryValue.ts b/packages/db-postgres/src/queries/sanitizeQueryValue.ts index 9ae605e91c..03abad4e38 100644 --- a/packages/db-postgres/src/queries/sanitizeQueryValue.ts +++ b/packages/db-postgres/src/queries/sanitizeQueryValue.ts @@ -1,7 +1,6 @@ -import type { Field, TabAsField } from 'payload/types'; - -import { APIError } from 'payload/errors'; -import { createArrayFromCommaDelineated } from 'payload/utilities'; +import { APIError } from 'payload/errors' +import { type Field, type TabAsField, fieldAffectsData } from 'payload/types' +import { createArrayFromCommaDelineated } from 'payload/utilities' type SanitizeQueryValueArgs = { field: Field | TabAsField @@ -9,9 +8,13 @@ type SanitizeQueryValueArgs = { val: any } -export const sanitizeQueryValue = ({ field, operator: operatorArg, val }: SanitizeQueryValueArgs): { operator: string, value: unknown } => { - let operator = operatorArg; - let formattedValue = val; +export const sanitizeQueryValue = ({ + field, + operator: operatorArg, + val, +}: SanitizeQueryValueArgs): { operator: string; value: unknown } => { + let operator = operatorArg + let formattedValue = val // // Disregard invalid _ids // if (path === '_id' && typeof val === 'string' && val.split(',').length === 1) { @@ -32,35 +35,36 @@ export const sanitizeQueryValue = ({ field, operator: operatorArg, val }: Saniti // } // } + if (!fieldAffectsData(field)) return { operator, value: formattedValue } + // Cast incoming values as proper searchable types if (field.type === 'checkbox' && typeof val === 'string') { - if (val.toLowerCase() === 'true') formattedValue = true; - if (val.toLowerCase() === 'false') formattedValue = false; + if (val.toLowerCase() === 'true') formattedValue = true + if (val.toLowerCase() === 'false') formattedValue = false } if (['all', 'in', 'not_in'].includes(operator) && typeof formattedValue === 'string') { - formattedValue = createArrayFromCommaDelineated(formattedValue); + formattedValue = createArrayFromCommaDelineated(formattedValue) if (field.type === 'number') { - formattedValue = formattedValue.map((arrayVal) => parseFloat(arrayVal)); + formattedValue = formattedValue.map((arrayVal) => parseFloat(arrayVal)) } } if (field.type === 'number' && typeof formattedValue === 'string') { - formattedValue = Number(val); + formattedValue = Number(val) } if (field.type === 'date' && typeof val === 'string') { - formattedValue = new Date(val); + formattedValue = new Date(val) if (Number.isNaN(Date.parse(formattedValue))) { - return { operator, value: undefined }; + return { operator, value: undefined } } } - if (['relationship', 'upload'].includes(field.type)) { if (val === 'null') { - formattedValue = null; + formattedValue = null } // if (operator === 'in' && Array.isArray(formattedValue)) { @@ -80,21 +84,21 @@ export const sanitizeQueryValue = ({ field, operator: operatorArg, val }: Saniti } if (operator === 'near' || operator === 'within' || operator === 'intersects') { - throw new APIError(`Querying with '${operator}' is not supported with the postgres database adapter.`); + throw new APIError( + `Querying with '${operator}' is not supported with the postgres database adapter.`, + ) } - // if (path !== '_id' || (path === '_id' && hasCustomID && field.type === 'text')) { - // if (operator === 'contains') { - // formattedValue = { $regex: formattedValue, $options: 'i' }; - // } - // } + if (operator === 'contains') { + formattedValue = `%${formattedValue}%` + } if (operator === 'exists') { - formattedValue = (formattedValue === 'true' || formattedValue === true); + formattedValue = formattedValue === 'true' || formattedValue === true if (formattedValue === false) { - operator = 'isNull'; + operator = 'isNull' } } - return { operator, value: formattedValue }; -}; + return { operator, value: formattedValue } +} diff --git a/packages/db-postgres/src/transactions/beginTransaction.ts b/packages/db-postgres/src/transactions/beginTransaction.ts index f9cd2c3dc3..98b2837335 100644 --- a/packages/db-postgres/src/transactions/beginTransaction.ts +++ b/packages/db-postgres/src/transactions/beginTransaction.ts @@ -1,42 +1,39 @@ -import type { PgTransactionConfig } from 'drizzle-orm/pg-core'; -import type { BeginTransaction } from 'payload/database'; +import type { PgTransactionConfig } from 'drizzle-orm/pg-core' +import type { BeginTransaction } from 'payload/database' -import { sql } from 'drizzle-orm'; -import { drizzle } from 'drizzle-orm/node-postgres'; -import { Client, Pool } from 'pg'; -import { v4 as uuid } from 'uuid'; +import { sql } from 'drizzle-orm' +import { drizzle } from 'drizzle-orm/node-postgres' +import { Client, Pool } from 'pg' +import { v4 as uuid } from 'uuid' -import type { DrizzleDB } from '../types'; +import type { DrizzleDB } from '../types' export const beginTransaction: BeginTransaction = async function beginTransaction( options: PgTransactionConfig = {}, ) { - let id; + let id try { - let db: DrizzleDB; - id = uuid(); - if ('pool' in this && this.pool !== false) { - const pool = new Pool(this.pool); - db = drizzle(pool, { schema: this.schema }); - await pool.connect(); - } + let db: DrizzleDB + id = uuid() + // if ('pool' in this && this.pool !== false) { + // const pool = new Pool(this.pool); + // db = drizzle(pool, { schema: this.schema }); + // await pool.connect(); + // } - if ('client' in this && this.client !== false) { - const client = new Client(this.client); - db = drizzle(client, { schema: this.schema }); - await client.connect(); - } + // if ('client' in this && this.client !== false) { + // const client = new Client(this.client); + // db = drizzle(client, { schema: this.schema }); + // await client.connect(); + // } - this.sessions[id] = db; + this.sessions[id] = this.db - await this.sessions[id].execute(sql`BEGIN;`); + // await this.sessions[id].execute(sql`BEGIN;`); } catch (err) { - this.payload.logger.error( - `Error: cannot begin transaction: ${err.message}`, - err, - ); - process.exit(1); + this.payload.logger.error(`Error: cannot begin transaction: ${err.message}`, err) + process.exit(1) } - return id; -}; + return id +} diff --git a/packages/payload/src/database/types.ts b/packages/payload/src/database/types.ts index ea72957c79..d1d4e0fbe2 100644 --- a/packages/payload/src/database/types.ts +++ b/packages/payload/src/database/types.ts @@ -99,6 +99,10 @@ export interface DatabaseAdapter { * Path to read and write migration files from */ migrationDir?: string + /** + * The name of the database adapter + */ + name: string /** * reference to the instance of payload */ diff --git a/test/buildConfigWithDefaults.ts b/test/buildConfigWithDefaults.ts index fe986ef59f..da109f5dc7 100644 --- a/test/buildConfigWithDefaults.ts +++ b/test/buildConfigWithDefaults.ts @@ -8,7 +8,7 @@ import { mongooseAdapter } from '../packages/db-mongodb/src/index' import { postgresAdapter } from '../packages/db-postgres/src/index' import { buildConfig as buildPayloadConfig } from '../packages/payload/src/config/build' -// process.env.PAYLOAD_DATABASE = 'postgres' +process.env.PAYLOAD_DATABASE = 'postgres' const databaseAdapters = { mongoose: mongooseAdapter({ diff --git a/test/collections-graphql/int.spec.ts b/test/collections-graphql/int.spec.ts index d9e2e46911..c513a5b0f2 100644 --- a/test/collections-graphql/int.spec.ts +++ b/test/collections-graphql/int.spec.ts @@ -1,11 +1,11 @@ -import { GraphQLClient } from 'graphql-request'; +import { GraphQLClient } from 'graphql-request' -import type { Post } from './payload-types'; +import type { Post } from './payload-types' -import payload from '../../packages/payload/src'; -import { mapAsync } from '../../packages/payload/src/utilities/mapAsync'; -import { initPayloadTest } from '../helpers/configHelpers'; -import configPromise, { pointSlug, slug } from './config'; +import payload from '../../packages/payload/src' +import { mapAsync } from '../../packages/payload/src/utilities/mapAsync' +import { initPayloadTest } from '../helpers/configHelpers' +import configPromise, { pointSlug, slug } from './config' const title = 'title' @@ -20,13 +20,13 @@ describe('collections-graphql', () => { // Wait for indexes to be created, // as we need them to query by point - if (payload.db?.collections?.point?.ensureIndexes) { + if (payload.db.name === 'mongoose') { await new Promise((resolve, reject) => { payload.db?.collections?.point?.ensureIndexes(function (err) { - if (err) reject(err); - resolve(true); - }); - }); + if (err) reject(err) + resolve(true) + }) + }) } }) @@ -402,225 +402,227 @@ describe('collections-graphql', () => { expect(docs).toContainEqual(expect.objectContaining({ id: specialPost.id })) }) - describe('near', () => { - const point = [10, 20] - const [lat, lng] = point + if (['mongoose'].includes(process.env.PAYLOAD_DATABASE)) { + describe('near', () => { + const point = [10, 20] + const [lat, lng] = point - it('should return a document near a point', async () => { - const nearQuery = ` - query { - Points( - where: { - point: { - near: [${lat + 0.01}, ${lng + 0.01}, 10000] + it('should return a document near a point', async () => { + const nearQuery = ` + query { + Points( + where: { + point: { + near: [${lat + 0.01}, ${lng + 0.01}, 10000] + } + } + ) { + docs { + id + point } } - ) { - docs { - id - point - } - } - }` + }` - const response = await client.request(nearQuery) - const { docs } = response.Points + const response = await client.request(nearQuery) + const { docs } = response.Points - expect(docs).toHaveLength(1) - }) - - it('should not return a point far away', async () => { - const nearQuery = ` - query { - Points( - where: { - point: { - near: [${lng + 1}, ${lat - 1}, 5000] - } - } - ) { - docs { - id - point - } - } - }` - - const response = await client.request(nearQuery) - const { docs } = response.Points - - expect(docs).toHaveLength(0) - }) - - it('should sort find results by nearest distance', async () => { - // creating twice as many records as we are querying to get a random sample - await mapAsync([...Array(10)], async () => { - // setTimeout used to randomize the creation timestamp - setTimeout(async () => { - await payload.create({ - collection: pointSlug, - data: { - // only randomize longitude to make distance comparison easy - point: [Math.random(), 0], - }, - }) - }, Math.random()) + expect(docs).toHaveLength(1) }) - const nearQuery = ` - query { - Points( - where: { - point: { - near: [0, 0, 100000, 0] + it('should not return a point far away', async () => { + const nearQuery = ` + query { + Points( + where: { + point: { + near: [${lng + 1}, ${lat - 1}, 5000] + } + } + ) { + docs { + id + point } - }, - limit: 5 - ) { - docs { - id - point } - } - }` + }` - const response = await client.request(nearQuery) - const { docs } = response.Points + const response = await client.request(nearQuery) + const { docs } = response.Points - let previous = 0 - docs.forEach(({ point: coordinates }) => { - // The next document point should always be greater than the one before - expect(previous).toBeLessThanOrEqual(coordinates[0]) - ;[previous] = coordinates + expect(docs).toHaveLength(0) + }) + + it('should sort find results by nearest distance', async () => { + // creating twice as many records as we are querying to get a random sample + await mapAsync([...Array(10)], async () => { + // setTimeout used to randomize the creation timestamp + setTimeout(async () => { + await payload.create({ + collection: pointSlug, + data: { + // only randomize longitude to make distance comparison easy + point: [Math.random(), 0], + }, + }) + }, Math.random()) + }) + + const nearQuery = ` + query { + Points( + where: { + point: { + near: [0, 0, 100000, 0] + } + }, + limit: 5 + ) { + docs { + id + point + } + } + }` + + const response = await client.request(nearQuery) + const { docs } = response.Points + + let previous = 0 + docs.forEach(({ point: coordinates }) => { + // The next document point should always be greater than the one before + expect(previous).toBeLessThanOrEqual(coordinates[0]) + ;[previous] = coordinates + }) }) }) - }) - describe('within', () => { - type Point = [number, number] - const polygon: Point[] = [ - [9.0, 19.0], // bottom-left - [9.0, 21.0], // top-left - [11.0, 21.0], // top-right - [11.0, 19.0], // bottom-right - [9.0, 19.0], // back to starting point to close the polygon - ] + describe('within', () => { + type Point = [number, number] + const polygon: Point[] = [ + [9.0, 19.0], // bottom-left + [9.0, 21.0], // top-left + [11.0, 21.0], // top-right + [11.0, 19.0], // bottom-right + [9.0, 19.0], // back to starting point to close the polygon + ] - it('should return a document with the point inside the polygon', async () => { - const query = ` - query { - Points( - where: { - point: { - within: { - type: "Polygon", - coordinates: ${JSON.stringify([polygon])} + it('should return a document with the point inside the polygon', async () => { + const query = ` + query { + Points( + where: { + point: { + within: { + type: "Polygon", + coordinates: ${JSON.stringify([polygon])} + } } + }) { + docs { + id + point } - }) { - docs { - id - point } - } - }` + }` - const response = await client.request(query) - const { docs } = response.Points + const response = await client.request(query) + const { docs } = response.Points - expect(docs).toHaveLength(1) - expect(docs[0].point).toEqual([10, 20]) + expect(docs).toHaveLength(1) + expect(docs[0].point).toEqual([10, 20]) + }) + + it('should not return a document with the point outside the polygon', async () => { + const reducedPolygon = polygon.map((vertex) => vertex.map((coord) => coord * 0.1)) + const query = ` + query { + Points( + where: { + point: { + within: { + type: "Polygon", + coordinates: ${JSON.stringify([reducedPolygon])} + } + } + }) { + docs { + id + point + } + } + }` + + const response = await client.request(query) + const { docs } = response.Points + + expect(docs).toHaveLength(0) + }) }) - it('should not return a document with the point outside the polygon', async () => { - const reducedPolygon = polygon.map((vertex) => vertex.map((coord) => coord * 0.1)) - const query = ` - query { - Points( - where: { - point: { - within: { - type: "Polygon", - coordinates: ${JSON.stringify([reducedPolygon])} + describe('intersects', () => { + type Point = [number, number] + const polygon: Point[] = [ + [9.0, 19.0], // bottom-left + [9.0, 21.0], // top-left + [11.0, 21.0], // top-right + [11.0, 19.0], // bottom-right + [9.0, 19.0], // back to starting point to close the polygon + ] + + it('should return a document with the point intersecting the polygon', async () => { + const query = ` + query { + Points( + where: { + point: { + intersects: { + type: "Polygon", + coordinates: ${JSON.stringify([polygon])} + } } + }) { + docs { + id + point } - }) { - docs { - id - point } - } - }` + }` - const response = await client.request(query) - const { docs } = response.Points + const response = await client.request(query) + const { docs } = response.Points - expect(docs).toHaveLength(0) - }) - }) + expect(docs).toHaveLength(1) + expect(docs[0].point).toEqual([10, 20]) + }) - describe('intersects', () => { - type Point = [number, number] - const polygon: Point[] = [ - [9.0, 19.0], // bottom-left - [9.0, 21.0], // top-left - [11.0, 21.0], // top-right - [11.0, 19.0], // bottom-right - [9.0, 19.0], // back to starting point to close the polygon - ] - - it('should return a document with the point intersecting the polygon', async () => { - const query = ` - query { - Points( - where: { - point: { - intersects: { - type: "Polygon", - coordinates: ${JSON.stringify([polygon])} + it('should not return a document with the point not intersecting a smaller polygon', async () => { + const reducedPolygon = polygon.map((vertex) => vertex.map((coord) => coord * 0.1)) + const query = ` + query { + Points( + where: { + point: { + within: { + type: "Polygon", + coordinates: ${JSON.stringify([reducedPolygon])} + } } + }) { + docs { + id + point } - }) { - docs { - id - point } - } - }` + }` - const response = await client.request(query) - const { docs } = response.Points + const response = await client.request(query) + const { docs } = response.Points - expect(docs).toHaveLength(1) - expect(docs[0].point).toEqual([10, 20]) + expect(docs).toHaveLength(0) + }) }) - - it('should not return a document with the point not intersecting a smaller polygon', async () => { - const reducedPolygon = polygon.map((vertex) => vertex.map((coord) => coord * 0.1)) - const query = ` - query { - Points( - where: { - point: { - within: { - type: "Polygon", - coordinates: ${JSON.stringify([reducedPolygon])} - } - } - }) { - docs { - id - point - } - } - }` - - const response = await client.request(query) - const { docs } = response.Points - - expect(docs).toHaveLength(0) - }) - }) + } it('can query deeply nested fields within rows, tabs, collapsibles', async () => { const withNestedField = await createPost({ D1: { D2: { D3: { D4: 'nested message' } } } })