chore: pulls mongodb from main

This commit is contained in:
James
2024-02-16 11:27:23 -05:00
parent abf0f7111d
commit 12c5100bc8
28 changed files with 567 additions and 507 deletions

View File

@@ -1,9 +1,9 @@
const nextJest = require('next/jest.js') // const nextJest = require('next/jest.js')
const createJestConfig = nextJest({ // const createJestConfig = nextJest({
// Provide the path to your Next.js app to load next.config.js and .env files in your test environment // // Provide the path to your Next.js app to load next.config.js and .env files in your test environment
dir: './', // dir: './',
}) // })
const customJestConfig = { const customJestConfig = {
globalSetup: './test/jest.setup.ts', globalSetup: './test/jest.setup.ts',
@@ -12,10 +12,14 @@ const customJestConfig = {
'\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$': '\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$':
'<rootDir>/packages/payload/src/bundlers/mocks/fileMock.js', '<rootDir>/packages/payload/src/bundlers/mocks/fileMock.js',
}, },
// testEnvironment: 'node', testEnvironment: 'node',
testMatch: ['<rootDir>/packages/payload/src/**/*.spec.ts', '<rootDir>/test/**/*int.spec.ts'], testMatch: ['<rootDir>/packages/payload/src/**/*.spec.ts', '<rootDir>/test/**/*int.spec.ts'],
testTimeout: 90000, testTimeout: 90000,
transform: {
'^.+\\.(t|j)sx?$': ['@swc/jest'],
},
verbose: true, verbose: true,
} }
module.exports = createJestConfig(customJestConfig) // module.exports = createJestConfig(customJestConfig)
module.exports = customJestConfig

View File

@@ -1,5 +1,9 @@
const path = require('path') const path = require('path')
const withBundleAnalyzer = require('@next/bundle-analyzer')({
enabled: process.env.ANALYZE === 'true',
})
/** @type {import('next').NextConfig} */ /** @type {import('next').NextConfig} */
const nextConfig = { const nextConfig = {
experimental: { experimental: {
@@ -13,6 +17,9 @@ const nextConfig = {
}, },
}, },
}, },
typescript: {
ignoreBuildErrors: true,
},
webpack: (config) => { webpack: (config) => {
return { return {
...config, ...config,
@@ -53,4 +60,4 @@ const nextConfig = {
}, },
} }
module.exports = nextConfig module.exports = withBundleAnalyzer(nextConfig)

View File

@@ -34,6 +34,7 @@
"prepare": "husky install" "prepare": "husky install"
}, },
"devDependencies": { "devDependencies": {
"@next/bundle-analyzer": "^14.1.0",
"@payloadcms/eslint-config": "workspace:*", "@payloadcms/eslint-config": "workspace:*",
"@playwright/test": "1.40.1", "@playwright/test": "1.40.1",
"@swc/cli": "^0.1.62", "@swc/cli": "^0.1.62",
@@ -81,7 +82,7 @@
"lexical": "0.12.5", "lexical": "0.12.5",
"lint-staged": "^14.0.1", "lint-staged": "^14.0.1",
"minimist": "1.2.8", "minimist": "1.2.8",
"mongodb-memory-server": "8.13.0", "mongodb-memory-server": "^9",
"next": "14.1.1-canary.26", "next": "14.1.1-canary.26",
"node-fetch": "2.6.12", "node-fetch": "2.6.12",
"node-mocks-http": "^1.14.1", "node-mocks-http": "^1.14.1",
@@ -126,7 +127,9 @@
}, },
"pnpm": { "pnpm": {
"overrides": { "overrides": {
"graphql": "^16.8.1" "graphql": "^16.8.1",
"react": "^18.2.0",
"react-dom": "^18.2.0"
} }
} }
} }

View File

@@ -1 +0,0 @@
export const mongooseAdapter = () => ({})

View File

@@ -1,6 +1,6 @@
{ {
"name": "@payloadcms/db-mongodb", "name": "@payloadcms/db-mongodb",
"version": "1.1.0", "version": "1.4.1",
"description": "The officially supported MongoDB database adapter for Payload", "description": "The officially supported MongoDB database adapter for Payload",
"repository": "https://github.com/payloadcms/payload", "repository": "https://github.com/payloadcms/payload",
"license": "MIT", "license": "MIT",
@@ -11,13 +11,6 @@
"url": "https://payloadcms.com" "url": "https://payloadcms.com"
}, },
"main": "./dist/index.js", "main": "./dist/index.js",
"exports": {
".": {
"import": "./src/index.ts",
"require": "./src/index.ts",
"types": "./src/index.ts"
}
},
"types": "./dist/index.d.ts", "types": "./dist/index.d.ts",
"scripts": { "scripts": {
"build": "pnpm build:swc && pnpm build:types", "build": "pnpm build:swc && pnpm build:types",
@@ -27,18 +20,17 @@
"prepublishOnly": "pnpm clean && pnpm build" "prepublishOnly": "pnpm clean && pnpm build"
}, },
"dependencies": { "dependencies": {
"bson-ext": "^4.0.3",
"bson-objectid": "2.0.4", "bson-objectid": "2.0.4",
"deepmerge": "4.3.1", "deepmerge": "4.3.1",
"get-port": "5.1.1", "get-port": "5.1.1",
"mongoose": "7.6.8", "mongoose": "6.12.3",
"mongoose-paginate-v2": "1.7.22", "mongoose-paginate-v2": "1.7.22",
"prompts": "2.4.2", "prompts": "2.4.2",
"http-status": "1.6.2",
"uuid": "9.0.0" "uuid": "9.0.0"
}, },
"devDependencies": { "devDependencies": {
"@payloadcms/eslint-config": "workspace:*", "@payloadcms/eslint-config": "workspace:*",
"mongodb-memory-server": "8.13.0",
"payload": "workspace:*" "payload": "workspace:*"
}, },
"peerDependencies": { "peerDependencies": {

View File

@@ -27,6 +27,13 @@ export const connect: Connect = async function connect(this: MongooseAdapter, pa
try { try {
this.connection = (await mongoose.connect(urlToConnect, connectionOptions)).connection this.connection = (await mongoose.connect(urlToConnect, connectionOptions)).connection
const client = this.connection.getClient()
if (!client.options.replicaSet) {
this.transactionOptions = false
this.beginTransaction = undefined
}
if (process.env.PAYLOAD_DROP_DATABASE === 'true') { if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
this.payload.logger.info('---- DROPPING DATABASE ----') this.payload.logger.info('---- DROPPING DATABASE ----')
await mongoose.connection.dropDatabase() await mongoose.connection.dropDatabase()

View File

@@ -1,10 +1,9 @@
import type { Create } from 'payload/database' import type { Create } from 'payload/database'
import type { Document, PayloadRequest } from 'payload/types' import type { Document, PayloadRequest } from 'payload/types'
import { ValidationError } from 'payload/errors'
import type { MongooseAdapter } from '.' import type { MongooseAdapter } from '.'
import handleError from './utilities/handleError'
import { withSession } from './withSession' import { withSession } from './withSession'
export const create: Create = async function create( export const create: Create = async function create(
@@ -17,18 +16,7 @@ export const create: Create = async function create(
try { try {
;[doc] = await Model.create([data], options) ;[doc] = await Model.create([data], options)
} catch (error) { } catch (error) {
// Handle uniqueness error from MongoDB handleError(error, req)
throw error.code === 11000 && error.keyValue
? new ValidationError(
[
{
field: Object.keys(error.keyValue)[0],
message: req.t('error:valueMustBeUnique'),
},
],
req.t,
)
: error
} }
// doc.toJSON does not do stuff like converting ObjectIds to string, or date strings to date objects. That's why we use JSON.parse/stringify here // doc.toJSON does not do stuff like converting ObjectIds to string, or date strings to date objects. That's why we use JSON.parse/stringify here

View File

@@ -49,6 +49,7 @@ export const createGlobalVersion: CreateGlobalVersion = async function createGlo
], ],
}, },
{ $unset: { latest: 1 } }, { $unset: { latest: 1 } },
options,
) )
const result: Document = JSON.parse(JSON.stringify(doc)) const result: Document = JSON.parse(JSON.stringify(doc))

View File

@@ -1,5 +1,5 @@
/* eslint-disable no-restricted-syntax, no-await-in-loop */ /* eslint-disable no-restricted-syntax, no-await-in-loop */
// import type { CreateMigration } from 'payload/database' import type { CreateMigration } from 'payload/database'
import fs from 'fs' import fs from 'fs'
import path from 'path' import path from 'path'
@@ -18,7 +18,11 @@ ${downSQL ?? ` // Migration code`}
}; };
` `
export const createMigration = async function createMigration({ file, migrationName, payload }) { export const createMigration: CreateMigration = async function createMigration({
file,
migrationName,
payload,
}) {
const dir = payload.db.migrationDir const dir = payload.db.migrationDir
if (!fs.existsSync(dir)) { if (!fs.existsSync(dir)) {
fs.mkdirSync(dir) fs.mkdirSync(dir)
@@ -28,7 +32,7 @@ export const createMigration = async function createMigration({ file, migrationN
// Check for predefined migration. // Check for predefined migration.
// Either passed in via --file or prefixed with @payloadcms/db-mongodb/ // Either passed in via --file or prefixed with @payloadcms/db-mongodb/
if (file || migrationName.startsWith('@payloadcms/db-mongodb/')) { if (file || migrationName?.startsWith('@payloadcms/db-mongodb/')) {
if (!file) file = migrationName if (!file) file = migrationName
const predefinedMigrationName = file.replace('@payloadcms/db-mongodb/', '') const predefinedMigrationName = file.replace('@payloadcms/db-mongodb/', '')
@@ -37,8 +41,8 @@ export const createMigration = async function createMigration({ file, migrationN
// Check if predefined migration exists // Check if predefined migration exists
if (fs.existsSync(cleanPath)) { if (fs.existsSync(cleanPath)) {
// const { down, up } = require(cleanPath) const { down, up } = require(cleanPath)
// migrationFileContent = migrationTemplate(up, down) migrationFileContent = migrationTemplate(up, down)
} else { } else {
payload.logger.error({ payload.logger.error({
msg: `Canned migration ${predefinedMigrationName} not found.`, msg: `Canned migration ${predefinedMigrationName} not found.`,
@@ -55,8 +59,8 @@ export const createMigration = async function createMigration({ file, migrationN
const timestamp = `${formattedDate}_${formattedTime}` const timestamp = `${formattedDate}_${formattedTime}`
const formattedName = migrationName.replace(/\W/g, '_') const formattedName = migrationName?.replace(/\W/g, '_')
const fileName = `${timestamp}_${formattedName}.ts` const fileName = migrationName ? `${timestamp}_${formattedName}.ts` : `${timestamp}_migration.ts`
const filePath = `${dir}/${fileName}` const filePath = `${dir}/${fileName}`
fs.writeFileSync(filePath, migrationFileContent) fs.writeFileSync(filePath, migrationFileContent)
payload.logger.info({ msg: `Migration created at ${filePath}` }) payload.logger.info({ msg: `Migration created at ${filePath}` })

View File

@@ -57,6 +57,7 @@ export const createVersion: CreateVersion = async function createVersion(
], ],
}, },
{ $unset: { latest: 1 } }, { $unset: { latest: 1 } },
options,
) )
const result: Document = JSON.parse(JSON.stringify(doc)) const result: Document = JSON.parse(JSON.stringify(doc))

View File

@@ -55,21 +55,30 @@ export const find: Find = async function find(
useEstimatedCount, useEstimatedCount,
} }
if (!useEstimatedCount && this.disableIndexHints !== true) { if (!useEstimatedCount && Object.keys(query).length === 0 && this.disableIndexHints !== true) {
// Improve the performance of the countDocuments query which is used if useEstimatedCount is set to false by adding a hint. // Improve the performance of the countDocuments query which is used if useEstimatedCount is set to false by adding
// a hint. By default, if no hint is provided, MongoDB does not use an indexed field to count the returned documents,
// which makes queries very slow. This only happens when no query (filter) is provided. If one is provided, it uses
// the correct indexed field
paginationOptions.useCustomCountFn = () => { paginationOptions.useCustomCountFn = () => {
return Promise.resolve( return Promise.resolve(
Model.countDocuments(query, { Model.countDocuments(query, {
...options,
hint: { _id: 1 }, hint: { _id: 1 },
}), }),
) )
} }
} }
if (limit > 0) { if (limit >= 0) {
paginationOptions.limit = limit paginationOptions.limit = limit
// limit must also be set here, it's ignored when pagination is false // limit must also be set here, it's ignored when pagination is false
paginationOptions.options.limit = limit paginationOptions.options.limit = limit
// Disable pagination if limit is 0
if (limit === 0) {
paginationOptions.pagination = false
}
} }
const result = await Model.paginate(query, paginationOptions) const result = await Model.paginate(query, paginationOptions)

View File

@@ -74,21 +74,30 @@ export const findGlobalVersions: FindGlobalVersions = async function findGlobalV
useEstimatedCount, useEstimatedCount,
} }
if (!useEstimatedCount && this.disableIndexHints !== true) { if (!useEstimatedCount && Object.keys(query).length === 0 && this.disableIndexHints !== true) {
// Improve the performance of the countDocuments query which is used if useEstimatedCount is set to false by adding a hint. // Improve the performance of the countDocuments query which is used if useEstimatedCount is set to false by adding
// a hint. By default, if no hint is provided, MongoDB does not use an indexed field to count the returned documents,
// which makes queries very slow. This only happens when no query (filter) is provided. If one is provided, it uses
// the correct indexed field
paginationOptions.useCustomCountFn = () => { paginationOptions.useCustomCountFn = () => {
return Promise.resolve( return Promise.resolve(
Model.countDocuments(query, { Model.countDocuments(query, {
...options,
hint: { _id: 1 }, hint: { _id: 1 },
}), }),
) )
} }
} }
if (limit > 0) { if (limit >= 0) {
paginationOptions.limit = limit paginationOptions.limit = limit
// limit must also be set here, it's ignored when pagination is false // limit must also be set here, it's ignored when pagination is false
paginationOptions.options.limit = limit paginationOptions.options.limit = limit
// Disable pagination if limit is 0
if (limit === 0) {
paginationOptions.pagination = false
}
} }
const result = await Model.paginate(query, paginationOptions) const result = await Model.paginate(query, paginationOptions)

View File

@@ -63,7 +63,6 @@ export const findVersions: FindVersions = async function findVersions(
lean: true, lean: true,
leanWithId: true, leanWithId: true,
limit, limit,
offset: skip || 0,
options, options,
page, page,
pagination, pagination,
@@ -71,21 +70,30 @@ export const findVersions: FindVersions = async function findVersions(
useEstimatedCount, useEstimatedCount,
} }
if (!useEstimatedCount && this.disableIndexHints !== true) { if (!useEstimatedCount && Object.keys(query).length === 0 && this.disableIndexHints !== true) {
// Improve the performance of the countDocuments query which is used if useEstimatedCount is set to false by adding a hint. // Improve the performance of the countDocuments query which is used if useEstimatedCount is set to false by adding
// a hint. By default, if no hint is provided, MongoDB does not use an indexed field to count the returned documents,
// which makes queries very slow. This only happens when no query (filter) is provided. If one is provided, it uses
// the correct indexed field
paginationOptions.useCustomCountFn = () => { paginationOptions.useCustomCountFn = () => {
return Promise.resolve( return Promise.resolve(
Model.countDocuments(query, { Model.countDocuments(query, {
...options,
hint: { _id: 1 }, hint: { _id: 1 },
}), }),
) )
} }
} }
if (limit > 0) { if (limit >= 0) {
paginationOptions.limit = limit paginationOptions.limit = limit
// limit must also be set here, it's ignored when pagination is false // limit must also be set here, it's ignored when pagination is false
paginationOptions.options.limit = limit paginationOptions.options.limit = limit
// Disable pagination if limit is 0
if (limit === 0) {
paginationOptions.pagination = false
}
} }
const result = await Model.paginate(query, paginationOptions) const result = await Model.paginate(query, paginationOptions)

View File

@@ -1,14 +1,13 @@
import type { TransactionOptions } from 'mongodb'
import type { ClientSession, ConnectOptions, Connection } from 'mongoose' import type { ClientSession, ConnectOptions, Connection } from 'mongoose'
import type { Payload } from 'payload' import type { Payload } from 'payload'
import type { BaseDatabaseAdapter, DatabaseAdapterObj } from 'payload/database' import type { BaseDatabaseAdapter } from 'payload/database'
import fs from 'fs' import fs from 'fs'
import mongoose from 'mongoose' import mongoose from 'mongoose'
import path from 'path' import path from 'path'
import { createDatabaseAdapter } from 'payload/database' import { createDatabaseAdapter } from 'payload/database'
export type { MigrateDownArgs, MigrateUpArgs } from './types'
import type { CollectionModel, GlobalModel } from './types' import type { CollectionModel, GlobalModel } from './types'
import { connect } from './connect' import { connect } from './connect'
@@ -37,6 +36,9 @@ import { updateGlobalVersion } from './updateGlobalVersion'
import { updateOne } from './updateOne' import { updateOne } from './updateOne'
import { updateVersion } from './updateVersion' import { updateVersion } from './updateVersion'
export type { MigrateDownArgs, MigrateUpArgs } from './types'
import type { DatabaseAdapterObj } from 'payload/database'
export interface Args { export interface Args {
/** Set to false to disable auto-pluralization of collection names, Defaults to true */ /** Set to false to disable auto-pluralization of collection names, Defaults to true */
autoPluralization?: boolean autoPluralization?: boolean
@@ -48,6 +50,7 @@ export interface Args {
/** Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false */ /** Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false */
disableIndexHints?: boolean disableIndexHints?: boolean
migrationDir?: string migrationDir?: string
transactionOptions?: TransactionOptions | false
/** The URL to connect to MongoDB or false to start payload and prevent connecting */ /** The URL to connect to MongoDB or false to start payload and prevent connecting */
url: false | string url: false | string
} }
@@ -76,7 +79,8 @@ declare module 'payload' {
connection: Connection connection: Connection
globals: GlobalModel globals: GlobalModel
mongoMemoryServer: any mongoMemoryServer: any
// sessions: Record<number | string, ClientSession> sessions: Record<number | string, ClientSession>
transactionOptions: TransactionOptions
versions: { versions: {
[slug: string]: CollectionModel [slug: string]: CollectionModel
} }
@@ -88,8 +92,9 @@ export function mongooseAdapter({
connectOptions, connectOptions,
disableIndexHints = false, disableIndexHints = false,
migrationDir: migrationDirArg, migrationDir: migrationDirArg,
transactionOptions = {},
url, url,
}: Args): DatabaseAdapterObj<MongooseAdapter> { }: Args): DatabaseAdapterObj {
function adapter({ payload }: { payload: Payload }) { function adapter({ payload }: { payload: Payload }) {
const migrationDir = findMigrationDir(migrationDirArg) const migrationDir = findMigrationDir(migrationDirArg)
mongoose.set('strictQuery', false) mongoose.set('strictQuery', false)
@@ -106,11 +111,12 @@ export function mongooseAdapter({
globals: undefined, globals: undefined,
mongoMemoryServer: undefined, mongoMemoryServer: undefined,
sessions: {}, sessions: {},
transactionOptions: transactionOptions === false ? undefined : transactionOptions,
url, url,
versions: {}, versions: {},
// DatabaseAdapter // DatabaseAdapter
beginTransaction, beginTransaction: transactionOptions ? beginTransaction : undefined,
commitTransaction, commitTransaction,
connect, connect,
create, create,

View File

@@ -48,7 +48,7 @@ export const init: Init = async function init(this: MongooseAdapter) {
versionModelName, versionModelName,
versionSchema, versionSchema,
this.autoPluralization === true ? undefined : versionModelName, this.autoPluralization === true ? undefined : versionModelName,
) as unknown as CollectionModel ) as CollectionModel
// this.payload.versions[collection.slug] = model; // this.payload.versions[collection.slug] = model;
this.versions[collection.slug] = model this.versions[collection.slug] = model
} }
@@ -57,9 +57,11 @@ export const init: Init = async function init(this: MongooseAdapter) {
collection.slug, collection.slug,
schema, schema,
this.autoPluralization === true ? undefined : collection.slug, this.autoPluralization === true ? undefined : collection.slug,
) as unknown as CollectionModel ) as CollectionModel
this.collections[collection.slug] = model this.collections[collection.slug] = model
// TS expect error only needed until we launch 2.0.0
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
this.payload.collections[collection.slug] = { this.payload.collections[collection.slug] = {
config: collection, config: collection,
} }
@@ -92,7 +94,7 @@ export const init: Init = async function init(this: MongooseAdapter) {
versionModelName, versionModelName,
versionSchema, versionSchema,
versionModelName, versionModelName,
) as unknown as CollectionModel ) as CollectionModel
this.versions[global.slug] = versionsModel this.versions[global.slug] = versionsModel
} }
}) })

View File

@@ -1,6 +1,9 @@
import type { PayloadRequest } from 'payload/types' import type { PayloadRequest } from 'payload/types'
import { readMigrationFiles } from 'payload/database' import { readMigrationFiles } from 'payload/database'
import { commitTransaction } from 'payload/dist/utilities/commitTransaction'
import { initTransaction } from 'payload/dist/utilities/initTransaction'
import { killTransaction } from 'payload/dist/utilities/killTransaction'
import prompts from 'prompts' import prompts from 'prompts'
import type { MongooseAdapter } from '.' import type { MongooseAdapter } from '.'
@@ -8,25 +11,30 @@ import type { MongooseAdapter } from '.'
/** /**
* Drop the current database and run all migrate up functions * Drop the current database and run all migrate up functions
*/ */
export async function migrateFresh(this: MongooseAdapter): Promise<void> { export async function migrateFresh(
this: MongooseAdapter,
{ forceAcceptWarning = false }: { forceAcceptWarning?: boolean },
): Promise<void> {
const { payload } = this const { payload } = this
const { confirm: acceptWarning } = await prompts( if (!forceAcceptWarning) {
{ const { confirm: acceptWarning } = await prompts(
name: 'confirm', {
initial: false, name: 'confirm',
message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`, type: 'confirm',
type: 'confirm', initial: false,
}, message: `WARNING: This will drop your database and run all migrations. Are you sure you want to proceed?`,
{
onCancel: () => {
process.exit(0)
}, },
}, {
) onCancel: () => {
process.exit(0)
},
},
)
if (!acceptWarning) { if (!acceptWarning) {
process.exit(0) process.exit(0)
}
} }
payload.logger.info({ payload.logger.info({
@@ -40,29 +48,29 @@ export async function migrateFresh(this: MongooseAdapter): Promise<void> {
msg: `Found ${migrationFiles.length} migration files.`, msg: `Found ${migrationFiles.length} migration files.`,
}) })
let transactionID const req = { payload } as PayloadRequest
// Run all migrate up // Run all migrate up
for (const migration of migrationFiles) { for (const migration of migrationFiles) {
payload.logger.info({ msg: `Migrating: ${migration.name}` }) payload.logger.info({ msg: `Migrating: ${migration.name}` })
try { try {
const start = Date.now() const start = Date.now()
transactionID = await this.beginTransaction() await initTransaction(req)
await migration.up({ payload }) await migration.up({ payload, req })
await payload.create({ await payload.create({
collection: 'payload-migrations', collection: 'payload-migrations',
data: { data: {
name: migration.name, name: migration.name,
batch: 1, batch: 1,
}, },
req: { req,
transactionID,
} as PayloadRequest,
}) })
await this.commitTransaction(transactionID)
await commitTransaction(req)
payload.logger.info({ msg: `Migrated: ${migration.name} (${Date.now() - start}ms)` }) payload.logger.info({ msg: `Migrated: ${migration.name} (${Date.now() - start}ms)` })
} catch (err: unknown) { } catch (err: unknown) {
await this.rollbackTransaction(transactionID) await killTransaction(req)
payload.logger.error({ payload.logger.error({
err, err,
msg: `Error running migration ${migration.name}. Rolling back.`, msg: `Error running migration ${migration.name}. Rolling back.`,

View File

@@ -1,4 +1,5 @@
import type { Field, Payload, Where } from 'payload/types' import type { Payload } from 'payload'
import type { Field, Where } from 'payload/types'
import { QueryError } from 'payload/errors' import { QueryError } from 'payload/errors'

View File

@@ -1,5 +1,6 @@
import type { Payload } from 'payload'
import type { PathToQuery } from 'payload/database' import type { PathToQuery } from 'payload/database'
import type { Field, Payload } from 'payload/types' import type { Field } from 'payload/types'
import type { Operator } from 'payload/types' import type { Operator } from 'payload/types'
import objectID from 'bson-objectid' import objectID from 'bson-objectid'
@@ -15,7 +16,8 @@ import { sanitizeQueryValue } from './sanitizeQueryValue'
type SearchParam = { type SearchParam = {
path?: string path?: string
value: unknown rawQuery?: unknown
value?: unknown
} }
const subQueryOptions = { const subQueryOptions = {
@@ -91,7 +93,11 @@ export async function buildSearchParam({
const [{ field, path }] = paths const [{ field, path }] = paths
if (path) { if (path) {
const { operator: formattedOperator, val: formattedValue } = sanitizeQueryValue({ const {
operator: formattedOperator,
rawQuery,
val: formattedValue,
} = sanitizeQueryValue({
field, field,
hasCustomID, hasCustomID,
operator, operator,
@@ -99,6 +105,8 @@ export async function buildSearchParam({
val, val,
}) })
if (rawQuery) return { value: rawQuery }
// If there are multiple collections to search through, // If there are multiple collections to search through,
// Recursively build up a list of query constraints // Recursively build up a list of query constraints
if (paths.length > 1) { if (paths.length > 1) {

View File

@@ -1,7 +1,8 @@
/* eslint-disable no-restricted-syntax */ /* eslint-disable no-restricted-syntax */
/* eslint-disable no-await-in-loop */ /* eslint-disable no-await-in-loop */
import type { FilterQuery } from 'mongoose' import type { FilterQuery } from 'mongoose'
import type { Operator, Payload, Where } from 'payload/types' import type { Payload } from 'payload'
import type { Operator, Where } from 'payload/types'
import type { Field } from 'payload/types' import type { Field } from 'payload/types'
import deepmerge from 'deepmerge' import deepmerge from 'deepmerge'

View File

@@ -17,7 +17,11 @@ export const sanitizeQueryValue = ({
operator, operator,
path, path,
val, val,
}: SanitizeQueryValueArgs): { operator: string; val: unknown } => { }: SanitizeQueryValueArgs): {
operator?: string
rawQuery?: unknown
val?: unknown
} => {
let formattedValue = val let formattedValue = val
let formattedOperator = operator let formattedOperator = operator
@@ -70,6 +74,24 @@ export const sanitizeQueryValue = ({
formattedValue = null formattedValue = null
} }
// Object equality requires the value to be the first key in the object that is being queried.
if (
operator === 'equals' &&
formattedValue &&
typeof formattedValue === 'object' &&
formattedValue.value &&
formattedValue.relationTo
) {
return {
rawQuery: {
$and: [
{ [`${path}.value`]: { $eq: formattedValue.value } },
{ [`${path}.relationTo`]: { $eq: formattedValue.relationTo } },
],
},
}
}
if (operator === 'in' && Array.isArray(formattedValue)) { if (operator === 'in' && Array.isArray(formattedValue)) {
formattedValue = formattedValue.reduce((formattedValues, inVal) => { formattedValue = formattedValue.reduce((formattedValues, inVal) => {
const newValues = [inVal] const newValues = [inVal]
@@ -104,7 +126,7 @@ export const sanitizeQueryValue = ({
formattedValue = undefined formattedValue = undefined
} else { } else {
formattedValue = { formattedValue = {
$geometry: { coordinates: [parseFloat(lng), parseFloat(lat)], type: 'Point' }, $geometry: { type: 'Point', coordinates: [parseFloat(lng), parseFloat(lat)] },
} }
if (maxDistance) formattedValue.$maxDistance = parseFloat(maxDistance) if (maxDistance) formattedValue.$maxDistance = parseFloat(maxDistance)
@@ -135,6 +157,23 @@ export const sanitizeQueryValue = ({
if (operator === 'exists') { if (operator === 'exists') {
formattedValue = formattedValue === 'true' || formattedValue === true formattedValue = formattedValue === 'true' || formattedValue === true
// Clearable fields
if (['relationship', 'select', 'upload'].includes(field.type)) {
if (formattedValue) {
return {
rawQuery: {
$and: [{ [path]: { $exists: true } }, { [path]: { $ne: null } }],
},
}
} else {
return {
rawQuery: {
$or: [{ [path]: { $exists: false } }, { [path]: { $eq: null } }],
},
}
}
}
} }
return { operator: formattedOperator, val: formattedValue } return { operator: formattedOperator, val: formattedValue }

View File

@@ -58,8 +58,15 @@ export const queryDrafts: QueryDrafts = async function queryDrafts(
useEstimatedCount, useEstimatedCount,
} }
if (!useEstimatedCount && this.disableIndexHints !== true) { if (
// Improve the performance of the countDocuments query which is used if useEstimatedCount is set to false by adding a hint. !useEstimatedCount &&
Object.keys(versionQuery).length === 0 &&
this.disableIndexHints !== true
) {
// Improve the performance of the countDocuments query which is used if useEstimatedCount is set to false by adding
// a hint. By default, if no hint is provided, MongoDB does not use an indexed field to count the returned documents,
// which makes queries very slow. This only happens when no query (filter) is provided. If one is provided, it uses
// the correct indexed field
paginationOptions.useCustomCountFn = () => { paginationOptions.useCustomCountFn = () => {
return Promise.resolve( return Promise.resolve(
VersionModel.countDocuments(versionQuery, { VersionModel.countDocuments(versionQuery, {

View File

@@ -1,34 +1,30 @@
// @ts-expect-error // TODO: Fix this import
import type { TransactionOptions } from 'mongodb' import type { TransactionOptions } from 'mongodb'
import type { BeginTransaction } from 'payload/database' import type { BeginTransaction } from 'payload/database'
import { APIError } from 'payload/errors' import { APIError } from 'payload/errors'
import { v4 as uuid } from 'uuid' import { v4 as uuid } from 'uuid'
let transactionsNotAvailable: boolean import type { MongooseAdapter } from '../index'
export const beginTransaction: BeginTransaction = async function beginTransaction( export const beginTransaction: BeginTransaction = async function beginTransaction(
options: TransactionOptions = {}, this: MongooseAdapter,
options: TransactionOptions,
) { ) {
let id = null
if (!this.connection) { if (!this.connection) {
throw new APIError('beginTransaction called while no connection to the database exists') throw new APIError('beginTransaction called while no connection to the database exists')
} }
if (transactionsNotAvailable) return id
const client = this.connection.getClient() const client = this.connection.getClient()
if (!client.options.replicaSet) { const id = uuid()
transactionsNotAvailable = true
} else { if (!this.sessions[id]) {
id = uuid() this.sessions[id] = client.startSession()
if (!this.sessions[id]) {
this.sessions[id] = await client.startSession()
}
if (this.sessions[id].inTransaction()) {
this.payload.logger.warn('beginTransaction called while transaction already exists')
} else {
await this.sessions[id].startTransaction(options)
}
} }
if (this.sessions[id].inTransaction()) {
this.payload.logger.warn('beginTransaction called while transaction already exists')
} else {
this.sessions[id].startTransaction(options || (this.transactionOptions as TransactionOptions))
}
return id return id
} }

View File

@@ -1,10 +1,9 @@
import type { UpdateOne } from 'payload/database' import type { UpdateOne } from 'payload/database'
import type { PayloadRequest } from 'payload/types' import type { PayloadRequest } from 'payload/types'
import { ValidationError } from 'payload/errors'
import type { MongooseAdapter } from '.' import type { MongooseAdapter } from '.'
import handleError from './utilities/handleError'
import sanitizeInternalFields from './utilities/sanitizeInternalFields' import sanitizeInternalFields from './utilities/sanitizeInternalFields'
import { withSession } from './withSession' import { withSession } from './withSession'
@@ -30,18 +29,7 @@ export const updateOne: UpdateOne = async function updateOne(
try { try {
result = await Model.findOneAndUpdate(query, data, options) result = await Model.findOneAndUpdate(query, data, options)
} catch (error) { } catch (error) {
// Handle uniqueness error from MongoDB handleError(error, req)
throw error.code === 11000 && error.keyValue
? new ValidationError(
[
{
field: Object.keys(error.keyValue)[0],
message: 'Value must be unique',
},
],
req.t,
)
: error
} }
result = JSON.parse(JSON.stringify(result)) result = JSON.parse(JSON.stringify(result))

View File

@@ -0,0 +1,23 @@
import httpStatus from 'http-status'
import { APIError, ValidationError } from 'payload/errors'
const handleError = (error, req) => {
// Handle uniqueness error from MongoDB
if (error.code === 11000 && error.keyValue) {
throw new ValidationError(
[
{
field: Object.keys(error.keyValue)[0],
message: req.t('error:valueMustBeUnique'),
},
],
req.t,
)
} else if (error.code === 11000) {
throw new APIError(req.t('error:valueMustBeUnique'), httpStatus.BAD_REQUEST)
} else {
throw error
}
}
export default handleError

650
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -35,12 +35,8 @@ export default buildConfigWithDefaults({
name: 'adminOnlyField', name: 'adminOnlyField',
type: 'text', type: 'text',
access: { access: {
read: ({ read: ({ req: { user } }) => {
req: { return user?.roles?.includes('admin')
user: { roles = [] },
},
}) => {
return roles.includes('admin')
}, },
}, },
}, },
@@ -177,7 +173,8 @@ export default buildConfigWithDefaults({
}, },
access: { access: {
read: ({ req: { user } }) => { read: ({ req: { user } }) => {
if (user.collection === 'api-keys') { if (!user) return false
if (user?.collection === 'api-keys') {
return { return {
id: { id: {
equals: user.id, equals: user.id,

View File

@@ -6,7 +6,7 @@ import type { User } from '../../packages/payload/src/auth'
import { getPayload } from '../../packages/payload/src' import { getPayload } from '../../packages/payload/src'
import { devUser } from '../credentials' import { devUser } from '../credentials'
import { NextRESTClient } from '../helpers/NextRESTClient' import { NextRESTClient } from '../helpers/NextRESTClient'
import { startMemoryDB } from '../startMemoryDB' // import { startMemoryDB } from '../startMemoryDB'
import configPromise from './config' import configPromise from './config'
import { namedSaveToJWTValue, saveToJWTKey, slug } from './shared' import { namedSaveToJWTValue, saveToJWTKey, slug } from './shared'
@@ -18,8 +18,8 @@ const { email, password } = devUser
describe('Auth', () => { describe('Auth', () => {
beforeAll(async () => { beforeAll(async () => {
const config = await startMemoryDB(configPromise) // const config = await startMemoryDB(configPromise)
payload = await getPayload({ config }) payload = await getPayload({ config: configPromise })
restClient = new NextRESTClient(payload.config) restClient = new NextRESTClient(payload.config)
}) })
@@ -33,24 +33,32 @@ describe('Auth', () => {
let token let token
let user let user
beforeAll(async () => { beforeAll(async () => {
const { data } = await restClient const result = await payload.login({
.GRAPHQL_POST({ collection: 'users',
body: JSON.stringify({ data: {
query: `mutation { email: devUser.email,
loginUser(email: "${devUser.email}", password: "${devUser.password}") { password: devUser.password,
token },
user { })
id
email
}
}
}`,
}),
})
.then((res) => res.json())
user = data.loginUser.user // const { data } = await restClient
token = data.loginUser.token // .GRAPHQL_POST({
// body: JSON.stringify({
// query: `mutation {
// loginUser(email: "${devUser.email}", password: "${devUser.password}") {
// token
// user {
// id
// email
// }
// }
// }`,
// }),
// })
// .then((res) => res.json())
user = result.user
token = result.token
}) })
it('should login', async () => { it('should login', async () => {

View File

@@ -41,7 +41,7 @@
"@payloadcms/translations/api": ["./packages/translations/src/all"], "@payloadcms/translations/api": ["./packages/translations/src/all"],
"@payloadcms/next/*": ["./packages/next/src/*"], "@payloadcms/next/*": ["./packages/next/src/*"],
"@payloadcms/graphql": ["./packages/graphql/src"], "@payloadcms/graphql": ["./packages/graphql/src"],
"payload-config": ["./test/_community/config.ts"] "payload-config": ["./test/auth/config.ts"]
} }
}, },
"exclude": ["dist", "build", "temp", "node_modules"], "exclude": ["dist", "build", "temp", "node_modules"],