feat: allow multiple, different payload instances using getPayload in same process (#13603)

Fixes https://github.com/payloadcms/payload/issues/13433. Testing
release: `3.54.0-internal.90cf7d5`

Previously, when calling `getPayload`, you would always use the same,
cached payload instance within a single process, regardless of the
arguments passed to the `getPayload` function. This resulted in the
following issues - both are fixed by this PR:

- If, in your frontend you're calling `getPayload` without `cron: true`,
and you're hosting the Payload Admin Panel in the same process, crons
will not be enabled even if you visit the admin panel which calls
`getPayload` with `cron: true`. This will break jobs autorun depending
on which page you visit first - admin panel or frontend
- Within the same process, you are unable to use `getPayload` twice for
different instances of payload with different Payload Configs.
On postgres, you can get around this by manually calling new
`BasePayload()` which skips the cache. This did not work on mongoose
though, as mongoose was caching the models on a global singleton (this
PR addresses this).
In order to bust the cache for different Payload Config, this PR
introduces a new, optional `key` property to `getPayload`.


## Mongoose - disable using global singleton

This PR refactors the Payload Mongoose adapter to stop relying on the
global mongoose singleton. Instead, each adapter instance now creates
and manages its own scoped Connection object.

### Motivation

Previously, calling `getPayload()` more than once in the same process
would throw `Cannot overwrite model` errors because models were compiled
into the global singleton. This prevented running multiple Payload
instances side-by-side, even when pointing at different databases.

### Changes
- Replace usage of `mongoose.connect()` / `mongoose.model()` with
instance-scoped `createConnection()` and `connection.model()`.
- Ensure models, globals, and versions are compiled per connection, not
globally.
- Added proper `close()` handling on `this.connection` instead of
`mongoose.disconnect()`.

---
- To see the specific tasks where the Asana app for GitHub is being
used, see below:
  - https://app.asana.com/0/0/1211114366468745
This commit is contained in:
Alessio Gravili
2025-08-27 10:24:37 -07:00
committed by GitHub
parent 03a00ca37d
commit 13c24afa63
8 changed files with 288 additions and 110 deletions

View File

@@ -35,7 +35,12 @@ export const connect: Connect = async function connect(
}
try {
this.connection = (await mongoose.connect(urlToConnect, connectionOptions)).connection
if (!this.connection) {
this.connection = await mongoose.createConnection(urlToConnect, connectionOptions).asPromise()
}
await this.connection.openUri(urlToConnect, connectionOptions)
if (this.useAlternativeDropDatabase) {
if (this.connection.db) {
// Firestore doesn't support dropDatabase, so we monkey patch
@@ -75,7 +80,8 @@ export const connect: Connect = async function connect(
if (!hotReload) {
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
this.payload.logger.info('---- DROPPING DATABASE ----')
await mongoose.connection.dropDatabase()
await this.connection.dropDatabase()
this.payload.logger.info('---- DROPPED DATABASE ----')
}
}

View File

@@ -1,11 +1,11 @@
import type { Destroy } from 'payload'
import mongoose from 'mongoose'
import type { MongooseAdapter } from './index.js'
export const destroy: Destroy = async function destroy(this: MongooseAdapter) {
await mongoose.disconnect()
await this.connection.close()
Object.keys(mongoose.models).map((model) => mongoose.deleteModel(model))
for (const name of Object.keys(this.connection.models)) {
this.connection.deleteModel(name)
}
}

View File

@@ -19,11 +19,14 @@ import { getBuildQueryPlugin } from './queries/getBuildQueryPlugin.js'
import { getDBName } from './utilities/getDBName.js'
export const init: Init = function init(this: MongooseAdapter) {
// Always create a scoped, **unopened** connection object
// (no URI here; models compile per-connection and do not require an open socket)
this.connection ??= mongoose.createConnection()
this.payload.config.collections.forEach((collection: SanitizedCollectionConfig) => {
const schemaOptions = this.collectionsSchemaOptions?.[collection.slug]
const schema = buildCollectionSchema(collection, this.payload, schemaOptions)
if (collection.versions) {
const versionModelName = getDBName({ config: collection, versions: true })
@@ -55,7 +58,7 @@ export const init: Init = function init(this: MongooseAdapter) {
const versionCollectionName =
this.autoPluralization === true && !collection.dbName ? undefined : versionModelName
this.versions[collection.slug] = mongoose.model(
this.versions[collection.slug] = this.connection.model(
versionModelName,
versionSchema,
versionCollectionName,
@@ -66,14 +69,14 @@ export const init: Init = function init(this: MongooseAdapter) {
const collectionName =
this.autoPluralization === true && !collection.dbName ? undefined : modelName
this.collections[collection.slug] = mongoose.model<any>(
this.collections[collection.slug] = this.connection.model<any>(
modelName,
schema,
collectionName,
) as CollectionModel
})
this.globals = buildGlobalModel(this.payload) as GlobalModel
this.globals = buildGlobalModel(this) as GlobalModel
this.payload.config.globals.forEach((global) => {
if (global.versions) {
@@ -101,7 +104,7 @@ export const init: Init = function init(this: MongooseAdapter) {
}),
)
this.versions[global.slug] = mongoose.model<any>(
this.versions[global.slug] = this.connection.model<any>(
versionModelName,
versionSchema,
versionModelName,

View File

@@ -1,14 +1,13 @@
import type { Payload } from 'payload'
import mongoose from 'mongoose'
import type { MongooseAdapter } from '../index.js'
import type { GlobalModel } from '../types.js'
import { getBuildQueryPlugin } from '../queries/getBuildQueryPlugin.js'
import { buildSchema } from './buildSchema.js'
export const buildGlobalModel = (payload: Payload): GlobalModel | null => {
if (payload.config.globals && payload.config.globals.length > 0) {
export const buildGlobalModel = (adapter: MongooseAdapter): GlobalModel | null => {
if (adapter.payload.config.globals && adapter.payload.config.globals.length > 0) {
const globalsSchema = new mongoose.Schema(
{},
{ discriminatorKey: 'globalType', minimize: false, timestamps: true },
@@ -16,9 +15,13 @@ export const buildGlobalModel = (payload: Payload): GlobalModel | null => {
globalsSchema.plugin(getBuildQueryPlugin())
const Globals = mongoose.model('globals', globalsSchema, 'globals') as unknown as GlobalModel
const Globals = adapter.connection.model(
'globals',
globalsSchema,
'globals',
) as unknown as GlobalModel
Object.values(payload.config.globals).forEach((globalConfig) => {
Object.values(adapter.payload.config.globals).forEach((globalConfig) => {
const globalSchema = buildSchema({
buildSchemaOptions: {
options: {
@@ -26,7 +29,7 @@ export const buildGlobalModel = (payload: Payload): GlobalModel | null => {
},
},
configFields: globalConfig.fields,
payload,
payload: adapter.payload,
})
Globals.discriminator(globalConfig.slug, globalSchema)
})

View File

@@ -625,6 +625,57 @@ export class BasePayload {
[slug: string]: any // TODO: Type this
} = {}
async _initializeCrons() {
if (this.config.jobs.enabled && this.config.jobs.autoRun && !isNextBuild()) {
const DEFAULT_CRON = '* * * * *'
const DEFAULT_LIMIT = 10
const cronJobs =
typeof this.config.jobs.autoRun === 'function'
? await this.config.jobs.autoRun(this)
: this.config.jobs.autoRun
await Promise.all(
cronJobs.map((cronConfig) => {
const jobAutorunCron = new Cron(cronConfig.cron ?? DEFAULT_CRON, async () => {
if (
_internal_jobSystemGlobals.shouldAutoSchedule &&
!cronConfig.disableScheduling &&
this.config.jobs.scheduling
) {
await this.jobs.handleSchedules({
allQueues: cronConfig.allQueues,
queue: cronConfig.queue,
})
}
if (!_internal_jobSystemGlobals.shouldAutoRun) {
return
}
if (typeof this.config.jobs.shouldAutoRun === 'function') {
const shouldAutoRun = await this.config.jobs.shouldAutoRun(this)
if (!shouldAutoRun) {
jobAutorunCron.stop()
return
}
}
await this.jobs.run({
allQueues: cronConfig.allQueues,
limit: cronConfig.limit ?? DEFAULT_LIMIT,
queue: cronConfig.queue,
silent: cronConfig.silent,
})
})
this.crons.push(jobAutorunCron)
}),
)
}
}
async bin({
args,
cwd,
@@ -855,53 +906,8 @@ export class BasePayload {
throw error
}
if (this.config.jobs.enabled && this.config.jobs.autoRun && !isNextBuild() && options.cron) {
const DEFAULT_CRON = '* * * * *'
const DEFAULT_LIMIT = 10
const cronJobs =
typeof this.config.jobs.autoRun === 'function'
? await this.config.jobs.autoRun(this)
: this.config.jobs.autoRun
await Promise.all(
cronJobs.map((cronConfig) => {
const jobAutorunCron = new Cron(cronConfig.cron ?? DEFAULT_CRON, async () => {
if (
_internal_jobSystemGlobals.shouldAutoSchedule &&
!cronConfig.disableScheduling &&
this.config.jobs.scheduling
) {
await this.jobs.handleSchedules({
allQueues: cronConfig.allQueues,
queue: cronConfig.queue,
})
}
if (!_internal_jobSystemGlobals.shouldAutoRun) {
return
}
if (typeof this.config.jobs.shouldAutoRun === 'function') {
const shouldAutoRun = await this.config.jobs.shouldAutoRun(this)
if (!shouldAutoRun) {
jobAutorunCron.stop()
return
}
}
await this.jobs.run({
allQueues: cronConfig.allQueues,
limit: cronConfig.limit ?? DEFAULT_LIMIT,
queue: cronConfig.queue,
silent: cronConfig.silent,
})
})
this.crons.push(jobAutorunCron)
}),
)
if (options.cron) {
await this._initializeCrons()
}
return this
@@ -932,17 +938,6 @@ const initialized = new BasePayload()
// eslint-disable-next-line no-restricted-exports
export default initialized
let cached: {
payload: null | Payload
promise: null | Promise<Payload>
reload: boolean | Promise<void>
ws: null | WebSocket
} = (global as any)._payload
if (!cached) {
cached = (global as any)._payload = { payload: null, promise: null, reload: false, ws: null }
}
export const reload = async (
config: SanitizedConfig,
payload: Payload,
@@ -1010,14 +1005,73 @@ export const reload = async (
;(global as any)._payload_doNotCacheClientSchemaMap = true
}
let _cached: Map<
string,
{
initializedCrons: boolean
payload: null | Payload
promise: null | Promise<Payload>
reload: boolean | Promise<void>
ws: null | WebSocket
}
> = (global as any)._payload
if (!_cached) {
_cached = (global as any)._payload = new Map()
}
/**
* Get a payload instance.
* This function is a wrapper around new BasePayload().init() that adds the following functionality on top of that:
*
* - smartly caches Payload instance on the module scope. That way, we prevent unnecessarily initializing Payload over and over again
* when calling getPayload multiple times or from multiple locations.
* - adds HMR support and reloads the payload instance when the config changes.
*/
export const getPayload = async (
options: Pick<InitOptions, 'config' | 'cron' | 'importMap'>,
options: {
/**
* A unique key to identify the payload instance. You can pass your own key if you want to cache this payload instance separately.
* This is useful if you pass a different payload config for each instance.
*
* @default 'default'
*/
key?: string
} & Pick<InitOptions, 'config' | 'cron' | 'disableOnInit' | 'importMap'>,
): Promise<Payload> => {
if (!options?.config) {
throw new Error('Error: the payload config is required for getPayload to work.')
}
let alreadyCachedSameConfig = false
let cached = _cached.get(options.key ?? 'default')
if (!cached) {
cached = {
initializedCrons: Boolean(options.cron),
payload: null,
promise: null,
reload: false,
ws: null,
}
_cached.set(options.key ?? 'default', cached)
} else {
alreadyCachedSameConfig = true
}
if (alreadyCachedSameConfig) {
// alreadyCachedSameConfig => already called onInit once, but same config => no need to call onInit again.
// calling onInit again would only make sense if a different config was passed.
options.disableOnInit = true
}
if (cached.payload) {
if (options.cron && !cached.initializedCrons) {
// getPayload called with crons enabled, but existing cached version does not have crons initialized. => Initialize crons in existing cached version
cached.initializedCrons = true
await cached.payload._initializeCrons()
}
if (cached.reload === true) {
let resolve!: () => void

View File

@@ -1,12 +1,12 @@
import type { BlockField, Payload } from 'payload'
import { execSync } from 'child_process'
import { existsSync, readFileSync, rmSync } from 'fs'
import path from 'path'
import { type BlocksField, getPayload, type Payload } from 'payload'
import { fileURLToPath } from 'url'
import type { NextRESTClient } from '../helpers/NextRESTClient.js'
import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js'
import { initPayloadInt } from '../helpers/initPayloadInt.js'
import { testFilePath } from './testFilePath.js'
@@ -40,6 +40,56 @@ describe('Config', () => {
description: 'Get the sanitized payload config',
})
})
it('should allow multiple getPayload calls using different configs in same process', async () => {
const payload2 = await getPayload({
key: 'payload2',
config: await buildConfigWithDefaults({
collections: [
{
slug: 'payload2',
fields: [{ name: 'title2', type: 'text' }],
},
],
}),
})
// Use payload2 instance before creating payload3 instance, as we share the same db connection => each instance
// creation will reset the db schema.
const result2: any = await payload2.create({
collection: 'payload2',
data: {
title2: 'Payload 2',
},
} as any)
expect(result2.title2).toBe('Payload 2')
const payload3 = await getPayload({
key: 'payload3',
config: await buildConfigWithDefaults({
collections: [
{
slug: 'payload3',
fields: [{ name: 'title3', type: 'text' }],
},
],
}),
})
// If payload was still incorrectly cached, this would fail, as the old payload config would still be used
const result3: any = await payload3.create({
collection: 'payload3',
data: {
title3: 'Payload 3',
},
} as any)
expect(result3.title3).toBe('Payload 3')
await payload2.destroy()
await payload3.destroy()
})
})
describe('collection config', () => {
@@ -72,7 +122,7 @@ describe('Config', () => {
const [collection] = payload.config.collections
const [, blocksField] = collection.fields
expect((blocksField as BlockField).blocks[0].custom).toEqual({
expect((blocksField as BlocksField).blocks[0].custom).toEqual({
description: 'The blockOne of this page',
})
})

View File

@@ -0,0 +1,85 @@
import path from 'path'
import {
_internal_jobSystemGlobals,
_internal_resetJobSystemGlobals,
getPayload,
migrateCLI,
type SanitizedConfig,
} from 'payload'
import { wait } from 'payload/shared'
import { fileURLToPath } from 'url'
import { initPayloadInt } from '../helpers/initPayloadInt.js'
import { waitUntilAutorunIsDone } from './utilities.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
describe('Queues - CLI', () => {
let config: SanitizedConfig
beforeAll(async () => {
;({ config } = await initPayloadInt(dirname, undefined, false))
})
it('ensure consecutive getPayload call with cron: true will autorun jobs', async () => {
const payload = await getPayload({
config,
})
await payload.jobs.queue({
workflow: 'inlineTaskTest',
queue: 'autorunSecond',
input: {
message: 'hello!',
},
})
process.env.PAYLOAD_DROP_DATABASE = 'false'
// Second instance of payload with the only purpose of running cron jobs
const _payload2 = await getPayload({
config,
cron: true,
})
await waitUntilAutorunIsDone({
payload,
queue: 'autorunSecond',
})
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(1)
expect(allSimples?.docs?.[0]?.title).toBe('hello!')
// Shut down safely:
// Ensure no new crons are scheduled
_internal_jobSystemGlobals.shouldAutoRun = false
_internal_jobSystemGlobals.shouldAutoSchedule = false
// Wait 3 seconds to ensure all currently-running crons are done. If we shut down the db while a function is running, it can cause issues
// Cron function runs may persist after a test has finished
await wait(3000)
// Now we can destroy the payload instance
await _payload2.destroy()
await payload.destroy()
_internal_resetJobSystemGlobals()
})
it('can run migrate CLI without jobs attempting to run', async () => {
await migrateCLI({
config,
parsedArgs: {
_: ['migrate'],
},
})
// Wait 3 seconds to let potential autorun crons trigger
await new Promise((resolve) => setTimeout(resolve, 3000))
// Expect no errors. Previously, this would throw an "error: relation "payload_jobs" does not exist" error
expect(true).toBe(true)
})
})

View File

@@ -4,9 +4,7 @@ import {
_internal_resetJobSystemGlobals,
type JobTaskStatus,
type Payload,
type SanitizedConfig,
} from 'payload'
import { migrateCLI } from 'payload'
import { wait } from 'payload/shared'
import { fileURLToPath } from 'url'
@@ -17,15 +15,15 @@ import { initPayloadInt } from '../helpers/initPayloadInt.js'
import { clearAndSeedEverything } from './seed.js'
import { waitUntilAutorunIsDone } from './utilities.js'
let payload: Payload
let restClient: NextRESTClient
let token: string
const { email, password } = devUser
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
describe('Queues', () => {
describe('Queues - Payload', () => {
let payload: Payload
let restClient: NextRESTClient
let token: string
beforeAll(async () => {
process.env.SEED_IN_CONFIG_ONINIT = 'false' // Makes it so the payload config onInit seed is not run. Otherwise, the seed would be run unnecessarily twice for the initial test run - once for beforeEach and once for onInit
;({ payload, restClient } = await initPayloadInt(dirname))
@@ -1056,7 +1054,7 @@ describe('Queues', () => {
it('ensure payload.jobs.runByID works and only runs the specified job', async () => {
payload.config.jobs.deleteJobOnComplete = false
let lastJobID: null | string = null
let lastJobID: null | number | string = null
for (let i = 0; i < 3; i++) {
const job = await payload.jobs.queue({
task: 'CreateSimple',
@@ -1100,7 +1098,7 @@ describe('Queues', () => {
it('ensure where query for id in payload.jobs.run works and only runs the specified job', async () => {
payload.config.jobs.deleteJobOnComplete = false
let lastJobID: null | string = null
let lastJobID: null | number | string = null
for (let i = 0; i < 3; i++) {
const job = await payload.jobs.queue({
task: 'CreateSimple',
@@ -1467,24 +1465,3 @@ describe('Queues', () => {
expect(allSimples?.docs?.[0]?.title).toBe('hello!')
})
})
describe('Queues - CLI', () => {
let config: SanitizedConfig
beforeAll(async () => {
;({ config } = await initPayloadInt(dirname, undefined, false))
})
it('can run migrate CLI without jobs attempting to run', async () => {
await migrateCLI({
config,
parsedArgs: {
_: ['migrate'],
},
})
// Wait 3 seconds to let potential autorun crons trigger
await new Promise((resolve) => setTimeout(resolve, 3000))
// Expect no errors. Previously, this would throw an "error: relation "payload_jobs" does not exist" error
expect(true).toBe(true)
})
})