chore: fix various e2e test setup issues (#12670)

I noticed a few issues when running e2e tests that will be resolved by
this PR:

- Most important: for some test suites (fields, fields-relationship,
versions, queues, lexical), the database was cleared and seeded
**twice** in between each test run. This is because the onInit function
was running the clear and seed script, when it should only have been
running the seed script. Clearing the database / the snapshot workflow
is being done by the reInit endpoint, which then calls onInit to seed
the actual data.
- The slowest part of `clearAndSeedEverything` is recreating indexes on
mongodb. This PR slightly improves performance here by:
- Skipping this process for the built-in `['payload-migrations',
'payload-preferences', 'payload-locked-documents']` collections
- Previously we were calling both `createIndexes` and `ensureIndexes`.
This was unnecessary - `ensureIndexes` is a deprecated alias of
`createIndexes`. This PR changes it to only call `createIndexes`
- Makes the reinit endpoint accept GET requests instead of POST requests
- this makes it easier to debug right in the browser
- Some typescript fixes
- Adds a `dev:memorydb` script to the package.json. For some reason,
`dev` is super unreliable on mongodb locally when running e2e tests - it
frequently fails during index creation. Using the memorydb fixes this
issue, with the bonus of more closely resembling the CI environment
- Previously, you were unable to run test suites using turbopack +
postgres. This fixes it, by explicitly installing `pg` as devDependency
in our monorepo
- Fixes jest open handles warning
This commit is contained in:
Alessio Gravili
2025-06-04 13:34:37 -07:00
committed by GitHub
parent 337f6188da
commit 545d870650
28 changed files with 139 additions and 126 deletions

View File

@@ -2,9 +2,7 @@ import type { Payload } from 'payload'
import { devUser } from '../credentials.js'
import { executePromises } from '../helpers/executePromises.js'
import { seedDB } from '../helpers/seed.js'
import {
collectionSlugs,
customViews1CollectionSlug,
customViews2CollectionSlug,
geoCollectionSlug,
@@ -14,7 +12,7 @@ import {
with300DocumentsSlug,
} from './slugs.js'
export const seed = async (_payload) => {
export const seed = async (_payload: Payload) => {
await executePromises(
[
() =>
@@ -139,12 +137,3 @@ export const seed = async (_payload) => {
await Promise.all([...manyDocumentsPromises])
}
export async function clearAndSeedEverything(_payload: Payload) {
return await seedDB({
_payload,
collectionSlugs,
seedFunction: seed,
snapshotKey: 'adminTests',
})
}

View File

@@ -32,7 +32,7 @@ import { es } from 'payload/i18n/es'
import sharp from 'sharp'
import { databaseAdapter } from './databaseAdapter.js'
import { reInitEndpoint } from './helpers/reInit.js'
import { reInitEndpoint } from './helpers/reInitEndpoint.js'
import { localAPIEndpoint } from './helpers/sdk/endpoint.js'
import { testEmailAdapter } from './testEmailAdapter.js'

View File

@@ -18,7 +18,7 @@ import { Restricted } from './collections/Restricted/index.js'
import { RelationshipUpdatedExternally } from './collections/UpdatedExternally/index.js'
import { Versions } from './collections/Versions/index.js'
import { Video } from './collections/Video/index.js'
import { clearAndSeedEverything } from './seed.js'
import { seed } from './seed.js'
export default buildConfigWithDefaults({
admin: {
@@ -49,7 +49,7 @@ export default buildConfigWithDefaults({
},
onInit: async (payload) => {
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
await clearAndSeedEverything(payload)
await seed(payload)
}
},
typescript: {

View File

@@ -1,14 +1,9 @@
import type { Payload } from 'payload'
import path from 'path'
import { fileURLToPath } from 'url'
import { devUser } from '../credentials.js'
import { seedDB } from '../helpers/seed.js'
import {
collection1Slug,
collection2Slug,
collectionSlugs,
podcastCollectionSlug,
relationOneSlug,
relationRestrictedSlug,
@@ -18,9 +13,6 @@ import {
videoCollectionSlug,
} from './slugs.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
export const seed = async (_payload: Payload) => {
await _payload.create({
collection: 'users',
@@ -179,13 +171,3 @@ export const seed = async (_payload: Payload) => {
})
}
}
export async function clearAndSeedEverything(_payload: Payload) {
return await seedDB({
_payload,
collectionSlugs,
seedFunction: seed,
snapshotKey: 'fieldsTest',
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
}

View File

@@ -38,7 +38,7 @@ import UploadsMultiPoly from './collections/UploadMultiPoly/index.js'
import UploadsPoly from './collections/UploadPoly/index.js'
import UploadRestricted from './collections/UploadRestricted/index.js'
import Uploads3 from './collections/Uploads3/index.js'
import { clearAndSeedEverything } from './seed.js'
import { seed } from './seed.js'
export const collectionSlugs: CollectionConfig[] = [
{
@@ -157,7 +157,7 @@ export const baseConfig: Partial<Config> = {
},
onInit: async (payload) => {
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
await clearAndSeedEverything(payload)
await seed(payload)
}
},
typescript: {

View File

@@ -11,6 +11,8 @@ export const allDatabaseAdapters = {
export const databaseAdapter = mongooseAdapter({
ensureIndexes: true,
// required for connect to detect that we are using a memory server
mongoMemoryServer: global._mongoMemoryServer,
url:
process.env.MONGODB_MEMORY_SERVER_URI ||
process.env.DATABASE_URI ||

View File

@@ -0,0 +1,9 @@
export function isErrorWithCode(err: unknown, code?: string): err is NodeJS.ErrnoException {
return (
typeof err === 'object' &&
err !== null &&
'code' in err &&
typeof (err as any).code === 'string' &&
(!code || (err as NodeJS.ErrnoException).code === code)
)
}

View File

@@ -1,25 +1,38 @@
import type { Endpoint, PayloadHandler } from 'payload'
import { status as httpStatus } from 'http-status'
import { addDataAndFileToRequest } from 'payload'
import * as qs from 'qs-esm'
import { path } from './reInitializeDB.js'
import { seedDB } from './seed.js'
const handler: PayloadHandler = async (req) => {
process.env.SEED_IN_CONFIG_ONINIT = 'true'
await addDataAndFileToRequest(req)
const { data, payload } = req
const { payload } = req
if (!req.url) {
throw new Error('Request URL is required')
}
const query: {
deleteOnly?: boolean
snapshotKey?: string
uploadsDir?: string | string[]
} = qs.parse(req.url.split('?')[1] ?? '', {
depth: 10,
ignoreQueryPrefix: true,
})
try {
console.log('Calling seedDB')
await seedDB({
_payload: payload,
collectionSlugs: payload.config.collections.map(({ slug }) => slug),
seedFunction: payload.config.onInit,
snapshotKey: String(data.snapshotKey),
snapshotKey: String(query.snapshotKey),
// uploadsDir can be string or stringlist
uploadsDir: data.uploadsDir as string | string[],
deleteOnly: data.deleteOnly,
uploadsDir: query.uploadsDir as string | string[],
deleteOnly: query.deleteOnly,
})
return Response.json(
@@ -40,6 +53,6 @@ const handler: PayloadHandler = async (req) => {
export const reInitEndpoint: Endpoint = {
path,
method: 'post',
method: 'get',
handler,
}

View File

@@ -1,3 +1,5 @@
import * as qs from 'qs-esm'
export const path = '/re-initialize'
export const reInitializeDB = async ({
@@ -19,13 +21,19 @@ export const reInitializeDB = async ({
try {
console.log(`Attempting to reinitialize DB (attempt ${attempt}/${maxAttempts})...`)
const response = await fetch(`${serverURL}/api${path}`, {
method: 'post',
body: JSON.stringify({
const queryParams = qs.stringify(
{
snapshotKey,
uploadsDir,
deleteOnly,
}),
},
{
addQueryPrefix: true,
},
)
const response = await fetch(`${serverURL}/api${path}${queryParams}`, {
method: 'get',
headers: {
'Content-Type': 'application/json',
},
@@ -39,7 +47,7 @@ export const reInitializeDB = async ({
console.log(`Successfully reinitialized DB (took ${timeTaken}ms)`)
return
} catch (error) {
console.error(`Failed to reinitialize DB: ${error.message}`)
console.error(`Failed to reinitialize DB`, error)
if (attempt === maxAttempts) {
console.error('Max retry attempts reached. Giving up.')

View File

@@ -5,7 +5,12 @@ import { isMongoose } from './isMongoose.js'
export async function resetDB(_payload: Payload, collectionSlugs: string[]) {
if (isMongoose(_payload) && 'collections' in _payload.db && collectionSlugs.length > 0) {
await _payload.db.collections[collectionSlugs[0]].db.dropDatabase()
const firstCollectionSlug = collectionSlugs?.[0]
if (!firstCollectionSlug?.length) {
throw new Error('No collection slugs provided to reset the database.')
}
await _payload.db.collections[firstCollectionSlug]?.db.dropDatabase()
} else if ('drizzle' in _payload.db) {
const db = _payload.db as unknown as DrizzleAdapter

View File

@@ -3,6 +3,7 @@ import * as os from 'node:os'
import path from 'path'
import { type Payload } from 'payload'
import { isErrorWithCode } from './isErrorWithCode.js'
import { isMongoose } from './isMongoose.js'
import { resetDB } from './reset.js'
import { createSnapshot, dbSnapshot, restoreFromSnapshot, uploadsDirCache } from './snapshot.js'
@@ -47,15 +48,18 @@ export async function seedDB({
const uploadsDirs = Array.isArray(uploadsDir) ? uploadsDir : [uploadsDir]
for (const dir of uploadsDirs) {
try {
// Attempt to clear the uploads directory if it exists
await fs.promises.access(dir)
const files = await fs.promises.readdir(dir)
for (const file of files) {
await fs.promises.rm(path.join(dir, file))
const filePath = path.join(dir, file)
await fs.promises.rm(filePath, { recursive: true, force: true })
}
} catch (error) {
if (error.code !== 'ENOENT') {
// If the error is not because the directory doesn't exist
if (isErrorWithCode(error, 'ENOENT')) {
// Directory does not exist - that's okay, skip it
continue
} else {
// Some other error occurred - rethrow it
console.error('Error in operation (deleting uploads dir):', dir, error)
throw error
}
@@ -124,16 +128,20 @@ export async function seedDB({
try {
if (isMongoose(_payload)) {
await Promise.all([
...collectionSlugs.map(async (collectionSlug) => {
await _payload.db.collections[collectionSlug].createIndexes()
}),
...collectionSlugs
.filter(
(collectionSlug) =>
['payload-migrations', 'payload-preferences', 'payload-locked-documents'].indexOf(
collectionSlug,
) === -1,
)
.map(async (collectionSlug) => {
await _payload.db.collections[collectionSlug]?.createIndexes({
// Blocks writes (doesn't matter here) but faster
background: false,
})
}),
])
await Promise.all(
_payload.config.collections.map(async (coll) => {
await _payload.db?.collections[coll.slug]?.ensureIndexes()
}),
)
}
} catch (e) {
console.error('Error in operation (re-creating indexes):', e)
@@ -170,7 +178,7 @@ export async function seedDB({
let newObj: {
cacheDir: string
originalDir: string
} = null
} | null = null
if (!uploadsDirCache[snapshotKey].find((cache) => cache.originalDir === dir)) {
// Define new cache folder path to the OS temp directory (well a random folder inside it)
newObj = {

View File

@@ -116,7 +116,13 @@ export async function createSnapshot(
collectionSlugs: string[],
) {
if (isMongoose(_payload) && 'collections' in _payload.db) {
const mongooseCollections = _payload.db.collections[collectionSlugs[0]].db.collections
const firstCollectionSlug = collectionSlugs?.[0]
if (!firstCollectionSlug?.length) {
throw new Error('No collection slugs provided to reset the database.')
}
const mongooseCollections = _payload.db.collections[firstCollectionSlug]?.db.collections
await createMongooseSnapshot(mongooseCollections, snapshotKey)
} else {

View File

@@ -1,8 +1,7 @@
import { MongoMemoryReplSet } from 'mongodb-memory-server'
import dotenv from 'dotenv'
import { MongoMemoryReplSet } from 'mongodb-memory-server'
dotenv.config()
// eslint-disable-next-line no-restricted-exports
export default async () => {
// @ts-expect-error
@@ -23,8 +22,11 @@ export default async () => {
},
})
await db.waitUntilRunning()
global._mongoMemoryServer = db
process.env.MONGODB_MEMORY_SERVER_URI = `${global._mongoMemoryServer.getUri()}&retryWrites=true`
console.log('Started memory db')
}
}

View File

@@ -5,4 +5,5 @@ export default async () => {
await global._mongoMemoryServer.stop()
console.log('Stopped memorydb')
}
process.exit(0)
}

View File

@@ -5,12 +5,10 @@ import { getFileByPath } from 'payload'
import { fileURLToPath } from 'url'
import { devUser } from '../credentials.js'
import { seedDB } from '../helpers/seed.js'
import {
categoriesJoinRestrictedSlug,
categoriesSlug,
collectionRestrictedSlug,
collectionSlugs,
hiddenPostsSlug,
postsSlug,
uploadsSlug,
@@ -215,12 +213,3 @@ export const seed = async (_payload: Payload) => {
data: { title: 'post 5', description: 'This is post 5', folder: sub_folder_2 },
})
}
export async function clearAndSeedEverything(_payload: Payload) {
return await seedDB({
_payload,
collectionSlugs,
seedFunction: seed,
snapshotKey: 'joinsTest',
})
}

View File

@@ -20,7 +20,7 @@ import RichTextFields from './collections/RichText/index.js'
import TextFields from './collections/Text/index.js'
import Uploads from './collections/Upload/index.js'
import TabsWithRichText from './globals/TabsWithRichText.js'
import { clearAndSeedEverything } from './seed.js'
import { seed } from './seed.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
@@ -60,8 +60,9 @@ export const baseConfig: Partial<Config> = {
},
},
onInit: async (payload) => {
// IMPORTANT: This should only seed, not clear the database.
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
await clearAndSeedEverything(payload)
await seed(payload)
}
},
localization: {

View File

@@ -958,6 +958,7 @@ describe('lexicalMain', () => {
test('ensure internal links can be created', async () => {
await navigateToLexicalFields()
await wait(200)
const richTextField = page.locator('.rich-text-lexical').first()
await richTextField.scrollIntoViewIfNeeded()
await expect(richTextField).toBeVisible()
@@ -970,11 +971,15 @@ describe('lexicalMain', () => {
const paragraph = richTextField.locator('.LexicalEditorTheme__paragraph').first()
await paragraph.scrollIntoViewIfNeeded()
await expect(paragraph).toBeVisible()
await wait(200)
/**
* Type some text
*/
await paragraph.click()
await wait(200)
await page.keyboard.type('Link')
await wait(200)
// Select "Link" by pressing shift + arrow left
for (let i = 0; i < 4; i++) {
@@ -986,6 +991,7 @@ describe('lexicalMain', () => {
const linkButton = inlineToolbar.locator('.toolbar-popup__button-link')
await expect(linkButton).toBeVisible()
await wait(200)
await linkButton.click()
/**
@@ -1005,16 +1011,20 @@ describe('lexicalMain', () => {
.locator('.radio-input__styled-radio')
await radioInternalLink.click()
await wait(200)
const internalLinkSelect = linkDrawer
.locator('#field-doc .rs__control .value-container')
.first()
await internalLinkSelect.click()
await wait(200)
await expect(linkDrawer.locator('.rs__option').nth(0)).toBeVisible()
await expect(linkDrawer.locator('.rs__option').nth(0)).toContainText('Rich Text') // Link to itself - that way we can also test if depth 0 works
await linkDrawer.locator('.rs__option').nth(0).click()
await expect(internalLinkSelect).toContainText('Rich Text')
await wait(200)
await linkDrawer.locator('button').getByText('Save').first().click()
await expect(linkDrawer).toBeHidden()

View File

@@ -2,8 +2,7 @@ import type { Payload } from 'payload'
import { devUser, regularUser } from '../credentials.js'
import { executePromises } from '../helpers/executePromises.js'
import { seedDB } from '../helpers/seed.js'
import { collectionSlugs, pagesSlug, postsSlug } from './slugs.js'
import { pagesSlug, postsSlug } from './slugs.js'
export const seed = async (_payload: Payload) => {
await executePromises(
@@ -46,12 +45,3 @@ export const seed = async (_payload: Payload) => {
false,
)
}
export async function clearAndSeedEverything(_payload: Payload) {
return await seedDB({
_payload,
collectionSlugs,
seedFunction: seed,
snapshotKey: 'adminTests',
})
}

View File

@@ -83,6 +83,7 @@
"next": "15.3.2",
"nodemailer": "6.9.16",
"payload": "workspace:*",
"pg": "8.11.3",
"qs-esm": "7.0.2",
"react": "19.1.0",
"react-dom": "19.1.0",

View File

@@ -2,8 +2,7 @@ import type { Payload, QueryPreset } from 'payload'
import { devUser as devCredentials, regularUser as regularCredentials } from '../credentials.js'
import { executePromises } from '../helpers/executePromises.js'
import { seedDB } from '../helpers/seed.js'
import { collectionSlugs, pagesSlug, usersSlug } from './slugs.js'
import { pagesSlug, usersSlug } from './slugs.js'
type SeededQueryPreset = {
relatedCollection: 'pages'
@@ -187,12 +186,3 @@ export const seed = async (_payload: Payload) => {
false,
)
}
export async function clearAndSeedEverything(_payload: Payload) {
return await seedDB({
_payload,
collectionSlugs,
seedFunction: seed,
snapshotKey: 'adminTests',
})
}

View File

@@ -7,7 +7,7 @@ import path from 'path'
import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js'
import { devUser } from '../credentials.js'
import { updatePostStep1, updatePostStep2 } from './runners/updatePost.js'
import { clearAndSeedEverything } from './seed.js'
import { seed } from './seed.js'
import { externalWorkflow } from './workflows/externalWorkflow.js'
import { inlineTaskTestWorkflow } from './workflows/inlineTaskTest.js'
import { inlineTaskTestDelayedWorkflow } from './workflows/inlineTaskTestDelayed.js'
@@ -394,7 +394,7 @@ export default buildConfigWithDefaults({
editor: lexicalEditor(),
onInit: async (payload) => {
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
await clearAndSeedEverything(payload)
await seed(payload)
}
},
typescript: {

View File

@@ -114,7 +114,6 @@ describe('Versions', () => {
})
await ensureCompilationIsDone({ page, serverURL })
//await clearAndSeedEverything(payload)
})
describe('draft collections', () => {