chore: fix various e2e test setup issues (#12670)

I noticed a few issues when running e2e tests that will be resolved by
this PR:

- Most important: for some test suites (fields, fields-relationship,
versions, queues, lexical), the database was cleared and seeded
**twice** in between each test run. This is because the onInit function
was running the clear and seed script, when it should only have been
running the seed script. Clearing the database / the snapshot workflow
is being done by the reInit endpoint, which then calls onInit to seed
the actual data.
- The slowest part of `clearAndSeedEverything` is recreating indexes on
mongodb. This PR slightly improves performance here by:
- Skipping this process for the built-in `['payload-migrations',
'payload-preferences', 'payload-locked-documents']` collections
- Previously we were calling both `createIndexes` and `ensureIndexes`.
This was unnecessary - `ensureIndexes` is a deprecated alias of
`createIndexes`. This PR changes it to only call `createIndexes`
- Makes the reinit endpoint accept GET requests instead of POST requests
- this makes it easier to debug right in the browser
- Some typescript fixes
- Adds a `dev:memorydb` script to the package.json. For some reason,
`dev` is super unreliable on mongodb locally when running e2e tests - it
frequently fails during index creation. Using the memorydb fixes this
issue, with the bonus of more closely resembling the CI environment
- Previously, you were unable to run test suites using turbopack +
postgres. This fixes it, by explicitly installing `pg` as devDependency
in our monorepo
- Fixes jest open handles warning
This commit is contained in:
Alessio Gravili
2025-06-04 13:34:37 -07:00
committed by GitHub
parent 337f6188da
commit 545d870650
28 changed files with 139 additions and 126 deletions

View File

@@ -65,6 +65,7 @@
"dev:generate-graphql-schema": "pnpm runts ./test/generateGraphQLSchema.ts",
"dev:generate-importmap": "pnpm runts ./test/generateImportMap.ts",
"dev:generate-types": "pnpm runts ./test/generateTypes.ts",
"dev:memorydb": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --start-memory-db",
"dev:postgres": "cross-env PAYLOAD_DATABASE=postgres pnpm runts ./test/dev.ts",
"dev:prod": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --prod",
"dev:prod:memorydb": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --prod --start-memory-db",
@@ -155,10 +156,11 @@
"jest": "29.7.0",
"lint-staged": "15.2.7",
"minimist": "1.2.8",
"mongodb-memory-server": "^10",
"mongodb-memory-server": "10.1.4",
"next": "15.3.2",
"open": "^10.1.0",
"p-limit": "^5.0.0",
"pg": "8.11.3",
"playwright": "1.50.0",
"playwright-core": "1.50.0",
"prettier": "3.5.3",

View File

@@ -17,8 +17,8 @@
"url": "https://payloadcms.com"
}
],
"type": "module",
"sideEffects": false,
"type": "module",
"exports": {
".": {
"import": "./src/index.ts",
@@ -58,7 +58,7 @@
"@types/prompts": "^2.4.5",
"@types/uuid": "10.0.0",
"mongodb": "6.12.0",
"mongodb-memory-server": "^10",
"mongodb-memory-server": "10.1.4",
"payload": "workspace:*"
},
"peerDependencies": {

View File

@@ -40,6 +40,9 @@ export const connect: Connect = async function connect(
// If we are running a replica set with MongoDB Memory Server,
// wait until the replica set elects a primary before proceeding
if (this.mongoMemoryServer) {
this.payload.logger.info(
'Waiting for MongoDB Memory Server replica set to elect a primary...',
)
await new Promise((resolve) => setTimeout(resolve, 2000))
}
@@ -50,7 +53,7 @@ export const connect: Connect = async function connect(
this.beginTransaction = defaultBeginTransaction()
}
if (!this.mongoMemoryServer && !hotReload) {
if (!hotReload) {
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
this.payload.logger.info('---- DROPPING DATABASE ----')
await mongoose.connection.dropDatabase()

View File

@@ -5,11 +5,7 @@ import mongoose from 'mongoose'
import type { MongooseAdapter } from './index.js'
export const destroy: Destroy = async function destroy(this: MongooseAdapter) {
if (this.mongoMemoryServer) {
await this.mongoMemoryServer.stop()
} else {
await mongoose.disconnect()
}
await mongoose.disconnect()
Object.keys(mongoose.models).map((model) => mongoose.deleteModel(model))
}

33
pnpm-lock.yaml generated
View File

@@ -9,7 +9,7 @@ overrides:
cross-env: 7.0.3
dotenv: 16.4.7
graphql: ^16.8.1
mongodb-memory-server: ^10
mongodb-memory-server: 10.1.4
react: 19.1.0
react-dom: 19.1.0
typescript: 5.7.3
@@ -124,8 +124,8 @@ importers:
specifier: 1.2.8
version: 1.2.8
mongodb-memory-server:
specifier: ^10
version: 10.1.3(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
specifier: 10.1.4
version: 10.1.4(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
next:
specifier: 15.3.2
version: 15.3.2(@opentelemetry/api@1.9.0)(@playwright/test@1.50.0)(babel-plugin-macros@3.1.0)(babel-plugin-react-compiler@19.1.0-rc.2)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(sass@1.77.4)
@@ -135,6 +135,9 @@ importers:
p-limit:
specifier: ^5.0.0
version: 5.0.0
pg:
specifier: 8.11.3
version: 8.11.3
playwright:
specifier: 1.50.0
version: 1.50.0
@@ -292,8 +295,8 @@ importers:
specifier: 6.12.0
version: 6.12.0(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
mongodb-memory-server:
specifier: ^10
version: 10.1.3(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
specifier: 10.1.4
version: 10.1.4(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
payload:
specifier: workspace:*
version: link:../payload
@@ -1852,6 +1855,9 @@ importers:
payload:
specifier: workspace:*
version: link:../packages/payload
pg:
specifier: 8.11.3
version: 8.11.3
qs-esm:
specifier: 7.0.2
version: 7.0.2
@@ -8503,12 +8509,12 @@ packages:
mongodb-connection-string-url@3.0.1:
resolution: {integrity: sha512-XqMGwRX0Lgn05TDB4PyG2h2kKO/FfWJyCzYQbIhXUxz7ETt0I/FqHjUeqj37irJ+Dl1ZtU82uYyj14u2XsZKfg==}
mongodb-memory-server-core@10.1.3:
resolution: {integrity: sha512-ayBQHeV74wRHhgcAKpxHYI4th9Ufidy/m3XhJnLFRufKsOyDsyHYU3Zxv5Fm4hxsWE6wVd0GAVcQ7t7XNkivOg==}
mongodb-memory-server-core@10.1.4:
resolution: {integrity: sha512-o8fgY7ZalEd8pGps43fFPr/hkQu1L8i6HFEGbsTfA2zDOW0TopgpswaBCqDr0qD7ptibyPfB5DmC+UlIxbThzA==}
engines: {node: '>=16.20.1'}
mongodb-memory-server@10.1.3:
resolution: {integrity: sha512-QCUjsIIXSYv/EgkpDAjfhlqRKo6N+qR6DD43q4lyrCVn24xQmvlArdWHW/Um5RS4LkC9YWC3XveSncJqht2Hbg==}
mongodb-memory-server@10.1.4:
resolution: {integrity: sha512-+oKQ/kc3CX+816oPFRtaF0CN4vNcGKNjpOQe4bHo/21A3pMD+lC7Xz1EX5HP7siCX4iCpVchDMmCOFXVQSGkUg==}
engines: {node: '>=16.20.1'}
mongodb@6.12.0:
@@ -8641,6 +8647,7 @@ packages:
node-domexception@1.0.0:
resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
engines: {node: '>=10.5.0'}
deprecated: Use your platform's native DOMException instead
node-fetch-native@1.6.4:
resolution: {integrity: sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ==}
@@ -18796,7 +18803,7 @@ snapshots:
'@types/whatwg-url': 11.0.5
whatwg-url: 13.0.0
mongodb-memory-server-core@10.1.3(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3):
mongodb-memory-server-core@10.1.4(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3):
dependencies:
async-mutex: 0.5.0
camelcase: 6.3.0
@@ -18806,7 +18813,7 @@ snapshots:
https-proxy-agent: 7.0.5
mongodb: 6.12.0(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
new-find-package-json: 2.0.0
semver: 7.6.3
semver: 7.7.1
tar-stream: 3.1.7
tslib: 2.8.1
yauzl: 3.2.0
@@ -18820,9 +18827,9 @@ snapshots:
- socks
- supports-color
mongodb-memory-server@10.1.3(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3):
mongodb-memory-server@10.1.4(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3):
dependencies:
mongodb-memory-server-core: 10.1.3(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
mongodb-memory-server-core: 10.1.4(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
tslib: 2.8.1
transitivePeerDependencies:
- '@aws-sdk/credential-providers'

View File

@@ -63,7 +63,7 @@
"eslint": "^9.23.0",
"eslint-config-next": "15.3.0",
"graphql": "^16.8.1",
"mongodb-memory-server": "^10.1.2",
"mongodb-memory-server": "10.1.4",
"next": "15.3.0",
"open": "^10.1.0",
"payload": "3.37.0",

View File

@@ -2,9 +2,7 @@ import type { Payload } from 'payload'
import { devUser } from '../credentials.js'
import { executePromises } from '../helpers/executePromises.js'
import { seedDB } from '../helpers/seed.js'
import {
collectionSlugs,
customViews1CollectionSlug,
customViews2CollectionSlug,
geoCollectionSlug,
@@ -14,7 +12,7 @@ import {
with300DocumentsSlug,
} from './slugs.js'
export const seed = async (_payload) => {
export const seed = async (_payload: Payload) => {
await executePromises(
[
() =>
@@ -139,12 +137,3 @@ export const seed = async (_payload) => {
await Promise.all([...manyDocumentsPromises])
}
export async function clearAndSeedEverything(_payload: Payload) {
return await seedDB({
_payload,
collectionSlugs,
seedFunction: seed,
snapshotKey: 'adminTests',
})
}

View File

@@ -32,7 +32,7 @@ import { es } from 'payload/i18n/es'
import sharp from 'sharp'
import { databaseAdapter } from './databaseAdapter.js'
import { reInitEndpoint } from './helpers/reInit.js'
import { reInitEndpoint } from './helpers/reInitEndpoint.js'
import { localAPIEndpoint } from './helpers/sdk/endpoint.js'
import { testEmailAdapter } from './testEmailAdapter.js'

View File

@@ -18,7 +18,7 @@ import { Restricted } from './collections/Restricted/index.js'
import { RelationshipUpdatedExternally } from './collections/UpdatedExternally/index.js'
import { Versions } from './collections/Versions/index.js'
import { Video } from './collections/Video/index.js'
import { clearAndSeedEverything } from './seed.js'
import { seed } from './seed.js'
export default buildConfigWithDefaults({
admin: {
@@ -49,7 +49,7 @@ export default buildConfigWithDefaults({
},
onInit: async (payload) => {
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
await clearAndSeedEverything(payload)
await seed(payload)
}
},
typescript: {

View File

@@ -1,14 +1,9 @@
import type { Payload } from 'payload'
import path from 'path'
import { fileURLToPath } from 'url'
import { devUser } from '../credentials.js'
import { seedDB } from '../helpers/seed.js'
import {
collection1Slug,
collection2Slug,
collectionSlugs,
podcastCollectionSlug,
relationOneSlug,
relationRestrictedSlug,
@@ -18,9 +13,6 @@ import {
videoCollectionSlug,
} from './slugs.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
export const seed = async (_payload: Payload) => {
await _payload.create({
collection: 'users',
@@ -179,13 +171,3 @@ export const seed = async (_payload: Payload) => {
})
}
}
export async function clearAndSeedEverything(_payload: Payload) {
return await seedDB({
_payload,
collectionSlugs,
seedFunction: seed,
snapshotKey: 'fieldsTest',
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
}

View File

@@ -38,7 +38,7 @@ import UploadsMultiPoly from './collections/UploadMultiPoly/index.js'
import UploadsPoly from './collections/UploadPoly/index.js'
import UploadRestricted from './collections/UploadRestricted/index.js'
import Uploads3 from './collections/Uploads3/index.js'
import { clearAndSeedEverything } from './seed.js'
import { seed } from './seed.js'
export const collectionSlugs: CollectionConfig[] = [
{
@@ -157,7 +157,7 @@ export const baseConfig: Partial<Config> = {
},
onInit: async (payload) => {
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
await clearAndSeedEverything(payload)
await seed(payload)
}
},
typescript: {

View File

@@ -11,6 +11,8 @@ export const allDatabaseAdapters = {
export const databaseAdapter = mongooseAdapter({
ensureIndexes: true,
// required for connect to detect that we are using a memory server
mongoMemoryServer: global._mongoMemoryServer,
url:
process.env.MONGODB_MEMORY_SERVER_URI ||
process.env.DATABASE_URI ||

View File

@@ -0,0 +1,9 @@
export function isErrorWithCode(err: unknown, code?: string): err is NodeJS.ErrnoException {
return (
typeof err === 'object' &&
err !== null &&
'code' in err &&
typeof (err as any).code === 'string' &&
(!code || (err as NodeJS.ErrnoException).code === code)
)
}

View File

@@ -1,25 +1,38 @@
import type { Endpoint, PayloadHandler } from 'payload'
import { status as httpStatus } from 'http-status'
import { addDataAndFileToRequest } from 'payload'
import * as qs from 'qs-esm'
import { path } from './reInitializeDB.js'
import { seedDB } from './seed.js'
const handler: PayloadHandler = async (req) => {
process.env.SEED_IN_CONFIG_ONINIT = 'true'
await addDataAndFileToRequest(req)
const { data, payload } = req
const { payload } = req
if (!req.url) {
throw new Error('Request URL is required')
}
const query: {
deleteOnly?: boolean
snapshotKey?: string
uploadsDir?: string | string[]
} = qs.parse(req.url.split('?')[1] ?? '', {
depth: 10,
ignoreQueryPrefix: true,
})
try {
console.log('Calling seedDB')
await seedDB({
_payload: payload,
collectionSlugs: payload.config.collections.map(({ slug }) => slug),
seedFunction: payload.config.onInit,
snapshotKey: String(data.snapshotKey),
snapshotKey: String(query.snapshotKey),
// uploadsDir can be string or stringlist
uploadsDir: data.uploadsDir as string | string[],
deleteOnly: data.deleteOnly,
uploadsDir: query.uploadsDir as string | string[],
deleteOnly: query.deleteOnly,
})
return Response.json(
@@ -40,6 +53,6 @@ const handler: PayloadHandler = async (req) => {
export const reInitEndpoint: Endpoint = {
path,
method: 'post',
method: 'get',
handler,
}

View File

@@ -1,3 +1,5 @@
import * as qs from 'qs-esm'
export const path = '/re-initialize'
export const reInitializeDB = async ({
@@ -19,13 +21,19 @@ export const reInitializeDB = async ({
try {
console.log(`Attempting to reinitialize DB (attempt ${attempt}/${maxAttempts})...`)
const response = await fetch(`${serverURL}/api${path}`, {
method: 'post',
body: JSON.stringify({
const queryParams = qs.stringify(
{
snapshotKey,
uploadsDir,
deleteOnly,
}),
},
{
addQueryPrefix: true,
},
)
const response = await fetch(`${serverURL}/api${path}${queryParams}`, {
method: 'get',
headers: {
'Content-Type': 'application/json',
},
@@ -39,7 +47,7 @@ export const reInitializeDB = async ({
console.log(`Successfully reinitialized DB (took ${timeTaken}ms)`)
return
} catch (error) {
console.error(`Failed to reinitialize DB: ${error.message}`)
console.error(`Failed to reinitialize DB`, error)
if (attempt === maxAttempts) {
console.error('Max retry attempts reached. Giving up.')

View File

@@ -5,7 +5,12 @@ import { isMongoose } from './isMongoose.js'
export async function resetDB(_payload: Payload, collectionSlugs: string[]) {
if (isMongoose(_payload) && 'collections' in _payload.db && collectionSlugs.length > 0) {
await _payload.db.collections[collectionSlugs[0]].db.dropDatabase()
const firstCollectionSlug = collectionSlugs?.[0]
if (!firstCollectionSlug?.length) {
throw new Error('No collection slugs provided to reset the database.')
}
await _payload.db.collections[firstCollectionSlug]?.db.dropDatabase()
} else if ('drizzle' in _payload.db) {
const db = _payload.db as unknown as DrizzleAdapter

View File

@@ -3,6 +3,7 @@ import * as os from 'node:os'
import path from 'path'
import { type Payload } from 'payload'
import { isErrorWithCode } from './isErrorWithCode.js'
import { isMongoose } from './isMongoose.js'
import { resetDB } from './reset.js'
import { createSnapshot, dbSnapshot, restoreFromSnapshot, uploadsDirCache } from './snapshot.js'
@@ -47,15 +48,18 @@ export async function seedDB({
const uploadsDirs = Array.isArray(uploadsDir) ? uploadsDir : [uploadsDir]
for (const dir of uploadsDirs) {
try {
// Attempt to clear the uploads directory if it exists
await fs.promises.access(dir)
const files = await fs.promises.readdir(dir)
for (const file of files) {
await fs.promises.rm(path.join(dir, file))
const filePath = path.join(dir, file)
await fs.promises.rm(filePath, { recursive: true, force: true })
}
} catch (error) {
if (error.code !== 'ENOENT') {
// If the error is not because the directory doesn't exist
if (isErrorWithCode(error, 'ENOENT')) {
// Directory does not exist - that's okay, skip it
continue
} else {
// Some other error occurred - rethrow it
console.error('Error in operation (deleting uploads dir):', dir, error)
throw error
}
@@ -124,16 +128,20 @@ export async function seedDB({
try {
if (isMongoose(_payload)) {
await Promise.all([
...collectionSlugs.map(async (collectionSlug) => {
await _payload.db.collections[collectionSlug].createIndexes()
}),
...collectionSlugs
.filter(
(collectionSlug) =>
['payload-migrations', 'payload-preferences', 'payload-locked-documents'].indexOf(
collectionSlug,
) === -1,
)
.map(async (collectionSlug) => {
await _payload.db.collections[collectionSlug]?.createIndexes({
// Blocks writes (doesn't matter here) but faster
background: false,
})
}),
])
await Promise.all(
_payload.config.collections.map(async (coll) => {
await _payload.db?.collections[coll.slug]?.ensureIndexes()
}),
)
}
} catch (e) {
console.error('Error in operation (re-creating indexes):', e)
@@ -170,7 +178,7 @@ export async function seedDB({
let newObj: {
cacheDir: string
originalDir: string
} = null
} | null = null
if (!uploadsDirCache[snapshotKey].find((cache) => cache.originalDir === dir)) {
// Define new cache folder path to the OS temp directory (well a random folder inside it)
newObj = {

View File

@@ -116,7 +116,13 @@ export async function createSnapshot(
collectionSlugs: string[],
) {
if (isMongoose(_payload) && 'collections' in _payload.db) {
const mongooseCollections = _payload.db.collections[collectionSlugs[0]].db.collections
const firstCollectionSlug = collectionSlugs?.[0]
if (!firstCollectionSlug?.length) {
throw new Error('No collection slugs provided to reset the database.')
}
const mongooseCollections = _payload.db.collections[firstCollectionSlug]?.db.collections
await createMongooseSnapshot(mongooseCollections, snapshotKey)
} else {

View File

@@ -1,8 +1,7 @@
import { MongoMemoryReplSet } from 'mongodb-memory-server'
import dotenv from 'dotenv'
import { MongoMemoryReplSet } from 'mongodb-memory-server'
dotenv.config()
// eslint-disable-next-line no-restricted-exports
export default async () => {
// @ts-expect-error
@@ -23,8 +22,11 @@ export default async () => {
},
})
await db.waitUntilRunning()
global._mongoMemoryServer = db
process.env.MONGODB_MEMORY_SERVER_URI = `${global._mongoMemoryServer.getUri()}&retryWrites=true`
console.log('Started memory db')
}
}

View File

@@ -5,4 +5,5 @@ export default async () => {
await global._mongoMemoryServer.stop()
console.log('Stopped memorydb')
}
process.exit(0)
}

View File

@@ -5,12 +5,10 @@ import { getFileByPath } from 'payload'
import { fileURLToPath } from 'url'
import { devUser } from '../credentials.js'
import { seedDB } from '../helpers/seed.js'
import {
categoriesJoinRestrictedSlug,
categoriesSlug,
collectionRestrictedSlug,
collectionSlugs,
hiddenPostsSlug,
postsSlug,
uploadsSlug,
@@ -215,12 +213,3 @@ export const seed = async (_payload: Payload) => {
data: { title: 'post 5', description: 'This is post 5', folder: sub_folder_2 },
})
}
export async function clearAndSeedEverything(_payload: Payload) {
return await seedDB({
_payload,
collectionSlugs,
seedFunction: seed,
snapshotKey: 'joinsTest',
})
}

View File

@@ -20,7 +20,7 @@ import RichTextFields from './collections/RichText/index.js'
import TextFields from './collections/Text/index.js'
import Uploads from './collections/Upload/index.js'
import TabsWithRichText from './globals/TabsWithRichText.js'
import { clearAndSeedEverything } from './seed.js'
import { seed } from './seed.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
@@ -60,8 +60,9 @@ export const baseConfig: Partial<Config> = {
},
},
onInit: async (payload) => {
// IMPORTANT: This should only seed, not clear the database.
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
await clearAndSeedEverything(payload)
await seed(payload)
}
},
localization: {

View File

@@ -958,6 +958,7 @@ describe('lexicalMain', () => {
test('ensure internal links can be created', async () => {
await navigateToLexicalFields()
await wait(200)
const richTextField = page.locator('.rich-text-lexical').first()
await richTextField.scrollIntoViewIfNeeded()
await expect(richTextField).toBeVisible()
@@ -970,11 +971,15 @@ describe('lexicalMain', () => {
const paragraph = richTextField.locator('.LexicalEditorTheme__paragraph').first()
await paragraph.scrollIntoViewIfNeeded()
await expect(paragraph).toBeVisible()
await wait(200)
/**
* Type some text
*/
await paragraph.click()
await wait(200)
await page.keyboard.type('Link')
await wait(200)
// Select "Link" by pressing shift + arrow left
for (let i = 0; i < 4; i++) {
@@ -986,6 +991,7 @@ describe('lexicalMain', () => {
const linkButton = inlineToolbar.locator('.toolbar-popup__button-link')
await expect(linkButton).toBeVisible()
await wait(200)
await linkButton.click()
/**
@@ -1005,16 +1011,20 @@ describe('lexicalMain', () => {
.locator('.radio-input__styled-radio')
await radioInternalLink.click()
await wait(200)
const internalLinkSelect = linkDrawer
.locator('#field-doc .rs__control .value-container')
.first()
await internalLinkSelect.click()
await wait(200)
await expect(linkDrawer.locator('.rs__option').nth(0)).toBeVisible()
await expect(linkDrawer.locator('.rs__option').nth(0)).toContainText('Rich Text') // Link to itself - that way we can also test if depth 0 works
await linkDrawer.locator('.rs__option').nth(0).click()
await expect(internalLinkSelect).toContainText('Rich Text')
await wait(200)
await linkDrawer.locator('button').getByText('Save').first().click()
await expect(linkDrawer).toBeHidden()

View File

@@ -2,8 +2,7 @@ import type { Payload } from 'payload'
import { devUser, regularUser } from '../credentials.js'
import { executePromises } from '../helpers/executePromises.js'
import { seedDB } from '../helpers/seed.js'
import { collectionSlugs, pagesSlug, postsSlug } from './slugs.js'
import { pagesSlug, postsSlug } from './slugs.js'
export const seed = async (_payload: Payload) => {
await executePromises(
@@ -46,12 +45,3 @@ export const seed = async (_payload: Payload) => {
false,
)
}
export async function clearAndSeedEverything(_payload: Payload) {
return await seedDB({
_payload,
collectionSlugs,
seedFunction: seed,
snapshotKey: 'adminTests',
})
}

View File

@@ -83,6 +83,7 @@
"next": "15.3.2",
"nodemailer": "6.9.16",
"payload": "workspace:*",
"pg": "8.11.3",
"qs-esm": "7.0.2",
"react": "19.1.0",
"react-dom": "19.1.0",

View File

@@ -2,8 +2,7 @@ import type { Payload, QueryPreset } from 'payload'
import { devUser as devCredentials, regularUser as regularCredentials } from '../credentials.js'
import { executePromises } from '../helpers/executePromises.js'
import { seedDB } from '../helpers/seed.js'
import { collectionSlugs, pagesSlug, usersSlug } from './slugs.js'
import { pagesSlug, usersSlug } from './slugs.js'
type SeededQueryPreset = {
relatedCollection: 'pages'
@@ -187,12 +186,3 @@ export const seed = async (_payload: Payload) => {
false,
)
}
export async function clearAndSeedEverything(_payload: Payload) {
return await seedDB({
_payload,
collectionSlugs,
seedFunction: seed,
snapshotKey: 'adminTests',
})
}

View File

@@ -7,7 +7,7 @@ import path from 'path'
import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js'
import { devUser } from '../credentials.js'
import { updatePostStep1, updatePostStep2 } from './runners/updatePost.js'
import { clearAndSeedEverything } from './seed.js'
import { seed } from './seed.js'
import { externalWorkflow } from './workflows/externalWorkflow.js'
import { inlineTaskTestWorkflow } from './workflows/inlineTaskTest.js'
import { inlineTaskTestDelayedWorkflow } from './workflows/inlineTaskTestDelayed.js'
@@ -394,7 +394,7 @@ export default buildConfigWithDefaults({
editor: lexicalEditor(),
onInit: async (payload) => {
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
await clearAndSeedEverything(payload)
await seed(payload)
}
},
typescript: {

View File

@@ -114,7 +114,6 @@ describe('Versions', () => {
})
await ensureCompilationIsDone({ page, serverURL })
//await clearAndSeedEverything(payload)
})
describe('draft collections', () => {