chore: fix various e2e test setup issues (#12670)
I noticed a few issues when running e2e tests that will be resolved by this PR: - Most important: for some test suites (fields, fields-relationship, versions, queues, lexical), the database was cleared and seeded **twice** in between each test run. This is because the onInit function was running the clear and seed script, when it should only have been running the seed script. Clearing the database / the snapshot workflow is being done by the reInit endpoint, which then calls onInit to seed the actual data. - The slowest part of `clearAndSeedEverything` is recreating indexes on mongodb. This PR slightly improves performance here by: - Skipping this process for the built-in `['payload-migrations', 'payload-preferences', 'payload-locked-documents']` collections - Previously we were calling both `createIndexes` and `ensureIndexes`. This was unnecessary - `ensureIndexes` is a deprecated alias of `createIndexes`. This PR changes it to only call `createIndexes` - Makes the reinit endpoint accept GET requests instead of POST requests - this makes it easier to debug right in the browser - Some typescript fixes - Adds a `dev:memorydb` script to the package.json. For some reason, `dev` is super unreliable on mongodb locally when running e2e tests - it frequently fails during index creation. Using the memorydb fixes this issue, with the bonus of more closely resembling the CI environment - Previously, you were unable to run test suites using turbopack + postgres. This fixes it, by explicitly installing `pg` as devDependency in our monorepo - Fixes jest open handles warning
This commit is contained in:
@@ -65,6 +65,7 @@
|
|||||||
"dev:generate-graphql-schema": "pnpm runts ./test/generateGraphQLSchema.ts",
|
"dev:generate-graphql-schema": "pnpm runts ./test/generateGraphQLSchema.ts",
|
||||||
"dev:generate-importmap": "pnpm runts ./test/generateImportMap.ts",
|
"dev:generate-importmap": "pnpm runts ./test/generateImportMap.ts",
|
||||||
"dev:generate-types": "pnpm runts ./test/generateTypes.ts",
|
"dev:generate-types": "pnpm runts ./test/generateTypes.ts",
|
||||||
|
"dev:memorydb": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --start-memory-db",
|
||||||
"dev:postgres": "cross-env PAYLOAD_DATABASE=postgres pnpm runts ./test/dev.ts",
|
"dev:postgres": "cross-env PAYLOAD_DATABASE=postgres pnpm runts ./test/dev.ts",
|
||||||
"dev:prod": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --prod",
|
"dev:prod": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --prod",
|
||||||
"dev:prod:memorydb": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --prod --start-memory-db",
|
"dev:prod:memorydb": "cross-env NODE_OPTIONS=--no-deprecation tsx ./test/dev.ts --prod --start-memory-db",
|
||||||
@@ -155,10 +156,11 @@
|
|||||||
"jest": "29.7.0",
|
"jest": "29.7.0",
|
||||||
"lint-staged": "15.2.7",
|
"lint-staged": "15.2.7",
|
||||||
"minimist": "1.2.8",
|
"minimist": "1.2.8",
|
||||||
"mongodb-memory-server": "^10",
|
"mongodb-memory-server": "10.1.4",
|
||||||
"next": "15.3.2",
|
"next": "15.3.2",
|
||||||
"open": "^10.1.0",
|
"open": "^10.1.0",
|
||||||
"p-limit": "^5.0.0",
|
"p-limit": "^5.0.0",
|
||||||
|
"pg": "8.11.3",
|
||||||
"playwright": "1.50.0",
|
"playwright": "1.50.0",
|
||||||
"playwright-core": "1.50.0",
|
"playwright-core": "1.50.0",
|
||||||
"prettier": "3.5.3",
|
"prettier": "3.5.3",
|
||||||
|
|||||||
@@ -17,8 +17,8 @@
|
|||||||
"url": "https://payloadcms.com"
|
"url": "https://payloadcms.com"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"type": "module",
|
|
||||||
"sideEffects": false,
|
"sideEffects": false,
|
||||||
|
"type": "module",
|
||||||
"exports": {
|
"exports": {
|
||||||
".": {
|
".": {
|
||||||
"import": "./src/index.ts",
|
"import": "./src/index.ts",
|
||||||
@@ -58,7 +58,7 @@
|
|||||||
"@types/prompts": "^2.4.5",
|
"@types/prompts": "^2.4.5",
|
||||||
"@types/uuid": "10.0.0",
|
"@types/uuid": "10.0.0",
|
||||||
"mongodb": "6.12.0",
|
"mongodb": "6.12.0",
|
||||||
"mongodb-memory-server": "^10",
|
"mongodb-memory-server": "10.1.4",
|
||||||
"payload": "workspace:*"
|
"payload": "workspace:*"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
|
|||||||
@@ -40,6 +40,9 @@ export const connect: Connect = async function connect(
|
|||||||
// If we are running a replica set with MongoDB Memory Server,
|
// If we are running a replica set with MongoDB Memory Server,
|
||||||
// wait until the replica set elects a primary before proceeding
|
// wait until the replica set elects a primary before proceeding
|
||||||
if (this.mongoMemoryServer) {
|
if (this.mongoMemoryServer) {
|
||||||
|
this.payload.logger.info(
|
||||||
|
'Waiting for MongoDB Memory Server replica set to elect a primary...',
|
||||||
|
)
|
||||||
await new Promise((resolve) => setTimeout(resolve, 2000))
|
await new Promise((resolve) => setTimeout(resolve, 2000))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -50,7 +53,7 @@ export const connect: Connect = async function connect(
|
|||||||
this.beginTransaction = defaultBeginTransaction()
|
this.beginTransaction = defaultBeginTransaction()
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!this.mongoMemoryServer && !hotReload) {
|
if (!hotReload) {
|
||||||
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
|
if (process.env.PAYLOAD_DROP_DATABASE === 'true') {
|
||||||
this.payload.logger.info('---- DROPPING DATABASE ----')
|
this.payload.logger.info('---- DROPPING DATABASE ----')
|
||||||
await mongoose.connection.dropDatabase()
|
await mongoose.connection.dropDatabase()
|
||||||
|
|||||||
@@ -5,11 +5,7 @@ import mongoose from 'mongoose'
|
|||||||
import type { MongooseAdapter } from './index.js'
|
import type { MongooseAdapter } from './index.js'
|
||||||
|
|
||||||
export const destroy: Destroy = async function destroy(this: MongooseAdapter) {
|
export const destroy: Destroy = async function destroy(this: MongooseAdapter) {
|
||||||
if (this.mongoMemoryServer) {
|
await mongoose.disconnect()
|
||||||
await this.mongoMemoryServer.stop()
|
|
||||||
} else {
|
|
||||||
await mongoose.disconnect()
|
|
||||||
}
|
|
||||||
|
|
||||||
Object.keys(mongoose.models).map((model) => mongoose.deleteModel(model))
|
Object.keys(mongoose.models).map((model) => mongoose.deleteModel(model))
|
||||||
}
|
}
|
||||||
|
|||||||
33
pnpm-lock.yaml
generated
33
pnpm-lock.yaml
generated
@@ -9,7 +9,7 @@ overrides:
|
|||||||
cross-env: 7.0.3
|
cross-env: 7.0.3
|
||||||
dotenv: 16.4.7
|
dotenv: 16.4.7
|
||||||
graphql: ^16.8.1
|
graphql: ^16.8.1
|
||||||
mongodb-memory-server: ^10
|
mongodb-memory-server: 10.1.4
|
||||||
react: 19.1.0
|
react: 19.1.0
|
||||||
react-dom: 19.1.0
|
react-dom: 19.1.0
|
||||||
typescript: 5.7.3
|
typescript: 5.7.3
|
||||||
@@ -124,8 +124,8 @@ importers:
|
|||||||
specifier: 1.2.8
|
specifier: 1.2.8
|
||||||
version: 1.2.8
|
version: 1.2.8
|
||||||
mongodb-memory-server:
|
mongodb-memory-server:
|
||||||
specifier: ^10
|
specifier: 10.1.4
|
||||||
version: 10.1.3(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
|
version: 10.1.4(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
|
||||||
next:
|
next:
|
||||||
specifier: 15.3.2
|
specifier: 15.3.2
|
||||||
version: 15.3.2(@opentelemetry/api@1.9.0)(@playwright/test@1.50.0)(babel-plugin-macros@3.1.0)(babel-plugin-react-compiler@19.1.0-rc.2)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(sass@1.77.4)
|
version: 15.3.2(@opentelemetry/api@1.9.0)(@playwright/test@1.50.0)(babel-plugin-macros@3.1.0)(babel-plugin-react-compiler@19.1.0-rc.2)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(sass@1.77.4)
|
||||||
@@ -135,6 +135,9 @@ importers:
|
|||||||
p-limit:
|
p-limit:
|
||||||
specifier: ^5.0.0
|
specifier: ^5.0.0
|
||||||
version: 5.0.0
|
version: 5.0.0
|
||||||
|
pg:
|
||||||
|
specifier: 8.11.3
|
||||||
|
version: 8.11.3
|
||||||
playwright:
|
playwright:
|
||||||
specifier: 1.50.0
|
specifier: 1.50.0
|
||||||
version: 1.50.0
|
version: 1.50.0
|
||||||
@@ -292,8 +295,8 @@ importers:
|
|||||||
specifier: 6.12.0
|
specifier: 6.12.0
|
||||||
version: 6.12.0(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
|
version: 6.12.0(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
|
||||||
mongodb-memory-server:
|
mongodb-memory-server:
|
||||||
specifier: ^10
|
specifier: 10.1.4
|
||||||
version: 10.1.3(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
|
version: 10.1.4(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
|
||||||
payload:
|
payload:
|
||||||
specifier: workspace:*
|
specifier: workspace:*
|
||||||
version: link:../payload
|
version: link:../payload
|
||||||
@@ -1852,6 +1855,9 @@ importers:
|
|||||||
payload:
|
payload:
|
||||||
specifier: workspace:*
|
specifier: workspace:*
|
||||||
version: link:../packages/payload
|
version: link:../packages/payload
|
||||||
|
pg:
|
||||||
|
specifier: 8.11.3
|
||||||
|
version: 8.11.3
|
||||||
qs-esm:
|
qs-esm:
|
||||||
specifier: 7.0.2
|
specifier: 7.0.2
|
||||||
version: 7.0.2
|
version: 7.0.2
|
||||||
@@ -8503,12 +8509,12 @@ packages:
|
|||||||
mongodb-connection-string-url@3.0.1:
|
mongodb-connection-string-url@3.0.1:
|
||||||
resolution: {integrity: sha512-XqMGwRX0Lgn05TDB4PyG2h2kKO/FfWJyCzYQbIhXUxz7ETt0I/FqHjUeqj37irJ+Dl1ZtU82uYyj14u2XsZKfg==}
|
resolution: {integrity: sha512-XqMGwRX0Lgn05TDB4PyG2h2kKO/FfWJyCzYQbIhXUxz7ETt0I/FqHjUeqj37irJ+Dl1ZtU82uYyj14u2XsZKfg==}
|
||||||
|
|
||||||
mongodb-memory-server-core@10.1.3:
|
mongodb-memory-server-core@10.1.4:
|
||||||
resolution: {integrity: sha512-ayBQHeV74wRHhgcAKpxHYI4th9Ufidy/m3XhJnLFRufKsOyDsyHYU3Zxv5Fm4hxsWE6wVd0GAVcQ7t7XNkivOg==}
|
resolution: {integrity: sha512-o8fgY7ZalEd8pGps43fFPr/hkQu1L8i6HFEGbsTfA2zDOW0TopgpswaBCqDr0qD7ptibyPfB5DmC+UlIxbThzA==}
|
||||||
engines: {node: '>=16.20.1'}
|
engines: {node: '>=16.20.1'}
|
||||||
|
|
||||||
mongodb-memory-server@10.1.3:
|
mongodb-memory-server@10.1.4:
|
||||||
resolution: {integrity: sha512-QCUjsIIXSYv/EgkpDAjfhlqRKo6N+qR6DD43q4lyrCVn24xQmvlArdWHW/Um5RS4LkC9YWC3XveSncJqht2Hbg==}
|
resolution: {integrity: sha512-+oKQ/kc3CX+816oPFRtaF0CN4vNcGKNjpOQe4bHo/21A3pMD+lC7Xz1EX5HP7siCX4iCpVchDMmCOFXVQSGkUg==}
|
||||||
engines: {node: '>=16.20.1'}
|
engines: {node: '>=16.20.1'}
|
||||||
|
|
||||||
mongodb@6.12.0:
|
mongodb@6.12.0:
|
||||||
@@ -8641,6 +8647,7 @@ packages:
|
|||||||
node-domexception@1.0.0:
|
node-domexception@1.0.0:
|
||||||
resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
|
resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
|
||||||
engines: {node: '>=10.5.0'}
|
engines: {node: '>=10.5.0'}
|
||||||
|
deprecated: Use your platform's native DOMException instead
|
||||||
|
|
||||||
node-fetch-native@1.6.4:
|
node-fetch-native@1.6.4:
|
||||||
resolution: {integrity: sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ==}
|
resolution: {integrity: sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ==}
|
||||||
@@ -18796,7 +18803,7 @@ snapshots:
|
|||||||
'@types/whatwg-url': 11.0.5
|
'@types/whatwg-url': 11.0.5
|
||||||
whatwg-url: 13.0.0
|
whatwg-url: 13.0.0
|
||||||
|
|
||||||
mongodb-memory-server-core@10.1.3(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3):
|
mongodb-memory-server-core@10.1.4(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3):
|
||||||
dependencies:
|
dependencies:
|
||||||
async-mutex: 0.5.0
|
async-mutex: 0.5.0
|
||||||
camelcase: 6.3.0
|
camelcase: 6.3.0
|
||||||
@@ -18806,7 +18813,7 @@ snapshots:
|
|||||||
https-proxy-agent: 7.0.5
|
https-proxy-agent: 7.0.5
|
||||||
mongodb: 6.12.0(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
|
mongodb: 6.12.0(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
|
||||||
new-find-package-json: 2.0.0
|
new-find-package-json: 2.0.0
|
||||||
semver: 7.6.3
|
semver: 7.7.1
|
||||||
tar-stream: 3.1.7
|
tar-stream: 3.1.7
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
yauzl: 3.2.0
|
yauzl: 3.2.0
|
||||||
@@ -18820,9 +18827,9 @@ snapshots:
|
|||||||
- socks
|
- socks
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
mongodb-memory-server@10.1.3(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3):
|
mongodb-memory-server@10.1.4(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3):
|
||||||
dependencies:
|
dependencies:
|
||||||
mongodb-memory-server-core: 10.1.3(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
|
mongodb-memory-server-core: 10.1.4(@aws-sdk/credential-providers@3.687.0(@aws-sdk/client-sso-oidc@3.687.0(@aws-sdk/client-sts@3.687.0)))(socks@2.8.3)
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- '@aws-sdk/credential-providers'
|
- '@aws-sdk/credential-providers'
|
||||||
|
|||||||
@@ -63,7 +63,7 @@
|
|||||||
"eslint": "^9.23.0",
|
"eslint": "^9.23.0",
|
||||||
"eslint-config-next": "15.3.0",
|
"eslint-config-next": "15.3.0",
|
||||||
"graphql": "^16.8.1",
|
"graphql": "^16.8.1",
|
||||||
"mongodb-memory-server": "^10.1.2",
|
"mongodb-memory-server": "10.1.4",
|
||||||
"next": "15.3.0",
|
"next": "15.3.0",
|
||||||
"open": "^10.1.0",
|
"open": "^10.1.0",
|
||||||
"payload": "3.37.0",
|
"payload": "3.37.0",
|
||||||
|
|||||||
@@ -2,9 +2,7 @@ import type { Payload } from 'payload'
|
|||||||
|
|
||||||
import { devUser } from '../credentials.js'
|
import { devUser } from '../credentials.js'
|
||||||
import { executePromises } from '../helpers/executePromises.js'
|
import { executePromises } from '../helpers/executePromises.js'
|
||||||
import { seedDB } from '../helpers/seed.js'
|
|
||||||
import {
|
import {
|
||||||
collectionSlugs,
|
|
||||||
customViews1CollectionSlug,
|
customViews1CollectionSlug,
|
||||||
customViews2CollectionSlug,
|
customViews2CollectionSlug,
|
||||||
geoCollectionSlug,
|
geoCollectionSlug,
|
||||||
@@ -14,7 +12,7 @@ import {
|
|||||||
with300DocumentsSlug,
|
with300DocumentsSlug,
|
||||||
} from './slugs.js'
|
} from './slugs.js'
|
||||||
|
|
||||||
export const seed = async (_payload) => {
|
export const seed = async (_payload: Payload) => {
|
||||||
await executePromises(
|
await executePromises(
|
||||||
[
|
[
|
||||||
() =>
|
() =>
|
||||||
@@ -139,12 +137,3 @@ export const seed = async (_payload) => {
|
|||||||
|
|
||||||
await Promise.all([...manyDocumentsPromises])
|
await Promise.all([...manyDocumentsPromises])
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function clearAndSeedEverything(_payload: Payload) {
|
|
||||||
return await seedDB({
|
|
||||||
_payload,
|
|
||||||
collectionSlugs,
|
|
||||||
seedFunction: seed,
|
|
||||||
snapshotKey: 'adminTests',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ import { es } from 'payload/i18n/es'
|
|||||||
import sharp from 'sharp'
|
import sharp from 'sharp'
|
||||||
|
|
||||||
import { databaseAdapter } from './databaseAdapter.js'
|
import { databaseAdapter } from './databaseAdapter.js'
|
||||||
import { reInitEndpoint } from './helpers/reInit.js'
|
import { reInitEndpoint } from './helpers/reInitEndpoint.js'
|
||||||
import { localAPIEndpoint } from './helpers/sdk/endpoint.js'
|
import { localAPIEndpoint } from './helpers/sdk/endpoint.js'
|
||||||
import { testEmailAdapter } from './testEmailAdapter.js'
|
import { testEmailAdapter } from './testEmailAdapter.js'
|
||||||
|
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ import { Restricted } from './collections/Restricted/index.js'
|
|||||||
import { RelationshipUpdatedExternally } from './collections/UpdatedExternally/index.js'
|
import { RelationshipUpdatedExternally } from './collections/UpdatedExternally/index.js'
|
||||||
import { Versions } from './collections/Versions/index.js'
|
import { Versions } from './collections/Versions/index.js'
|
||||||
import { Video } from './collections/Video/index.js'
|
import { Video } from './collections/Video/index.js'
|
||||||
import { clearAndSeedEverything } from './seed.js'
|
import { seed } from './seed.js'
|
||||||
|
|
||||||
export default buildConfigWithDefaults({
|
export default buildConfigWithDefaults({
|
||||||
admin: {
|
admin: {
|
||||||
@@ -49,7 +49,7 @@ export default buildConfigWithDefaults({
|
|||||||
},
|
},
|
||||||
onInit: async (payload) => {
|
onInit: async (payload) => {
|
||||||
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
|
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
|
||||||
await clearAndSeedEverything(payload)
|
await seed(payload)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
typescript: {
|
typescript: {
|
||||||
|
|||||||
@@ -1,14 +1,9 @@
|
|||||||
import type { Payload } from 'payload'
|
import type { Payload } from 'payload'
|
||||||
|
|
||||||
import path from 'path'
|
|
||||||
import { fileURLToPath } from 'url'
|
|
||||||
|
|
||||||
import { devUser } from '../credentials.js'
|
import { devUser } from '../credentials.js'
|
||||||
import { seedDB } from '../helpers/seed.js'
|
|
||||||
import {
|
import {
|
||||||
collection1Slug,
|
collection1Slug,
|
||||||
collection2Slug,
|
collection2Slug,
|
||||||
collectionSlugs,
|
|
||||||
podcastCollectionSlug,
|
podcastCollectionSlug,
|
||||||
relationOneSlug,
|
relationOneSlug,
|
||||||
relationRestrictedSlug,
|
relationRestrictedSlug,
|
||||||
@@ -18,9 +13,6 @@ import {
|
|||||||
videoCollectionSlug,
|
videoCollectionSlug,
|
||||||
} from './slugs.js'
|
} from './slugs.js'
|
||||||
|
|
||||||
const filename = fileURLToPath(import.meta.url)
|
|
||||||
const dirname = path.dirname(filename)
|
|
||||||
|
|
||||||
export const seed = async (_payload: Payload) => {
|
export const seed = async (_payload: Payload) => {
|
||||||
await _payload.create({
|
await _payload.create({
|
||||||
collection: 'users',
|
collection: 'users',
|
||||||
@@ -179,13 +171,3 @@ export const seed = async (_payload: Payload) => {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function clearAndSeedEverything(_payload: Payload) {
|
|
||||||
return await seedDB({
|
|
||||||
_payload,
|
|
||||||
collectionSlugs,
|
|
||||||
seedFunction: seed,
|
|
||||||
snapshotKey: 'fieldsTest',
|
|
||||||
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ import UploadsMultiPoly from './collections/UploadMultiPoly/index.js'
|
|||||||
import UploadsPoly from './collections/UploadPoly/index.js'
|
import UploadsPoly from './collections/UploadPoly/index.js'
|
||||||
import UploadRestricted from './collections/UploadRestricted/index.js'
|
import UploadRestricted from './collections/UploadRestricted/index.js'
|
||||||
import Uploads3 from './collections/Uploads3/index.js'
|
import Uploads3 from './collections/Uploads3/index.js'
|
||||||
import { clearAndSeedEverything } from './seed.js'
|
import { seed } from './seed.js'
|
||||||
|
|
||||||
export const collectionSlugs: CollectionConfig[] = [
|
export const collectionSlugs: CollectionConfig[] = [
|
||||||
{
|
{
|
||||||
@@ -157,7 +157,7 @@ export const baseConfig: Partial<Config> = {
|
|||||||
},
|
},
|
||||||
onInit: async (payload) => {
|
onInit: async (payload) => {
|
||||||
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
|
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
|
||||||
await clearAndSeedEverything(payload)
|
await seed(payload)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
typescript: {
|
typescript: {
|
||||||
|
|||||||
@@ -11,6 +11,8 @@ export const allDatabaseAdapters = {
|
|||||||
|
|
||||||
export const databaseAdapter = mongooseAdapter({
|
export const databaseAdapter = mongooseAdapter({
|
||||||
ensureIndexes: true,
|
ensureIndexes: true,
|
||||||
|
// required for connect to detect that we are using a memory server
|
||||||
|
mongoMemoryServer: global._mongoMemoryServer,
|
||||||
url:
|
url:
|
||||||
process.env.MONGODB_MEMORY_SERVER_URI ||
|
process.env.MONGODB_MEMORY_SERVER_URI ||
|
||||||
process.env.DATABASE_URI ||
|
process.env.DATABASE_URI ||
|
||||||
|
|||||||
9
test/helpers/isErrorWithCode.ts
Normal file
9
test/helpers/isErrorWithCode.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
export function isErrorWithCode(err: unknown, code?: string): err is NodeJS.ErrnoException {
|
||||||
|
return (
|
||||||
|
typeof err === 'object' &&
|
||||||
|
err !== null &&
|
||||||
|
'code' in err &&
|
||||||
|
typeof (err as any).code === 'string' &&
|
||||||
|
(!code || (err as NodeJS.ErrnoException).code === code)
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -1,25 +1,38 @@
|
|||||||
import type { Endpoint, PayloadHandler } from 'payload'
|
import type { Endpoint, PayloadHandler } from 'payload'
|
||||||
|
|
||||||
import { status as httpStatus } from 'http-status'
|
import { status as httpStatus } from 'http-status'
|
||||||
import { addDataAndFileToRequest } from 'payload'
|
import * as qs from 'qs-esm'
|
||||||
|
|
||||||
import { path } from './reInitializeDB.js'
|
import { path } from './reInitializeDB.js'
|
||||||
import { seedDB } from './seed.js'
|
import { seedDB } from './seed.js'
|
||||||
|
|
||||||
const handler: PayloadHandler = async (req) => {
|
const handler: PayloadHandler = async (req) => {
|
||||||
process.env.SEED_IN_CONFIG_ONINIT = 'true'
|
process.env.SEED_IN_CONFIG_ONINIT = 'true'
|
||||||
await addDataAndFileToRequest(req)
|
const { payload } = req
|
||||||
const { data, payload } = req
|
|
||||||
|
if (!req.url) {
|
||||||
|
throw new Error('Request URL is required')
|
||||||
|
}
|
||||||
|
|
||||||
|
const query: {
|
||||||
|
deleteOnly?: boolean
|
||||||
|
snapshotKey?: string
|
||||||
|
uploadsDir?: string | string[]
|
||||||
|
} = qs.parse(req.url.split('?')[1] ?? '', {
|
||||||
|
depth: 10,
|
||||||
|
ignoreQueryPrefix: true,
|
||||||
|
})
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
console.log('Calling seedDB')
|
||||||
await seedDB({
|
await seedDB({
|
||||||
_payload: payload,
|
_payload: payload,
|
||||||
collectionSlugs: payload.config.collections.map(({ slug }) => slug),
|
collectionSlugs: payload.config.collections.map(({ slug }) => slug),
|
||||||
seedFunction: payload.config.onInit,
|
seedFunction: payload.config.onInit,
|
||||||
snapshotKey: String(data.snapshotKey),
|
snapshotKey: String(query.snapshotKey),
|
||||||
// uploadsDir can be string or stringlist
|
// uploadsDir can be string or stringlist
|
||||||
uploadsDir: data.uploadsDir as string | string[],
|
uploadsDir: query.uploadsDir as string | string[],
|
||||||
deleteOnly: data.deleteOnly,
|
deleteOnly: query.deleteOnly,
|
||||||
})
|
})
|
||||||
|
|
||||||
return Response.json(
|
return Response.json(
|
||||||
@@ -40,6 +53,6 @@ const handler: PayloadHandler = async (req) => {
|
|||||||
|
|
||||||
export const reInitEndpoint: Endpoint = {
|
export const reInitEndpoint: Endpoint = {
|
||||||
path,
|
path,
|
||||||
method: 'post',
|
method: 'get',
|
||||||
handler,
|
handler,
|
||||||
}
|
}
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
|
import * as qs from 'qs-esm'
|
||||||
|
|
||||||
export const path = '/re-initialize'
|
export const path = '/re-initialize'
|
||||||
|
|
||||||
export const reInitializeDB = async ({
|
export const reInitializeDB = async ({
|
||||||
@@ -19,13 +21,19 @@ export const reInitializeDB = async ({
|
|||||||
try {
|
try {
|
||||||
console.log(`Attempting to reinitialize DB (attempt ${attempt}/${maxAttempts})...`)
|
console.log(`Attempting to reinitialize DB (attempt ${attempt}/${maxAttempts})...`)
|
||||||
|
|
||||||
const response = await fetch(`${serverURL}/api${path}`, {
|
const queryParams = qs.stringify(
|
||||||
method: 'post',
|
{
|
||||||
body: JSON.stringify({
|
|
||||||
snapshotKey,
|
snapshotKey,
|
||||||
uploadsDir,
|
uploadsDir,
|
||||||
deleteOnly,
|
deleteOnly,
|
||||||
}),
|
},
|
||||||
|
{
|
||||||
|
addQueryPrefix: true,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
const response = await fetch(`${serverURL}/api${path}${queryParams}`, {
|
||||||
|
method: 'get',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
@@ -39,7 +47,7 @@ export const reInitializeDB = async ({
|
|||||||
console.log(`Successfully reinitialized DB (took ${timeTaken}ms)`)
|
console.log(`Successfully reinitialized DB (took ${timeTaken}ms)`)
|
||||||
return
|
return
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Failed to reinitialize DB: ${error.message}`)
|
console.error(`Failed to reinitialize DB`, error)
|
||||||
|
|
||||||
if (attempt === maxAttempts) {
|
if (attempt === maxAttempts) {
|
||||||
console.error('Max retry attempts reached. Giving up.')
|
console.error('Max retry attempts reached. Giving up.')
|
||||||
|
|||||||
@@ -5,7 +5,12 @@ import { isMongoose } from './isMongoose.js'
|
|||||||
|
|
||||||
export async function resetDB(_payload: Payload, collectionSlugs: string[]) {
|
export async function resetDB(_payload: Payload, collectionSlugs: string[]) {
|
||||||
if (isMongoose(_payload) && 'collections' in _payload.db && collectionSlugs.length > 0) {
|
if (isMongoose(_payload) && 'collections' in _payload.db && collectionSlugs.length > 0) {
|
||||||
await _payload.db.collections[collectionSlugs[0]].db.dropDatabase()
|
const firstCollectionSlug = collectionSlugs?.[0]
|
||||||
|
|
||||||
|
if (!firstCollectionSlug?.length) {
|
||||||
|
throw new Error('No collection slugs provided to reset the database.')
|
||||||
|
}
|
||||||
|
await _payload.db.collections[firstCollectionSlug]?.db.dropDatabase()
|
||||||
} else if ('drizzle' in _payload.db) {
|
} else if ('drizzle' in _payload.db) {
|
||||||
const db = _payload.db as unknown as DrizzleAdapter
|
const db = _payload.db as unknown as DrizzleAdapter
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import * as os from 'node:os'
|
|||||||
import path from 'path'
|
import path from 'path'
|
||||||
import { type Payload } from 'payload'
|
import { type Payload } from 'payload'
|
||||||
|
|
||||||
|
import { isErrorWithCode } from './isErrorWithCode.js'
|
||||||
import { isMongoose } from './isMongoose.js'
|
import { isMongoose } from './isMongoose.js'
|
||||||
import { resetDB } from './reset.js'
|
import { resetDB } from './reset.js'
|
||||||
import { createSnapshot, dbSnapshot, restoreFromSnapshot, uploadsDirCache } from './snapshot.js'
|
import { createSnapshot, dbSnapshot, restoreFromSnapshot, uploadsDirCache } from './snapshot.js'
|
||||||
@@ -47,15 +48,18 @@ export async function seedDB({
|
|||||||
const uploadsDirs = Array.isArray(uploadsDir) ? uploadsDir : [uploadsDir]
|
const uploadsDirs = Array.isArray(uploadsDir) ? uploadsDir : [uploadsDir]
|
||||||
for (const dir of uploadsDirs) {
|
for (const dir of uploadsDirs) {
|
||||||
try {
|
try {
|
||||||
// Attempt to clear the uploads directory if it exists
|
|
||||||
await fs.promises.access(dir)
|
await fs.promises.access(dir)
|
||||||
const files = await fs.promises.readdir(dir)
|
const files = await fs.promises.readdir(dir)
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
await fs.promises.rm(path.join(dir, file))
|
const filePath = path.join(dir, file)
|
||||||
|
await fs.promises.rm(filePath, { recursive: true, force: true })
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error.code !== 'ENOENT') {
|
if (isErrorWithCode(error, 'ENOENT')) {
|
||||||
// If the error is not because the directory doesn't exist
|
// Directory does not exist - that's okay, skip it
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
// Some other error occurred - rethrow it
|
||||||
console.error('Error in operation (deleting uploads dir):', dir, error)
|
console.error('Error in operation (deleting uploads dir):', dir, error)
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
@@ -124,16 +128,20 @@ export async function seedDB({
|
|||||||
try {
|
try {
|
||||||
if (isMongoose(_payload)) {
|
if (isMongoose(_payload)) {
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
...collectionSlugs.map(async (collectionSlug) => {
|
...collectionSlugs
|
||||||
await _payload.db.collections[collectionSlug].createIndexes()
|
.filter(
|
||||||
}),
|
(collectionSlug) =>
|
||||||
|
['payload-migrations', 'payload-preferences', 'payload-locked-documents'].indexOf(
|
||||||
|
collectionSlug,
|
||||||
|
) === -1,
|
||||||
|
)
|
||||||
|
.map(async (collectionSlug) => {
|
||||||
|
await _payload.db.collections[collectionSlug]?.createIndexes({
|
||||||
|
// Blocks writes (doesn't matter here) but faster
|
||||||
|
background: false,
|
||||||
|
})
|
||||||
|
}),
|
||||||
])
|
])
|
||||||
|
|
||||||
await Promise.all(
|
|
||||||
_payload.config.collections.map(async (coll) => {
|
|
||||||
await _payload.db?.collections[coll.slug]?.ensureIndexes()
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error('Error in operation (re-creating indexes):', e)
|
console.error('Error in operation (re-creating indexes):', e)
|
||||||
@@ -170,7 +178,7 @@ export async function seedDB({
|
|||||||
let newObj: {
|
let newObj: {
|
||||||
cacheDir: string
|
cacheDir: string
|
||||||
originalDir: string
|
originalDir: string
|
||||||
} = null
|
} | null = null
|
||||||
if (!uploadsDirCache[snapshotKey].find((cache) => cache.originalDir === dir)) {
|
if (!uploadsDirCache[snapshotKey].find((cache) => cache.originalDir === dir)) {
|
||||||
// Define new cache folder path to the OS temp directory (well a random folder inside it)
|
// Define new cache folder path to the OS temp directory (well a random folder inside it)
|
||||||
newObj = {
|
newObj = {
|
||||||
|
|||||||
@@ -116,7 +116,13 @@ export async function createSnapshot(
|
|||||||
collectionSlugs: string[],
|
collectionSlugs: string[],
|
||||||
) {
|
) {
|
||||||
if (isMongoose(_payload) && 'collections' in _payload.db) {
|
if (isMongoose(_payload) && 'collections' in _payload.db) {
|
||||||
const mongooseCollections = _payload.db.collections[collectionSlugs[0]].db.collections
|
const firstCollectionSlug = collectionSlugs?.[0]
|
||||||
|
|
||||||
|
if (!firstCollectionSlug?.length) {
|
||||||
|
throw new Error('No collection slugs provided to reset the database.')
|
||||||
|
}
|
||||||
|
|
||||||
|
const mongooseCollections = _payload.db.collections[firstCollectionSlug]?.db.collections
|
||||||
|
|
||||||
await createMongooseSnapshot(mongooseCollections, snapshotKey)
|
await createMongooseSnapshot(mongooseCollections, snapshotKey)
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import { MongoMemoryReplSet } from 'mongodb-memory-server'
|
|
||||||
import dotenv from 'dotenv'
|
import dotenv from 'dotenv'
|
||||||
|
import { MongoMemoryReplSet } from 'mongodb-memory-server'
|
||||||
dotenv.config()
|
dotenv.config()
|
||||||
|
|
||||||
|
|
||||||
// eslint-disable-next-line no-restricted-exports
|
// eslint-disable-next-line no-restricted-exports
|
||||||
export default async () => {
|
export default async () => {
|
||||||
// @ts-expect-error
|
// @ts-expect-error
|
||||||
@@ -23,8 +22,11 @@ export default async () => {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
await db.waitUntilRunning()
|
||||||
|
|
||||||
global._mongoMemoryServer = db
|
global._mongoMemoryServer = db
|
||||||
|
|
||||||
process.env.MONGODB_MEMORY_SERVER_URI = `${global._mongoMemoryServer.getUri()}&retryWrites=true`
|
process.env.MONGODB_MEMORY_SERVER_URI = `${global._mongoMemoryServer.getUri()}&retryWrites=true`
|
||||||
|
console.log('Started memory db')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,4 +5,5 @@ export default async () => {
|
|||||||
await global._mongoMemoryServer.stop()
|
await global._mongoMemoryServer.stop()
|
||||||
console.log('Stopped memorydb')
|
console.log('Stopped memorydb')
|
||||||
}
|
}
|
||||||
|
process.exit(0)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,12 +5,10 @@ import { getFileByPath } from 'payload'
|
|||||||
import { fileURLToPath } from 'url'
|
import { fileURLToPath } from 'url'
|
||||||
|
|
||||||
import { devUser } from '../credentials.js'
|
import { devUser } from '../credentials.js'
|
||||||
import { seedDB } from '../helpers/seed.js'
|
|
||||||
import {
|
import {
|
||||||
categoriesJoinRestrictedSlug,
|
categoriesJoinRestrictedSlug,
|
||||||
categoriesSlug,
|
categoriesSlug,
|
||||||
collectionRestrictedSlug,
|
collectionRestrictedSlug,
|
||||||
collectionSlugs,
|
|
||||||
hiddenPostsSlug,
|
hiddenPostsSlug,
|
||||||
postsSlug,
|
postsSlug,
|
||||||
uploadsSlug,
|
uploadsSlug,
|
||||||
@@ -215,12 +213,3 @@ export const seed = async (_payload: Payload) => {
|
|||||||
data: { title: 'post 5', description: 'This is post 5', folder: sub_folder_2 },
|
data: { title: 'post 5', description: 'This is post 5', folder: sub_folder_2 },
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function clearAndSeedEverything(_payload: Payload) {
|
|
||||||
return await seedDB({
|
|
||||||
_payload,
|
|
||||||
collectionSlugs,
|
|
||||||
seedFunction: seed,
|
|
||||||
snapshotKey: 'joinsTest',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ import RichTextFields from './collections/RichText/index.js'
|
|||||||
import TextFields from './collections/Text/index.js'
|
import TextFields from './collections/Text/index.js'
|
||||||
import Uploads from './collections/Upload/index.js'
|
import Uploads from './collections/Upload/index.js'
|
||||||
import TabsWithRichText from './globals/TabsWithRichText.js'
|
import TabsWithRichText from './globals/TabsWithRichText.js'
|
||||||
import { clearAndSeedEverything } from './seed.js'
|
import { seed } from './seed.js'
|
||||||
|
|
||||||
const filename = fileURLToPath(import.meta.url)
|
const filename = fileURLToPath(import.meta.url)
|
||||||
const dirname = path.dirname(filename)
|
const dirname = path.dirname(filename)
|
||||||
@@ -60,8 +60,9 @@ export const baseConfig: Partial<Config> = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
onInit: async (payload) => {
|
onInit: async (payload) => {
|
||||||
|
// IMPORTANT: This should only seed, not clear the database.
|
||||||
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
|
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
|
||||||
await clearAndSeedEverything(payload)
|
await seed(payload)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
localization: {
|
localization: {
|
||||||
|
|||||||
@@ -958,6 +958,7 @@ describe('lexicalMain', () => {
|
|||||||
|
|
||||||
test('ensure internal links can be created', async () => {
|
test('ensure internal links can be created', async () => {
|
||||||
await navigateToLexicalFields()
|
await navigateToLexicalFields()
|
||||||
|
await wait(200)
|
||||||
const richTextField = page.locator('.rich-text-lexical').first()
|
const richTextField = page.locator('.rich-text-lexical').first()
|
||||||
await richTextField.scrollIntoViewIfNeeded()
|
await richTextField.scrollIntoViewIfNeeded()
|
||||||
await expect(richTextField).toBeVisible()
|
await expect(richTextField).toBeVisible()
|
||||||
@@ -970,11 +971,15 @@ describe('lexicalMain', () => {
|
|||||||
const paragraph = richTextField.locator('.LexicalEditorTheme__paragraph').first()
|
const paragraph = richTextField.locator('.LexicalEditorTheme__paragraph').first()
|
||||||
await paragraph.scrollIntoViewIfNeeded()
|
await paragraph.scrollIntoViewIfNeeded()
|
||||||
await expect(paragraph).toBeVisible()
|
await expect(paragraph).toBeVisible()
|
||||||
|
await wait(200)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Type some text
|
* Type some text
|
||||||
*/
|
*/
|
||||||
await paragraph.click()
|
await paragraph.click()
|
||||||
|
await wait(200)
|
||||||
await page.keyboard.type('Link')
|
await page.keyboard.type('Link')
|
||||||
|
await wait(200)
|
||||||
|
|
||||||
// Select "Link" by pressing shift + arrow left
|
// Select "Link" by pressing shift + arrow left
|
||||||
for (let i = 0; i < 4; i++) {
|
for (let i = 0; i < 4; i++) {
|
||||||
@@ -986,6 +991,7 @@ describe('lexicalMain', () => {
|
|||||||
|
|
||||||
const linkButton = inlineToolbar.locator('.toolbar-popup__button-link')
|
const linkButton = inlineToolbar.locator('.toolbar-popup__button-link')
|
||||||
await expect(linkButton).toBeVisible()
|
await expect(linkButton).toBeVisible()
|
||||||
|
await wait(200)
|
||||||
await linkButton.click()
|
await linkButton.click()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1005,16 +1011,20 @@ describe('lexicalMain', () => {
|
|||||||
.locator('.radio-input__styled-radio')
|
.locator('.radio-input__styled-radio')
|
||||||
|
|
||||||
await radioInternalLink.click()
|
await radioInternalLink.click()
|
||||||
|
await wait(200)
|
||||||
|
|
||||||
const internalLinkSelect = linkDrawer
|
const internalLinkSelect = linkDrawer
|
||||||
.locator('#field-doc .rs__control .value-container')
|
.locator('#field-doc .rs__control .value-container')
|
||||||
.first()
|
.first()
|
||||||
await internalLinkSelect.click()
|
await internalLinkSelect.click()
|
||||||
|
await wait(200)
|
||||||
|
|
||||||
await expect(linkDrawer.locator('.rs__option').nth(0)).toBeVisible()
|
await expect(linkDrawer.locator('.rs__option').nth(0)).toBeVisible()
|
||||||
await expect(linkDrawer.locator('.rs__option').nth(0)).toContainText('Rich Text') // Link to itself - that way we can also test if depth 0 works
|
await expect(linkDrawer.locator('.rs__option').nth(0)).toContainText('Rich Text') // Link to itself - that way we can also test if depth 0 works
|
||||||
await linkDrawer.locator('.rs__option').nth(0).click()
|
await linkDrawer.locator('.rs__option').nth(0).click()
|
||||||
|
|
||||||
await expect(internalLinkSelect).toContainText('Rich Text')
|
await expect(internalLinkSelect).toContainText('Rich Text')
|
||||||
|
await wait(200)
|
||||||
|
|
||||||
await linkDrawer.locator('button').getByText('Save').first().click()
|
await linkDrawer.locator('button').getByText('Save').first().click()
|
||||||
await expect(linkDrawer).toBeHidden()
|
await expect(linkDrawer).toBeHidden()
|
||||||
|
|||||||
@@ -2,8 +2,7 @@ import type { Payload } from 'payload'
|
|||||||
|
|
||||||
import { devUser, regularUser } from '../credentials.js'
|
import { devUser, regularUser } from '../credentials.js'
|
||||||
import { executePromises } from '../helpers/executePromises.js'
|
import { executePromises } from '../helpers/executePromises.js'
|
||||||
import { seedDB } from '../helpers/seed.js'
|
import { pagesSlug, postsSlug } from './slugs.js'
|
||||||
import { collectionSlugs, pagesSlug, postsSlug } from './slugs.js'
|
|
||||||
|
|
||||||
export const seed = async (_payload: Payload) => {
|
export const seed = async (_payload: Payload) => {
|
||||||
await executePromises(
|
await executePromises(
|
||||||
@@ -46,12 +45,3 @@ export const seed = async (_payload: Payload) => {
|
|||||||
false,
|
false,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function clearAndSeedEverything(_payload: Payload) {
|
|
||||||
return await seedDB({
|
|
||||||
_payload,
|
|
||||||
collectionSlugs,
|
|
||||||
seedFunction: seed,
|
|
||||||
snapshotKey: 'adminTests',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -83,6 +83,7 @@
|
|||||||
"next": "15.3.2",
|
"next": "15.3.2",
|
||||||
"nodemailer": "6.9.16",
|
"nodemailer": "6.9.16",
|
||||||
"payload": "workspace:*",
|
"payload": "workspace:*",
|
||||||
|
"pg": "8.11.3",
|
||||||
"qs-esm": "7.0.2",
|
"qs-esm": "7.0.2",
|
||||||
"react": "19.1.0",
|
"react": "19.1.0",
|
||||||
"react-dom": "19.1.0",
|
"react-dom": "19.1.0",
|
||||||
|
|||||||
@@ -2,8 +2,7 @@ import type { Payload, QueryPreset } from 'payload'
|
|||||||
|
|
||||||
import { devUser as devCredentials, regularUser as regularCredentials } from '../credentials.js'
|
import { devUser as devCredentials, regularUser as regularCredentials } from '../credentials.js'
|
||||||
import { executePromises } from '../helpers/executePromises.js'
|
import { executePromises } from '../helpers/executePromises.js'
|
||||||
import { seedDB } from '../helpers/seed.js'
|
import { pagesSlug, usersSlug } from './slugs.js'
|
||||||
import { collectionSlugs, pagesSlug, usersSlug } from './slugs.js'
|
|
||||||
|
|
||||||
type SeededQueryPreset = {
|
type SeededQueryPreset = {
|
||||||
relatedCollection: 'pages'
|
relatedCollection: 'pages'
|
||||||
@@ -187,12 +186,3 @@ export const seed = async (_payload: Payload) => {
|
|||||||
false,
|
false,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function clearAndSeedEverything(_payload: Payload) {
|
|
||||||
return await seedDB({
|
|
||||||
_payload,
|
|
||||||
collectionSlugs,
|
|
||||||
seedFunction: seed,
|
|
||||||
snapshotKey: 'adminTests',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import path from 'path'
|
|||||||
import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js'
|
import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js'
|
||||||
import { devUser } from '../credentials.js'
|
import { devUser } from '../credentials.js'
|
||||||
import { updatePostStep1, updatePostStep2 } from './runners/updatePost.js'
|
import { updatePostStep1, updatePostStep2 } from './runners/updatePost.js'
|
||||||
import { clearAndSeedEverything } from './seed.js'
|
import { seed } from './seed.js'
|
||||||
import { externalWorkflow } from './workflows/externalWorkflow.js'
|
import { externalWorkflow } from './workflows/externalWorkflow.js'
|
||||||
import { inlineTaskTestWorkflow } from './workflows/inlineTaskTest.js'
|
import { inlineTaskTestWorkflow } from './workflows/inlineTaskTest.js'
|
||||||
import { inlineTaskTestDelayedWorkflow } from './workflows/inlineTaskTestDelayed.js'
|
import { inlineTaskTestDelayedWorkflow } from './workflows/inlineTaskTestDelayed.js'
|
||||||
@@ -394,7 +394,7 @@ export default buildConfigWithDefaults({
|
|||||||
editor: lexicalEditor(),
|
editor: lexicalEditor(),
|
||||||
onInit: async (payload) => {
|
onInit: async (payload) => {
|
||||||
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
|
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
|
||||||
await clearAndSeedEverything(payload)
|
await seed(payload)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
typescript: {
|
typescript: {
|
||||||
|
|||||||
@@ -114,7 +114,6 @@ describe('Versions', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
await ensureCompilationIsDone({ page, serverURL })
|
await ensureCompilationIsDone({ page, serverURL })
|
||||||
//await clearAndSeedEverything(payload)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('draft collections', () => {
|
describe('draft collections', () => {
|
||||||
|
|||||||
Reference in New Issue
Block a user