fix(richtext-lexical): upload, relationship and block node insertion fails sometimes

This commit is contained in:
Alessio Gravili
2024-05-16 16:02:53 -04:00
committed by GitHub
parent 9556d1bd42
commit 608387084c
14 changed files with 220 additions and 112 deletions

View File

@@ -46,7 +46,7 @@ describe('Array', () => {
await reInitializeDB({
serverURL,
snapshotKey: 'fieldsArrayTest',
uploadsDir: path.resolve(dirname, '../Upload/uploads'),
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
await ensureAutoLoginAndCompilationIsDone({ page, serverURL })
})
@@ -54,7 +54,7 @@ describe('Array', () => {
await reInitializeDB({
serverURL,
snapshotKey: 'fieldsArrayTest',
uploadsDir: path.resolve(dirname, '../Upload/uploads'),
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
if (client) {

View File

@@ -41,7 +41,7 @@ describe('Block fields', () => {
await reInitializeDB({
serverURL,
snapshotKey: 'blockFieldsTest',
uploadsDir: path.resolve(dirname, '../Upload/uploads'),
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
await ensureAutoLoginAndCompilationIsDone({ page, serverURL })
})
@@ -49,7 +49,7 @@ describe('Block fields', () => {
await reInitializeDB({
serverURL,
snapshotKey: 'blockFieldsTest',
uploadsDir: path.resolve(dirname, '../Upload/uploads'),
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
if (client) {

View File

@@ -73,7 +73,7 @@ describe('lexical', () => {
await reInitializeDB({
serverURL,
snapshotKey: 'fieldsLexicalTest',
uploadsDir: path.resolve(dirname, '../Upload/uploads'),
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
await ensureAutoLoginAndCompilationIsDone({ page, serverURL })
})
@@ -86,7 +86,10 @@ describe('lexical', () => {
await reInitializeDB({
serverURL,
snapshotKey: 'fieldsLexicalTest',
uploadsDir: path.resolve(dirname, '../Upload/uploads'),
uploadsDir: [
path.resolve(dirname, './collections/Upload/uploads'),
path.resolve(dirname, './collections/Upload2/uploads2'),
],
})
if (client) {
@@ -356,6 +359,85 @@ describe('lexical', () => {
await expect(reactSelect.locator('.rs__value-container').first()).toHaveText('Option 3')
})
// This reproduces an issue where if you create an upload node, the document drawer opens, you select a collection other than the default one, create a NEW upload document and save, it throws a lexical error
test('ensure creation of new upload document within upload node works', async () => {
await navigateToLexicalFields()
const richTextField = page.locator('.rich-text-lexical').nth(1) // second
await richTextField.scrollIntoViewIfNeeded()
await expect(richTextField).toBeVisible()
const lastParagraph = richTextField.locator('p').last()
await lastParagraph.scrollIntoViewIfNeeded()
await expect(lastParagraph).toBeVisible()
/**
* Create new upload node
*/
// type / to open the slash menu
await lastParagraph.click()
await page.keyboard.press('/')
await page.keyboard.type('Upload')
// Create Upload node
const slashMenuPopover = page.locator('#slash-menu .slash-menu-popup')
await expect(slashMenuPopover).toBeVisible()
const uploadSelectButton = slashMenuPopover.locator('button').nth(1)
await expect(uploadSelectButton).toBeVisible()
await expect(uploadSelectButton).toContainText('Upload')
await uploadSelectButton.click()
await expect(slashMenuPopover).toBeHidden()
await wait(500) // wait for drawer form state to initialize (it's a flake)
const uploadListDrawer = page.locator('dialog[id^=list-drawer_1_]').first() // IDs starting with list-drawer_1_ (there's some other symbol after the underscore)
await expect(uploadListDrawer).toBeVisible()
await wait(500)
await uploadListDrawer.locator('.rs__control .value-container').first().click()
await wait(500)
await expect(uploadListDrawer.locator('.rs__option').nth(1)).toBeVisible()
await expect(uploadListDrawer.locator('.rs__option').nth(1)).toContainText('Upload 2')
await uploadListDrawer.locator('.rs__option').nth(1).click()
// wait till the text appears in uploadListDrawer: "No Uploads 2 found. Either no Uploads 2 exist yet or none match the filters you've specified above."
await expect(
uploadListDrawer.getByText(
"No Uploads 2 found. Either no Uploads 2 exist yet or none match the filters you've specified above.",
),
).toBeVisible()
await uploadListDrawer.getByText('Create New').first().click()
const createUploadDrawer = page.locator('dialog[id^=doc-drawer_uploads2_]').first() // IDs starting with list-drawer_1_ (there's some other symbol after the underscore)
await expect(createUploadDrawer).toBeVisible()
await wait(500)
const input = createUploadDrawer.locator('.file-field__upload input[type="file"]').first()
await expect(input).toBeAttached()
await input.setInputFiles(path.resolve(dirname, './collections/Upload/payload.jpg'))
await expect(createUploadDrawer.locator('.file-field .file-field__filename')).toHaveValue(
'payload.jpg',
)
await wait(500)
await createUploadDrawer.getByText('Save').first().click()
await expect(createUploadDrawer).toBeHidden()
await expect(uploadListDrawer).toBeHidden()
await wait(500)
await saveDocAndAssert(page)
// second one should be the newly created one
const secondUploadNode = richTextField.locator('.lexical-upload').nth(1)
await secondUploadNode.scrollIntoViewIfNeeded()
await expect(secondUploadNode).toBeVisible()
await expect(secondUploadNode.locator('.lexical-upload__bottomRow')).toContainText(
'payload.jpg',
)
await expect(secondUploadNode.locator('.lexical-upload__collectionLabel')).toContainText(
'Upload 2',
)
})
describe('nested lexical editor in block', () => {
test('should type and save typed text', async () => {
await navigateToLexicalFields()
@@ -850,7 +932,7 @@ describe('lexical', () => {
const lexicalField: SerializedEditorState = lexicalDoc.lexicalWithBlocks
const richTextBlock: SerializedBlockNode = lexicalField.root
.children[12] as SerializedBlockNode
.children[13] as SerializedBlockNode
const subRichTextBlock: SerializedBlockNode = richTextBlock.fields.richTextField.root
.children[1] as SerializedBlockNode // index 0 and 2 are paragraphs created by default around the block node when a new block is added via slash command
@@ -894,7 +976,7 @@ describe('lexical', () => {
const lexicalField2: SerializedEditorState = lexicalDocDepth1.lexicalWithBlocks
const richTextBlock2: SerializedBlockNode = lexicalField2.root
.children[12] as SerializedBlockNode
.children[13] as SerializedBlockNode
const subRichTextBlock2: SerializedBlockNode = richTextBlock2.fields.richTextField.root
.children[1] as SerializedBlockNode // index 0 and 2 are paragraphs created by default around the block node when a new block is added via slash command

View File

@@ -48,7 +48,7 @@ describe('relationship', () => {
await reInitializeDB({
serverURL,
snapshotKey: 'fieldsRelationshipTest',
uploadsDir: path.resolve(dirname, '../Upload/uploads'),
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
await ensureAutoLoginAndCompilationIsDone({ page, serverURL })
})
@@ -56,7 +56,7 @@ describe('relationship', () => {
await reInitializeDB({
serverURL,
snapshotKey: 'fieldsRelationshipTest',
uploadsDir: path.resolve(dirname, '../Upload/uploads'),
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
if (client) {

View File

@@ -41,7 +41,7 @@ describe('Rich Text', () => {
await reInitializeDB({
serverURL,
snapshotKey: 'fieldsRichTextTest',
uploadsDir: path.resolve(dirname, '../Upload/uploads'),
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
await ensureAutoLoginAndCompilationIsDone({ page, serverURL })
})
@@ -49,7 +49,7 @@ describe('Rich Text', () => {
await reInitializeDB({
serverURL,
snapshotKey: 'fieldsRichTextTest',
uploadsDir: path.resolve(dirname, '../Upload/uploads'),
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
if (client) {

View File

@@ -17,7 +17,8 @@ const handler: PayloadHandler = async (req) => {
collectionSlugs: payload.config.collections.map(({ slug }) => slug),
seedFunction: payload.config.onInit,
snapshotKey: String(data.snapshotKey),
uploadsDir: String(data.uploadsDir),
// uploadsDir can be string or stringlist
uploadsDir: data.uploadsDir as string | string[],
})
return Response.json(

View File

@@ -7,7 +7,7 @@ export const reInitializeDB = async ({
}: {
serverURL: string
snapshotKey: string
uploadsDir?: string
uploadsDir?: string | string[]
}) => {
await fetch(`${serverURL}/api${path}`, {
method: 'post',

View File

@@ -28,7 +28,7 @@ export async function seedDB({
* Key to uniquely identify the kind of snapshot. Each test suite should pass in a unique key
*/
snapshotKey: string
uploadsDir?: string
uploadsDir?: string | string[]
}) {
/**
* Reset database
@@ -38,18 +38,21 @@ export async function seedDB({
* Delete uploads directory if it exists
*/
if (uploadsDir) {
try {
// Attempt to clear the uploads directory if it exists
await fs.promises.access(uploadsDir)
const files = await fs.promises.readdir(uploadsDir)
for (const file of files) {
await fs.promises.rm(path.join(uploadsDir, file))
}
} catch (error) {
if (error.code !== 'ENOENT') {
// If the error is not because the directory doesn't exist
console.error('Error in operation (deleting uploads dir):', error)
throw error
const uploadsDirs = Array.isArray(uploadsDir) ? uploadsDir : [uploadsDir]
for (const dir of uploadsDirs) {
try {
// Attempt to clear the uploads directory if it exists
await fs.promises.access(dir)
const files = await fs.promises.readdir(dir)
for (const file of files) {
await fs.promises.rm(path.join(dir, file))
}
} catch (error) {
if (error.code !== 'ENOENT') {
// If the error is not because the directory doesn't exist
console.error('Error in operation (deleting uploads dir):', dir, error)
throw error
}
}
}
}
@@ -67,32 +70,36 @@ export async function seedDB({
/**
* Restore uploads dir if it exists
*/
if (uploadsDir && fs.existsSync(uploadsDirCache[snapshotKey])) {
// move all files from inside uploadsDirCacheFolder to uploadsDir
await fs.promises
.readdir(uploadsDirCache[snapshotKey], { withFileTypes: true })
.then(async (files) => {
for (const file of files) {
if (file.isDirectory()) {
await fs.promises.mkdir(path.join(uploadsDir, file.name), {
recursive: true,
})
await fs.promises.copyFile(
path.join(uploadsDirCache[snapshotKey], file.name),
path.join(uploadsDir, file.name),
)
} else {
await fs.promises.copyFile(
path.join(uploadsDirCache[snapshotKey], file.name),
path.join(uploadsDir, file.name),
)
}
}
})
.catch((err) => {
console.error('Error in operation (restoring uploads dir):', err)
throw err
})
if (uploadsDirCache[snapshotKey]) {
for (const cache of uploadsDirCache[snapshotKey]) {
if (cache.originalDir && fs.existsSync(cache.cacheDir)) {
// move all files from inside uploadsDirCacheFolder to uploadsDir
await fs.promises
.readdir(cache.cacheDir, { withFileTypes: true })
.then(async (files) => {
for (const file of files) {
if (file.isDirectory()) {
await fs.promises.mkdir(path.join(cache.originalDir, file.name), {
recursive: true,
})
await fs.promises.copyFile(
path.join(cache.cacheDir, file.name),
path.join(cache.originalDir, file.name),
)
} else {
await fs.promises.copyFile(
path.join(cache.cacheDir, file.name),
path.join(cache.originalDir, file.name),
)
}
}
})
.catch((err) => {
console.error('Error in operation (restoring uploads dir):', err)
throw err
})
}
}
}
restored = true
@@ -132,45 +139,61 @@ export async function seedDB({
/**
* Cache uploads dir to a cache folder if uploadsDir exists
*/
if (!alwaysSeed && uploadsDir && fs.existsSync(uploadsDir)) {
if (!uploadsDirCache[snapshotKey]) {
// Define new cache folder path to the OS temp directory (well a random folder inside it)
uploadsDirCache[snapshotKey] = path.join(
os.tmpdir(),
`${snapshotKey}`,
`payload-e2e-tests-uploads-cache`,
)
}
// delete the cache folder if it exists
if (fs.existsSync(uploadsDirCache[snapshotKey])) {
await fs.promises.rm(uploadsDirCache[snapshotKey], { recursive: true })
}
await fs.promises.mkdir(uploadsDirCache[snapshotKey], { recursive: true })
// recursively move all files and directories from uploadsDir to uploadsDirCacheFolder
await fs.promises
.readdir(uploadsDir, { withFileTypes: true })
.then(async (files) => {
for (const file of files) {
if (file.isDirectory()) {
await fs.promises.mkdir(path.join(uploadsDirCache[snapshotKey], file.name), {
recursive: true,
})
await fs.promises.copyFile(
path.join(uploadsDir, file.name),
path.join(uploadsDirCache[snapshotKey], file.name),
)
} else {
await fs.promises.copyFile(
path.join(uploadsDir, file.name),
path.join(uploadsDirCache[snapshotKey], file.name),
)
if (!alwaysSeed && uploadsDir) {
const uploadsDirs = Array.isArray(uploadsDir) ? uploadsDir : [uploadsDir]
for (const dir of uploadsDirs) {
if (dir && fs.existsSync(dir)) {
if (!uploadsDirCache[snapshotKey]) {
uploadsDirCache[snapshotKey] = []
}
let newObj: {
cacheDir: string
originalDir: string
} = null
if (!uploadsDirCache[snapshotKey].find((cache) => cache.originalDir === dir)) {
// Define new cache folder path to the OS temp directory (well a random folder inside it)
newObj = {
originalDir: dir,
cacheDir: path.join(os.tmpdir(), `${snapshotKey}`, `payload-e2e-tests-uploads-cache`),
}
}
})
.catch((err) => {
console.error('Error in operation (creating snapshot of uploads dir):', err)
throw err
})
if (!newObj) {
continue
}
// delete the cache folder if it exists
if (fs.existsSync(newObj.cacheDir)) {
await fs.promises.rm(newObj.cacheDir, { recursive: true })
}
await fs.promises.mkdir(newObj.cacheDir, { recursive: true })
// recursively move all files and directories from uploadsDir to uploadsDirCacheFolder
try {
const files = await fs.promises.readdir(newObj.originalDir, { withFileTypes: true })
for (const file of files) {
if (file.isDirectory()) {
await fs.promises.mkdir(path.join(newObj.cacheDir, file.name), {
recursive: true,
})
await fs.promises.copyFile(
path.join(newObj.originalDir, file.name),
path.join(newObj.cacheDir, file.name),
)
} else {
await fs.promises.copyFile(
path.join(newObj.originalDir, file.name),
path.join(newObj.cacheDir, file.name),
)
}
}
uploadsDirCache[snapshotKey].push(newObj)
} catch (e) {
console.error('Error in operation (creating snapshot of uploads dir):', e)
throw e
}
}
}
}
}

View File

@@ -7,7 +7,10 @@ import { sql } from 'drizzle-orm'
import { isMongoose } from './isMongoose.js'
export const uploadsDirCache: {
[key: string]: string
[key: string]: {
cacheDir: string
originalDir: string
}[]
} = {}
export const dbSnapshot = {}