feat(richtext-lexical): support copy & pasting and drag & dopping files/images into the editor (#13868)
This PR adds support for inserting images into the rich text editor via both **copy & paste** and **drag & drop**, whether from local files or image DOM nodes. It leverages the bulk uploads UI to provide a smooth workflow for: - Selecting the target collection - Filling in any required fields defined on the uploads collection - Uploading multiple images at once This significantly improves the UX for adding images to rich text, and also works seamlessly when pasting images from external editors like Google Docs or Microsoft Word. Test pre-release: `3.57.0-internal.801ab5a` ## Showcase - drag & drop images from computer https://github.com/user-attachments/assets/c558c034-d2e4-40d8-9035-c0681389fb7b ## Showcase - copy & paste images from computer https://github.com/user-attachments/assets/f36faf94-5274-4151-b141-00aff2b0efa4 ## Showcase - copy & paste image DOM nodes https://github.com/user-attachments/assets/2839ed0f-3f28-4e8d-8b47-01d0cb947edc --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1211217132290841
This commit is contained in:
@@ -22,7 +22,7 @@ import { OnDemandForm } from './collections/OnDemandForm/index.js'
|
||||
import { OnDemandOutsideForm } from './collections/OnDemandOutsideForm/index.js'
|
||||
import RichTextFields from './collections/RichText/index.js'
|
||||
import TextFields from './collections/Text/index.js'
|
||||
import Uploads from './collections/Upload/index.js'
|
||||
import { Uploads, Uploads2 } from './collections/Upload/index.js'
|
||||
import TabsWithRichText from './globals/TabsWithRichText.js'
|
||||
import { seed } from './seed.js'
|
||||
|
||||
@@ -49,6 +49,7 @@ export const baseConfig: Partial<Config> = {
|
||||
RichTextFields,
|
||||
TextFields,
|
||||
Uploads,
|
||||
Uploads2,
|
||||
ArrayFields,
|
||||
OnDemandForm,
|
||||
OnDemandOutsideForm,
|
||||
@@ -60,9 +61,15 @@ export const baseConfig: Partial<Config> = {
|
||||
baseDir: path.resolve(dirname),
|
||||
},
|
||||
components: {
|
||||
views: {
|
||||
custom: {
|
||||
Component: './components/Image.js#Image',
|
||||
path: '/custom-image',
|
||||
},
|
||||
},
|
||||
beforeDashboard: [
|
||||
{
|
||||
path: './components/CollectionsExplained.tsx#CollectionsExplained',
|
||||
path: './components/CollectionsExplained.js#CollectionsExplained',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { expect, test } from '@playwright/test'
|
||||
import { AdminUrlUtil } from 'helpers/adminUrlUtil.js'
|
||||
import { reInitializeDB } from 'helpers/reInitializeDB.js'
|
||||
import { lexicalHeadingFeatureSlug } from 'lexical/slugs.js'
|
||||
import path from 'path'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
@@ -3,11 +3,11 @@ import type { CollectionConfig } from 'payload'
|
||||
import path from 'path'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
import { uploadsSlug } from '../../slugs.js'
|
||||
import { uploads2Slug, uploadsSlug } from '../../slugs.js'
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
const dirname = path.dirname(filename)
|
||||
|
||||
const Uploads: CollectionConfig = {
|
||||
export const Uploads: CollectionConfig = {
|
||||
slug: uploadsSlug,
|
||||
fields: [
|
||||
{
|
||||
@@ -34,4 +34,14 @@ const Uploads: CollectionConfig = {
|
||||
},
|
||||
}
|
||||
|
||||
export default Uploads
|
||||
export const Uploads2: CollectionConfig = {
|
||||
...Uploads,
|
||||
slug: uploads2Slug,
|
||||
fields: [
|
||||
...Uploads.fields,
|
||||
{
|
||||
name: 'altText',
|
||||
type: 'text',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
127
test/lexical/collections/_LexicalFullyFeatured/db/e2e.spec.ts
Normal file
127
test/lexical/collections/_LexicalFullyFeatured/db/e2e.spec.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import { expect, type Page, test } from '@playwright/test'
|
||||
import { lexicalFullyFeaturedSlug } from 'lexical/slugs.js'
|
||||
import path from 'path'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
import type { PayloadTestSDK } from '../../../../helpers/sdk/index.js'
|
||||
import type { Config } from '../../../payload-types.js'
|
||||
|
||||
import { ensureCompilationIsDone, saveDocAndAssert } from '../../../../helpers.js'
|
||||
import { AdminUrlUtil } from '../../../../helpers/adminUrlUtil.js'
|
||||
import { initPayloadE2ENoConfig } from '../../../../helpers/initPayloadE2ENoConfig.js'
|
||||
import { reInitializeDB } from '../../../../helpers/reInitializeDB.js'
|
||||
import { TEST_TIMEOUT_LONG } from '../../../../playwright.config.js'
|
||||
import { LexicalHelpers, type PasteMode } from '../../utils.js'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
const currentFolder = path.dirname(filename)
|
||||
const dirname = path.resolve(currentFolder, '../../../')
|
||||
|
||||
let payload: PayloadTestSDK<Config>
|
||||
let serverURL: string
|
||||
|
||||
const { beforeAll, beforeEach, describe } = test
|
||||
|
||||
// This test suite resets the database before each test to ensure a clean state and cannot be run in parallel.
|
||||
// Use this for tests that modify the database.
|
||||
describe('Lexical Fully Featured - database', () => {
|
||||
let lexical: LexicalHelpers
|
||||
let url: AdminUrlUtil
|
||||
beforeAll(async ({ browser }, testInfo) => {
|
||||
testInfo.setTimeout(TEST_TIMEOUT_LONG)
|
||||
process.env.SEED_IN_CONFIG_ONINIT = 'false' // Makes it so the payload config onInit seed is not run. Otherwise, the seed would be run unnecessarily twice for the initial test run - once for beforeEach and once for onInit
|
||||
;({ payload, serverURL } = await initPayloadE2ENoConfig<Config>({ dirname }))
|
||||
|
||||
const page = await browser.newPage()
|
||||
await ensureCompilationIsDone({ page, serverURL })
|
||||
await page.close()
|
||||
})
|
||||
beforeEach(async ({ page }) => {
|
||||
await reInitializeDB({
|
||||
serverURL,
|
||||
snapshotKey: 'lexicalTest',
|
||||
uploadsDir: [path.resolve(dirname, './collections/Upload/uploads')],
|
||||
})
|
||||
url = new AdminUrlUtil(serverURL, lexicalFullyFeaturedSlug)
|
||||
lexical = new LexicalHelpers(page)
|
||||
await page.goto(url.create)
|
||||
await lexical.editor.first().focus()
|
||||
})
|
||||
|
||||
describe('auto upload', () => {
|
||||
const filePath = path.resolve(dirname, './collections/Upload/payload.jpg')
|
||||
|
||||
async function uploadsTest(page: Page, mode: 'cmd+v' | PasteMode, expectedFileName?: string) {
|
||||
if (mode === 'cmd+v') {
|
||||
await page.keyboard.press('Meta+V')
|
||||
await page.keyboard.press('Control+V')
|
||||
} else {
|
||||
await lexical.pasteFile({ filePath, mode })
|
||||
}
|
||||
|
||||
await expect(lexical.drawer).toBeVisible()
|
||||
await lexical.drawer.locator('.bulk-upload--actions-bar').getByText('Save').click()
|
||||
await expect(lexical.drawer).toBeHidden()
|
||||
|
||||
await expect(lexical.editor.locator('.lexical-upload')).toHaveCount(1)
|
||||
await expect(lexical.editor.locator('.lexical-upload__doc-drawer-toggler')).toHaveText(
|
||||
expectedFileName || 'payload-1.jpg',
|
||||
)
|
||||
|
||||
const uploadedImage = await payload.find({
|
||||
collection: 'uploads',
|
||||
where: { filename: { equals: expectedFileName || 'payload-1.jpg' } },
|
||||
})
|
||||
expect(uploadedImage.totalDocs).toBe(1)
|
||||
}
|
||||
|
||||
// eslint-disable-next-line playwright/expect-expect
|
||||
test('ensure auto upload by copy & pasting image works when pasting a blob', async ({
|
||||
page,
|
||||
}) => {
|
||||
await uploadsTest(page, 'blob')
|
||||
})
|
||||
|
||||
// eslint-disable-next-line playwright/expect-expect
|
||||
test('ensure auto upload by copy & pasting image works when pasting as html', async ({
|
||||
page,
|
||||
}) => {
|
||||
// blob will be put in src of img tag => cannot infer file name
|
||||
await uploadsTest(page, 'html', 'pasted-image.jpeg')
|
||||
})
|
||||
|
||||
test('ensure auto upload by copy & pasting image works when pasting from website', async ({
|
||||
page,
|
||||
}) => {
|
||||
await page.goto(url.admin + '/custom-image')
|
||||
await page.keyboard.press('Meta+A')
|
||||
await page.keyboard.press('Control+A')
|
||||
|
||||
await page.keyboard.press('Meta+C')
|
||||
await page.keyboard.press('Control+C')
|
||||
|
||||
await page.goto(url.create)
|
||||
await lexical.editor.first().focus()
|
||||
await expect(lexical.editor).toBeFocused()
|
||||
|
||||
await uploadsTest(page, 'cmd+v')
|
||||
|
||||
// Save page
|
||||
await saveDocAndAssert(page)
|
||||
|
||||
const lexicalFullyFeatured = await payload.find({
|
||||
collection: lexicalFullyFeaturedSlug,
|
||||
limit: 1,
|
||||
})
|
||||
const richText = lexicalFullyFeatured?.docs?.[0]?.richText
|
||||
|
||||
const headingNode = richText?.root?.children[0]
|
||||
expect(headingNode).toBeDefined()
|
||||
expect(headingNode?.children?.[1]?.text).toBe('This is an image:')
|
||||
|
||||
const uploadNode = richText?.root?.children?.[1]?.children?.[0]
|
||||
// @ts-expect-error unsafe access is fine in tests
|
||||
expect(uploadNode.value?.filename).toBe('payload-1.jpg')
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,33 +1,37 @@
|
||||
import { expect, test } from '@playwright/test'
|
||||
import { AdminUrlUtil } from 'helpers/adminUrlUtil.js'
|
||||
import { reInitializeDB } from 'helpers/reInitializeDB.js'
|
||||
import { lexicalFullyFeaturedSlug } from 'lexical/slugs.js'
|
||||
import path from 'path'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
import type { PayloadTestSDK } from '../../../helpers/sdk/index.js'
|
||||
import type { Config } from '../../payload-types.js'
|
||||
|
||||
import { ensureCompilationIsDone } from '../../../helpers.js'
|
||||
import { AdminUrlUtil } from '../../../helpers/adminUrlUtil.js'
|
||||
import { initPayloadE2ENoConfig } from '../../../helpers/initPayloadE2ENoConfig.js'
|
||||
import { lexicalFullyFeaturedSlug } from '../../../lexical/slugs.js'
|
||||
import { TEST_TIMEOUT_LONG } from '../../../playwright.config.js'
|
||||
import { LexicalHelpers } from '../utils.js'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
const currentFolder = path.dirname(filename)
|
||||
const dirname = path.resolve(currentFolder, '../../')
|
||||
|
||||
let payload: PayloadTestSDK<Config>
|
||||
let serverURL: string
|
||||
|
||||
const { beforeAll, beforeEach, describe } = test
|
||||
|
||||
// Unlike the other suites, this one runs in parallel, as they run on the `lexical-fully-featured/create` URL and are "pure" tests
|
||||
// PLEASE do not reset the database or perform any operations that modify it in this file.
|
||||
test.describe.configure({ mode: 'parallel' })
|
||||
|
||||
const { serverURL } = await initPayloadE2ENoConfig({
|
||||
dirname,
|
||||
})
|
||||
|
||||
describe('Lexical Fully Featured', () => {
|
||||
let lexical: LexicalHelpers
|
||||
beforeAll(async ({ browser }, testInfo) => {
|
||||
testInfo.setTimeout(TEST_TIMEOUT_LONG)
|
||||
process.env.SEED_IN_CONFIG_ONINIT = 'false' // Makes it so the payload config onInit seed is not run. Otherwise, the seed would be run unnecessarily twice for the initial test run - once for beforeEach and once for onInit
|
||||
;({ payload, serverURL } = await initPayloadE2ENoConfig<Config>({ dirname }))
|
||||
|
||||
const page = await browser.newPage()
|
||||
await ensureCompilationIsDone({ page, serverURL })
|
||||
await page.close()
|
||||
|
||||
@@ -1,8 +1,35 @@
|
||||
import type { Locator, Page } from 'playwright'
|
||||
|
||||
import { expect } from '@playwright/test'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { wait } from 'payload/shared'
|
||||
|
||||
export type PasteMode = 'blob' | 'html'
|
||||
|
||||
function inferMimeFromExt(ext: string): string {
|
||||
switch (ext.toLowerCase()) {
|
||||
case '.gif':
|
||||
return 'image/gif'
|
||||
case '.jpeg':
|
||||
case '.jpg':
|
||||
return 'image/jpeg'
|
||||
case '.png':
|
||||
return 'image/png'
|
||||
case '.svg':
|
||||
return 'image/svg+xml'
|
||||
case '.webp':
|
||||
return 'image/webp'
|
||||
default:
|
||||
return 'application/octet-stream'
|
||||
}
|
||||
}
|
||||
|
||||
async function readAsBase64(filePath: string): Promise<string> {
|
||||
const buf = await fs.promises.readFile(filePath)
|
||||
return Buffer.from(buf).toString('base64')
|
||||
}
|
||||
|
||||
export class LexicalHelpers {
|
||||
page: Page
|
||||
constructor(page: Page) {
|
||||
@@ -89,6 +116,8 @@ export class LexicalHelpers {
|
||||
}
|
||||
|
||||
async paste(type: 'html' | 'markdown', text: string) {
|
||||
await this.page.context().grantPermissions(['clipboard-read', 'clipboard-write'])
|
||||
|
||||
await this.page.evaluate(
|
||||
async ([text, type]) => {
|
||||
const blob = new Blob([text!], { type: type === 'html' ? 'text/html' : 'text/markdown' })
|
||||
@@ -100,6 +129,54 @@ export class LexicalHelpers {
|
||||
await this.page.keyboard.press(`ControlOrMeta+v`)
|
||||
}
|
||||
|
||||
async pasteFile({ filePath, mode: modeFromArgs }: { filePath: string; mode?: PasteMode }) {
|
||||
const mode: PasteMode = modeFromArgs ?? 'blob'
|
||||
const name = path.basename(filePath)
|
||||
const mime = inferMimeFromExt(path.extname(name))
|
||||
|
||||
// Build payloads per mode
|
||||
let payload:
|
||||
| { bytes: number[]; kind: 'blob'; mime: string; name: string }
|
||||
| { html: string; kind: 'html' } = { html: '', kind: 'html' }
|
||||
|
||||
if (mode === 'blob') {
|
||||
const buf = await fs.promises.readFile(filePath)
|
||||
payload = { kind: 'blob', bytes: Array.from(buf), name, mime }
|
||||
} else if (mode === 'html') {
|
||||
const b64 = await readAsBase64(filePath)
|
||||
const src = `data:${mime};base64,${b64}`
|
||||
const html = `<img src="${src}" alt="${name}">`
|
||||
payload = { kind: 'html', html }
|
||||
}
|
||||
|
||||
await this.page.evaluate((p) => {
|
||||
const target =
|
||||
(document.activeElement as HTMLElement | null) ||
|
||||
document.querySelector('[contenteditable="true"]') ||
|
||||
document.body
|
||||
|
||||
const dt = new DataTransfer()
|
||||
|
||||
if (p.kind === 'blob') {
|
||||
const file = new File([new Uint8Array(p.bytes)], p.name, { type: p.mime })
|
||||
dt.items.add(file)
|
||||
} else if (p.kind === 'html') {
|
||||
dt.setData('text/html', p.html)
|
||||
}
|
||||
|
||||
try {
|
||||
const evt = new ClipboardEvent('paste', {
|
||||
clipboardData: dt,
|
||||
bubbles: true,
|
||||
cancelable: true,
|
||||
})
|
||||
target.dispatchEvent(evt)
|
||||
} catch {
|
||||
/* ignore */
|
||||
}
|
||||
}, payload)
|
||||
}
|
||||
|
||||
async save(container: 'document' | 'drawer') {
|
||||
if (container === 'drawer') {
|
||||
await this.drawer.getByText('Save').click()
|
||||
|
||||
22
test/lexical/components/Image.tsx
Normal file
22
test/lexical/components/Image.tsx
Normal file
@@ -0,0 +1,22 @@
|
||||
import type { AdminViewServerProps } from 'payload'
|
||||
|
||||
import React from 'react'
|
||||
|
||||
export const Image: React.FC<AdminViewServerProps> = async ({ payload }) => {
|
||||
const images = await payload.find({
|
||||
collection: 'uploads',
|
||||
limit: 1,
|
||||
})
|
||||
|
||||
if (!images?.docs?.length) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h2>This is an image:</h2>
|
||||
{/* eslint-disable-next-line jsx-a11y/alt-text */}
|
||||
<img src={images?.docs?.[0]?.url as string} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -97,6 +97,7 @@ export interface Config {
|
||||
'rich-text-fields': RichTextField;
|
||||
'text-fields': TextField;
|
||||
uploads: Upload;
|
||||
uploads2: Uploads2;
|
||||
'array-fields': ArrayField;
|
||||
OnDemandForm: OnDemandForm;
|
||||
OnDemandOutsideForm: OnDemandOutsideForm;
|
||||
@@ -121,6 +122,7 @@ export interface Config {
|
||||
'rich-text-fields': RichTextFieldsSelect<false> | RichTextFieldsSelect<true>;
|
||||
'text-fields': TextFieldsSelect<false> | TextFieldsSelect<true>;
|
||||
uploads: UploadsSelect<false> | UploadsSelect<true>;
|
||||
uploads2: Uploads2Select<false> | Uploads2Select<true>;
|
||||
'array-fields': ArrayFieldsSelect<false> | ArrayFieldsSelect<true>;
|
||||
OnDemandForm: OnDemandFormSelect<false> | OnDemandFormSelect<true>;
|
||||
OnDemandOutsideForm: OnDemandOutsideFormSelect<false> | OnDemandOutsideFormSelect<true>;
|
||||
@@ -760,6 +762,27 @@ export interface Upload {
|
||||
focalX?: number | null;
|
||||
focalY?: number | null;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "uploads2".
|
||||
*/
|
||||
export interface Uploads2 {
|
||||
id: string;
|
||||
text?: string | null;
|
||||
media?: (string | null) | Upload;
|
||||
altText?: string | null;
|
||||
updatedAt: string;
|
||||
createdAt: string;
|
||||
url?: string | null;
|
||||
thumbnailURL?: string | null;
|
||||
filename?: string | null;
|
||||
mimeType?: string | null;
|
||||
filesize?: number | null;
|
||||
width?: number | null;
|
||||
height?: number | null;
|
||||
focalX?: number | null;
|
||||
focalY?: number | null;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "array-fields".
|
||||
@@ -996,6 +1019,10 @@ export interface PayloadLockedDocument {
|
||||
relationTo: 'uploads';
|
||||
value: number | Upload;
|
||||
} | null)
|
||||
| ({
|
||||
relationTo: 'uploads2';
|
||||
value: string | Uploads2;
|
||||
} | null)
|
||||
| ({
|
||||
relationTo: 'array-fields';
|
||||
value: number | ArrayField;
|
||||
@@ -1288,6 +1315,26 @@ export interface UploadsSelect<T extends boolean = true> {
|
||||
focalX?: T;
|
||||
focalY?: T;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "uploads2_select".
|
||||
*/
|
||||
export interface Uploads2Select<T extends boolean = true> {
|
||||
text?: T;
|
||||
media?: T;
|
||||
altText?: T;
|
||||
updatedAt?: T;
|
||||
createdAt?: T;
|
||||
url?: T;
|
||||
thumbnailURL?: T;
|
||||
filename?: T;
|
||||
mimeType?: T;
|
||||
filesize?: T;
|
||||
width?: T;
|
||||
height?: T;
|
||||
focalX?: T;
|
||||
focalY?: T;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `Config`'s JSON-Schema
|
||||
* via the `definition` "array-fields_select".
|
||||
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
lexicalRelationshipFieldsSlug,
|
||||
richTextFieldsSlug,
|
||||
textFieldsSlug,
|
||||
uploads2Slug,
|
||||
uploadsSlug,
|
||||
usersSlug,
|
||||
} from './slugs.js'
|
||||
@@ -125,6 +126,14 @@ export const seed = async (_payload: Payload) => {
|
||||
overrideAccess: true,
|
||||
})
|
||||
|
||||
const createdPNGDoc2 = await _payload.create({
|
||||
collection: uploads2Slug,
|
||||
data: {},
|
||||
file: pngFile,
|
||||
depth: 0,
|
||||
overrideAccess: true,
|
||||
})
|
||||
|
||||
const createdJPGDoc = await _payload.create({
|
||||
collection: uploadsSlug,
|
||||
data: {
|
||||
|
||||
@@ -15,6 +15,8 @@ export const richTextFieldsSlug = 'rich-text-fields'
|
||||
// Auxiliary slugs
|
||||
export const textFieldsSlug = 'text-fields'
|
||||
export const uploadsSlug = 'uploads'
|
||||
export const uploads2Slug = 'uploads2'
|
||||
|
||||
export const arrayFieldsSlug = 'array-fields'
|
||||
|
||||
export const collectionSlugs = [
|
||||
|
||||
@@ -82,17 +82,26 @@ if (!suiteName) {
|
||||
|
||||
// Run specific suite
|
||||
clearWebpackCache()
|
||||
const suitePath: string | undefined = path
|
||||
.resolve(dirname, inputSuitePath, 'e2e.spec.ts')
|
||||
const suiteFolderPath: string | undefined = path
|
||||
.resolve(dirname, inputSuitePath)
|
||||
.replaceAll('__', '/')
|
||||
|
||||
const allSuitesInFolder = await globby(`${suiteFolderPath.replace(/\\/g, '/')}/*e2e.spec.ts`)
|
||||
|
||||
const baseTestFolder = inputSuitePath.split('__')[0]
|
||||
|
||||
if (!suitePath || !baseTestFolder) {
|
||||
if (!baseTestFolder || !allSuitesInFolder?.length) {
|
||||
throw new Error(`No test suite found for ${suiteName}`)
|
||||
}
|
||||
|
||||
executePlaywright(suitePath, baseTestFolder, false, suiteConfigPath)
|
||||
console.log(`\n\nExecuting all ${allSuitesInFolder.length} E2E tests...\n\n`)
|
||||
|
||||
console.log(`${allSuitesInFolder.join('\n')}\n`)
|
||||
|
||||
for (const file of allSuitesInFolder) {
|
||||
clearWebpackCache()
|
||||
executePlaywright(file, baseTestFolder, false, suiteConfigPath)
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\nRESULTS:')
|
||||
|
||||
Reference in New Issue
Block a user