diff --git a/docs/admin/overview.mdx b/docs/admin/overview.mdx index ec8d4d55b..4a5766ab4 100644 --- a/docs/admin/overview.mdx +++ b/docs/admin/overview.mdx @@ -177,7 +177,7 @@ The following options are available: Tip: - You can easily add _new_ routes to the Admin Panel through [Custom Endpoints](../rest-api/overview#custom-endpoints) and [Custom Views](./views). + You can easily add _new_ routes to the Admin Panel through [Custom Endpoints](../rest-api/overview#custom-endpoints) and [Custom Views](./views). #### Customizing Root-level Routes diff --git a/docs/lexical/converters.mdx b/docs/lexical/converters.mdx index ca98ba169..bcea99a0a 100644 --- a/docs/lexical/converters.mdx +++ b/docs/lexical/converters.mdx @@ -334,8 +334,7 @@ This has been taken from the [lexical serialization & deserialization docs](http Convert markdown content to the Lexical editor format with the following: ```ts -import { $convertFromMarkdownString } from '@lexical/markdown' -import { sanitizeServerEditorConfig } from '@payloadcms/richtext-lexical' +import { sanitizeServerEditorConfig, $convertFromMarkdownString } from '@payloadcms/richtext-lexical' const yourSanitizedEditorConfig = sanitizeServerEditorConfig(yourEditorConfig, payloadConfig) // <= your editor config & Payload Config here const markdown = `# Hello World` diff --git a/eslint.config.js b/eslint.config.js index ae45e1d63..9141461fd 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -28,7 +28,7 @@ export const rootParserOptions = { ecmaVersion: 'latest', projectService: { maximumDefaultProjectFileMatchCount_THIS_WILL_SLOW_DOWN_LINTING: 40, - allowDefaultProject: ['scripts/*.ts', '*.js', '*.mjs', '*.spec.ts', '*.d.ts'], + allowDefaultProject: ['scripts/*.ts', '*.js', '*.mjs', '*.d.ts'], }, } diff --git a/packages/next/src/layouts/Root/index.tsx b/packages/next/src/layouts/Root/index.tsx index 6372f1bcf..afad998ac 100644 --- a/packages/next/src/layouts/Root/index.tsx +++ b/packages/next/src/layouts/Root/index.tsx @@ -40,6 +40,7 @@ let checkedDependencies = false export const RootLayout = async ({ children, config: configPromise, + importMap, serverFunction, }: { readonly children: React.ReactNode @@ -136,6 +137,7 @@ export const RootLayout = async ({ const clientConfig = await getClientConfig({ config, i18n, + importMap, }) return ( diff --git a/packages/next/src/utilities/getClientConfig.ts b/packages/next/src/utilities/getClientConfig.ts index e118d5937..1fde75460 100644 --- a/packages/next/src/utilities/getClientConfig.ts +++ b/packages/next/src/utilities/getClientConfig.ts @@ -1,16 +1,21 @@ import type { I18nClient } from '@payloadcms/translations' -import type { ClientConfig, SanitizedConfig } from 'payload' +import type { ClientConfig, ImportMap, SanitizedConfig } from 'payload' import { createClientConfig } from 'payload' import { cache } from 'react' export const getClientConfig = cache( - async (args: { config: SanitizedConfig; i18n: I18nClient }): Promise => { - const { config, i18n } = args + async (args: { + config: SanitizedConfig + i18n: I18nClient + importMap: ImportMap + }): Promise => { + const { config, i18n, importMap } = args const clientConfig = createClientConfig({ config, i18n, + importMap, }) return Promise.resolve(clientConfig) diff --git a/packages/next/src/views/Document/handleServerFunction.tsx b/packages/next/src/views/Document/handleServerFunction.tsx index 468b7f98d..187e8674f 100644 --- a/packages/next/src/views/Document/handleServerFunction.tsx +++ b/packages/next/src/views/Document/handleServerFunction.tsx @@ -4,6 +4,7 @@ import type { Data, DocumentPreferences, FormState, + ImportMap, PayloadRequest, SanitizedConfig, VisibleEntities, @@ -23,8 +24,9 @@ if (!cachedClientConfig) { export const getClientConfig = (args: { config: SanitizedConfig i18n: I18nClient + importMap: ImportMap }): ClientConfig => { - const { config, i18n } = args + const { config, i18n, importMap } = args if (cachedClientConfig && process.env.NODE_ENV !== 'development') { return cachedClientConfig @@ -33,6 +35,7 @@ export const getClientConfig = (args: { cachedClientConfig = createClientConfig({ config, i18n, + importMap, }) return cachedClientConfig @@ -112,6 +115,7 @@ export const renderDocumentHandler = async (args: { const clientConfig = getClientConfig({ config, i18n, + importMap: req.payload.importMap, }) let preferences: DocumentPreferences diff --git a/packages/next/src/views/List/handleServerFunction.tsx b/packages/next/src/views/List/handleServerFunction.tsx index 50293f58d..afe897ec7 100644 --- a/packages/next/src/views/List/handleServerFunction.tsx +++ b/packages/next/src/views/List/handleServerFunction.tsx @@ -2,6 +2,7 @@ import type { I18nClient } from '@payloadcms/translations' import type { ListPreferences } from '@payloadcms/ui' import type { ClientConfig, + ImportMap, ListQuery, PayloadRequest, SanitizedConfig, @@ -22,8 +23,9 @@ if (!cachedClientConfig) { export const getClientConfig = (args: { config: SanitizedConfig i18n: I18nClient + importMap: ImportMap }): ClientConfig => { - const { config, i18n } = args + const { config, i18n, importMap } = args if (cachedClientConfig && process.env.NODE_ENV !== 'development') { return cachedClientConfig @@ -32,6 +34,7 @@ export const getClientConfig = (args: { cachedClientConfig = createClientConfig({ config, i18n, + importMap, }) return cachedClientConfig @@ -114,6 +117,7 @@ export const renderListHandler = async (args: { const clientConfig = getClientConfig({ config, i18n, + importMap: payload.importMap, }) const preferencesKey = `${collectionSlug}-list` diff --git a/packages/next/src/views/Root/index.tsx b/packages/next/src/views/Root/index.tsx index 8b74c451a..b41410cb0 100644 --- a/packages/next/src/views/Root/index.tsx +++ b/packages/next/src/views/Root/index.tsx @@ -124,6 +124,7 @@ export const RootPage = async ({ const clientConfig = await getClientConfig({ config, i18n: initPageResult?.req.i18n, + importMap, }) const RenderedView = ( diff --git a/packages/payload/src/bin/generateImportMap/getFromImportMap.ts b/packages/payload/src/bin/generateImportMap/getFromImportMap.ts new file mode 100644 index 000000000..a517eaf33 --- /dev/null +++ b/packages/payload/src/bin/generateImportMap/getFromImportMap.ts @@ -0,0 +1,34 @@ +import type { PayloadComponent } from '../../config/types.js' +import type { ImportMap } from './index.js' + +import { parsePayloadComponent } from './parsePayloadComponent.js' + +export const getFromImportMap = (args: { + importMap: ImportMap + PayloadComponent: PayloadComponent + schemaPath?: string + silent?: boolean +}): TOutput => { + const { importMap, PayloadComponent, schemaPath, silent } = args + + const { exportName, path } = parsePayloadComponent(PayloadComponent) + + const key = path + '#' + exportName + + const importMapEntry = importMap[key] + + if (!importMapEntry && !silent) { + // eslint-disable-next-line no-console + console.error( + `getFromImportMap: PayloadComponent not found in importMap`, + { + key, + PayloadComponent, + schemaPath, + }, + 'You may need to run the `payload generate:importmap` command to generate the importMap ahead of runtime.', + ) + } + + return importMapEntry +} diff --git a/packages/payload/src/bin/generateImportMap/iterateConfig.ts b/packages/payload/src/bin/generateImportMap/iterateConfig.ts index 066b538dd..b36a3c88a 100644 --- a/packages/payload/src/bin/generateImportMap/iterateConfig.ts +++ b/packages/payload/src/bin/generateImportMap/iterateConfig.ts @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/no-unused-expressions */ import type { AdminViewConfig } from '../../admin/views/types.js' import type { SanitizedConfig } from '../../config/types.js' import type { AddToImportMap, Imports, InternalImportMap } from './index.js' diff --git a/packages/payload/src/bin/generateImportMap/iterateFields.ts b/packages/payload/src/bin/generateImportMap/iterateFields.ts index 1dcc387fb..10cbb7129 100644 --- a/packages/payload/src/bin/generateImportMap/iterateFields.ts +++ b/packages/payload/src/bin/generateImportMap/iterateFields.ts @@ -86,6 +86,8 @@ export function genImportMapIterateFields({ } } + hasKey(field?.admin, 'jsx') && addToImportMap(field.admin.jsx) // For Blocks + hasKey(field?.admin?.components, 'Label') && addToImportMap(field.admin.components.Label) hasKey(field?.admin?.components, 'Block') && addToImportMap(field.admin.components.Block) diff --git a/packages/payload/src/collections/config/client.ts b/packages/payload/src/collections/config/client.ts index e01ad1f57..4404a7133 100644 --- a/packages/payload/src/collections/config/client.ts +++ b/packages/payload/src/collections/config/client.ts @@ -1,6 +1,7 @@ import type { I18nClient } from '@payloadcms/translations' import type { StaticDescription } from '../../admin/types.js' +import type { ImportMap } from '../../bin/generateImportMap/index.js' import type { LivePreviewConfig, ServerOnlyLivePreviewProperties, @@ -84,10 +85,12 @@ export const createClientCollectionConfig = ({ collection, defaultIDType, i18n, + importMap, }: { collection: SanitizedCollectionConfig defaultIDType: Payload['config']['db']['defaultIDType'] i18n: I18nClient + importMap: ImportMap }): ClientCollectionConfig => { const clientCollection = deepCopyObjectSimple( collection, @@ -99,6 +102,7 @@ export const createClientCollectionConfig = ({ defaultIDType, fields: collection.fields, i18n, + importMap, }) serverOnlyCollectionProperties.forEach((key) => { @@ -185,10 +189,12 @@ export const createClientCollectionConfigs = ({ collections, defaultIDType, i18n, + importMap, }: { collections: SanitizedCollectionConfig[] defaultIDType: Payload['config']['db']['defaultIDType'] i18n: I18nClient + importMap: ImportMap }): ClientCollectionConfig[] => { const clientCollections = new Array(collections.length) @@ -199,6 +205,7 @@ export const createClientCollectionConfigs = ({ collection, defaultIDType, i18n, + importMap, }) } diff --git a/packages/payload/src/config/client.ts b/packages/payload/src/config/client.ts index 031f12d32..a511fca50 100644 --- a/packages/payload/src/config/client.ts +++ b/packages/payload/src/config/client.ts @@ -1,5 +1,6 @@ import type { I18nClient } from '@payloadcms/translations' +import type { ImportMap } from '../bin/generateImportMap/index.js' import type { LivePreviewConfig, SanitizedConfig, @@ -74,9 +75,11 @@ export const serverOnlyConfigProperties: readonly Partial { // We can use deepCopySimple here, as the clientConfig should be JSON serializable anyways, since it will be sent from server => client const clientConfig = deepCopyObjectSimple(config, true) as unknown as ClientConfig @@ -119,12 +122,14 @@ export const createClientConfig = ({ collections: config.collections, defaultIDType: config.db.defaultIDType, i18n, + importMap, }) clientConfig.globals = createClientGlobalConfigs({ defaultIDType: config.db.defaultIDType, globals: config.globals, i18n, + importMap, }) return clientConfig diff --git a/packages/payload/src/exports/shared.ts b/packages/payload/src/exports/shared.ts index feeedac98..76ff98576 100644 --- a/packages/payload/src/exports/shared.ts +++ b/packages/payload/src/exports/shared.ts @@ -5,6 +5,7 @@ export { getCookieExpiration, parseCookies, } from '../auth/cookies.js' +export { getFromImportMap } from '../bin/generateImportMap/getFromImportMap.js' export { parsePayloadComponent } from '../bin/generateImportMap/parsePayloadComponent.js' export { defaults as collectionDefaults } from '../collections/config/defaults.js' diff --git a/packages/payload/src/fields/config/client.ts b/packages/payload/src/fields/config/client.ts index ac0f70b8f..4549bb179 100644 --- a/packages/payload/src/fields/config/client.ts +++ b/packages/payload/src/fields/config/client.ts @@ -2,6 +2,7 @@ import type { I18nClient } from '@payloadcms/translations' import type { AdminClient, + BlockJSX, BlocksFieldClient, ClientBlock, ClientField, @@ -15,9 +16,10 @@ import type { } from '../../fields/config/types.js' import type { Payload } from '../../types/index.js' +import { getFromImportMap } from '../../bin/generateImportMap/getFromImportMap.js' import { MissingEditorProp } from '../../errors/MissingEditorProp.js' import { fieldAffectsData } from '../../fields/config/types.js' -import { flattenTopLevelFields } from '../../index.js' +import { flattenTopLevelFields, type ImportMap } from '../../index.js' import { removeUndefined } from '../../utilities/removeUndefined.js' // Should not be used - ClientField should be used instead. This is why we don't export ClientField, we don't want people @@ -42,11 +44,13 @@ export const createClientField = ({ defaultIDType, field: incomingField, i18n, + importMap, }: { clientField?: ClientField defaultIDType: Payload['config']['db']['defaultIDType'] field: Field i18n: I18nClient + importMap: ImportMap }): ClientField => { const serverOnlyFieldProperties: Partial[] = [ 'hooks', @@ -128,6 +132,7 @@ export const createClientField = ({ disableAddingID: incomingField.type !== 'array', fields: incomingField.fields, i18n, + importMap, }) break @@ -154,6 +159,15 @@ export const createClientField = ({ } } + if (block?.admin?.jsx) { + const jsxResolved = getFromImportMap({ + importMap, + PayloadComponent: block.admin.jsx, + schemaPath: '', + }) + clientBlock.jsx = jsxResolved + } + if (block.labels) { clientBlock.labels = {} as unknown as LabelsClient @@ -176,6 +190,7 @@ export const createClientField = ({ defaultIDType, fields: block.fields, i18n, + importMap, }) if (!field.blocks) { @@ -190,8 +205,7 @@ export const createClientField = ({ } case 'radio': - - // eslint-disable-next-line no-fallthrough + // falls through case 'select': { const field = clientField as RadioFieldClient | SelectFieldClient @@ -246,6 +260,7 @@ export const createClientField = ({ disableAddingID: true, fields: tab.fields, i18n, + importMap, }) } } @@ -295,12 +310,14 @@ export const createClientFields = ({ disableAddingID, fields, i18n, + importMap, }: { clientFields: ClientField[] defaultIDType: Payload['config']['db']['defaultIDType'] disableAddingID?: boolean fields: Field[] i18n: I18nClient + importMap: ImportMap }): ClientField[] => { const newClientFields: ClientField[] = [] @@ -312,6 +329,7 @@ export const createClientFields = ({ defaultIDType, field, i18n, + importMap, }) if (newField) { diff --git a/packages/payload/src/fields/config/types.ts b/packages/payload/src/fields/config/types.ts index 0a455225c..2de9f66e7 100644 --- a/packages/payload/src/fields/config/types.ts +++ b/packages/payload/src/fields/config/types.ts @@ -1186,6 +1186,70 @@ export type RadioFieldClient = { } & FieldBaseClient & Pick +type BlockFields = { + [key: string]: any + blockName?: string + blockType?: string +} + +export type BlockJSX = { + /** + * Override the default regex used to search for the start of the block in the JSX. + * By default, it's + */ + customStartRegex?: RegExp + /** + * By default, all spaces at the beginning and end of every line of the + * children (text between the open and close match) are removed. + * Set this to true to disable this behavior. + */ + doNotTrimChildren?: boolean + /** + * Function that receives the data for a given block and returns a JSX representation of it. + * + * This is used to convert Lexical => JSX + */ + export: (props: { + fields: BlockFields + lexicalToMarkdown?: (props: { editorState: Record }) => string + }) => + | { + children?: string + props?: object + } + | false + | string + /** + * Function that receives the markdown string and parsed + * JSX props for a given matched block and returns a Lexical representation of it. + * + * This is used to convert JSX => Lexical + */ + import: (props: { + children: string + closeMatch: null | RegExpMatchArray // Only available when customEndRegex is set + htmlToLexical?: ((props: { html: string }) => any) | null + markdownToLexical?: (props: { markdown: string }) => Record + openMatch?: RegExpMatchArray + props: Record + }) => BlockFields | false +} + export type Block = { /** * Do not set this property manually. This is set to true during sanitization, to avoid @@ -1202,6 +1266,7 @@ export type Block = { } /** Extension point to add your custom data. Available in server and client. */ custom?: Record + jsx?: PayloadComponent } /** Extension point to add your custom data. Server only. */ custom?: Record @@ -1223,6 +1288,7 @@ export type Block = { * **Note**: Top level types can collide, ensure they are unique amongst collections, arrays, groups, blocks, tabs. */ interfaceName?: string + jsx?: BlockJSX labels?: Labels slug: string } @@ -1231,7 +1297,7 @@ export type ClientBlock = { admin?: Pick fields: ClientField[] labels?: LabelsClient -} & Pick +} & Pick export type BlocksField = { admin?: { diff --git a/packages/payload/src/globals/config/client.ts b/packages/payload/src/globals/config/client.ts index e9af523b7..bbc52649c 100644 --- a/packages/payload/src/globals/config/client.ts +++ b/packages/payload/src/globals/config/client.ts @@ -1,5 +1,6 @@ import type { I18nClient } from '@payloadcms/translations' +import type { ImportMap } from '../../bin/generateImportMap/index.js' import type { LivePreviewConfig, SanitizedConfig, @@ -44,10 +45,12 @@ export const createClientGlobalConfig = ({ defaultIDType, global, i18n, + importMap, }: { defaultIDType: Payload['config']['db']['defaultIDType'] global: SanitizedConfig['globals'][0] i18n: I18nClient + importMap: ImportMap }): ClientGlobalConfig => { const clientGlobal = deepCopyObjectSimple(global, true) as unknown as ClientGlobalConfig @@ -56,6 +59,7 @@ export const createClientGlobalConfig = ({ defaultIDType, fields: global.fields, i18n, + importMap, }) serverOnlyProperties.forEach((key) => { @@ -95,10 +99,12 @@ export const createClientGlobalConfigs = ({ defaultIDType, globals, i18n, + importMap, }: { defaultIDType: Payload['config']['db']['defaultIDType'] globals: SanitizedConfig['globals'] i18n: I18nClient + importMap: ImportMap }): ClientGlobalConfig[] => { const clientGlobals = new Array(globals.length) @@ -109,6 +115,7 @@ export const createClientGlobalConfigs = ({ defaultIDType, global, i18n, + importMap, }) } diff --git a/packages/payload/src/index.ts b/packages/payload/src/index.ts index 49fc380f2..add9a101d 100644 --- a/packages/payload/src/index.ts +++ b/packages/payload/src/index.ts @@ -1105,6 +1105,7 @@ export type { ArrayFieldClient, BaseValidateOptions, Block, + BlockJSX, BlocksField, BlocksFieldClient, CheckboxField, diff --git a/packages/payload/src/utilities/deepCopyObject.ts b/packages/payload/src/utilities/deepCopyObject.ts index 34dff51c9..d368769c3 100644 --- a/packages/payload/src/utilities/deepCopyObject.ts +++ b/packages/payload/src/utilities/deepCopyObject.ts @@ -32,6 +32,8 @@ const constructorHandlers = new Map() constructorHandlers.set(Date, (o) => new Date(o)) constructorHandlers.set(Map, (o, fn) => new Map(cloneArray(Array.from(o), fn))) constructorHandlers.set(Set, (o, fn) => new Set(cloneArray(Array.from(o), fn))) +constructorHandlers.set(RegExp, (regex: RegExp) => new RegExp(regex.source, regex.flags)) + let handler = null function cloneArray(a: T, fn): T { @@ -42,6 +44,8 @@ function cloneArray(a: T, fn): T { const cur = a[k] if (typeof cur !== 'object' || cur === null) { a2[k] = cur + } else if (cur instanceof RegExp) { + a2[k] = new RegExp(cur.source, cur.flags) } else if (cur.constructor !== Object && (handler = constructorHandlers.get(cur.constructor))) { a2[k] = handler(cur, fn) } else if (ArrayBuffer.isView(cur)) { @@ -60,6 +64,10 @@ export const deepCopyObject = (o: T): T => { if (Array.isArray(o)) { return cloneArray(o, deepCopyObject) } + if (o instanceof RegExp) { + return new RegExp(o.source, o.flags) as T + } + if (o.constructor !== Object && (handler = constructorHandlers.get(o.constructor))) { return handler(o, deepCopyObject) } @@ -71,6 +79,8 @@ export const deepCopyObject = (o: T): T => { const cur = o[k] if (typeof cur !== 'object' || cur === null) { o2[k as string] = cur + } else if (cur instanceof RegExp) { + o2[k as string] = new RegExp(cur.source, cur.flags) } else if (cur.constructor !== Object && (handler = constructorHandlers.get(cur.constructor))) { o2[k as string] = handler(cur, deepCopyObject) } else if (ArrayBuffer.isView(cur)) { diff --git a/packages/richtext-lexical/package.json b/packages/richtext-lexical/package.json index 831342996..70dab4257 100644 --- a/packages/richtext-lexical/package.json +++ b/packages/richtext-lexical/package.json @@ -39,6 +39,16 @@ "import": "./src/exports/server/migrate.ts", "types": "./src/exports/server/migrate.ts", "default": "./src/exports/server/migrate.ts" + }, + "./ast/mdx": { + "import": "./src/exports/server/ast/mdx.ts", + "types": "./src/exports/server/ast/mdx.ts", + "default": "./src/exports/server/ast/mdx.ts" + }, + "./shared": { + "import": "./src/exports/shared.ts", + "types": "./src/exports/shared.ts", + "default": "./src/exports/shared.ts" } }, "main": "./src/index.ts", @@ -62,6 +72,14 @@ "prepublishOnly": "pnpm clean && pnpm turbo build", "translateNewKeys": "node --no-deprecation --import @swc-node/register/esm-register scripts/translateNewKeys.ts" }, + "lint-staged": { + "**/package.json": "sort-package-json", + "*.{md,mdx,yml,json}": "prettier --write", + "*.{js,jsx,ts,tsx}": [ + "prettier --write", + "eslint --cache --fix" + ] + }, "dependencies": { "@lexical/headless": "0.20.0", "@lexical/link": "0.20.0", @@ -75,10 +93,14 @@ "@payloadcms/translations": "workspace:*", "@payloadcms/ui": "workspace:*", "@types/uuid": "10.0.0", + "acorn": "8.12.1", "bson-objectid": "2.0.4", "dequal": "2.0.3", "escape-html": "1.0.3", "lexical": "0.20.0", + "mdast-util-from-markdown": "2.0.2", + "mdast-util-mdx-jsx": "3.1.3", + "micromark-extension-mdx-jsx": "3.0.1", "react-error-boundary": "4.0.13", "ts-essentials": "10.0.3", "uuid": "10.0.0" @@ -147,6 +169,16 @@ "import": "./dist/exports/server/migrate.js", "types": "./dist/exports/server/migrate.d.ts", "default": "./dist/exports/server/migrate.js" + }, + "./ast/mdx": { + "import": "./dist/exports/server/ast/mdx.js", + "types": "./dist/exports/server/ast/mdx.d.ts", + "default": "./dist/exports/server/ast/mdx.js" + }, + "./shared": { + "import": "./dist/exports/shared.js", + "types": "./dist/exports/shared.d.ts", + "default": "./dist/exports/shared.js" } }, "main": "./dist/index.js", diff --git a/packages/richtext-lexical/src/exports/server/ast/mdx.ts b/packages/richtext-lexical/src/exports/server/ast/mdx.ts new file mode 100644 index 000000000..82005add6 --- /dev/null +++ b/packages/richtext-lexical/src/exports/server/ast/mdx.ts @@ -0,0 +1,43 @@ +import * as acorn from 'acorn' +import { fromMarkdown } from 'mdast-util-from-markdown' +import { mdxJsxFromMarkdown } from 'mdast-util-mdx-jsx' +import { mdxJsx } from 'micromark-extension-mdx-jsx' + +export type AST = ReturnType + +export function parseJSXToAST({ + jsxString, + keepPositions, +}: { + jsxString: string + keepPositions?: boolean +}): AST { + const treeComplex: AST = fromMarkdown(jsxString, { + // @ts-expect-error + extensions: [mdxJsx({ acorn, addResult: false })], + mdastExtensions: [mdxJsxFromMarkdown()], + }) + + // Remove "position" keys + const parseTree = (tree: object) => { + for (const key in tree) { + if (key === 'position' && tree[key].start && tree[key].end) { + delete tree[key] + } else if (typeof tree[key] === 'object') { + parseTree(tree[key]) + } else if (Array.isArray(tree[key])) { + for (const item of tree[key]) { + parseTree(item) + } + } + } + } + + const tree: AST = treeComplex + + if (keepPositions !== true) { + parseTree(tree) + } + + return tree +} diff --git a/packages/richtext-lexical/src/exports/shared.ts b/packages/richtext-lexical/src/exports/shared.ts new file mode 100644 index 000000000..b984f57f9 --- /dev/null +++ b/packages/richtext-lexical/src/exports/shared.ts @@ -0,0 +1,9 @@ +export { collectTopLevelJSXInLines } from '../utilities/jsx/collectTopLevelJSXInLines.js' + +export { extractPropsFromJSXPropsString } from '../utilities/jsx/extractPropsFromJSXPropsString.js' +export { + extractFrontmatter, + frontmatterToObject, + objectToFrontmatter, + propsToJSXString, +} from '../utilities/jsx/jsx.js' diff --git a/packages/richtext-lexical/src/features/blocks/client/index.tsx b/packages/richtext-lexical/src/features/blocks/client/index.tsx index 2f036bea2..b25f95ea9 100644 --- a/packages/richtext-lexical/src/features/blocks/client/index.tsx +++ b/packages/richtext-lexical/src/features/blocks/client/index.tsx @@ -1,10 +1,13 @@ 'use client' -import type { BlocksFieldClient } from 'payload' +import type { BlocksFieldClient, ClientBlock } from 'payload' import { getTranslation } from '@payloadcms/translations' -import type { SlashMenuItem } from '../../../lexical/plugins/SlashMenu/LexicalTypeaheadMenuPlugin/types.js' +import type { + SlashMenuGroup, + SlashMenuItem, +} from '../../../lexical/plugins/SlashMenu/LexicalTypeaheadMenuPlugin/types.js' import type { ToolbarGroup, ToolbarGroupItem } from '../../toolbars/types.js' import { BlockIcon } from '../../../lexical/ui/icons/Block/index.js' @@ -15,184 +18,167 @@ import { InlineBlockNode } from './nodes/InlineBlocksNode.js' import { INSERT_BLOCK_COMMAND, INSERT_INLINE_BLOCK_COMMAND } from './plugin/commands.js' import { BlocksPlugin } from './plugin/index.js' -export type BlocksFeatureClientProps = { - clientBlockSlugs: string[] - clientInlineBlockSlugs: string[] -} -// @ts-expect-error - TODO: fix this -export const BlocksFeatureClient = createClientFeature(({ props }) => ({ - nodes: [BlockNode, InlineBlockNode], - plugins: [ - { - Component: BlocksPlugin, - position: 'normal', - }, - ], - sanitizedClientFeatureProps: props, - slashMenu: { - groups: [ - props.clientBlockSlugs?.length - ? { - items: props.clientBlockSlugs.map((blockSlug) => { - return { - Icon: BlockIcon, - key: 'block-' + blockSlug, - keywords: ['block', 'blocks', blockSlug], - label: ({ featureClientSchemaMap, i18n, schemaPath }) => { - if (!featureClientSchemaMap) { - return blockSlug - } +export const BlocksFeatureClient = createClientFeature( + ({ featureClientSchemaMap, props, schemaPath }) => { + const schemaMapRenderedBlockPathPrefix = `${schemaPath}.lexical_internal_feature.blocks.lexical_blocks` + const schemaMapRenderedInlineBlockPathPrefix = `${schemaPath}.lexical_internal_feature.blocks.lexical_inline_blocks` + const clientSchema = featureClientSchemaMap['blocks'] - const componentMapRenderedBlockPath = `${schemaPath}.lexical_internal_feature.blocks.lexical_blocks.${blockSlug}` - const clientSchemaMap = featureClientSchemaMap['blocks'] + const blocksFields: BlocksFieldClient[] = Object.entries(clientSchema) + .filter( + ([key]) => + key.startsWith(schemaMapRenderedBlockPathPrefix + '.') && + !key.replace(schemaMapRenderedBlockPathPrefix + '.', '').includes('.'), + ) + .map(([key, value]) => value[0] as BlocksFieldClient) - const blocksField: BlocksFieldClient = clientSchemaMap[ - componentMapRenderedBlockPath - ][0] as BlocksFieldClient + const inlineBlocksFields: BlocksFieldClient[] = Object.entries(clientSchema) + .filter( + ([key]) => + key.startsWith(schemaMapRenderedInlineBlockPathPrefix + '.') && + !key.replace(schemaMapRenderedInlineBlockPathPrefix + '.', '').includes('.'), + ) + .map(([key, value]) => value[0] as BlocksFieldClient) - const clientBlock = blocksField.blocks[0] + const clientBlocks: ClientBlock[] = blocksFields.map((field) => { + return field.blocks[0] + }) - const blockDisplayName = clientBlock?.labels?.singular - ? getTranslation(clientBlock.labels.singular, i18n) - : clientBlock?.slug + const clientInlineBlocks: ClientBlock[] = inlineBlocksFields.map((field) => { + return field.blocks[0] + }) - return blockDisplayName + return { + nodes: [BlockNode, InlineBlockNode], + plugins: [ + { + Component: BlocksPlugin, + position: 'normal', + }, + ], + sanitizedClientFeatureProps: props, + slashMenu: { + groups: [ + clientBlocks?.length + ? { + items: clientBlocks.map((block) => { + return { + Icon: BlockIcon, + key: 'block-' + block.slug, + keywords: ['block', 'blocks', block.slug], + label: ({ i18n }) => { + const blockDisplayName = block?.labels?.singular + ? getTranslation(block.labels.singular, i18n) + : block?.slug + + return blockDisplayName + }, + onSelect: ({ editor }) => { + editor.dispatchCommand(INSERT_BLOCK_COMMAND, { + blockName: '', + blockType: block.slug, + }) + }, + } as SlashMenuItem + }), + key: 'blocks', + label: ({ i18n }) => { + return i18n.t('lexical:blocks:label') }, - onSelect: ({ editor }) => { - editor.dispatchCommand(INSERT_BLOCK_COMMAND, { - blockName: '', - blockType: blockSlug, - }) + } + : null, + clientInlineBlocks?.length + ? { + items: clientInlineBlocks.map((inlineBlock) => { + return { + Icon: InlineBlocksIcon, + key: 'inlineBlocks-' + inlineBlock.slug, + keywords: ['inlineBlock', 'inline block', inlineBlock.slug], + label: ({ i18n }) => { + const blockDisplayName = inlineBlock?.labels?.singular + ? getTranslation(inlineBlock.labels.singular, i18n) + : inlineBlock?.slug + + return blockDisplayName + }, + onSelect: ({ editor }) => { + editor.dispatchCommand(INSERT_INLINE_BLOCK_COMMAND, { + blockName: '', + blockType: inlineBlock.slug, + }) + }, + } as SlashMenuItem + }), + key: 'inlineBlocks', + label: ({ i18n }) => { + return i18n.t('lexical:blocks:inlineBlocks:label') }, - } as SlashMenuItem - }), - key: 'blocks', - label: ({ i18n }) => { - return i18n.t('lexical:blocks:label') - }, - } - : null, - props.clientInlineBlockSlugs?.length - ? { - items: props.clientInlineBlockSlugs.map((inlineBlockSlug) => { - return { - Icon: InlineBlocksIcon, - key: 'inlineBlocks-' + inlineBlockSlug, - keywords: ['inlineBlock', 'inline block', inlineBlockSlug], - label: ({ featureClientSchemaMap, i18n, schemaPath }) => { - const componentMapRenderedBlockPath = `${schemaPath}.lexical_internal_feature.blocks.lexical_inline_blocks.${inlineBlockSlug}` - const clientSchemaMap = featureClientSchemaMap['blocks'] - - const blocksField: BlocksFieldClient = clientSchemaMap[ - componentMapRenderedBlockPath - ][0] as BlocksFieldClient - - const clientBlock = blocksField.blocks[0] - - const blockDisplayName = clientBlock?.labels?.singular - ? getTranslation(clientBlock.labels.singular, i18n) - : clientBlock?.slug - - return blockDisplayName - }, - onSelect: ({ editor }) => { - editor.dispatchCommand(INSERT_INLINE_BLOCK_COMMAND, { - blockName: '', - blockType: inlineBlockSlug, - }) - }, - } as SlashMenuItem - }), - key: 'inlineBlocks', - label: ({ i18n }) => { - return i18n.t('lexical:blocks:inlineBlocks:label') - }, - } - : null, - ].filter(Boolean), - }, - toolbarFixed: { - groups: [ - props.clientBlockSlugs?.length - ? { - type: 'dropdown', - ChildComponent: BlockIcon, - items: props.clientBlockSlugs.map((blockSlug, index) => { - return { + } + : null, + ].filter(Boolean) as SlashMenuGroup[], + }, + toolbarFixed: { + groups: [ + clientBlocks.length + ? { + type: 'dropdown', ChildComponent: BlockIcon, - isActive: undefined, // At this point, we would be inside a sub-richtext-editor. And at this point this will be run against the focused sub-editor, not the parent editor which has the actual block. Thus, no point in running this - key: 'block-' + blockSlug, - label: ({ featureClientSchemaMap, i18n, schemaPath }) => { - const componentMapRenderedBlockPath = `${schemaPath}.lexical_internal_feature.blocks.lexical_blocks.${blockSlug}` - const clientSchemaMap = featureClientSchemaMap['blocks'] + items: clientBlocks.map((block, index) => { + return { + ChildComponent: BlockIcon, + isActive: undefined, // At this point, we would be inside a sub-richtext-editor. And at this point this will be run against the focused sub-editor, not the parent editor which has the actual block. Thus, no point in running this + key: 'block-' + block.slug, + label: ({ i18n }) => { + const blockDisplayName = block?.labels?.singular + ? getTranslation(block.labels.singular, i18n) + : block?.slug - const blocksField: BlocksFieldClient = clientSchemaMap[ - componentMapRenderedBlockPath - ][0] as BlocksFieldClient - - const clientBlock = blocksField.blocks[0] - - const blockDisplayName = clientBlock?.labels?.singular - ? getTranslation(clientBlock.labels.singular, i18n) - : clientBlock?.slug - - return blockDisplayName - }, - onSelect: ({ editor }) => { - editor.dispatchCommand(INSERT_BLOCK_COMMAND, { - blockName: '', - blockType: blockSlug, - }) - }, - order: index, - } as ToolbarGroupItem - }), - key: 'blocks', - order: 20, - } - : null, - props.clientInlineBlockSlugs?.length - ? { - type: 'dropdown', - ChildComponent: InlineBlocksIcon, - items: props.clientInlineBlockSlugs.map((inlineBlockSlug, index) => { - return { + return blockDisplayName + }, + onSelect: ({ editor }) => { + editor.dispatchCommand(INSERT_BLOCK_COMMAND, { + blockName: '', + blockType: block.slug, + }) + }, + order: index, + } as ToolbarGroupItem + }), + key: 'blocks', + order: 20, + } + : null, + clientInlineBlocks?.length + ? { + type: 'dropdown', ChildComponent: InlineBlocksIcon, - isActive: undefined, - key: 'inlineBlock-' + inlineBlockSlug, - label: ({ featureClientSchemaMap, i18n, schemaPath }) => { - if (!featureClientSchemaMap) { - return inlineBlockSlug - } + items: clientInlineBlocks.map((inlineBlock, index) => { + return { + ChildComponent: InlineBlocksIcon, + isActive: undefined, + key: 'inlineBlock-' + inlineBlock.slug, + label: ({ i18n }) => { + const blockDisplayName = inlineBlock?.labels?.singular + ? getTranslation(inlineBlock.labels.singular, i18n) + : inlineBlock?.slug - const componentMapRenderedBlockPath = `${schemaPath}.lexical_internal_feature.blocks.lexical_inline_blocks.${inlineBlockSlug}` - const clientSchemaMap = featureClientSchemaMap['blocks'] - - const blocksField: BlocksFieldClient = clientSchemaMap[ - componentMapRenderedBlockPath - ][0] as BlocksFieldClient - - const clientBlock = blocksField.blocks[0] - - const blockDisplayName = clientBlock?.labels?.singular - ? getTranslation(clientBlock.labels.singular, i18n) - : clientBlock?.slug - - return blockDisplayName - }, - onSelect: ({ editor }) => { - editor.dispatchCommand(INSERT_INLINE_BLOCK_COMMAND, { - blockName: '', - blockType: inlineBlockSlug, - }) - }, - order: index, - } as ToolbarGroupItem - }), - key: 'inlineBlocks', - order: 25, - } - : null, - ].filter(Boolean) as ToolbarGroup[], + return blockDisplayName + }, + onSelect: ({ editor }) => { + editor.dispatchCommand(INSERT_INLINE_BLOCK_COMMAND, { + blockName: '', + blockType: inlineBlock.slug, + }) + }, + order: index, + } as ToolbarGroupItem + }), + key: 'inlineBlocks', + order: 25, + } + : null, + ].filter(Boolean) as ToolbarGroup[], + }, + } }, -})) +) diff --git a/packages/richtext-lexical/src/features/blocks/client/markdownTransformer.ts b/packages/richtext-lexical/src/features/blocks/client/markdownTransformer.ts new file mode 100644 index 000000000..3f10fb470 --- /dev/null +++ b/packages/richtext-lexical/src/features/blocks/client/markdownTransformer.ts @@ -0,0 +1,175 @@ +import type { MultilineElementTransformer, Transformer } from '@lexical/markdown' +import type { Klass, LexicalNode, LexicalNodeReplacement, SerializedEditorState } from 'lexical' +import type { ClientBlock } from 'payload' + +import { createHeadlessEditor } from '@lexical/headless' +import { $convertFromMarkdownString, $convertToMarkdownString } from '@lexical/markdown' + +import { extractPropsFromJSXPropsString } from '../../../utilities/jsx/extractPropsFromJSXPropsString.js' +import { propsToJSXString } from '../../../utilities/jsx/jsx.js' +import { $createBlockNode, $isBlockNode, BlockNode } from './nodes/BlocksNode.js' + +function createTagRegexes(tagName: string) { + const escapedTagName = tagName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') + return { + regExpEnd: new RegExp(`|<${escapedTagName}[^>]*?/>`, 'i'), + regExpStart: new RegExp(`<(${escapedTagName})([^>]*?)\\s*(/?)>`, 'i'), + } +} +export const getBlockMarkdownTransformers = ({ + blocks, +}: { + blocks: ClientBlock[] +}): ((props: { + allNodes: Array | LexicalNodeReplacement> + allTransformers: Transformer[] +}) => MultilineElementTransformer)[] => { + if (!blocks?.length) { + return [] + } + + const transformers: ((props: { + allNodes: Array | LexicalNodeReplacement> + allTransformers: Transformer[] + }) => MultilineElementTransformer)[] = [] + + for (const block of blocks) { + if (!block.jsx) { + continue + } + const regex = createTagRegexes(block.slug) + transformers.push(({ allNodes, allTransformers }) => ({ + type: 'multiline-element', + dependencies: [BlockNode], + export: (node) => { + if (!$isBlockNode(node)) { + return null + } + if (node.getFields()?.blockType?.toLowerCase() !== block.slug.toLowerCase()) { + return null + } + + const nodeFields = node.getFields() + const lexicalToMarkdown = getLexicalToMarkdown(allNodes, allTransformers) + + const exportResult = block.jsx!.export({ + fields: nodeFields, + lexicalToMarkdown, + }) + if (exportResult === false) { + return null + } + if (typeof exportResult === 'string') { + return exportResult + } + + if (exportResult?.children?.length) { + return `<${nodeFields.blockType}${exportResult.props ? ' ' + propsToJSXString({ props: exportResult.props }) : ''}>\n ${exportResult.children}\n` + } + + return `<${nodeFields.blockType}${exportResult.props ? ' ' + propsToJSXString({ props: exportResult.props }) : ''}/>` + }, + regExpEnd: block.jsx?.customEndRegex ?? regex.regExpEnd, + regExpStart: block.jsx?.customStartRegex ?? regex.regExpStart, + replace: (rootNode, children, openMatch, closeMatch, linesInBetween) => { + if (block?.jsx?.import) { + if (!linesInBetween) { + // convert children to linesInBetween + let line = '' + if (children) { + for (const child of children) { + line += child.getTextContent() + } + } + + linesInBetween = [line] + } + + const childrenString = linesInBetween.join('\n').trim() + + const propsString: null | string = openMatch?.length > 2 ? openMatch[2]?.trim() : null + + const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers) + + const blockFields = block.jsx.import({ + children: childrenString, + closeMatch: closeMatch as RegExpMatchArray, + htmlToLexical: null, // TODO + markdownToLexical, + openMatch: openMatch as RegExpMatchArray, + props: propsString + ? extractPropsFromJSXPropsString({ + propsString, + }) + : {}, + }) + if (blockFields === false) { + return false + } + + const node = $createBlockNode({ + blockType: block.slug, + ...blockFields, + blockName: blockFields.blockName || '', + }) + if (node) { + rootNode.append(node) + } + + return + } + return false // Run next transformer + }, + })) + } + + return transformers +} + +export function getMarkdownToLexical( + allNodes: Array | LexicalNodeReplacement>, + allTransformers: Transformer[], +): (args: { markdown: string }) => SerializedEditorState { + const markdownToLexical = ({ markdown }: { markdown: string }): SerializedEditorState => { + const headlessEditor = createHeadlessEditor({ + nodes: allNodes, + }) + + headlessEditor.update( + () => { + $convertFromMarkdownString(markdown, allTransformers) + }, + { discrete: true }, + ) + + const editorJSON = headlessEditor.getEditorState().toJSON() + + return editorJSON + } + return markdownToLexical +} + +export function getLexicalToMarkdown( + allNodes: Array | LexicalNodeReplacement>, + allTransformers: Transformer[], +): (args: { editorState: Record }) => string { + const lexicalToMarkdown = ({ editorState }: { editorState: Record }): string => { + const headlessEditor = createHeadlessEditor({ + nodes: allNodes, + }) + + try { + headlessEditor.setEditorState(headlessEditor.parseEditorState(editorState as any)) // This should commit the editor state immediately + } catch (e) { + console.error('getLexicalToMarkdown: ERROR parsing editor state', e) + } + + let markdown: string = '' + headlessEditor.getEditorState().read(() => { + markdown = $convertToMarkdownString(allTransformers) + }) + + return markdown + } + return lexicalToMarkdown +} diff --git a/packages/richtext-lexical/src/features/blocks/client/plugin/index.tsx b/packages/richtext-lexical/src/features/blocks/client/plugin/index.tsx index e8016f3b2..829315c8e 100644 --- a/packages/richtext-lexical/src/features/blocks/client/plugin/index.tsx +++ b/packages/richtext-lexical/src/features/blocks/client/plugin/index.tsx @@ -18,7 +18,6 @@ import { useEffect, useState } from 'react' import type { PluginComponent } from '../../../typesClient.js' import type { BlockFields, BlockFieldsOptionalID } from '../../server/nodes/BlocksNode.js' -import type { BlocksFeatureClientProps } from '../index.js' import { useEditorConfigContext } from '../../../../lexical/config/client/EditorConfigProvider.js' import { useLexicalDrawer } from '../../../../utilities/fieldsDrawer/useLexicalDrawer.js' @@ -28,7 +27,7 @@ import { INSERT_BLOCK_COMMAND, INSERT_INLINE_BLOCK_COMMAND } from './commands.js export type InsertBlockPayload = BlockFieldsOptionalID -export const BlocksPlugin: PluginComponent = () => { +export const BlocksPlugin: PluginComponent = () => { const [editor] = useLexicalComposerContext() const [targetNodeKey, setTargetNodeKey] = useState(null) diff --git a/packages/richtext-lexical/src/features/blocks/server/index.ts b/packages/richtext-lexical/src/features/blocks/server/index.ts index c357693ff..d7005b1f0 100644 --- a/packages/richtext-lexical/src/features/blocks/server/index.ts +++ b/packages/richtext-lexical/src/features/blocks/server/index.ts @@ -1,13 +1,12 @@ -import type { Block, BlocksField, Config, Field, FieldSchemaMap } from 'payload' +import type { Block, BlocksField, Config, FieldSchemaMap } from 'payload' import { fieldsToJSONSchema, sanitizeFields } from 'payload' -import type { BlocksFeatureClientProps } from '../client/index.js' - import { createServerFeature } from '../../../utilities/createServerFeature.js' import { createNode } from '../../typeUtilities.js' import { blockPopulationPromiseHOC } from './graphQLPopulationPromise.js' import { i18n } from './i18n.js' +import { getBlockMarkdownTransformers } from './markdownTransformer.js' import { ServerBlockNode } from './nodes/BlocksNode.js' import { ServerInlineBlockNode } from './nodes/InlineBlocksNode.js' import { blockValidationHOC } from './validate.js' @@ -17,17 +16,8 @@ export type BlocksFeatureProps = { inlineBlocks?: Block[] } -export const BlocksFeature = createServerFeature< - BlocksFeatureProps, - BlocksFeatureProps, - BlocksFeatureClientProps ->({ +export const BlocksFeature = createServerFeature({ feature: async ({ config: _config, isRoot, parentIsLocalized, props }) => { - // Build clientProps - const clientProps: BlocksFeatureClientProps = { - clientBlockSlugs: [], - clientInlineBlockSlugs: [], - } const validRelationships = _config.collections.map((c) => c.slug) || [] const sanitized = await sanitizeFields({ @@ -52,12 +42,8 @@ export const BlocksFeature = createServerFeature< props.blocks = (sanitized[0] as BlocksField).blocks props.inlineBlocks = (sanitized[1] as BlocksField).blocks - clientProps.clientBlockSlugs = props.blocks.map((block) => block.slug) - clientProps.clientInlineBlockSlugs = props.inlineBlocks.map((block) => block.slug) - return { ClientFeature: '@payloadcms/richtext-lexical/client#BlocksFeatureClient', - clientFeatureProps: clientProps, generatedTypes: { modifyOutputSchema: ({ collectionIDFieldTypes, @@ -163,6 +149,11 @@ export const BlocksFeature = createServerFeature< return schemaMap }, i18n, + markdownTransformers: getBlockMarkdownTransformers({ + blocks: props.blocks, + inlineBlocks: props.inlineBlocks, + }), + nodes: [ createNode({ // @ts-expect-error - TODO: fix this diff --git a/packages/richtext-lexical/src/features/blocks/server/linesFromMatchToContentAndPropsString.ts b/packages/richtext-lexical/src/features/blocks/server/linesFromMatchToContentAndPropsString.ts new file mode 100644 index 000000000..ef3c81961 --- /dev/null +++ b/packages/richtext-lexical/src/features/blocks/server/linesFromMatchToContentAndPropsString.ts @@ -0,0 +1,175 @@ +export function linesFromStartToContentAndPropsString({ + isEndOptional, + lines, + regexpEndRegex, + startLineIndex, + startMatch, + trimChildren, +}: { + isEndOptional?: boolean + lines: string[] + regexpEndRegex?: RegExp + startLineIndex: number + startMatch: RegExpMatchArray + trimChildren?: boolean +}): { + /** + * The matched string after the end match, in the same line as the end match. Useful for inline matches. + */ + afterEndLine: string + /** + * The matched string before the start match, in the same line as the start match. Useful for inline matches. + */ + beforeStartLine: string + content: string + endLineIndex: number + endlineLastCharIndex: number + propsString: string +} { + let propsString = '' + let content = '' + const linesCopy = lines.slice(startLineIndex) + + let isWithinContent = false // If false => is within prop + let contentSubTagStartAmount = 0 + + let bracketCount = 0 + let quoteChar: null | string = null + let isSelfClosing = false + let isWithinCodeBlockAmount = 0 + + const beforeStartLine = linesCopy[0].slice(0, startMatch.index) + let endlineLastCharIndex = 0 + + let endLineIndex = startLineIndex + + mainLoop: for (let lineIndex = 0; lineIndex < linesCopy.length; lineIndex++) { + const line = trimChildren ? linesCopy[lineIndex].trim() : linesCopy[lineIndex] + let amountOfBeginningSpacesRemoved = 0 + if (trimChildren) { + for (let i = 0; i < linesCopy[lineIndex].length; i++) { + if (linesCopy[lineIndex][i] === ' ') { + amountOfBeginningSpacesRemoved++ + } else { + break + } + } + } + + let charIndex = 0 + + if (lineIndex === 0) { + charIndex = (startMatch.index ?? 0) + startMatch[0].length - amountOfBeginningSpacesRemoved // We need to also loop over the ">" in something like "" in order to later set isWithinContent to true + } + + while (charIndex < line.length) { + const char = line[charIndex] + const nextChar = line[charIndex + 1] + + if (!isWithinContent) { + if (char === '{' && !quoteChar) { + bracketCount++ + } else if (char === '}' && !quoteChar) { + bracketCount-- + } else if ((char === '"' || char === "'") && !quoteChar) { + quoteChar = char + } else if (char === quoteChar) { + quoteChar = null + } + + if (char === '/' && nextChar === '>' && bracketCount === 0 && !quoteChar) { + isSelfClosing = true + endLineIndex = lineIndex + endlineLastCharIndex = charIndex + 2 + + break mainLoop + } else if (char === '>' && bracketCount === 0 && !quoteChar) { + isWithinContent = true + charIndex++ + continue + } + + propsString += char + } else { + if (char === '`') { + isWithinCodeBlockAmount++ + } + + if (isWithinCodeBlockAmount % 2 === 0) { + if (char === '<' && nextChar === '/') { + contentSubTagStartAmount-- + + if (contentSubTagStartAmount < 0) { + if (content[content.length - 1] === '\n') { + content = content.slice(0, -1) // Remove the last newline + } + endLineIndex = lineIndex + // Calculate endlineLastCharIndex by finding ">" in line + for (let i = charIndex; i < line.length; i++) { + if (line[i] === '>') { + endlineLastCharIndex = i + 1 + + break + } + } + break mainLoop + } + } else if (char === '/' && nextChar === '>') { + contentSubTagStartAmount-- + + if (contentSubTagStartAmount < 0) { + if (content[content.length - 1] === '\n') { + content = content.slice(0, -1) // Remove the last newline + } + endLineIndex = lineIndex + endlineLastCharIndex = charIndex + 2 + break mainLoop + } + } else if (char === '<' && nextChar !== '/') { + contentSubTagStartAmount++ + } + } + + content += char + } + + charIndex++ + } + + if (isWithinContent) { + if (content?.length > 0 && lineIndex > 0) { + content += '\n' + } + } else { + propsString += '\n' + } + + if (regexpEndRegex && contentSubTagStartAmount < 0) { + // If 0 and in same line where it got lowered to 0 then this is not the match we are looking for + const match = line.match(regexpEndRegex) + if (match?.index !== undefined) { + endLineIndex = lineIndex + endlineLastCharIndex = match.index + match[0].length - 1 + break + } + } + + if (lineIndex === linesCopy.length - 1 && !isEndOptional && !isSelfClosing) { + throw new Error('End match not found for lines ' + lines.join('\n')) + } + } + + // Replace all \n with spaces + propsString = propsString.replace(/\n/g, ' ').trim() + + const afterEndLine = linesCopy[endLineIndex].trim().slice(endlineLastCharIndex) + + return { + afterEndLine, + beforeStartLine, + content, + endLineIndex: startLineIndex + endLineIndex, + endlineLastCharIndex, + propsString, + } +} diff --git a/packages/richtext-lexical/src/features/blocks/server/markdownTransformer.ts b/packages/richtext-lexical/src/features/blocks/server/markdownTransformer.ts new file mode 100644 index 000000000..61d22dd5c --- /dev/null +++ b/packages/richtext-lexical/src/features/blocks/server/markdownTransformer.ts @@ -0,0 +1,498 @@ +import type { + MultilineElementTransformer, + TextMatchTransformer, + Transformer, +} from '@lexical/markdown' +import type { ElementNode, SerializedEditorState, SerializedLexicalNode } from 'lexical' +import type { Block } from 'payload' + +import { createHeadlessEditor } from '@lexical/headless' +import { $convertToMarkdownString } from '@lexical/markdown' +import { $parseSerializedNode } from 'lexical' + +import type { NodeWithHooks } from '../../typesServer.js' + +import { getEnabledNodesFromServerNodes } from '../../../lexical/nodes/index.js' +import { extractPropsFromJSXPropsString } from '../../../utilities/jsx/extractPropsFromJSXPropsString.js' +import { propsToJSXString } from '../../../utilities/jsx/jsx.js' +import { $convertFromMarkdownString } from '../../../utilities/jsx/lexicalMarkdownCopy.js' +import { linesFromStartToContentAndPropsString } from './linesFromMatchToContentAndPropsString.js' +import { $createServerBlockNode, $isServerBlockNode, ServerBlockNode } from './nodes/BlocksNode.js' +import { + $createServerInlineBlockNode, + $isServerInlineBlockNode, + ServerInlineBlockNode, +} from './nodes/InlineBlocksNode.js' + +export function createTagRegexes(tagName: string) { + const escapedTagName = tagName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') + + // Regex components + const openingTag = `<${escapedTagName}` + const closingTag = `` + + // Assembled regex patterns + const startPattern = `${openingTag}(?=\\s|>|$)` // Only match the tag name + const endPattern = `${closingTag}${optionalWhitespace}${mandatoryClosingBracket}` + + return { + regExpEnd: new RegExp(endPattern, 'i'), + regExpStart: new RegExp(startPattern, 'i'), + } +} +export const getBlockMarkdownTransformers = ({ + blocks, + inlineBlocks, +}: { + blocks: Block[] + inlineBlocks: Block[] +}): ((props: { + allNodes: Array + allTransformers: Transformer[] +}) => MultilineElementTransformer | TextMatchTransformer)[] => { + if (!blocks?.length && !inlineBlocks?.length) { + return [] + } + + let transformers: ((props: { + allNodes: Array + allTransformers: Transformer[] + }) => MultilineElementTransformer | TextMatchTransformer)[] = [] + + if (blocks?.length) { + for (const block of blocks) { + const transformer = getMarkdownTransformerForBlock(block, false) + + if (transformer) { + transformers = transformers.concat(transformer) + } + } + } + + if (inlineBlocks?.length) { + for (const block of inlineBlocks) { + const transformer = getMarkdownTransformerForBlock(block, true) + + if (transformer) { + transformers = transformers.concat(transformer) + } + } + } + + return transformers +} + +function getMarkdownTransformerForBlock( + block: Block, + isInlineBlock: boolean, +): Array< + (props: { + allNodes: Array + allTransformers: Transformer[] + }) => MultilineElementTransformer | TextMatchTransformer +> | null { + if (!block.jsx) { + return null + } + const regex = createTagRegexes(block.slug) + const toReturn: Array< + (props: { + allNodes: Array + allTransformers: Transformer[] + }) => MultilineElementTransformer | TextMatchTransformer + > = [] + + if (isInlineBlock) { + toReturn.push(({ allNodes, allTransformers }) => ({ + type: 'text-match', + dependencies: [ServerInlineBlockNode], + export: (node) => { + if (!$isServerInlineBlockNode(node)) { + return null + } + + if (node.getFields()?.blockType?.toLowerCase() !== block.slug.toLowerCase()) { + return null + } + + const nodeFields = node.getFields() + const lexicalToMarkdown = getLexicalToMarkdown(allNodes, allTransformers) + + const exportResult = block.jsx!.export({ + fields: nodeFields, + lexicalToMarkdown, + }) + if (exportResult === false) { + return null + } + if (typeof exportResult === 'string') { + return exportResult + } + + const hasProps = exportResult.props && Object.keys(exportResult.props)?.length > 0 + const props = exportResult.props ?? {} + + if (exportResult?.children?.length) { + return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({ props }) : ''}>${exportResult.children}` + } + + return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({ props }) : ''}/>` + }, + getEndIndex: (node, match) => { + const { endlineLastCharIndex } = linesFromStartToContentAndPropsString({ + isEndOptional: false, + lines: [node.getTextContent()], + regexpEndRegex: regex.regExpEnd, + startLineIndex: 0, + startMatch: match, + trimChildren: false, + }) + + return endlineLastCharIndex + }, + importRegExp: block.jsx?.customStartRegex ?? regex.regExpStart, + regExp: /___ignoreignoreignore___/g, + replace(node, match) { + const { content, propsString } = linesFromStartToContentAndPropsString({ + isEndOptional: false, + lines: [node.getTextContent()], + regexpEndRegex: regex.regExpEnd, + startLineIndex: 0, + startMatch: { + ...match, + index: 0, + }, + trimChildren: false, + }) + + if (!block?.jsx?.import) { + // No multiline transformer handled this line successfully + return + } + + const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers) + + const blockFields = block.jsx.import({ + children: content, + closeMatch: null, + htmlToLexical: null, // TODO + markdownToLexical, + openMatch: match, + props: propsString + ? extractPropsFromJSXPropsString({ + propsString, + }) + : {}, + }) + if (blockFields === false) { + return + } + + const inlineBlockNode = $createServerInlineBlockNode({ + blockType: block.slug, + ...(blockFields as any), + }) + + node.replace(inlineBlockNode) + }, + })) + + return toReturn + } + + toReturn.push(({ allNodes, allTransformers }) => ({ + dependencies: [ServerBlockNode], + export: (node) => { + if (!$isServerBlockNode(node)) { + return null + } + + if (node.getFields()?.blockType?.toLowerCase() !== block.slug.toLowerCase()) { + return null + } + + const nodeFields = node.getFields() + const lexicalToMarkdown = getLexicalToMarkdown(allNodes, allTransformers) + + const exportResult = block.jsx!.export({ + fields: nodeFields, + lexicalToMarkdown, + }) + if (exportResult === false) { + return null + } + if (typeof exportResult === 'string') { + return exportResult + } + + const hasProps = exportResult.props && Object.keys(exportResult.props)?.length > 0 + const props = exportResult.props ?? {} + + if (exportResult?.children?.length) { + const children = exportResult.children + let sanitizedChildren = '' + + // Ensure it has a leftpad of at least 2 spaces. The data is saved without those spaces, so we can just blindly add it to every child + if (children.includes('\n')) { + for (const child of children.split('\n')) { + let sanitizedChild = '' + if (!block?.jsx?.doNotTrimChildren && child !== '') { + sanitizedChild = ' ' + } + sanitizedChild += child + '\n' + + sanitizedChildren += sanitizedChild + } + } else { + sanitizedChildren = (block?.jsx?.doNotTrimChildren ? '' : ' ') + children + '\n' + } + + return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({ props }) : ''}>\n${sanitizedChildren}` + } + + return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({ props }) : ''}/>` + }, + handleImportAfterStartMatch: block.jsx?.customEndRegex + ? undefined + : ({ lines, rootNode, startLineIndex, startMatch, transformer }) => { + const regexpEndRegex: RegExp | undefined = + typeof transformer.regExpEnd === 'object' && 'regExp' in transformer.regExpEnd + ? transformer.regExpEnd.regExp + : transformer.regExpEnd + + const isEndOptional = + transformer.regExpEnd && + typeof transformer.regExpEnd === 'object' && + 'optional' in transformer.regExpEnd + ? transformer.regExpEnd.optional + : !transformer.regExpEnd + + const { + afterEndLine, + beforeStartLine, + content: unsanitizedContent, + endLineIndex, + propsString, + } = linesFromStartToContentAndPropsString({ + isEndOptional, + lines, + regexpEndRegex, + startLineIndex, + startMatch, + trimChildren: false, + }) + + let content = '' + + if (block?.jsx?.doNotTrimChildren) { + content = unsanitizedContent.endsWith('\n') + ? unsanitizedContent.slice(0, -1) + : unsanitizedContent + } else { + // Ensure it has a leftpad of at least 2 spaces. The data is saved without those spaces, so we can just blindly add it to every child + if (unsanitizedContent.includes('\n')) { + const split = unsanitizedContent.split('\n') + let index = 0 + for (const child of split) { + index++ + + if (child.startsWith(' ')) { + content += child.slice(2) + } else { + // If one child is misaligned, skip aligning completely, unless it's just empty + if (child === '') { + content += child + } else { + content = unsanitizedContent.endsWith('\n') + ? unsanitizedContent.slice(0, -1) + : unsanitizedContent + break + } + } + + content += index === split.length ? '' : '\n' + } + } else { + content = + (!unsanitizedContent.startsWith(' ') + ? unsanitizedContent + : unsanitizedContent.slice(2)) + '\n' + } + } + + if (!block?.jsx?.import) { + // No multiline transformer handled this line successfully + return [false, startLineIndex] + } + + const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers) + + const blockFields = block.jsx.import({ + children: content, + closeMatch: null, + htmlToLexical: null, // TODO + markdownToLexical, + openMatch: startMatch, + props: propsString + ? extractPropsFromJSXPropsString({ + propsString, + }) + : {}, + }) + if (blockFields === false) { + return [false, startLineIndex] + } + + const node = $createServerBlockNode({ + blockType: block.slug, + ...blockFields, + } as any) + + if (node) { + // Now handle beforeStartLine and afterEndLine. If those are not empty, we need to add them as text nodes before and after the block node. + // However, those themselves can contain other markdown matches, so we need to parse them as well. + // Example where this is needed: "Hello inline code test." + let prevNodes: null | SerializedLexicalNode[] = null + let nextNodes: null | SerializedLexicalNode[] = null + // TODO: Might not need this prevNodes and nextNodes handling if inline nodes are handled by textmatch transformers + + if (beforeStartLine?.length) { + prevNodes = markdownToLexical({ markdown: beforeStartLine })?.root?.children ?? [] + + if (prevNodes?.length) { + rootNode.append($parseSerializedNode(prevNodes[0])) + } + } + + rootNode.append(node) + + if (afterEndLine?.length) { + nextNodes = markdownToLexical({ markdown: afterEndLine })?.root?.children ?? [] + const lastChild = rootNode.getChildren()[rootNode.getChildren().length - 1] + + const children = ($parseSerializedNode(nextNodes[0]) as ElementNode)?.getChildren() + if (children?.length) { + for (const child of children) { + ;(lastChild as ElementNode).append(child) + } + } + } + } + + return [true, endLineIndex] + }, + regExpEnd: block.jsx?.customEndRegex ?? regex.regExpEnd, + regExpStart: block.jsx?.customStartRegex ?? regex.regExpStart, + // This replace is ONLY run for ``` code blocks (so any blocks with custom start and end regexes). For others, we use the special JSX handling above: + type: 'multiline-element', + replace: (rootNode, children, openMatch, closeMatch, linesInBetween) => { + if (block?.jsx?.import) { + if (!linesInBetween) { + // convert children to linesInBetween + let line = '' + if (children) { + for (const child of children) { + line += child.getTextContent() + } + } + + linesInBetween = [line] + } + + let childrenString = '' + if (block?.jsx?.doNotTrimChildren) { + // Do not trim, but remove empty lines + childrenString = linesInBetween.filter((line) => line.trim().length > 0).join('\n') + } else { + childrenString = linesInBetween.join('\n').trim() + } + + const propsString: null | string = openMatch?.length > 1 ? openMatch[1]?.trim() : null + + const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers) + + const blockFields = block.jsx.import({ + children: childrenString, + closeMatch: closeMatch as RegExpMatchArray, + htmlToLexical: null, // TODO + markdownToLexical, + openMatch: openMatch as RegExpMatchArray, + props: propsString + ? extractPropsFromJSXPropsString({ + propsString, + }) + : {}, + }) + if (blockFields === false) { + return false + } + + const node = $createServerBlockNode({ + blockType: block.slug, + ...blockFields, + } as any) + + if (node) { + rootNode.append(node) + } + + return + } + return false // Run next transformer + }, + })) + + return toReturn +} + +export function getMarkdownToLexical( + allNodes: Array, + allTransformers: Transformer[], +): (args: { markdown: string }) => SerializedEditorState { + const markdownToLexical = ({ markdown }: { markdown: string }): SerializedEditorState => { + const headlessEditor = createHeadlessEditor({ + nodes: getEnabledNodesFromServerNodes({ + nodes: allNodes, + }), + }) + + headlessEditor.update( + () => { + $convertFromMarkdownString(markdown, allTransformers) + }, + { discrete: true }, + ) + + return headlessEditor.getEditorState().toJSON() + } + return markdownToLexical +} + +export function getLexicalToMarkdown( + allNodes: Array, + allTransformers: Transformer[], +): (args: { editorState: Record }) => string { + const lexicalToMarkdown = ({ editorState }: { editorState: Record }): string => { + const headlessEditor = createHeadlessEditor({ + nodes: getEnabledNodesFromServerNodes({ + nodes: allNodes, + }), + }) + + try { + headlessEditor.setEditorState(headlessEditor.parseEditorState(editorState as any)) // This should commit the editor state immediately + } catch (e) { + console.error('getLexicalToMarkdown: ERROR parsing editor state', e) + } + + let markdown: string = '' + headlessEditor.getEditorState().read(() => { + markdown = $convertToMarkdownString(allTransformers) + }) + + return markdown + } + return lexicalToMarkdown +} diff --git a/packages/richtext-lexical/src/features/experimental_table/client/index.ts b/packages/richtext-lexical/src/features/experimental_table/client/index.ts index ec397a7f4..454cccfc0 100644 --- a/packages/richtext-lexical/src/features/experimental_table/client/index.ts +++ b/packages/richtext-lexical/src/features/experimental_table/client/index.ts @@ -6,6 +6,7 @@ import { TableIcon } from '../../../lexical/ui/icons/Table/index.js' import { createClientFeature } from '../../../utilities/createClientFeature.js' import { slashMenuBasicGroupWithItems } from '../../shared/slashMenu/basicGroup.js' import { toolbarAddDropdownGroupWithItems } from '../../shared/toolbar/addDropdownGroup.js' +import { TableMarkdownTransformer } from '../markdownTransformer.js' import { TableActionMenuPlugin } from './plugins/TableActionMenuPlugin/index.js' import { TableCellResizerPlugin } from './plugins/TableCellResizerPlugin/index.js' import { TableHoverActionsPlugin } from './plugins/TableHoverActionsPlugin/index.js' @@ -16,6 +17,7 @@ import { } from './plugins/TablePlugin/index.js' export const TableFeatureClient = createClientFeature({ + markdownTransformers: [TableMarkdownTransformer], nodes: [TableNode, TableCellNode, TableRowNode], plugins: [ { diff --git a/packages/richtext-lexical/src/features/experimental_table/markdownTransformer.ts b/packages/richtext-lexical/src/features/experimental_table/markdownTransformer.ts new file mode 100644 index 000000000..baa83f05d --- /dev/null +++ b/packages/richtext-lexical/src/features/experimental_table/markdownTransformer.ts @@ -0,0 +1,171 @@ +import type { ElementTransformer, Transformer } from '@lexical/markdown' +import type { LexicalNode } from 'lexical' + +import { $convertFromMarkdownString, $convertToMarkdownString } from '@lexical/markdown' +import { + $createTableCellNode, + $createTableNode, + $createTableRowNode, + $isTableCellNode, + $isTableNode, + $isTableRowNode, + TableCellHeaderStates, + TableCellNode, + TableNode, + TableRowNode, +} from '@lexical/table' +import { $isParagraphNode, $isTextNode } from 'lexical' + +// Very primitive table setup +const TABLE_ROW_REG_EXP = /^\|(.+)\|\s?$/ +// eslint-disable-next-line regexp/no-unused-capturing-group +const TABLE_ROW_DIVIDER_REG_EXP = /^(\| ?:?-*:? ?)+\|\s?$/ + +export const TableMarkdownTransformer: (props: { + allTransformers: Transformer[] +}) => ElementTransformer = ({ allTransformers }) => ({ + type: 'element', + dependencies: [TableNode, TableRowNode, TableCellNode], + export: (node: LexicalNode) => { + if (!$isTableNode(node)) { + return null + } + + const output: string[] = [] + + for (const row of node.getChildren()) { + const rowOutput: string[] = [] + if (!$isTableRowNode(row)) { + continue + } + + let isHeaderRow = false + for (const cell of row.getChildren()) { + // It's TableCellNode, so it's just to make flow happy + if ($isTableCellNode(cell)) { + rowOutput.push($convertToMarkdownString(allTransformers, cell).replace(/\n/g, '\\n')) + if (cell.__headerState === TableCellHeaderStates.ROW) { + isHeaderRow = true + } + } + } + + output.push(`| ${rowOutput.join(' | ')} |`) + if (isHeaderRow) { + output.push(`| ${rowOutput.map((_) => '---').join(' | ')} |`) + } + } + + return output.join('\n') + }, + regExp: TABLE_ROW_REG_EXP, + replace: (parentNode, _1, match) => { + // Header row + if (TABLE_ROW_DIVIDER_REG_EXP.test(match[0])) { + const table = parentNode.getPreviousSibling() + if (!table || !$isTableNode(table)) { + return + } + + const rows = table.getChildren() + const lastRow = rows[rows.length - 1] + if (!lastRow || !$isTableRowNode(lastRow)) { + return + } + + // Add header state to row cells + lastRow.getChildren().forEach((cell) => { + if (!$isTableCellNode(cell)) { + return + } + cell.setHeaderStyles(TableCellHeaderStates.ROW, TableCellHeaderStates.ROW) + }) + + // Remove line + parentNode.remove() + return + } + + const matchCells = mapToTableCells(match[0], allTransformers) + + if (matchCells == null) { + return + } + + const rows = [matchCells] + let sibling = parentNode.getPreviousSibling() + let maxCells = matchCells.length + + while (sibling) { + if (!$isParagraphNode(sibling)) { + break + } + + if (sibling.getChildrenSize() !== 1) { + break + } + + const firstChild = sibling.getFirstChild() + + if (!$isTextNode(firstChild)) { + break + } + + const cells = mapToTableCells(firstChild.getTextContent(), allTransformers) + + if (cells == null) { + break + } + + maxCells = Math.max(maxCells, cells.length) + rows.unshift(cells) + const previousSibling = sibling.getPreviousSibling() + sibling.remove() + sibling = previousSibling + } + + const table = $createTableNode() + + for (const cells of rows) { + const tableRow = $createTableRowNode() + table.append(tableRow) + + for (let i = 0; i < maxCells; i++) { + tableRow.append(i < cells.length ? cells[i] : $createTableCell('', allTransformers)) + } + } + + const previousSibling = parentNode.getPreviousSibling() + if ($isTableNode(previousSibling) && getTableColumnsSize(previousSibling) === maxCells) { + previousSibling.append(...table.getChildren()) + parentNode.remove() + } else { + parentNode.replace(table) + } + + table.selectEnd() + }, +}) + +function getTableColumnsSize(table: TableNode) { + const row = table.getFirstChild() + return $isTableRowNode(row) ? row.getChildrenSize() : 0 +} + +const $createTableCell = (textContent: string, allTransformers: Transformer[]): TableCellNode => { + textContent = textContent.replace(/\\n/g, '\n') + const cell = $createTableCellNode(TableCellHeaderStates.NO_STATUS) + $convertFromMarkdownString(textContent, allTransformers, cell) + return cell +} + +const mapToTableCells = ( + textContent: string, + allTransformers: Transformer[], +): Array | null => { + const match = textContent.match(TABLE_ROW_REG_EXP) + if (!match || !match[1]) { + return null + } + return match[1].split('|').map((text) => $createTableCell(text, allTransformers)) +} diff --git a/packages/richtext-lexical/src/features/experimental_table/server/index.ts b/packages/richtext-lexical/src/features/experimental_table/server/index.ts index dc9916c2a..5579d1a28 100644 --- a/packages/richtext-lexical/src/features/experimental_table/server/index.ts +++ b/packages/richtext-lexical/src/features/experimental_table/server/index.ts @@ -12,6 +12,7 @@ import { sanitizeFields } from 'payload' import { createServerFeature } from '../../../utilities/createServerFeature.js' import { convertLexicalNodesToHTML } from '../../converters/html/converter/index.js' import { createNode } from '../../typeUtilities.js' +import { TableMarkdownTransformer } from '../markdownTransformer.js' const fields: Field[] = [ { @@ -70,6 +71,7 @@ export const EXPERIMENTAL_TableFeature = createServerFeature({ return schemaMap }, + markdownTransformers: [TableMarkdownTransformer], nodes: [ createNode({ converters: { diff --git a/packages/richtext-lexical/src/features/typesClient.ts b/packages/richtext-lexical/src/features/typesClient.ts index 7773c0431..f7b3bb35e 100644 --- a/packages/richtext-lexical/src/features/typesClient.ts +++ b/packages/richtext-lexical/src/features/typesClient.ts @@ -1,15 +1,11 @@ import type { Transformer } from '@lexical/markdown' -import type { - Klass, - LexicalEditor, - LexicalNode, - LexicalNodeReplacement, - SerializedEditorState, -} from 'lexical' +import type { Klass, LexicalEditor, LexicalNode, LexicalNodeReplacement } from 'lexical' +import type { RichTextFieldClient } from 'payload' import type React from 'react' import type { ClientEditorConfig } from '../lexical/config/types.js' import type { SlashMenuGroup } from '../lexical/plugins/SlashMenu/LexicalTypeaheadMenuPlugin/types.js' +import type { FeatureClientSchemaMap } from '../types.js' import type { ToolbarGroup } from './toolbars/types.js' export type FeatureProviderProviderClient< @@ -30,10 +26,13 @@ export type FeatureProviderClient< clientFeatureProps: BaseClientFeatureProps feature: | ((props: { + featureClientSchemaMap: FeatureClientSchemaMap /** unSanitizedEditorConfig.features, but mapped */ featureProviderMap: ClientFeatureProviderMap + field?: RichTextFieldClient // other resolved features, which have been loaded before this one. All features declared in 'dependencies' should be available here resolvedFeatures: ResolvedClientFeatureMap + schemaPath: string // unSanitized EditorConfig, unSanitizedEditorConfig: ClientEditorConfig }) => ClientFeature) @@ -49,7 +48,13 @@ export type PluginComponentWithAnchor = React.FC<{ }> export type ClientFeature = { - markdownTransformers?: Transformer[] + markdownTransformers?: ( + | ((props: { + allNodes: Array | LexicalNodeReplacement> + allTransformers: Transformer[] + }) => Transformer) + | Transformer + )[] nodes?: Array | LexicalNodeReplacement> /** * Plugins are react components which get added to the editor. You can use them to interact with lexical, e.g. to create a command which creates a node, or opens a modal, or some other more "outside" functionality @@ -198,6 +203,8 @@ export type SanitizedPlugin = export type SanitizedClientFeatures = { /** The keys of all enabled features */ enabledFeatures: string[] + markdownTransformers: Transformer[] + /** * Plugins are react components which get added to the editor. You can use them to interact with lexical, e.g. to create a command which creates a node, or opens a modal, or some other more "outside" functionality */ @@ -219,8 +226,5 @@ export type SanitizedClientFeatures = { groups: SlashMenuGroup[] } } & Required< - Pick< - ResolvedClientFeature, - 'markdownTransformers' | 'nodes' | 'providers' | 'toolbarFixed' | 'toolbarInline' - > + Pick, 'nodes' | 'providers' | 'toolbarFixed' | 'toolbarInline'> > diff --git a/packages/richtext-lexical/src/features/typesServer.ts b/packages/richtext-lexical/src/features/typesServer.ts index c002986ca..6684ba9d8 100644 --- a/packages/richtext-lexical/src/features/typesServer.ts +++ b/packages/richtext-lexical/src/features/typesServer.ts @@ -346,7 +346,10 @@ export type ServerFeature = { * In order to access these translations, you would use `i18n.t('lexical:horizontalRule:label')`. */ i18n?: Partial - markdownTransformers?: Transformer[] + markdownTransformers?: ( + | ((props: { allNodes: Array; allTransformers: Transformer[] }) => Transformer) + | Transformer + )[] nodes?: Array /** Props which were passed into your feature will have to be passed here. This will allow them to be used / read in other places of the code, e.g. wherever you can use useEditorConfigContext */ @@ -411,6 +414,7 @@ export type SanitizedServerFeatures = { > graphQLPopulationPromises: Map> hooks: RichTextHooks + markdownTransformers: Transformer[] nodeHooks?: { afterChange?: Map>> afterRead?: Map>> @@ -419,4 +423,4 @@ export type SanitizedServerFeatures = { } /** The node types mapped to their populationPromises */ /** The node types mapped to their validations */ validations: Map> -} & Required, 'i18n' | 'markdownTransformers' | 'nodes'>> +} & Required, 'i18n' | 'nodes'>> diff --git a/packages/richtext-lexical/src/field/index.tsx b/packages/richtext-lexical/src/field/index.tsx index 36d800554..271de5147 100644 --- a/packages/richtext-lexical/src/field/index.tsx +++ b/packages/richtext-lexical/src/field/index.tsx @@ -1,5 +1,7 @@ 'use client' +import type { RichTextFieldClient } from 'payload' + import { ShimmerEffect } from '@payloadcms/ui' import React, { lazy, Suspense, useEffect, useState } from 'react' @@ -16,7 +18,14 @@ const RichTextEditor = lazy(() => ) export const RichTextField: React.FC = (props) => { - const { admin = {}, clientFeatures, lexicalEditorConfig } = props + const { + admin = {}, + clientFeatures, + featureClientSchemaMap, + field, + lexicalEditorConfig, + schemaPath, + } = props const [finalSanitizedEditorConfig, setFinalSanitizedEditorConfig] = useState(null) @@ -41,6 +50,9 @@ export const RichTextField: React.FC = (props) => { : defaultEditorLexicalConfig const resolvedClientFeatures = loadClientFeatures({ + featureClientSchemaMap, + field: field as RichTextFieldClient, + schemaPath: schemaPath ?? field.name, unSanitizedEditorConfig: { features: featureProvidersLocal, lexical: finalLexicalEditorConfig, @@ -50,7 +62,15 @@ export const RichTextField: React.FC = (props) => { setFinalSanitizedEditorConfig( sanitizeClientEditorConfig(resolvedClientFeatures, finalLexicalEditorConfig, admin), ) - }, [lexicalEditorConfig, admin, finalSanitizedEditorConfig, clientFeatures]) // TODO: Optimize this and use useMemo for this in the future. This might break sub-richtext-blocks from the blocks feature. Need to investigate + }, [ + lexicalEditorConfig, + admin, + finalSanitizedEditorConfig, + clientFeatures, + featureClientSchemaMap, + field, + schemaPath, + ]) // TODO: Optimize this and use useMemo for this in the future. This might break sub-richtext-blocks from the blocks feature. Need to investigate return ( }> diff --git a/packages/richtext-lexical/src/index.ts b/packages/richtext-lexical/src/index.ts index f4004cf7c..3c1b84159 100644 --- a/packages/richtext-lexical/src/index.ts +++ b/packages/richtext-lexical/src/index.ts @@ -944,7 +944,7 @@ export type { SanitizedServerEditorConfig, ServerEditorConfig, } from './lexical/config/types.js' -export { getEnabledNodes } from './lexical/nodes/index.js' +export { getEnabledNodes, getEnabledNodesFromServerNodes } from './lexical/nodes/index.js' export type { AdapterProps } export type { @@ -977,4 +977,13 @@ export type { LexicalEditorProps, LexicalRichTextAdapter } from './types.js' export { createServerFeature } from './utilities/createServerFeature.js' export type { FieldsDrawerProps } from './utilities/fieldsDrawer/Drawer.js' + +export { extractPropsFromJSXPropsString } from './utilities/jsx/extractPropsFromJSXPropsString.js' +export { + extractFrontmatter, + frontmatterToObject, + objectToFrontmatter, + propsToJSXString, +} from './utilities/jsx/jsx.js' +export { $convertFromMarkdownString } from './utilities/jsx/lexicalMarkdownCopy.js' export { upgradeLexicalData } from './utilities/upgradeLexicalData/index.js' diff --git a/packages/richtext-lexical/src/lexical/config/client/loader.ts b/packages/richtext-lexical/src/lexical/config/client/loader.ts index cee6e7b32..0c90b1964 100644 --- a/packages/richtext-lexical/src/lexical/config/client/loader.ts +++ b/packages/richtext-lexical/src/lexical/config/client/loader.ts @@ -1,10 +1,13 @@ 'use client' +import type { RichTextFieldClient } from 'payload' + import type { ClientFeatureProviderMap, ResolvedClientFeature, ResolvedClientFeatureMap, } from '../../../features/typesClient.js' +import type { FeatureClientSchemaMap } from '../../../types.js' import type { ClientEditorConfig } from '../types.js' /** @@ -12,8 +15,14 @@ import type { ClientEditorConfig } from '../types.js' * @param unSanitizedEditorConfig */ export function loadClientFeatures({ + featureClientSchemaMap, + field, + schemaPath, unSanitizedEditorConfig, }: { + featureClientSchemaMap: FeatureClientSchemaMap + field?: RichTextFieldClient + schemaPath: string unSanitizedEditorConfig: ClientEditorConfig }): ResolvedClientFeatureMap { for (const featureProvider of unSanitizedEditorConfig.features) { @@ -46,8 +55,11 @@ export function loadClientFeatures({ const feature: Partial> = typeof featureProvider.feature === 'function' ? featureProvider.feature({ + featureClientSchemaMap, featureProviderMap, + field, resolvedFeatures, + schemaPath, unSanitizedEditorConfig, }) : featureProvider.feature diff --git a/packages/richtext-lexical/src/lexical/config/client/sanitize.ts b/packages/richtext-lexical/src/lexical/config/client/sanitize.ts index 9e242929c..b1c7d660f 100644 --- a/packages/richtext-lexical/src/lexical/config/client/sanitize.ts +++ b/packages/richtext-lexical/src/lexical/config/client/sanitize.ts @@ -40,7 +40,10 @@ export const sanitizeClientFeatures = ( } if (feature.nodes?.length) { - sanitized.nodes = sanitized.nodes.concat(feature.nodes) + // Important: do not use concat + for (const node of feature.nodes) { + sanitized.nodes.push(node) + } } if (feature.plugins?.length) { feature.plugins.forEach((plugin, i) => { @@ -133,9 +136,19 @@ export const sanitizeClientFeatures = ( } if (feature.markdownTransformers?.length) { - sanitized.markdownTransformers = sanitized.markdownTransformers.concat( - feature.markdownTransformers, - ) + // Important: do not use concat + for (const transformer of feature.markdownTransformers) { + if (typeof transformer === 'function') { + sanitized.markdownTransformers.push( + transformer({ + allNodes: sanitized.nodes, + allTransformers: sanitized.markdownTransformers, + }), + ) + } else { + sanitized.markdownTransformers.push(transformer) + } + } } sanitized.enabledFeatures.push(feature.key) }) diff --git a/packages/richtext-lexical/src/lexical/config/server/sanitize.ts b/packages/richtext-lexical/src/lexical/config/server/sanitize.ts index 7c035ce67..36a54bdb9 100644 --- a/packages/richtext-lexical/src/lexical/config/server/sanitize.ts +++ b/packages/richtext-lexical/src/lexical/config/server/sanitize.ts @@ -79,7 +79,10 @@ export const sanitizeServerFeatures = ( } if (feature.nodes?.length) { - sanitized.nodes = sanitized.nodes.concat(feature.nodes) + // Do not concat here. We need to keep the object reference of sanitized.nodes so that function markdown transformers of features automatically get the updated nodes + for (const node of feature.nodes) { + sanitized.nodes.push(node) + } feature.nodes.forEach((node) => { const nodeType = 'with' in node.node ? node.node.replace.getType() : node.node.getType() // TODO: Idk if this works for node replacements if (node?.graphQLPopulationPromises?.length) { @@ -113,9 +116,20 @@ export const sanitizeServerFeatures = ( } if (feature.markdownTransformers?.length) { - sanitized.markdownTransformers = sanitized.markdownTransformers.concat( - feature.markdownTransformers, - ) + // Do not concat here. We need to keep the object reference of feature.markdownTransformers + + for (const transformer of feature.markdownTransformers) { + if (typeof transformer === 'function') { + sanitized.markdownTransformers.push( + transformer({ + allNodes: sanitized.nodes, + allTransformers: sanitized.markdownTransformers, + }), + ) + } else { + sanitized.markdownTransformers.push(transformer) + } + } } sanitized.enabledFeatures.push(feature.key) diff --git a/packages/richtext-lexical/src/lexical/nodes/index.ts b/packages/richtext-lexical/src/lexical/nodes/index.ts index 69dc1b672..b0f69f0d5 100644 --- a/packages/richtext-lexical/src/lexical/nodes/index.ts +++ b/packages/richtext-lexical/src/lexical/nodes/index.ts @@ -1,5 +1,6 @@ import type { Klass, LexicalNode, LexicalNodeReplacement } from 'lexical' +import type { NodeWithHooks } from '../../features/typesServer.js' import type { SanitizedClientEditorConfig, SanitizedServerEditorConfig } from '../config/types.js' export function getEnabledNodes({ @@ -7,7 +8,17 @@ export function getEnabledNodes({ }: { editorConfig: SanitizedClientEditorConfig | SanitizedServerEditorConfig }): Array | LexicalNodeReplacement> { - return editorConfig.features.nodes.map((node) => { + return getEnabledNodesFromServerNodes({ + nodes: editorConfig.features.nodes, + }) +} + +export function getEnabledNodesFromServerNodes({ + nodes, +}: { + nodes: Array | LexicalNodeReplacement> | Array +}): Array | LexicalNodeReplacement> { + return nodes.map((node) => { if ('node' in node) { return node.node } diff --git a/packages/richtext-lexical/src/lexical/ui/ContentEditable.scss b/packages/richtext-lexical/src/lexical/ui/ContentEditable.scss index 0ab945b25..d2fb51f91 100644 --- a/packages/richtext-lexical/src/lexical/ui/ContentEditable.scss +++ b/packages/richtext-lexical/src/lexical/ui/ContentEditable.scss @@ -6,7 +6,6 @@ $lexical-contenteditable-bottom-padding: 8px; @layer payload-default { .ContentEditable__root { border: 0; - font-size: 15px; display: block; position: relative; tab-size: 1; diff --git a/packages/richtext-lexical/src/utilities/createClientFeature.ts b/packages/richtext-lexical/src/utilities/createClientFeature.ts index fe2584019..6e59b3059 100644 --- a/packages/richtext-lexical/src/utilities/createClientFeature.ts +++ b/packages/richtext-lexical/src/utilities/createClientFeature.ts @@ -1,3 +1,5 @@ +import type { RichTextFieldClient } from 'payload' + import type { BaseClientFeatureProps, ClientFeature, @@ -7,14 +9,18 @@ import type { ResolvedClientFeatureMap, } from '../features/typesClient.js' import type { ClientEditorConfig } from '../lexical/config/types.js' +import type { FeatureClientSchemaMap } from '../types.js' export type CreateClientFeatureArgs = | ((props: { + featureClientSchemaMap: FeatureClientSchemaMap /** unSanitizedEditorConfig.features, but mapped */ featureProviderMap: ClientFeatureProviderMap + field?: RichTextFieldClient props: BaseClientFeatureProps // other resolved features, which have been loaded before this one. All features declared in 'dependencies' should be available here resolvedFeatures: ResolvedClientFeatureMap + schemaPath: string // unSanitized EditorConfig, unSanitizedEditorConfig: ClientEditorConfig }) => ClientFeature) @@ -33,14 +39,20 @@ export const createClientFeature: < if (typeof feature === 'function') { featureProviderClient.feature = ({ + featureClientSchemaMap, featureProviderMap, + field, resolvedFeatures, + schemaPath, unSanitizedEditorConfig, }) => { const toReturn = feature({ + featureClientSchemaMap, featureProviderMap, + field, props, resolvedFeatures, + schemaPath, unSanitizedEditorConfig, }) diff --git a/packages/richtext-lexical/src/utilities/initLexicalFeatures.ts b/packages/richtext-lexical/src/utilities/initLexicalFeatures.ts index 406f875c1..58e9e2c05 100644 --- a/packages/richtext-lexical/src/utilities/initLexicalFeatures.ts +++ b/packages/richtext-lexical/src/utilities/initLexicalFeatures.ts @@ -1,6 +1,5 @@ import type { I18nClient } from '@payloadcms/translations' -import { getFromImportMap } from '@payloadcms/ui/elements/RenderServerComponent' import { type ClientField, createClientFields, @@ -8,6 +7,7 @@ import { type FieldSchemaMap, type Payload, } from 'payload' +import { getFromImportMap } from 'payload/shared' import type { FeatureProviderProviderClient } from '../features/typesClient.js' import type { SanitizedServerEditorConfig } from '../lexical/config/types.js' @@ -111,6 +111,7 @@ export function initLexicalFeatures(args: Args): { disableAddingID: true, fields: 'fields' in state ? state.fields : [state], i18n: args.i18n, + importMap: args.payload.importMap, }) featureClientSchemaMap[featureKey][key] = clientFields } diff --git a/packages/richtext-lexical/src/utilities/jsx/collectTopLevelJSXInLines.ts b/packages/richtext-lexical/src/utilities/jsx/collectTopLevelJSXInLines.ts new file mode 100644 index 000000000..60cf82361 --- /dev/null +++ b/packages/richtext-lexical/src/utilities/jsx/collectTopLevelJSXInLines.ts @@ -0,0 +1,90 @@ +import { linesFromStartToContentAndPropsString } from '../../features/blocks/server/linesFromMatchToContentAndPropsString.js' +import { createTagRegexes } from '../../features/blocks/server/markdownTransformer.js' + +/** + * Helpful utility for parsing out all matching top-level JSX tags in a given string. + * This will collect them in a list, that contains the content of the JSX tag and the props string. + * + * While this is not used within payload, this can be used for certain payload blocks that need to + * be serializable and deserializable to and from JSX. + * + * @example: + * + * Say you have Steps block that contains a steps array. Its JSX representation may look like this: + * + * + * + *

Step 1

+ *
+ * + *

Step 2

+ *
+ *
+ * + * In this case, the Steps block would have the following content as its children string: + * + *

Step 1

+ *
+ * + *

Step 2

+ *
+ * + * It could then use this function to collect all the top-level JSX tags (= the steps): + * + * collectTopLevelJSXInLines(children.split('\n'), 'Step') + * + * This will return: + * + * [ + * { + * content: '

Step 1

', + * propsString: 'title="Step1"', + * }, + * { + * content: '

Step 2

', + * propsString: 'title="Step2"', + * }, + * ] + * + * You can then map this data to construct the data for this blocks array field. + */ +export function collectTopLevelJSXInLines( + lines: Array, + jsxToMatch: string, +): { + content: string + propsString: string +}[] { + const finds: { + content: string + propsString: string + }[] = [] + const regex = createTagRegexes(jsxToMatch) + + const linesLength = lines.length + + for (let i = 0; i < linesLength; i++) { + const startMatch = lines[i].match(regex.regExpStart) + if (!startMatch) { + continue // Try next transformer + } + + const { content, endLineIndex, propsString } = linesFromStartToContentAndPropsString({ + isEndOptional: false, + lines, + regexpEndRegex: regex.regExpEnd, + startLineIndex: i, + startMatch, + }) + + finds.push({ + content, + propsString, + }) + + i = endLineIndex + continue + } + + return finds +} diff --git a/packages/richtext-lexical/src/utilities/jsx/extractPropsFromJSXPropsString.ts b/packages/richtext-lexical/src/utilities/jsx/extractPropsFromJSXPropsString.ts new file mode 100644 index 000000000..52d23e977 --- /dev/null +++ b/packages/richtext-lexical/src/utilities/jsx/extractPropsFromJSXPropsString.ts @@ -0,0 +1,138 @@ +/** + * Turns a JSX props string into an object. + * + * @example + * + * Input: type="info" hello={{heyyy: 'test', someNumber: 2}} + * Output: { type: 'info', hello: { heyyy: 'test', someNumber: 2 } } + */ +export function extractPropsFromJSXPropsString({ + propsString, +}: { + propsString: string +}): Record { + const props: Record = {} + let key = '' + let collectingKey = true + + for (let i = 0; i < propsString.length; i++) { + const char = propsString[i] + + if (collectingKey) { + if (char === '=' || char === ' ') { + if (key) { + if (char === ' ') { + props[key] = true + key = '' + } else { + collectingKey = false + } + } + } else { + key += char + } + } else { + const result = handleValue(propsString, i) + props[key] = result.value + i = result.newIndex + key = '' + collectingKey = true + } + } + + if (key) { + props[key] = true + } + + return props +} + +function handleValue(propsString: string, startIndex: number): { newIndex: number; value: any } { + const char = propsString[startIndex] + + if (char === '"') { + return handleQuotedString(propsString, startIndex) + } else if (char === '{') { + return handleObject(propsString, startIndex) + } else if (char === '[') { + return handleArray(propsString, startIndex) + } else { + return handleUnquotedString(propsString, startIndex) + } +} + +function handleArray(propsString: string, startIndex: number): { newIndex: number; value: any } { + let bracketCount = 1 + let value = '' + let i = startIndex + 1 + + while (i < propsString.length && bracketCount > 0) { + if (propsString[i] === '[') { + bracketCount++ + } else if (propsString[i] === ']') { + bracketCount-- + } + if (bracketCount > 0) { + value += propsString[i] + } + i++ + } + + return { newIndex: i, value: JSON.parse(`[${value}]`) } +} + +function handleQuotedString( + propsString: string, + startIndex: number, +): { newIndex: number; value: string } { + let value = '' + let i = startIndex + 1 + while (i < propsString.length && (propsString[i] !== '"' || propsString[i - 1] === '\\')) { + value += propsString[i] + i++ + } + return { newIndex: i, value } +} + +function handleObject(propsString: string, startIndex: number): { newIndex: number; value: any } { + let bracketCount = 1 + let value = '' + let i = startIndex + 1 + + while (i < propsString.length && bracketCount > 0) { + if (propsString[i] === '{') { + bracketCount++ + } else if (propsString[i] === '}') { + bracketCount-- + } + if (bracketCount > 0) { + value += propsString[i] + } + i++ + } + + return { newIndex: i, value: parseObject(value) } +} + +function parseObject(objString: string): Record { + if (objString[0] !== '{') { + return JSON.parse(objString) + } + + const result = JSON.parse(objString.replace(/(\w+):/g, '"$1":')) + + return result +} + +function handleUnquotedString( + propsString: string, + startIndex: number, +): { newIndex: number; value: string } { + let value = '' + let i = startIndex + while (i < propsString.length && propsString[i] !== ' ') { + value += propsString[i] + i++ + } + return { newIndex: i - 1, value } +} diff --git a/packages/richtext-lexical/src/utilities/jsx/jsx.spec.ts b/packages/richtext-lexical/src/utilities/jsx/jsx.spec.ts new file mode 100644 index 000000000..decbf1d55 --- /dev/null +++ b/packages/richtext-lexical/src/utilities/jsx/jsx.spec.ts @@ -0,0 +1,136 @@ +import { extractPropsFromJSXPropsString } from './extractPropsFromJSXPropsString.js' +import { propsToJSXString } from './jsx.js' + +describe('jsx', () => { + describe('prop string to object', () => { + const INPUT_AND_OUTPUT = [ + { + input: 'key="value"', + output: { + key: 'value', + }, + }, + { + input: 'key={[1, 2, 3]}', + output: { + key: [1, 2, 3], + }, + }, + { + input: 'key={[1, 2, 3, [1, 2]]}', + output: { + key: [1, 2, 3, [1, 2]], + }, + }, + { + input: 'object={4}', + output: { + object: 4, + }, + }, + { + input: 'object={{"test": 1}}', + output: { + object: { test: 1 }, + }, + }, + { + input: 'object={[1, 2, 3, [1, 2]]}', + output: { + object: [1, 2, 3, [1, 2]], + }, + }, + { + input: 'object={[1, 2]}', + output: { + object: [1, 2], + }, + }, + { + input: 'key="value" object={{key: "value"}}', + inputFromOutput: 'key="value" object={{"key": "value"}}', + output: { + key: 'value', + object: { key: 'value' }, + }, + }, + { + input: 'global packageId="myId" uniqueId="some unique id!" update', + output: { + global: true, + packageId: 'myId', + uniqueId: 'some unique id!', + update: true, + }, + }, + { + input: + 'global key="value" object={{key: "value", something: "test", hello: 1}} packageId="myId" uniqueId="some unique id!" update', + inputFromOutput: + 'global key="value" object={{"hello": 1, "key": "value", "something": "test"}} packageId="myId" uniqueId="some unique id!" update', + output: { + global: true, + key: 'value', + object: { hello: 1, key: 'value', something: 'test' }, + packageId: 'myId', + uniqueId: 'some unique id!', + update: true, + }, + }, + { + input: + 'object={{hello: 1, key: "value", nested: { key: "value" }, something: "test", test: [1, 2, 3]}}', + inputFromOutput: + 'object={{"hello": 1, "key": "value", "nested": {"key": "value"}, "something": "test", "test": [1, 2, 3]}}', + output: { + object: { + hello: 1, + key: 'value', + nested: { key: 'value' }, + something: 'test', + test: [1, 2, 3], + }, + }, + }, + { + input: + 'global key="value" object={{hello: 1, key: "value", nested: { key: "value" }, something: "test", test: [1, 2, 3]}} packageId="myId" uniqueId="some unique id!" update', + inputFromOutput: + 'global key="value" object={{"hello": 1, "key": "value", "nested": { "key": "value" }, "something": "test", "test": [1, 2, 3]}} packageId="myId" uniqueId="some unique id!" update', + output: { + global: true, + key: 'value', + object: { + hello: 1, + key: 'value', + nested: { key: 'value' }, + something: 'test', + test: [1, 2, 3], + }, + packageId: 'myId', + uniqueId: 'some unique id!', + update: true, + }, + }, + ] + + for (const { input, output } of INPUT_AND_OUTPUT) { + it(`can correctly convert to object: "${input.replace(/\n/g, '\\n')}"`, () => { + const propsObject = extractPropsFromJSXPropsString({ propsString: input }) + console.log({ output, propsObject }) + + expect(propsObject).toStrictEqual(output) + }) + } + + for (const { input: originalInput, inputFromOutput, output } of INPUT_AND_OUTPUT) { + const input = inputFromOutput || originalInput + it(`can correctly convert from object: "${input.replace(/\n/g, '\\n')}"`, () => { + const propsString = propsToJSXString({ props: output }) + console.log({ input, propsString }) + + expect(propsString.replaceAll(' ', '')).toBe(input.replaceAll(' ', '')) + }) + } + }) +}) diff --git a/packages/richtext-lexical/src/utilities/jsx/jsx.ts b/packages/richtext-lexical/src/utilities/jsx/jsx.ts new file mode 100644 index 000000000..807f50c07 --- /dev/null +++ b/packages/richtext-lexical/src/utilities/jsx/jsx.ts @@ -0,0 +1,117 @@ +/** + * Converts an object of props to a JSX props string. + * + * This function is the inverse of `extractPropsFromJSXPropsString`. + */ +export function propsToJSXString({ props }: { props: Record }): string { + const propsArray: string[] = [] + + for (const [key, value] of Object.entries(props)) { + if (typeof value === 'string') { + // Handle simple string props + propsArray.push(`${key}="${escapeQuotes(value)}"`) + } else if (typeof value === 'number') { + // Handle number and boolean props + propsArray.push(`${key}={${value}}`) + } else if (typeof value === 'boolean') { + if (value) { + propsArray.push(`${key}`) + } + } else if (value !== null && typeof value === 'object') { + if (Array.isArray(value)) { + // Handle array props + propsArray.push(`${key}={[${value.map((v) => JSON.stringify(v, replacer)).join(', ')}]}`) + } else { + // Handle complex object props + propsArray.push(`${key}={${JSON.stringify(value, replacer)}}`) + } + } + } + + return propsArray.join(' ') +} + +// Helper function to escape quotes in string values +function escapeQuotes(str: string): string { + return str.replace(/"/g, '"') +} + +// Custom replacer function for JSON.stringify to handle single quotes +function replacer(key: string, value: any): any { + if (typeof value === 'string') { + return value.replace(/'/g, "\\'") + } + return value +} + +/** + * Converts a frontmatter string to an object. + */ +export function frontmatterToObject(frontmatter: string): Record { + const lines = frontmatter.trim().split('\n') + const result = {} + let inFrontmatter = false + + for (const line of lines) { + if (line.trim() === '---') { + inFrontmatter = !inFrontmatter + continue + } + + if (inFrontmatter) { + const [key, ...valueParts] = line.split(':') + const value = valueParts.join(':').trim() + + result[key.trim()] = value + } + } + + return result +} + +/** + * Converts an object to a frontmatter string. + */ +export function objectToFrontmatter(obj: Record): null | string { + if (!Object.entries(obj)?.length) { + return null + } + let frontmatter = '---\n' + + for (const [key, value] of Object.entries(obj)) { + if (Array.isArray(value)) { + frontmatter += `${key}: ${value.join(', ')}\n` + } else { + frontmatter += `${key}: ${value}\n` + } + } + + frontmatter += '---\n' + return frontmatter +} + +/** + * Takes an MDX content string and extracts the frontmatter and content. + * + * The resulting object contains the mdx content without the frontmatter and the frontmatter itself. + */ +export function extractFrontmatter(mdxContent: string) { + // eslint-disable-next-line regexp/no-super-linear-backtracking + const frontmatterRegex = /^---\s*\n[\s\S]*?\n---\s*\n/ + const match = mdxContent.match(frontmatterRegex) + + if (match) { + const frontmatter = match[0] + const contentWithoutFrontmatter = mdxContent.slice(frontmatter.length).trim() + return { + content: contentWithoutFrontmatter, + frontmatter: frontmatter.trim(), + } + } else { + // If no frontmatter is found, return the original content + return { + content: mdxContent.trim(), + frontmatter: '', + } + } +} diff --git a/packages/richtext-lexical/src/utilities/jsx/lexicalMarkdownCopy.ts b/packages/richtext-lexical/src/utilities/jsx/lexicalMarkdownCopy.ts new file mode 100644 index 000000000..e4d0e585e --- /dev/null +++ b/packages/richtext-lexical/src/utilities/jsx/lexicalMarkdownCopy.ts @@ -0,0 +1,109 @@ +/* eslint-disable regexp/no-unused-capturing-group */ + +import type { + MultilineElementTransformer as _MultilineElementTransformer, + Transformer, +} from '@lexical/markdown' +import type { ElementNode } from 'lexical' + +import { + $convertFromMarkdownString as $originalConvertFromMarkdownString, + TRANSFORMERS, +} from '@lexical/markdown' + +const EMPTY_OR_WHITESPACE_ONLY = /^[\t ]*$/ +const ORDERED_LIST_REGEX = /^(\s*)(\d+)\.\s/ +const UNORDERED_LIST_REGEX = /^(\s*)[-*+]\s/ +const CHECK_LIST_REGEX = /^(\s*)(?:-\s)?\s?(\[(\s|x)?\])\s/i +const HEADING_REGEX = /^(#{1,6})\s/ +const QUOTE_REGEX = /^>\s/ +const CODE_START_REGEX = /^[ \t]*```(\w+)?/ +const CODE_END_REGEX = /[ \t]*```$/ +const CODE_SINGLE_LINE_REGEX = /^[ \t]*```[^`]+(?:(?:`{1,2}|`{4,})[^`]+)*```(?:[^`]|$)/ +const TABLE_ROW_REG_EXP = /^\|(.+)\|\s?$/ +const TABLE_ROW_DIVIDER_REG_EXP = /^(\| ?:?-*:? ?)+\|\s?$/ +const TAG_START_REGEX = /^[ \t]*<[a-z_][\w-]*(?:\s[^<>]*)?\/?>/i +const TAG_END_REGEX = /^[ \t]*<\/[a-z_][\w-]*\s*>/i + +export function normalizeMarkdown(input: string, shouldMergeAdjacentLines = false): string { + const lines = input.split('\n') + let inCodeBlock = false + const sanitizedLines: string[] = [] + + for (let i = 0; i < lines.length; i++) { + const line = lines[i] + const lastLine = sanitizedLines[sanitizedLines.length - 1] + + // Code blocks of ```single line``` don't toggle the inCodeBlock flag + if (CODE_SINGLE_LINE_REGEX.test(line)) { + sanitizedLines.push(line) + continue + } + + if ( + (CODE_START_REGEX.test(line) && !inCodeBlock) || + (CODE_END_REGEX.test(line) && inCodeBlock) + ) { + inCodeBlock = !inCodeBlock + } + + // Detect the start or end of a code block + if (CODE_START_REGEX.test(line) || CODE_END_REGEX.test(line)) { + sanitizedLines.push(line) + continue + } + + // If we are inside a code block, keep the line unchanged + if (inCodeBlock) { + sanitizedLines.push(line) + continue + } + + // In markdown the concept of "empty paragraphs" does not exist. + // Blocks must be separated by an empty line. Non-empty adjacent lines must be merged. + if ( + EMPTY_OR_WHITESPACE_ONLY.test(line) || + EMPTY_OR_WHITESPACE_ONLY.test(lastLine) || + !lastLine || + HEADING_REGEX.test(lastLine) || + HEADING_REGEX.test(line) || + QUOTE_REGEX.test(line) || + ORDERED_LIST_REGEX.test(line) || + UNORDERED_LIST_REGEX.test(line) || + CHECK_LIST_REGEX.test(line) || + TABLE_ROW_REG_EXP.test(line) || + TABLE_ROW_DIVIDER_REG_EXP.test(line) || + !shouldMergeAdjacentLines || + TAG_START_REGEX.test(line) || + TAG_END_REGEX.test(line) || + TAG_START_REGEX.test(lastLine) || + TAG_END_REGEX.test(lastLine) + ) { + sanitizedLines.push(line) + } else { + sanitizedLines[sanitizedLines.length - 1] = lastLine + ' ' + line.trim() + } + } + + return sanitizedLines.join('\n') +} + +/** + * Renders markdown from a string. The selection is moved to the start after the operation. + * + * @param {boolean} [shouldPreserveNewLines] By setting this to true, new lines will be preserved between conversions + * @param {boolean} [shouldMergeAdjacentLines] By setting this to true, adjacent non empty lines will be merged according to commonmark spec: https://spec.commonmark.org/0.24/#example-177. Not applicable if shouldPreserveNewLines = true. + */ +export function $convertFromMarkdownString( + markdown: string, + transformers: Array = TRANSFORMERS, + node?: ElementNode, + shouldPreserveNewLines = false, + shouldMergeAdjacentLines = true, // Changed from false to true here +): void { + const sanitizedMarkdown = shouldPreserveNewLines + ? markdown + : normalizeMarkdown(markdown, shouldMergeAdjacentLines) + + return $originalConvertFromMarkdownString(sanitizedMarkdown, transformers, node) // shouldPreserveNewLines to true, as we do our own, modified markdown normalization here. +} diff --git a/packages/richtext-lexical/tsconfig.json b/packages/richtext-lexical/tsconfig.json index 79ab3f11a..9ce206311 100644 --- a/packages/richtext-lexical/tsconfig.json +++ b/packages/richtext-lexical/tsconfig.json @@ -16,10 +16,6 @@ "test", "node_modules", "eslint.config.js", - "src/**/*.spec.js", - "src/**/*.spec.jsx", - "src/**/*.spec.ts", - "src/**/*.spec.tsx" ], "include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.d.ts", "src/**/*.json"], "references": [ diff --git a/packages/richtext-slate/src/field/rscEntry.tsx b/packages/richtext-slate/src/field/rscEntry.tsx index 850d97311..f7f53eaf5 100644 --- a/packages/richtext-slate/src/field/rscEntry.tsx +++ b/packages/richtext-slate/src/field/rscEntry.tsx @@ -141,6 +141,7 @@ export const RscEntrySlateField: React.FC< defaultIDType: payload.config.db.defaultIDType, fields: args.admin?.link?.fields as Field[], i18n, + importMap: payload.importMap, }) componentMap.set(linkFieldsSchemaPath, clientFields) @@ -172,6 +173,7 @@ export const RscEntrySlateField: React.FC< defaultIDType: payload.config.db.defaultIDType, fields: args?.admin?.upload?.collections[collection.slug]?.fields, i18n, + importMap: payload.importMap, }) componentMap.set(`${uploadFieldsSchemaPath}.${collection.slug}`, clientFields) diff --git a/packages/ui/src/elements/RenderServerComponent/index.tsx b/packages/ui/src/elements/RenderServerComponent/index.tsx index 7f08d07db..a7978fd09 100644 --- a/packages/ui/src/elements/RenderServerComponent/index.tsx +++ b/packages/ui/src/elements/RenderServerComponent/index.tsx @@ -1,44 +1,10 @@ import type { ImportMap, PayloadComponent } from 'payload' -import { - isPlainObject, - isReactServerComponentOrFunction, - parsePayloadComponent, -} from 'payload/shared' +import { getFromImportMap, isPlainObject, isReactServerComponentOrFunction } from 'payload/shared' import React from 'react' import { removeUndefined } from '../../utilities/removeUndefined.js' -export const getFromImportMap = (args: { - importMap: ImportMap - PayloadComponent: PayloadComponent - schemaPath?: string - silent?: boolean -}): TOutput => { - const { importMap, PayloadComponent, schemaPath, silent } = args - - const { exportName, path } = parsePayloadComponent(PayloadComponent) - - const key = path + '#' + exportName - - const importMapEntry = importMap[key] - - if (!importMapEntry && !silent) { - // eslint-disable-next-line no-console - console.error( - `getFromImportMap: PayloadComponent not found in importMap`, - { - key, - PayloadComponent, - schemaPath, - }, - 'You may need to run the `payload generate:importmap` command to generate the importMap ahead of runtime.', - ) - } - - return importMapEntry -} - /** * Can be used to render both MappedComponents and React Components. */ diff --git a/packages/ui/src/forms/fieldSchemasToFormState/renderField.tsx b/packages/ui/src/forms/fieldSchemasToFormState/renderField.tsx index 1363afff7..d1d5918c6 100644 --- a/packages/ui/src/forms/fieldSchemasToFormState/renderField.tsx +++ b/packages/ui/src/forms/fieldSchemasToFormState/renderField.tsx @@ -51,6 +51,7 @@ export const renderField: RenderFieldMethod = ({ defaultIDType: req.payload.config.db.defaultIDType, field: fieldConfig, i18n: req.i18n, + importMap: req.payload.importMap, }) const permissions = diff --git a/packages/ui/src/utilities/buildFormState.ts b/packages/ui/src/utilities/buildFormState.ts index 0207682a2..0726767ed 100644 --- a/packages/ui/src/utilities/buildFormState.ts +++ b/packages/ui/src/utilities/buildFormState.ts @@ -9,7 +9,7 @@ import type { SanitizedConfig, } from 'payload' -import { createClientConfig, formatErrors } from 'payload' +import { formatErrors } from 'payload' import { reduceFieldsToValues } from 'payload/shared' import { fieldSchemasToFormState } from '../forms/fieldSchemasToFormState/index.js' @@ -60,24 +60,6 @@ export const getFieldSchemaMap = (args: { return entityFieldMap } -export const getClientConfig = (args: { - config: SanitizedConfig - i18n: I18nClient -}): ClientConfig => { - const { config, i18n } = args - - if (cachedClientConfig && process.env.NODE_ENV !== 'development') { - return cachedClientConfig - } - - cachedClientConfig = createClientConfig({ - config, - i18n, - }) - - return cachedClientConfig -} - type BuildFormStateSuccessResult = { clientConfig?: ClientConfig errors?: never diff --git a/packages/ui/src/utilities/buildTableState.ts b/packages/ui/src/utilities/buildTableState.ts index 050100f3a..c299b4c67 100644 --- a/packages/ui/src/utilities/buildTableState.ts +++ b/packages/ui/src/utilities/buildTableState.ts @@ -4,6 +4,7 @@ import type { ClientCollectionConfig, ClientConfig, ErrorResult, + ImportMap, PaginatedDocs, SanitizedCollectionConfig, SanitizedConfig, @@ -26,8 +27,9 @@ if (!cachedClientConfig) { export const getClientConfig = (args: { config: SanitizedConfig i18n: I18nClient + importMap: ImportMap }): ClientConfig => { - const { config, i18n } = args + const { config, i18n, importMap } = args if (cachedClientConfig && process.env.NODE_ENV !== 'development') { return cachedClientConfig @@ -36,6 +38,7 @@ export const getClientConfig = (args: { cachedClientConfig = createClientConfig({ config, i18n, + importMap, }) return cachedClientConfig @@ -146,6 +149,7 @@ export const buildTableState = async ( const clientConfig = getClientConfig({ config, i18n, + importMap: payload.importMap, }) let collectionConfig: SanitizedCollectionConfig diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b10525969..0de439f24 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1228,6 +1228,9 @@ importers: '@types/uuid': specifier: 10.0.0 version: 10.0.0 + acorn: + specifier: 8.12.1 + version: 8.12.1 bson-objectid: specifier: 2.0.4 version: 2.0.4 @@ -1240,6 +1243,15 @@ importers: lexical: specifier: 0.20.0 version: 0.20.0 + mdast-util-from-markdown: + specifier: 2.0.2 + version: 2.0.2 + mdast-util-mdx-jsx: + specifier: 3.1.3 + version: 3.1.3 + micromark-extension-mdx-jsx: + specifier: 3.0.1 + version: 3.0.1 react: specifier: 19.0.0-rc-65a56d0e-20241020 version: 19.0.0-rc-65a56d0e-20241020 @@ -4726,6 +4738,9 @@ packages: '@tybys/wasm-util@0.9.0': resolution: {integrity: sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw==} + '@types/acorn@4.0.6': + resolution: {integrity: sha512-veQTnWP+1D/xbxVrPC3zHnCZRjSrKfhbMUlEA43iMZLu7EsnTtkJklIuwrCPbOi8YkvDQAiW05VQQFvvz9oieQ==} + '@types/babel__core@7.20.5': resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} @@ -4753,6 +4768,9 @@ packages: '@types/connect@3.4.36': resolution: {integrity: sha512-P63Zd/JUGq+PdrM1lv0Wv5SBYeA2+CORvbrXbngriYY0jzLUWfQMQQxOhjONEz/wlHOAxOdY7CY65rgQdTjq2w==} + '@types/debug@4.1.12': + resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} + '@types/escape-html@1.0.4': resolution: {integrity: sha512-qZ72SFTgUAZ5a7Tj6kf2SHLetiH5S6f8G5frB2SPQ3EyF02kxdyBFf4Tz4banE3xCgGnKgWLt//a6VuYHKYJTg==} @@ -4768,6 +4786,9 @@ packages: '@types/esprima@4.0.6': resolution: {integrity: sha512-lIk+kSt9lGv5hxK6aZNjiUEGZqKmOTpmg0tKiJQI+Ow98fLillxsiZNik5+RcP7mXL929KiTH/D9jGtpDlMbVw==} + '@types/estree-jsx@1.0.5': + resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==} + '@types/estree@1.0.6': resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==} @@ -4786,6 +4807,9 @@ packages: '@types/graceful-fs@4.1.9': resolution: {integrity: sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==} + '@types/hast@3.0.4': + resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==} + '@types/http-cache-semantics@4.0.4': resolution: {integrity: sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==} @@ -4825,6 +4849,9 @@ packages: '@types/lodash@4.17.13': resolution: {integrity: sha512-lfx+dftrEZcdBPczf9d0Qv0x+j/rfNCMuC6OcfXmO8gkfeNAY88PgKUbvG56whcN23gc27yenwF6oJZXGFpYxg==} + '@types/mdast@4.0.4': + resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==} + '@types/minimatch@5.1.2': resolution: {integrity: sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==} @@ -4837,6 +4864,9 @@ packages: '@types/mongoose-aggregate-paginate-v2@1.0.12': resolution: {integrity: sha512-wL8pgJQxqJagv5f5mR7aI8WgUu22nS6rVLoJm71W2Uu+iKfS8jgph2rRLfXrjo+dFt1s7ik5Zl+uGZ4f5GM6Vw==} + '@types/ms@0.7.34': + resolution: {integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==} + '@types/mysql@2.15.26': resolution: {integrity: sha512-DSLCOXhkvfS5WNNPbfn2KdICAmk8lLc+/PNvnPnF7gOdMZCxopXduqv0OQ13y/yA/zXTSikZZqVgybUxOEg6YQ==} @@ -4900,6 +4930,12 @@ packages: '@types/tough-cookie@4.0.5': resolution: {integrity: sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==} + '@types/unist@2.0.11': + resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==} + + '@types/unist@3.0.3': + resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==} + '@types/uuid@10.0.0': resolution: {integrity: sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ==} @@ -5140,6 +5176,11 @@ packages: resolution: {integrity: sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==} engines: {node: '>=0.4.0'} + acorn@8.12.1: + resolution: {integrity: sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==} + engines: {node: '>=0.4.0'} + hasBin: true + acorn@8.14.0: resolution: {integrity: sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==} engines: {node: '>=0.4.0'} @@ -5512,6 +5553,9 @@ packages: caniuse-lite@1.0.30001678: resolution: {integrity: sha512-RR+4U/05gNtps58PEBDZcPWTgEO2MBeoPZ96aQcjmfkBWRIDfN451fW2qyDA9/+HohLLIL5GqiMwA+IB1pWarw==} + ccount@2.0.1: + resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} + chalk@3.0.0: resolution: {integrity: sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==} engines: {node: '>=8'} @@ -5532,6 +5576,18 @@ packages: resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} engines: {node: '>=10'} + character-entities-html4@2.1.0: + resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==} + + character-entities-legacy@3.0.0: + resolution: {integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==} + + character-entities@2.0.2: + resolution: {integrity: sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==} + + character-reference-invalid@2.0.1: + resolution: {integrity: sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==} + charenc@0.0.2: resolution: {integrity: sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==} @@ -5823,6 +5879,9 @@ packages: decimal.js@10.4.3: resolution: {integrity: sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==} + decode-named-character-reference@1.0.2: + resolution: {integrity: sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==} + decompress-response@6.0.0: resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==} engines: {node: '>=10'} @@ -5912,6 +5971,9 @@ packages: resolution: {integrity: sha512-qE3Veg1YXzGHQhlA6jzebZN2qVf6NX+A7m7qlhCGG30dJixrAQhYOsJjsnBjJkCSmuOPpCk30145fr8FV0bzog==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + devlop@1.1.0: + resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==} + diff-sequences@29.6.3: resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -6399,6 +6461,12 @@ packages: resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} engines: {node: '>=4.0'} + estree-util-is-identifier-name@3.0.0: + resolution: {integrity: sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==} + + estree-util-visit@2.0.0: + resolution: {integrity: sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww==} + estree-walker@2.0.2: resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} @@ -7014,6 +7082,12 @@ packages: resolution: {integrity: sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==} engines: {node: '>= 12'} + is-alphabetical@2.0.1: + resolution: {integrity: sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==} + + is-alphanumerical@2.0.1: + resolution: {integrity: sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==} + is-array-buffer@3.0.4: resolution: {integrity: sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==} engines: {node: '>= 0.4'} @@ -7058,6 +7132,9 @@ packages: resolution: {integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==} engines: {node: '>= 0.4'} + is-decimal@2.0.1: + resolution: {integrity: sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==} + is-docker@3.0.0: resolution: {integrity: sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -7091,6 +7168,9 @@ packages: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} + is-hexadecimal@2.0.1: + resolution: {integrity: sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==} + is-hotkey@0.1.8: resolution: {integrity: sha512-qs3NZ1INIS+H+yeo7cD9pDfwYV/jqRh1JG9S9zYrNudkoUQg7OL7ziXqRKu+InFjUIDoP2o6HIkLYMh1pcWgyQ==} @@ -7601,6 +7681,9 @@ packages: resolution: {integrity: sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==} engines: {node: '>=18'} + longest-streak@3.1.0: + resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==} + loose-envify@1.4.0: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true @@ -7646,6 +7729,21 @@ packages: md5@2.3.0: resolution: {integrity: sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==} + mdast-util-from-markdown@2.0.2: + resolution: {integrity: sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==} + + mdast-util-mdx-jsx@3.1.3: + resolution: {integrity: sha512-bfOjvNt+1AcbPLTFMFWY149nJz0OjmewJs3LQQ5pIyVGxP4CdOqNVJL6kTaM5c68p8q82Xv3nCyFfUnuEcH3UQ==} + + mdast-util-phrasing@4.1.0: + resolution: {integrity: sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==} + + mdast-util-to-markdown@2.1.2: + resolution: {integrity: sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==} + + mdast-util-to-string@4.0.0: + resolution: {integrity: sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==} + memoize-one@6.0.0: resolution: {integrity: sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw==} @@ -7662,6 +7760,78 @@ packages: merge@2.1.1: resolution: {integrity: sha512-jz+Cfrg9GWOZbQAnDQ4hlVnQky+341Yk5ru8bZSe6sIDTCIg8n9i/u7hSQGSVOF3C7lH6mGtqjkiT9G4wFLL0w==} + micromark-core-commonmark@2.0.2: + resolution: {integrity: sha512-FKjQKbxd1cibWMM1P9N+H8TwlgGgSkWZMmfuVucLCHaYqeSvJ0hFeHsIa65pA2nYbes0f8LDHPMrd9X7Ujxg9w==} + + micromark-extension-mdx-jsx@3.0.1: + resolution: {integrity: sha512-vNuFb9czP8QCtAQcEJn0UJQJZA8Dk6DXKBqx+bg/w0WGuSxDxNr7hErW89tHUY31dUW4NqEOWwmEUNhjTFmHkg==} + + micromark-factory-destination@2.0.1: + resolution: {integrity: sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==} + + micromark-factory-label@2.0.1: + resolution: {integrity: sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==} + + micromark-factory-mdx-expression@2.0.2: + resolution: {integrity: sha512-5E5I2pFzJyg2CtemqAbcyCktpHXuJbABnsb32wX2U8IQKhhVFBqkcZR5LRm1WVoFqa4kTueZK4abep7wdo9nrw==} + + micromark-factory-space@2.0.1: + resolution: {integrity: sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==} + + micromark-factory-title@2.0.1: + resolution: {integrity: sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==} + + micromark-factory-whitespace@2.0.1: + resolution: {integrity: sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==} + + micromark-util-character@2.1.1: + resolution: {integrity: sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==} + + micromark-util-chunked@2.0.1: + resolution: {integrity: sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==} + + micromark-util-classify-character@2.0.1: + resolution: {integrity: sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==} + + micromark-util-combine-extensions@2.0.1: + resolution: {integrity: sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==} + + micromark-util-decode-numeric-character-reference@2.0.2: + resolution: {integrity: sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==} + + micromark-util-decode-string@2.0.1: + resolution: {integrity: sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==} + + micromark-util-encode@2.0.1: + resolution: {integrity: sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==} + + micromark-util-events-to-acorn@2.0.2: + resolution: {integrity: sha512-Fk+xmBrOv9QZnEDguL9OI9/NQQp6Hz4FuQ4YmCb/5V7+9eAh1s6AYSvL20kHkD67YIg7EpE54TiSlcsf3vyZgA==} + + micromark-util-html-tag-name@2.0.1: + resolution: {integrity: sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==} + + micromark-util-normalize-identifier@2.0.1: + resolution: {integrity: sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==} + + micromark-util-resolve-all@2.0.1: + resolution: {integrity: sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==} + + micromark-util-sanitize-uri@2.0.1: + resolution: {integrity: sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==} + + micromark-util-subtokenize@2.0.2: + resolution: {integrity: sha512-xKxhkB62vwHUuuxHe9Xqty3UaAsizV2YKq5OV344u3hFBbf8zIYrhYOWhAQb94MtMPkjTOzzjJ/hid9/dR5vFA==} + + micromark-util-symbol@2.0.1: + resolution: {integrity: sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==} + + micromark-util-types@2.0.1: + resolution: {integrity: sha512-534m2WhVTddrcKVepwmVEVnUAmtrx9bfIjNoQHRqfnvdaHQiFytEhJoTgpWJvDEXCO5gLTQh3wYC1PgOJA4NSQ==} + + micromark@4.0.1: + resolution: {integrity: sha512-eBPdkcoCNvYcxQOAKAlceo5SNdzZWfF+FcSupREAzdAh9rRmE239CEQAiTwIgblwnoM8zzj35sZ5ZwvSEOF6Kw==} + micromatch@4.0.8: resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} engines: {node: '>=8.6'} @@ -8110,6 +8280,9 @@ packages: resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} engines: {node: '>=6'} + parse-entities@4.0.1: + resolution: {integrity: sha512-SWzvYcSJh4d/SGLIOQfZ/CoNv6BTlI6YEQ7Nj82oDVnRpwe/Z/F1EMx42x3JAOwGBlCjeCH0BRJQbQ/opHL17w==} + parse-json@5.2.0: resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} engines: {node: '>=8'} @@ -9159,6 +9332,9 @@ packages: string_decoder@1.3.0: resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + stringify-entities@4.0.4: + resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==} + strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} @@ -9594,6 +9770,21 @@ packages: resolution: {integrity: sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==} engines: {node: '>=8'} + unist-util-is@6.0.0: + resolution: {integrity: sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==} + + unist-util-position-from-estree@2.0.0: + resolution: {integrity: sha512-KaFVRjoqLyF6YXCbVLNad/eS4+OfPQQn2yOd7zF/h5T/CSL2v8NpN6a5TPvtbXthAGw5nG+PuTtq+DdIZr+cRQ==} + + unist-util-stringify-position@4.0.0: + resolution: {integrity: sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==} + + unist-util-visit-parents@6.0.1: + resolution: {integrity: sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==} + + unist-util-visit@5.0.0: + resolution: {integrity: sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==} + universalify@0.2.0: resolution: {integrity: sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==} engines: {node: '>= 4.0.0'} @@ -9690,6 +9881,9 @@ packages: varint@6.0.0: resolution: {integrity: sha512-cXEIW6cfr15lFv563k4GuVuW/fiwjknytD37jIOLSdSWuOI6WnO/oKwmP2FQTU2l01LP8/M5TSAJpzUaGe3uWg==} + vfile-message@4.0.2: + resolution: {integrity: sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==} + vue@3.5.12: resolution: {integrity: sha512-CLVZtXtn2ItBIi/zHZ0Sg1Xkb7+PU32bJJ8Bmy7ts3jxXTcbfsEfBivFYYWz1Hur+lalqGAh65Coin0r+HRUfg==} peerDependencies: @@ -9918,6 +10112,9 @@ packages: zod@3.23.8: resolution: {integrity: sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==} + zwitch@2.0.4: + resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} + snapshots: '@ampproject/remapping@2.3.0': @@ -13836,6 +14033,10 @@ snapshots: tslib: 2.8.1 optional: true + '@types/acorn@4.0.6': + dependencies: + '@types/estree': 1.0.6 + '@types/babel__core@7.20.5': dependencies: '@babel/parser': 7.26.2 @@ -13876,6 +14077,10 @@ snapshots: dependencies: '@types/node': 22.5.4 + '@types/debug@4.1.12': + dependencies: + '@types/ms': 0.7.34 + '@types/escape-html@1.0.4': {} '@types/eslint-scope@3.7.7': @@ -13896,6 +14101,10 @@ snapshots: dependencies: '@types/estree': 1.0.6 + '@types/estree-jsx@1.0.5': + dependencies: + '@types/estree': 1.0.6 + '@types/estree@1.0.6': {} '@types/find-node-modules@2.1.2': {} @@ -13918,6 +14127,10 @@ snapshots: dependencies: '@types/node': 22.5.4 + '@types/hast@3.0.4': + dependencies: + '@types/unist': 3.0.3 + '@types/http-cache-semantics@4.0.4': {} '@types/is-hotkey@0.1.10': {} @@ -13964,6 +14177,10 @@ snapshots: '@types/lodash@4.17.13': {} + '@types/mdast@4.0.4': + dependencies: + '@types/unist': 3.0.3 + '@types/minimatch@5.1.2': {} '@types/minimist@1.2.2': {} @@ -13984,6 +14201,8 @@ snapshots: - socks - supports-color + '@types/ms@0.7.34': {} + '@types/mysql@2.15.26': dependencies: '@types/node': 22.5.4 @@ -14068,6 +14287,10 @@ snapshots: '@types/tough-cookie@4.0.5': {} + '@types/unist@2.0.11': {} + + '@types/unist@3.0.3': {} + '@types/uuid@10.0.0': {} '@types/webidl-conversions@7.0.3': {} @@ -14392,12 +14615,12 @@ snapshots: acorn-globals@7.0.1: dependencies: - acorn: 8.14.0 + acorn: 8.12.1 acorn-walk: 8.3.4 - acorn-import-attributes@1.9.5(acorn@8.14.0): + acorn-import-attributes@1.9.5(acorn@8.12.1): dependencies: - acorn: 8.14.0 + acorn: 8.12.1 acorn-jsx@5.3.2(acorn@8.14.0): dependencies: @@ -14405,7 +14628,9 @@ snapshots: acorn-walk@8.3.4: dependencies: - acorn: 8.14.0 + acorn: 8.12.1 + + acorn@8.12.1: {} acorn@8.14.0: {} @@ -14853,6 +15078,8 @@ snapshots: caniuse-lite@1.0.30001678: {} + ccount@2.0.1: {} + chalk@3.0.0: dependencies: ansi-styles: 4.3.0 @@ -14886,6 +15113,14 @@ snapshots: char-regex@1.0.2: {} + character-entities-html4@2.1.0: {} + + character-entities-legacy@3.0.0: {} + + character-entities@2.0.2: {} + + character-reference-invalid@2.0.1: {} + charenc@0.0.2: {} chokidar@3.6.0: @@ -15163,6 +15398,10 @@ snapshots: decimal.js@10.4.3: {} + decode-named-character-reference@1.0.2: + dependencies: + character-entities: 2.0.2 + decompress-response@6.0.0: dependencies: mimic-response: 3.1.0 @@ -15231,6 +15470,10 @@ snapshots: detect-newline@4.0.1: {} + devlop@1.1.0: + dependencies: + dequal: 2.0.3 + diff-sequences@29.6.3: {} diff@5.2.0: {} @@ -15858,6 +16101,13 @@ snapshots: estraverse@5.3.0: {} + estree-util-is-identifier-name@3.0.0: {} + + estree-util-visit@2.0.0: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/unist': 3.0.3 + estree-walker@2.0.2: {} esutils@2.0.3: {} @@ -16503,8 +16753,8 @@ snapshots: import-in-the-middle@1.11.2: dependencies: - acorn: 8.14.0 - acorn-import-attributes: 1.9.5(acorn@8.14.0) + acorn: 8.12.1 + acorn-import-attributes: 1.9.5(acorn@8.12.1) cjs-module-lexer: 1.4.1 module-details-from-path: 1.0.3 @@ -16543,6 +16793,13 @@ snapshots: jsbn: 1.1.0 sprintf-js: 1.1.3 + is-alphabetical@2.0.1: {} + + is-alphanumerical@2.0.1: + dependencies: + is-alphabetical: 2.0.1 + is-decimal: 2.0.1 + is-array-buffer@3.0.4: dependencies: call-bind: 1.0.7 @@ -16583,6 +16840,8 @@ snapshots: dependencies: has-tostringtag: 1.0.2 + is-decimal@2.0.1: {} + is-docker@3.0.0: {} is-extendable@0.1.1: {} @@ -16603,6 +16862,8 @@ snapshots: dependencies: is-extglob: 2.1.1 + is-hexadecimal@2.0.1: {} + is-hotkey@0.1.8: {} is-hotkey@0.2.0: {} @@ -17124,7 +17385,7 @@ snapshots: jsdom@20.0.3(bufferutil@4.0.8): dependencies: abab: 2.0.6 - acorn: 8.14.0 + acorn: 8.12.1 acorn-globals: 7.0.1 cssom: 0.5.0 cssstyle: 2.3.0 @@ -17326,6 +17587,8 @@ snapshots: strip-ansi: 7.1.0 wrap-ansi: 9.0.0 + longest-streak@3.1.0: {} + loose-envify@1.4.0: dependencies: js-tokens: 4.0.0 @@ -17376,6 +17639,61 @@ snapshots: crypt: 0.0.2 is-buffer: 1.1.6 + mdast-util-from-markdown@2.0.2: + dependencies: + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + decode-named-character-reference: 1.0.2 + devlop: 1.1.0 + mdast-util-to-string: 4.0.0 + micromark: 4.0.1 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-decode-string: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + unist-util-stringify-position: 4.0.0 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx-jsx@3.1.3: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + ccount: 2.0.1 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + parse-entities: 4.0.1 + stringify-entities: 4.0.4 + unist-util-stringify-position: 4.0.0 + vfile-message: 4.0.2 + transitivePeerDependencies: + - supports-color + + mdast-util-phrasing@4.1.0: + dependencies: + '@types/mdast': 4.0.4 + unist-util-is: 6.0.0 + + mdast-util-to-markdown@2.1.2: + dependencies: + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + longest-streak: 3.1.0 + mdast-util-phrasing: 4.1.0 + mdast-util-to-string: 4.0.0 + micromark-util-classify-character: 2.0.1 + micromark-util-decode-string: 2.0.1 + unist-util-visit: 5.0.0 + zwitch: 2.0.4 + + mdast-util-to-string@4.0.0: + dependencies: + '@types/mdast': 4.0.4 + memoize-one@6.0.0: {} memory-pager@1.5.0: {} @@ -17386,6 +17704,176 @@ snapshots: merge@2.1.1: {} + micromark-core-commonmark@2.0.2: + dependencies: + decode-named-character-reference: 1.0.2 + devlop: 1.1.0 + micromark-factory-destination: 2.0.1 + micromark-factory-label: 2.0.1 + micromark-factory-space: 2.0.1 + micromark-factory-title: 2.0.1 + micromark-factory-whitespace: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-chunked: 2.0.1 + micromark-util-classify-character: 2.0.1 + micromark-util-html-tag-name: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-subtokenize: 2.0.2 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + + micromark-extension-mdx-jsx@3.0.1: + dependencies: + '@types/acorn': 4.0.6 + '@types/estree': 1.0.6 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + micromark-factory-mdx-expression: 2.0.2 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-events-to-acorn: 2.0.2 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + vfile-message: 4.0.2 + + micromark-factory-destination@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + + micromark-factory-label@2.0.1: + dependencies: + devlop: 1.1.0 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + + micromark-factory-mdx-expression@2.0.2: + dependencies: + '@types/estree': 1.0.6 + devlop: 1.1.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-events-to-acorn: 2.0.2 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + unist-util-position-from-estree: 2.0.0 + vfile-message: 4.0.2 + + micromark-factory-space@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-types: 2.0.1 + + micromark-factory-title@2.0.1: + dependencies: + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + + micromark-factory-whitespace@2.0.1: + dependencies: + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + + micromark-util-character@2.1.1: + dependencies: + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + + micromark-util-chunked@2.0.1: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-classify-character@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + + micromark-util-combine-extensions@2.0.1: + dependencies: + micromark-util-chunked: 2.0.1 + micromark-util-types: 2.0.1 + + micromark-util-decode-numeric-character-reference@2.0.2: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-decode-string@2.0.1: + dependencies: + decode-named-character-reference: 1.0.2 + micromark-util-character: 2.1.1 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-symbol: 2.0.1 + + micromark-util-encode@2.0.1: {} + + micromark-util-events-to-acorn@2.0.2: + dependencies: + '@types/acorn': 4.0.6 + '@types/estree': 1.0.6 + '@types/unist': 3.0.3 + devlop: 1.1.0 + estree-util-visit: 2.0.0 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + vfile-message: 4.0.2 + + micromark-util-html-tag-name@2.0.1: {} + + micromark-util-normalize-identifier@2.0.1: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-resolve-all@2.0.1: + dependencies: + micromark-util-types: 2.0.1 + + micromark-util-sanitize-uri@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-encode: 2.0.1 + micromark-util-symbol: 2.0.1 + + micromark-util-subtokenize@2.0.2: + dependencies: + devlop: 1.1.0 + micromark-util-chunked: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + + micromark-util-symbol@2.0.1: {} + + micromark-util-types@2.0.1: {} + + micromark@4.0.1: + dependencies: + '@types/debug': 4.1.12 + debug: 4.3.7 + decode-named-character-reference: 1.0.2 + devlop: 1.1.0 + micromark-core-commonmark: 2.0.2 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-chunked: 2.0.1 + micromark-util-combine-extensions: 2.0.1 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-encode: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-subtokenize: 2.0.2 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.1 + transitivePeerDependencies: + - supports-color + micromatch@4.0.8: dependencies: braces: 3.0.3 @@ -17465,7 +17953,7 @@ snapshots: mlly@1.7.2: dependencies: - acorn: 8.14.0 + acorn: 8.12.1 pathe: 1.1.2 pkg-types: 1.2.1 ufo: 1.5.4 @@ -17843,6 +18331,17 @@ snapshots: dependencies: callsites: 3.1.0 + parse-entities@4.0.1: + dependencies: + '@types/unist': 2.0.11 + character-entities: 2.0.2 + character-entities-legacy: 3.0.0 + character-reference-invalid: 2.0.1 + decode-named-character-reference: 1.0.2 + is-alphanumerical: 2.0.1 + is-decimal: 2.0.1 + is-hexadecimal: 2.0.1 + parse-json@5.2.0: dependencies: '@babel/code-frame': 7.26.2 @@ -18927,6 +19426,11 @@ snapshots: dependencies: safe-buffer: 5.2.1 + stringify-entities@4.0.4: + dependencies: + character-entities-html4: 2.1.0 + character-entities-legacy: 3.0.0 + strip-ansi@6.0.1: dependencies: ansi-regex: 5.0.1 @@ -19088,7 +19592,7 @@ snapshots: terser@5.36.0: dependencies: '@jridgewell/source-map': 0.3.6 - acorn: 8.14.0 + acorn: 8.12.1 commander: 2.20.3 source-map-support: 0.5.21 @@ -19354,13 +19858,36 @@ snapshots: dependencies: crypto-random-string: 2.0.0 + unist-util-is@6.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-position-from-estree@2.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-stringify-position@4.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-visit-parents@6.0.1: + dependencies: + '@types/unist': 3.0.3 + unist-util-is: 6.0.0 + + unist-util-visit@5.0.0: + dependencies: + '@types/unist': 3.0.3 + unist-util-is: 6.0.0 + unist-util-visit-parents: 6.0.1 + universalify@0.2.0: {} universalify@2.0.1: {} unplugin@1.0.1: dependencies: - acorn: 8.14.0 + acorn: 8.12.1 chokidar: 3.6.0 webpack-sources: 3.2.3 webpack-virtual-modules: 0.5.0 @@ -19426,6 +19953,11 @@ snapshots: varint@6.0.0: {} + vfile-message@4.0.2: + dependencies: + '@types/unist': 3.0.3 + unist-util-stringify-position: 4.0.0 + vue@3.5.12(typescript@5.6.3): dependencies: '@vue/compiler-dom': 3.5.12 @@ -19458,7 +19990,7 @@ snapshots: webpack-bundle-analyzer@4.10.1(bufferutil@4.0.8): dependencies: '@discoveryjs/json-ext': 0.5.7 - acorn: 8.14.0 + acorn: 8.12.1 acorn-walk: 8.3.4 commander: 7.2.0 debounce: 1.2.1 @@ -19662,3 +20194,5 @@ snapshots: zod: 3.23.8 zod@3.23.8: {} + + zwitch@2.0.4: {} diff --git a/test/fields/payload-types.ts b/test/fields/payload-types.ts index a435feb4f..820ee88ee 100644 --- a/test/fields/payload-types.ts +++ b/test/fields/payload-types.ts @@ -3400,6 +3400,6 @@ export interface Auth { declare module 'payload' { - // @ts-ignore + // @ts-ignore export interface GeneratedTypes extends Config {} -} \ No newline at end of file +} diff --git a/test/helpers/initPayloadInt.ts b/test/helpers/initPayloadInt.ts index 87b8ca77f..0ed59d95a 100644 --- a/test/helpers/initPayloadInt.ts +++ b/test/helpers/initPayloadInt.ts @@ -12,11 +12,17 @@ import { NextRESTClient } from './NextRESTClient.js' export async function initPayloadInt( dirname: string, testSuiteNameOverride?: string, -): Promise<{ config: SanitizedConfig; payload: Payload; restClient: NextRESTClient }> { + initializePayload = true, +): Promise<{ config: SanitizedConfig; payload?: Payload; restClient?: NextRESTClient }> { const testSuiteName = testSuiteNameOverride ?? path.basename(dirname) await runInit(testSuiteName, false, true) console.log('importing config', path.resolve(dirname, 'config.ts')) const { default: config } = await import(path.resolve(dirname, 'config.ts')) + + if (!initializePayload) { + return { config: await config } + } + console.log('starting payload') const payload = await getPayload({ config }) diff --git a/test/lexical-mdx/.gitignore b/test/lexical-mdx/.gitignore new file mode 100644 index 000000000..cce01755f --- /dev/null +++ b/test/lexical-mdx/.gitignore @@ -0,0 +1,2 @@ +/media +/media-gif diff --git a/test/lexical-mdx/collections/Media/index.ts b/test/lexical-mdx/collections/Media/index.ts new file mode 100644 index 000000000..bb5edd034 --- /dev/null +++ b/test/lexical-mdx/collections/Media/index.ts @@ -0,0 +1,33 @@ +import type { CollectionConfig } from 'payload' + +export const mediaSlug = 'media' + +export const MediaCollection: CollectionConfig = { + slug: mediaSlug, + access: { + create: () => true, + read: () => true, + }, + fields: [], + upload: { + crop: true, + focalPoint: true, + imageSizes: [ + { + name: 'thumbnail', + height: 200, + width: 200, + }, + { + name: 'medium', + height: 800, + width: 800, + }, + { + name: 'large', + height: 1200, + width: 1200, + }, + ], + }, +} diff --git a/test/lexical-mdx/collections/Posts/CodeFields.tsx b/test/lexical-mdx/collections/Posts/CodeFields.tsx new file mode 100644 index 000000000..287d952e0 --- /dev/null +++ b/test/lexical-mdx/collections/Posts/CodeFields.tsx @@ -0,0 +1,41 @@ +'use client' + +import type { CodeFieldClientProps } from 'payload' + +import { CodeField, useFormFields } from '@payloadcms/ui' +import React, { useMemo } from 'react' + +import { languages } from './shared.js' + +const languageKeyToMonacoLanguageMap = { + plaintext: 'plaintext', + ts: 'typescript', + tsx: 'typescript', +} + +export const Code: React.FC = ({ field }) => { + const languageField = useFormFields(([fields]) => fields['language']) + + const language: string = + (languageField?.value as string) || (languageField.initialValue as string) || 'typescript' + + const label = languages[language as keyof typeof languages] + + const props: typeof field = useMemo( + () => ({ + ...field, + admin: { + ...field.admin, + components: field.admin?.components || {}, + editorOptions: field.admin?.editorOptions || {}, + label, + language: languageKeyToMonacoLanguageMap[language] || language, + }, + }), + [field, language, label], + ) + + const key = `${field.name}-${language}-${label}` + + return +} diff --git a/test/lexical-mdx/collections/Posts/index.ts b/test/lexical-mdx/collections/Posts/index.ts new file mode 100644 index 000000000..895cc3890 --- /dev/null +++ b/test/lexical-mdx/collections/Posts/index.ts @@ -0,0 +1,107 @@ +import type { CollectionConfig } from 'payload' + +import { + BlocksFeature, + EXPERIMENTAL_TableFeature, + FixedToolbarFeature, + lexicalEditor, + TreeViewFeature, +} from '@payloadcms/richtext-lexical' + +import { loadMDXAfterRead, saveMDXBeforeChange } from '../../mdx/hooks.js' +import { BannerBlock } from '../../mdx/jsxBlocks/banner.js' +import { CodeBlock } from '../../mdx/jsxBlocks/code/code.js' +import { InlineCodeBlock } from '../../mdx/jsxBlocks/inlineCode.js' +import { PackageInstallOptions } from '../../mdx/jsxBlocks/packageInstallOptions.js' +import { TextContainerBlock } from '../../mdx/jsxBlocks/TextContainer.js' +import { TextContainerNoTrimBlock } from '../../mdx/jsxBlocks/TextContainerNoTrim.js' + +export const postsSlug = 'posts' + +export const PostsCollection: CollectionConfig = { + slug: postsSlug, + admin: { + useAsTitle: 'docPath', + }, + hooks: { + beforeChange: [saveMDXBeforeChange], + afterRead: [loadMDXAfterRead], + }, + fields: [ + { + name: 'docPath', + type: 'text', + required: true, + }, + { + type: 'collapsible', + label: 'FrontMatter', + admin: { + position: 'sidebar', + }, + fields: [ + { + name: 'frontMatter', + type: 'array', + fields: [ + { + type: 'text', + name: 'key', + }, + { + type: 'text', + name: 'value', + }, + ], + }, + ], + }, + { + name: 'richText', + type: 'richText', + editor: lexicalEditor({ + features: ({ defaultFeatures }) => [ + ...defaultFeatures, + TreeViewFeature(), + EXPERIMENTAL_TableFeature(), + FixedToolbarFeature(), + BlocksFeature({ + blocks: [ + BannerBlock, + CodeBlock, + PackageInstallOptions, + TextContainerNoTrimBlock, + TextContainerBlock, + ], + inlineBlocks: [InlineCodeBlock], + }), + ], + }), + }, + { + name: 'richTextUnconverted', + type: 'richText', + editor: lexicalEditor({ + features: ({ defaultFeatures }) => [ + ...defaultFeatures, + TreeViewFeature(), + EXPERIMENTAL_TableFeature(), + FixedToolbarFeature(), + BlocksFeature({ + blocks: [ + BannerBlock, + CodeBlock, + PackageInstallOptions, + TextContainerNoTrimBlock, + TextContainerBlock, + ], + inlineBlocks: [InlineCodeBlock], + }), + ], + }), + }, + ], + versions: { + drafts: true, + }, +} diff --git a/test/lexical-mdx/collections/Posts/shared.ts b/test/lexical-mdx/collections/Posts/shared.ts new file mode 100644 index 000000000..f9ae83f42 --- /dev/null +++ b/test/lexical-mdx/collections/Posts/shared.ts @@ -0,0 +1,15 @@ +export const docsBasePath = '/Users/alessio/Documents/payloadcms-mdx-mock/docs' + +export const languages = { + ts: 'TypeScript', + plaintext: 'Plain Text', + tsx: 'TSX', + js: 'JavaScript', + jsx: 'JSX', +} + +export const bannerTypes = { + success: 'Success', + info: 'Info', + warning: 'Warning', +} diff --git a/test/lexical-mdx/config.ts b/test/lexical-mdx/config.ts new file mode 100644 index 000000000..4f027d2cc --- /dev/null +++ b/test/lexical-mdx/config.ts @@ -0,0 +1,81 @@ +import { lexicalEditor } from '@payloadcms/richtext-lexical' +import * as fs from 'node:fs' +import { fileURLToPath } from 'node:url' +import path from 'path' + +import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' +import { devUser } from '../credentials.js' +import { MediaCollection } from './collections/Media/index.js' +import { PostsCollection } from './collections/Posts/index.js' +import { docsBasePath } from './collections/Posts/shared.js' +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +export default buildConfigWithDefaults({ + // ...extend config here + collections: [ + PostsCollection, + { + slug: 'simple', + fields: [ + { + name: 'text', + type: 'text', + }, + ], + }, + MediaCollection, + ], + admin: { + importMap: { + baseDir: path.resolve(dirname), + }, + }, + editor: lexicalEditor({}), + cors: ['http://localhost:3000', 'http://localhost:3001'], + globals: [], + onInit: async (payload) => { + await payload.create({ + collection: 'users', + data: { + email: devUser.email, + password: devUser.password, + }, + }) + + await payload.delete({ + collection: 'posts', + where: {}, + }) + + // Recursively collect all paths to .mdx files RELATIVE to basePath + const walkSync = (dir: string, filelist: string[] = []) => { + fs.readdirSync(dir).forEach((file) => { + filelist = fs.statSync(path.join(dir, file)).isDirectory() + ? walkSync(path.join(dir, file), filelist) + : filelist.concat(path.join(dir, file)) + }) + return filelist + } + + const mdxFiles = walkSync(docsBasePath) + .filter((file) => file.endsWith('.mdx')) + .map((file) => file.replace(docsBasePath, '')) + + for (const file of mdxFiles) { + await payload.create({ + collection: 'posts', + depth: 0, + context: { + seed: true, + }, + data: { + docPath: file, + }, + }) + } + }, + typescript: { + outputFile: path.resolve(dirname, 'payload-types.ts'), + }, +}) diff --git a/test/lexical-mdx/eslint.config.js b/test/lexical-mdx/eslint.config.js new file mode 100644 index 000000000..53d58f164 --- /dev/null +++ b/test/lexical-mdx/eslint.config.js @@ -0,0 +1,20 @@ +import { rootParserOptions } from '../../eslint.config.js' +import { testEslintConfig } from '../eslint.config.js' + +/** @typedef {import('eslint').Linter.FlatConfig} */ +let FlatConfig + +/** @type {FlatConfig[]} */ +export const index = [ + ...testEslintConfig, + { + languageOptions: { + parserOptions: { + ...rootParserOptions, + tsconfigRootDir: import.meta.dirname, + }, + }, + }, +] + +export default index diff --git a/test/lexical-mdx/int.spec.ts b/test/lexical-mdx/int.spec.ts new file mode 100644 index 000000000..27b7d5de5 --- /dev/null +++ b/test/lexical-mdx/int.spec.ts @@ -0,0 +1,1519 @@ +/* eslint jest/no-conditional-in-test: 0 */ +import type { + BlockFields, + LexicalRichTextAdapter, + SanitizedServerEditorConfig, + SerializedBlockNode, +} from '@payloadcms/richtext-lexical' +import type { RichTextField, SanitizedConfig } from 'payload' +import type { MarkOptional } from 'ts-essentials' + +import path from 'path' +import { fileURLToPath } from 'url' + +import { initPayloadInt } from '../helpers/initPayloadInt.js' +import { postsSlug } from './collections/Posts/index.js' +import { editorJSONToMDX, mdxToEditorJSON } from './mdx/hooks.js' +import { tableJson } from './tableJson.js' +import { textToRichText } from './textToRichText.js' + +let config: SanitizedConfig +let editorConfig: SanitizedServerEditorConfig + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +type Tests = Array<{ + blockNode?: { + fields: Omit + } & Omit< + MarkOptional, + 'fields' + > + debugFlag?: boolean + description?: string + ignoreSpacesAndNewlines?: boolean + input: string + inputAfterConvertFromEditorJSON?: string + rootChildren?: any[] +}> + +describe('Lexical MDX', () => { + // --__--__--__--__--__--__--__--__--__ + // Boilerplate test setup/teardown + // --__--__--__--__--__--__--__--__--__ + beforeAll(async () => { + const { config: incomingConfig } = await initPayloadInt(dirname, undefined, false) + config = incomingConfig + + const richTextField: RichTextField = config.collections + .find((collection) => collection.slug === postsSlug) + .fields.find( + (field) => 'name' in field && field.name === 'richText', + ) as unknown as RichTextField + + editorConfig = (richTextField.editor as LexicalRichTextAdapter).editorConfig + }) + + const INPUT_AND_OUTPUTBase: Tests = [ + { + inputAfterConvertFromEditorJSON: ``, + input: ` + + `, + blockNode: { + fields: { + blockType: 'PackageInstallOptions', + packageId: '444', + update: true, + uniqueId: 'xxx', + }, + }, + }, + { + input: ` + + ignored + +`, + inputAfterConvertFromEditorJSON: ``, + blockNode: { + fields: { + blockType: 'PackageInstallOptions', + packageId: '444', + }, + }, + }, + { + input: ` + + ignored + +`, + inputAfterConvertFromEditorJSON: ``, + blockNode: { + fields: { + blockType: 'PackageInstallOptions', + packageId: '444', + update: true, + }, + }, + }, + { + input: ` + + ignored + +`, + inputAfterConvertFromEditorJSON: ``, + blockNode: { + fields: { + blockType: 'PackageInstallOptions', + packageId: '444', + update: true, + }, + }, + }, + { + inputAfterConvertFromEditorJSON: ``, + input: ` + + ignored + +`, + blockNode: { + fields: { + blockType: 'PackageInstallOptions', + packageId: '444', + update: true, + }, + }, + }, + { + inputAfterConvertFromEditorJSON: ``, // Not test - test is not part of the block + input: ` + + ignored + +`, + blockNode: { + fields: { + blockType: 'PackageInstallOptions', + packageId: '444', + update: true, + someNestedObject: { test: 'hello' }, + }, + }, + }, + { + inputAfterConvertFromEditorJSON: ``, + + input: ` + + ignored + + ignoredi + + +`, + blockNode: { + fields: { + blockType: 'PackageInstallOptions', + packageId: '444', + update: true, + }, + }, + }, + { + inputAfterConvertFromEditorJSON: ``, + + input: ` + + ignored + + ignoredi + + + + +`, + blockNode: { + fields: { + blockType: 'PackageInstallOptions', + packageId: '444', + update: true, + }, + }, + }, + // TODO: Write test for this: + /* + + not ignored + + not ignored + + not ignored + + */ + { + input: ` +\`\`\`ts +hello\`\`\` +`, + inputAfterConvertFromEditorJSON: ` +\`\`\`ts +hello +\`\`\` +`, + blockNode: { + fields: { + blockType: 'Code', + code: 'hello', + language: 'ts', + }, + }, + }, + { + input: ` +\`\`\`ts + hello\`\`\` +`, + inputAfterConvertFromEditorJSON: ` +\`\`\`ts + hello +\`\`\` +`, + blockNode: { + fields: { + blockType: 'Code', + code: ' hello', + language: 'ts', + }, + }, + }, + { + input: ` +\`\`\`ts x\n hello\`\`\` +`, + inputAfterConvertFromEditorJSON: ` +\`\`\`ts + x + hello +\`\`\` +`, + blockNode: { + fields: { + blockType: 'Code', + code: ' x\n hello', + language: 'ts', + }, + }, + }, + { + input: ` +\`\`\`ts hello\`\`\` +`, + blockNode: { + fields: { + blockType: 'Code', + code: 'ts hello', + language: '', + }, + }, + }, + { + input: ` +\`\`\`hello\`\`\` +`, + blockNode: { + fields: { + blockType: 'Code', + code: 'hello', + language: '', + }, + }, + }, + { + input: ` +\`\`\`ts +hello +\`\`\` +`, + blockNode: { + fields: { + blockType: 'Code', + code: 'hello', + language: 'ts', + }, + }, + }, + { + input: ` +\`\`\`ts hello +there1 +\`\`\` +`, + inputAfterConvertFromEditorJSON: ` +\`\`\`ts + hello +there1 +\`\`\` +`, + blockNode: { + fields: { + blockType: 'Code', + code: ' hello\nthere1', + language: 'ts', + }, + }, + }, + { + input: ` +\`\`\`ts hello +there2 +!!\`\`\` +`, + inputAfterConvertFromEditorJSON: ` +\`\`\`ts + hello +there2 +!! +\`\`\` +`, + blockNode: { + fields: { + blockType: 'Code', + code: ' hello\nthere2\n!!', + language: 'ts', + }, + }, + }, + { + input: ` +\`\`\`ts +Hello +there3\`\`\` +`, + inputAfterConvertFromEditorJSON: ` +\`\`\`ts +Hello +there3 +\`\`\` +`, + blockNode: { + fields: { + blockType: 'Code', + code: 'Hello\nthere3', + language: 'ts', + }, + }, + }, + { + input: ` +\`\`\`ts +Hello +\`\`\`ts +nested +\`\`\`! +there4\`\`\` +`, + inputAfterConvertFromEditorJSON: ` +\`\`\`ts +Hello +\`\`\`ts +nested +\`\`\`! +there4 +\`\`\` +`, + blockNode: { + fields: { + blockType: 'Code', + code: 'Hello\n```ts\nnested\n```!\nthere4', + language: 'ts', + }, + }, + }, + { + ignoreSpacesAndNewlines: true, + input: ` +| Option | Default route | Description | +| ----------------- | ----------------------- | ----------------------------------------------- | +| \`account\` | | The user's account page. | +| \`createFirstUser\` | \`/create-first-user\` | The page to create the first user. | +`, + inputAfterConvertFromEditorJSON: ` +| Option | Default route | Description | +|---|---|---| +| \`account\` | | The user's account page. | +| \`createFirstUser\` | \`/create-first-user\` | The page to create the first user. | +`, + rootChildren: [tableJson], + }, + { + input: ` + + children text + +`, + blockNode: { + fields: { + blockType: 'Banner', + content: textToRichText('children text'), + }, + }, + }, + { + input: `\`inline code\``, + rootChildren: [ + { + children: [ + { + detail: 0, + format: 16, // Format 16 => inline code + mode: 'normal', + style: '', + text: 'inline code', + type: 'text', + version: 1, + }, + ], + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + }, + { + // This test ensures that the JSX within the code block is does not disrupt the main JSX parsing + input: ` + + \`https://.payloadcms.com/page\` + +`, + blockNode: { + fields: { + blockType: 'Banner', + content: { + root: { + children: [ + { + children: [ + { + detail: 0, + format: 16, // Format 16 => inline code + mode: 'normal', + style: '', + text: 'https://.payloadcms.com/page', + type: 'text', + version: 1, + }, + ], + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + format: '', + indent: 0, + type: 'root', + version: 1, + }, + }, + }, + }, + }, + { + input: 'Hello inline code test.', + rootChildren: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: 'Hello ', + type: 'text', + version: 1, + }, + { + type: 'inlineBlock', + + fields: { + code: 'inline code', + blockType: 'InlineCode', + }, + version: 1, + }, + + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' test.', + type: 'text', + version: 1, + }, + ], + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + }, + { + input: ` + + Some text 1 code 1 some + + text 2 code 2 some text + + 3 code 3 some text 4code 4 + +`, + description: 'Banner with inline codes, each line a paragraph', + blockNode: { + fields: { + blockType: 'Banner', + content: { + root: { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: 'Some text 1 ', + type: 'text', + version: 1, + }, + + { + type: 'inlineBlock', + + fields: { + code: 'code 1', + blockType: 'InlineCode', + }, + version: 1, + }, + + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' some', + type: 'text', + version: 1, + }, + ], + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: 'text 2 ', + type: 'text', + version: 1, + }, + + { + type: 'inlineBlock', + + fields: { + code: 'code 2', + blockType: 'InlineCode', + }, + version: 1, + }, + + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' some text', + type: 'text', + version: 1, + }, + ], + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: '3 ', + type: 'text', + version: 1, + }, + + { + type: 'inlineBlock', + + fields: { + code: 'code 3', + blockType: 'InlineCode', + }, + version: 1, + }, + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' some text 4', + type: 'text', + version: 1, + }, + { + type: 'inlineBlock', + fields: { + code: 'code 4', + blockType: 'InlineCode', + }, + version: 1, + }, + ], + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + format: '', + indent: 0, + type: 'root', + version: 1, + }, + }, + }, + }, + }, + { + input: ` + + Some text 1 code 1 some + + text 2 code 2 some text + + 3 code 3 some text 4code 4 + +`, + description: 'Banner with inline codes, three paragraphs', + + blockNode: { + fields: { + blockType: 'Banner', + content: { + root: { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: 'Some text 1 ', + type: 'text', + version: 1, + }, + { + type: 'inlineBlock', + fields: { + code: 'code 1', + blockType: 'InlineCode', + }, + version: 1, + }, + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' some', + type: 'text', + version: 1, + }, + ], + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: 'text 2 ', + type: 'text', + version: 1, + }, + { + type: 'inlineBlock', + fields: { + code: 'code 2', + blockType: 'InlineCode', + }, + version: 1, + }, + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' some text', + type: 'text', + version: 1, + }, + ], + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: '3 ', + type: 'text', + version: 1, + }, + { + type: 'inlineBlock', + fields: { + code: 'code 3', + blockType: 'InlineCode', + }, + version: 1, + }, + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' some text 4', + type: 'text', + version: 1, + }, + { + type: 'inlineBlock', + fields: { + code: 'code 4', + blockType: 'InlineCode', + }, + version: 1, + }, + ], + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + format: '', + indent: 0, + type: 'root', + version: 1, + }, + }, + }, + }, + }, + { + input: ` +Text before banner + + + test + +`, + rootChildren: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: 'Text before banner', + type: 'text', + version: 1, + }, + ], + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + { + type: 'block', + format: '', + fields: { + blockType: 'Banner', + content: { + root: { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: 'test', + type: 'text', + version: 1, + }, + ], + format: '', + indent: 0, + textFormat: 0, + textStyle: '', + type: 'paragraph', + version: 1, + }, + ], + format: '', + indent: 0, + type: 'root', + version: 1, + }, + }, + }, + version: 2, + }, + ], + }, + { + description: 'TextContainerNoTrim with nested, no-leftpad content', + input: ` + +no indent + + indent 2 + + indent 4 + +no indent + +`, + blockNode: { + fields: { + blockType: 'TextContainerNoTrim', + text: `no indent + + indent 2 + + indent 4 + +no indent`, + }, + }, + }, + { + description: 'TextContainer with nested, no-leftpad content', + + input: ` + +no indent + + indent 2 + + indent 4 + +no indent + +`, + inputAfterConvertFromEditorJSON: ` + + no indent + + indent 2 + + indent 4 + + no indent + +`, + blockNode: { + fields: { + blockType: 'TextContainer', + text: `no indent + + indent 2 + + indent 4 + +no indent`, + }, + }, + }, + { + description: 'TextContainerNoTrim with nested, leftpad content', + input: ` + + indent 2 + + indent 4 + + indent 6 + + indent 2 + +`, + blockNode: { + fields: { + blockType: 'TextContainerNoTrim', + text: ` indent 2 + + indent 4 + + indent 6 + + indent 2`, + }, + }, + }, + { + description: 'TextContainer with nested, leftpad content', + input: ` + + indent 2 + + indent 4 + + indent 6 + + indent 2 + +`, + blockNode: { + fields: { + blockType: 'TextContainer', + text: `indent 2 + + indent 4 + + indent 6 + +indent 2`, + }, + }, + }, + { + input: ` +Some text 1 +code 2 +`, + description: 'InlineCode after text, split by linebreak', + rootChildren: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: 'Some text 1', + type: 'text', + version: 1, + }, + { + type: 'linebreak', + version: 1, + }, + { + type: 'inlineBlock', + fields: { + code: 'code 2', + blockType: 'InlineCode', + }, + version: 1, + }, + ], + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + }, + { + description: 'Code block with nested within Banner', + input: ` + + \`\`\`ts + indent 1; + \`\`\` + +`, + blockNode: { + fields: { + blockType: 'Banner', + content: { + root: { + children: [ + { + fields: { + blockType: 'Code', + code: ' indent 1;', + language: 'ts', + }, + format: '', + type: 'block', + version: 2, + }, + ], + format: '', + indent: 0, + type: 'root', + version: 1, + }, + }, + }, + }, + }, + { + description: 'Code block with nested within Banner 2', + input: ` + +\`\`\`ts + indent 1; +\`\`\` + +`, + inputAfterConvertFromEditorJSON: ` + + \`\`\`ts + indent 1; + \`\`\` + +`, + blockNode: { + fields: { + blockType: 'Banner', + content: { + root: { + children: [ + { + fields: { + blockType: 'Code', + code: ' indent 1;', + language: 'ts', + }, + format: '', + type: 'block', + version: 2, + }, + ], + format: '', + indent: 0, + type: 'root', + version: 1, + }, + }, + }, + }, + }, + { + description: 'One-line Banner', + input: ` +Hi +`, + inputAfterConvertFromEditorJSON: ` + + Hi + +`, + blockNode: { + fields: { + blockType: 'Banner', + content: { + root: { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: 'Hi', + type: 'text', + version: 1, + }, + ], + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + format: '', + indent: 0, + type: 'root', + version: 1, + }, + }, + }, + }, + }, + { + description: 'Code block with nested within 2 Banners', + input: ` + + +\`\`\`ts + indent 1; +\`\`\` + + +`, + inputAfterConvertFromEditorJSON: ` + + + \`\`\`ts + indent 1; + \`\`\` + + +`, + blockNode: { + fields: { + blockType: 'Banner', + content: { + root: { + children: [ + { + type: 'block', + format: '', + fields: { + blockType: 'Banner', + content: { + root: { + children: [ + { + fields: { + blockType: 'Code', + code: ' indent 1;', + language: 'ts', + }, + format: '', + type: 'block', + version: 2, + }, + ], + format: '', + indent: 0, + type: 'root', + version: 1, + }, + }, + }, + version: 2, + }, + ], + format: '', + indent: 0, + type: 'root', + version: 1, + }, + }, + }, + }, + }, + { + inputAfterConvertFromEditorJSON: ` + + Some line [Start of link line2](/some/link) + +`, + input: ` + +Some line [Start of link + line2](/some/link) + +`, + blockNode: { + fields: { + blockType: 'Banner', + content: { + root: { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: 'Some line ', + type: 'text', + version: 1, + }, + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: 'Start of link line2', + type: 'text', + version: 1, + }, + ], + fields: { + linkType: 'custom', + newTab: false, + url: '/some/link', + }, + format: '', + indent: 0, + type: 'link', + version: 3, + }, + ], + direction: null, + format: '', + indent: 0, + textFormat: 0, + textStyle: '', + type: 'paragraph', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'root', + version: 1, + }, + }, + }, + }, + }, + { + inputAfterConvertFromEditorJSON: ` + + Text text [ Link ](/some/link) . + +`, + input: ` + + Text text [ Link + ](/some/link) . + +`, + blockNode: { + fields: { + blockType: 'Banner', + content: { + root: { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: 'Text text ', + type: 'text', + version: 1, + }, + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' Link ', + type: 'text', + version: 1, + }, + ], + fields: { + linkType: 'custom', + newTab: false, + url: '/some/link', + }, + format: '', + indent: 0, + type: 'link', + version: 3, + }, + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' .', + type: 'text', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + textFormat: 0, + textStyle: '', + type: 'paragraph', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'root', + version: 1, + }, + }, + }, + }, + }, + ] + + const INPUT_AND_OUTPUT: Tests = INPUT_AND_OUTPUTBase //.filter((test) => test.debugFlag) + + for (const { + input, + inputAfterConvertFromEditorJSON, + blockNode, + ignoreSpacesAndNewlines, + rootChildren, + description, + } of INPUT_AND_OUTPUT) { + let sanitizedInput = input + // Remove beginning and end newline of input if exists (since the input is a template string) + if (sanitizedInput.startsWith('\n')) { + sanitizedInput = sanitizedInput.slice(1) + } + if (sanitizedInput.endsWith('\n')) { + sanitizedInput = sanitizedInput.slice(0, -1) + } + + let sanitizedInputAfterConvertFromEditorJSON = inputAfterConvertFromEditorJSON + if (sanitizedInputAfterConvertFromEditorJSON) { + if (sanitizedInputAfterConvertFromEditorJSON.startsWith('\n')) { + sanitizedInputAfterConvertFromEditorJSON = sanitizedInputAfterConvertFromEditorJSON.slice(1) + } + if (sanitizedInputAfterConvertFromEditorJSON.endsWith('\n')) { + sanitizedInputAfterConvertFromEditorJSON = sanitizedInputAfterConvertFromEditorJSON.slice( + 0, + -1, + ) + } + } + + it(`can convert to editor JSON: ${description ?? sanitizedInput}"`, () => { + const result = mdxToEditorJSON({ + mdxWithFrontmatter: sanitizedInput, + editorConfig, + }) + + if (blockNode) { + const receivedBlockNode: SerializedBlockNode = result.editorState.root + .children[0] as unknown as SerializedBlockNode + expect(receivedBlockNode).not.toBeNull() + + // By doing it like this, the blockNode defined in the test does not need to have all the top-level properties. We only wanna compare keys that are defined in the test + const receivedBlockNodeToTest = {} + for (const key in blockNode) { + receivedBlockNodeToTest[key] = receivedBlockNode[key] + } + + removeUndefinedAndIDRecursively(receivedBlockNodeToTest) + removeUndefinedAndIDRecursively(blockNode) + + expect(receivedBlockNodeToTest).toStrictEqual(blockNode) + } else if (rootChildren) { + const receivedRootChildren = result.editorState.root.children + removeUndefinedAndIDRecursively(receivedRootChildren) + removeUndefinedAndIDRecursively(rootChildren) + + expect(receivedRootChildren).toStrictEqual(rootChildren) + } else { + throw new Error('Test not configured properly') + } + }) + + it(`can convert from editor JSON: ${description ?? sanitizedInput}"`, () => { + const editorState = { + root: { + children: blockNode + ? [ + { + format: '', + type: 'block', + version: 2, + ...blockNode, + }, + ] + : rootChildren, + format: '', + indent: 0, + type: 'root', + version: 1, + }, + } + const result = editorJSONToMDX({ + editorConfig, + editorState, + }) + // Remove all spaces and newlines + const resultNoSpace = ignoreSpacesAndNewlines ? result.replace(/\s/g, '') : result + const inputNoSpace = ignoreSpacesAndNewlines + ? (sanitizedInputAfterConvertFromEditorJSON ?? sanitizedInput).replace(/\s/g, '') + : (sanitizedInputAfterConvertFromEditorJSON ?? sanitizedInput) + + console.log('resultNoSpace', resultNoSpace) + console.log('inputNoSpace', inputNoSpace) + expect(resultNoSpace).toBe(inputNoSpace) + }) + } +}) + +function removeUndefinedAndIDRecursively(obj: object) { + for (const key in obj) { + const value = obj[key] + if (value && typeof value === 'object') { + removeUndefinedAndIDRecursively(value) + } else if (value === undefined) { + delete obj[key] + } else if (value === null) { + delete obj[key] + } else if (key === 'id') { + delete obj[key] + } + } +} diff --git a/test/lexical-mdx/mdx/hooks.ts b/test/lexical-mdx/mdx/hooks.ts new file mode 100644 index 000000000..6cae3dbda --- /dev/null +++ b/test/lexical-mdx/mdx/hooks.ts @@ -0,0 +1,170 @@ +import type { SerializedEditorState } from 'lexical' +import type { CollectionAfterReadHook, CollectionBeforeChangeHook, RichTextField } from 'payload' + +import { createHeadlessEditor } from '@lexical/headless' +import { $convertToMarkdownString } from '@lexical/markdown' +import { + $convertFromMarkdownString, + extractFrontmatter, + frontmatterToObject, + getEnabledNodes, + type LexicalRichTextAdapter, + objectToFrontmatter, + type SanitizedServerEditorConfig, +} from '@payloadcms/richtext-lexical' +import fs from 'node:fs' +import path from 'path' +import { deepCopyObjectSimple } from 'payload' + +import { docsBasePath } from '../collections/Posts/shared.js' + +export const editorJSONToMDX = ({ + editorState, + editorConfig, + frontMatterData, +}: { + editorConfig: SanitizedServerEditorConfig + editorState: any + frontMatterData?: any +}) => { + const headlessEditor = createHeadlessEditor({ + nodes: getEnabledNodes({ + editorConfig, + }), + }) + + // Convert lexical state to markdown + // Import editor state into your headless editor + try { + headlessEditor.setEditorState(headlessEditor.parseEditorState(editorState)) // This should commit the editor state immediately + } catch (e) { + console.error('Error parsing editor state', e) + } + + // Export to markdown + let markdown: string + headlessEditor.getEditorState().read(() => { + markdown = $convertToMarkdownString(editorConfig?.features?.markdownTransformers) + }) + + if (!frontMatterData) { + return markdown + } + + const frontMatterOriginalData = deepCopyObjectSimple(frontMatterData) + + //Frontmatter + const frontmatterData = {} + + if (frontMatterOriginalData) { + for (const frontMatterArrayEntry of frontMatterOriginalData) { + frontmatterData[frontMatterArrayEntry.key] = frontMatterArrayEntry.value + } + + const frontmatterString = objectToFrontmatter(frontmatterData) + + if (frontmatterString?.length) { + markdown = frontmatterString + '\n' + markdown + } + } + + return markdown +} + +export const saveMDXBeforeChange: CollectionBeforeChangeHook = ({ collection, data, context }) => { + if (context.seed) { + return data + } + const docFilePath = path.join(docsBasePath, data.docPath) + + const field: RichTextField = collection.fields.find( + (field) => 'name' in field && field.name === 'richText', + ) as RichTextField + const value = data[field.name] + + const editorConfig: SanitizedServerEditorConfig = (field.editor as LexicalRichTextAdapter) + .editorConfig + + const markdown = editorJSONToMDX({ + editorState: value, + editorConfig, + frontMatterData: data.frontMatter, + }) + + if (markdown?.trim()?.length) { + // Write markdown to '../../../../docs/admin/overview.mdx' + fs.writeFileSync(docFilePath, markdown, { + encoding: 'utf-8', + }) + } + + return null // Do not save anything to database +} + +export function mdxToEditorJSON({ + mdxWithFrontmatter, + editorConfig, +}: { + editorConfig: SanitizedServerEditorConfig + mdxWithFrontmatter: string +}): { + editorState: SerializedEditorState + frontMatter: { key: string; value: string }[] +} { + const frontMatter = extractFrontmatter(mdxWithFrontmatter) + + const mdx = frontMatter.content + + const headlessEditor = createHeadlessEditor({ + nodes: getEnabledNodes({ + editorConfig, + }), + }) + + headlessEditor.update( + () => { + $convertFromMarkdownString(mdx, editorConfig.features.markdownTransformers) + }, + { discrete: true }, + ) + + const frontMatterArray = frontMatter?.frontmatter?.length + ? Object.entries(frontmatterToObject(frontMatter.frontmatter)).map(([key, value]) => ({ + key, + value, + })) + : [] + + return { + editorState: headlessEditor.getEditorState().toJSON(), + frontMatter: frontMatterArray, + } +} + +export const loadMDXAfterRead: CollectionAfterReadHook = ({ collection, doc, context }) => { + if (context.seed) { + return doc + } + const field: RichTextField = collection.fields.find( + (field) => 'name' in field && field.name === 'richText', + ) as RichTextField + + const docFilePath = path.join(docsBasePath, doc.docPath) + + const mdxWithFrontmatter = fs.readFileSync(docFilePath, { + encoding: 'utf-8', + }) + const editorConfig: SanitizedServerEditorConfig = (field.editor as LexicalRichTextAdapter) + .editorConfig + + const result = mdxToEditorJSON({ + mdxWithFrontmatter, + editorConfig, + }) + + return { + ...doc, + richText: result.editorState, + frontMatter: result.frontMatter, + } +} diff --git a/test/lexical-mdx/mdx/jsxBlocks/TextContainer.ts b/test/lexical-mdx/mdx/jsxBlocks/TextContainer.ts new file mode 100644 index 000000000..c79d5f458 --- /dev/null +++ b/test/lexical-mdx/mdx/jsxBlocks/TextContainer.ts @@ -0,0 +1,24 @@ +import type { Block } from 'payload' + +export const TextContainerBlock: Block = { + slug: 'TextContainer', + jsx: { + import: ({ children }) => { + return { + text: children, + } + }, + export: ({ fields }) => { + return { + props: {}, + children: fields.text, + } + }, + }, + fields: [ + { + name: 'text', + type: 'text', + }, + ], +} diff --git a/test/lexical-mdx/mdx/jsxBlocks/TextContainerNoTrim.ts b/test/lexical-mdx/mdx/jsxBlocks/TextContainerNoTrim.ts new file mode 100644 index 000000000..6fbd42ba0 --- /dev/null +++ b/test/lexical-mdx/mdx/jsxBlocks/TextContainerNoTrim.ts @@ -0,0 +1,25 @@ +import type { Block } from 'payload' + +export const TextContainerNoTrimBlock: Block = { + slug: 'TextContainerNoTrim', + jsx: { + import: ({ children }) => { + return { + text: children, + } + }, + export: ({ fields }) => { + return { + props: {}, + children: fields.text, + } + }, + doNotTrimChildren: true, + }, + fields: [ + { + name: 'text', + type: 'text', + }, + ], +} diff --git a/test/lexical-mdx/mdx/jsxBlocks/banner.ts b/test/lexical-mdx/mdx/jsxBlocks/banner.ts new file mode 100644 index 000000000..3f5165cd4 --- /dev/null +++ b/test/lexical-mdx/mdx/jsxBlocks/banner.ts @@ -0,0 +1,53 @@ +import type { Block } from 'payload' + +import { BlocksFeature, lexicalEditor, TreeViewFeature } from '@payloadcms/richtext-lexical' + +import { bannerTypes } from '../../collections/Posts/shared.js' +import { InlineCodeBlock } from './inlineCode.js' + +export const BannerBlock: Block = { + slug: 'Banner', + jsx: { + import: ({ props, children, markdownToLexical }) => { + return { + type: props?.type, + content: markdownToLexical({ markdown: children }), + } + }, + export: ({ fields, lexicalToMarkdown }) => { + const props: any = {} + if (fields.type) { + props.type = fields.type + } + + return { + props, + children: lexicalToMarkdown({ editorState: fields.content }), + } + }, + }, + fields: [ + { + type: 'select', + name: 'type', + options: Object.entries(bannerTypes).map(([key, value]) => ({ + label: value, + value: key, + })), + defaultValue: 'info', + }, + { + name: 'content', + type: 'richText', + editor: lexicalEditor({ + features: ({ defaultFeatures }) => [ + ...defaultFeatures, + TreeViewFeature(), + BlocksFeature({ + inlineBlocks: [InlineCodeBlock], + }), + ], + }), + }, + ], +} diff --git a/test/lexical-mdx/mdx/jsxBlocks/code/code.ts b/test/lexical-mdx/mdx/jsxBlocks/code/code.ts new file mode 100644 index 000000000..e0b6f138b --- /dev/null +++ b/test/lexical-mdx/mdx/jsxBlocks/code/code.ts @@ -0,0 +1,32 @@ +import type { Block } from 'payload' + +import { languages } from '../../../collections/Posts/shared.js' +import { codeConverter } from './converter.js' + +export const CodeBlock: Block = { + slug: 'Code', + admin: { + jsx: './mdx/jsxBlocks/code/converterClient.js#codeConverterClient', + }, + jsx: codeConverter, + fields: [ + { + type: 'select', + name: 'language', + options: Object.entries(languages).map(([key, value]) => ({ + label: value, + value: key, + })), + defaultValue: 'ts', + }, + { + admin: { + components: { + Field: './collections/Posts/CodeFields.js#Code', + }, + }, + name: 'code', + type: 'code', + }, + ], +} diff --git a/test/lexical-mdx/mdx/jsxBlocks/code/converter.ts b/test/lexical-mdx/mdx/jsxBlocks/code/converter.ts new file mode 100644 index 000000000..b4fda5a85 --- /dev/null +++ b/test/lexical-mdx/mdx/jsxBlocks/code/converter.ts @@ -0,0 +1,36 @@ +import type { BlockJSX } from 'payload' + +export const codeConverter: BlockJSX = { + customStartRegex: /^[ \t]*```(\w+)?/, + customEndRegex: { + optional: true, + regExp: /[ \t]*```$/, + }, + doNotTrimChildren: true, + import: ({ openMatch, children, closeMatch }) => { + const language = openMatch[1] + + const isSingleLineAndComplete = + !!closeMatch && !children.includes('\n') && openMatch.input?.trim() !== '```' + language + + if (isSingleLineAndComplete) { + return { + language: '', + code: language + (children?.length ? children : ''), // No need to add space to children as they are not trimmed + } + } + + return { + language, + code: children, + } + }, + export: ({ fields }) => { + const isSingleLine = !fields.code.includes('\n') && !fields.language?.length + if (isSingleLine) { + return '```' + fields.code + '```' + } + + return '```' + (fields.language || '') + (fields.code ? '\n' + fields.code : '') + '\n' + '```' + }, +} diff --git a/test/lexical-mdx/mdx/jsxBlocks/code/converterClient.ts b/test/lexical-mdx/mdx/jsxBlocks/code/converterClient.ts new file mode 100644 index 000000000..66943b8f5 --- /dev/null +++ b/test/lexical-mdx/mdx/jsxBlocks/code/converterClient.ts @@ -0,0 +1,3 @@ +'use client' + +export { codeConverter as codeConverterClient } from './converter.js' diff --git a/test/lexical-mdx/mdx/jsxBlocks/inlineCode.ts b/test/lexical-mdx/mdx/jsxBlocks/inlineCode.ts new file mode 100644 index 000000000..c1eb59a25 --- /dev/null +++ b/test/lexical-mdx/mdx/jsxBlocks/inlineCode.ts @@ -0,0 +1,24 @@ +import type { Block } from 'payload' + +export const InlineCodeBlock: Block = { + slug: 'InlineCode', + jsx: { + import: ({ children }) => { + return { + code: children, + } + }, + export: ({ fields }) => { + return { + props: {}, + children: fields.code, + } + }, + }, + fields: [ + { + name: 'code', + type: 'code', + }, + ], +} diff --git a/test/lexical-mdx/mdx/jsxBlocks/packageInstallOptions.ts b/test/lexical-mdx/mdx/jsxBlocks/packageInstallOptions.ts new file mode 100644 index 000000000..d9b18e557 --- /dev/null +++ b/test/lexical-mdx/mdx/jsxBlocks/packageInstallOptions.ts @@ -0,0 +1,52 @@ +import type { Block } from 'payload' + +export const PackageInstallOptions: Block = { + slug: 'PackageInstallOptions', + jsx: { + import: ({ props, children, markdownToLexical }) => { + return { + global: props?.global, + packageId: props?.packageId, + someNestedObject: props?.someNestedObject, + uniqueId: props?.uniqueId, + update: props?.update, + } + }, + export: ({ fields, lexicalToMarkdown }) => { + return { + props: { + global: fields?.global, + packageId: fields?.packageId, + someNestedObject: fields?.someNestedObject, + uniqueId: fields?.uniqueId, + update: fields?.update, + }, + } + }, + }, + fields: [ + { + name: 'packageId', + type: 'text', + }, + { + name: 'global', + type: 'checkbox', + }, + { + name: 'update', + type: 'checkbox', + }, + { + name: 'uniqueId', + type: 'text', + }, + { + name: 'someNestedObject', + type: 'code', + admin: { + hidden: true, + }, + }, + ], +} diff --git a/test/lexical-mdx/payload-types.ts b/test/lexical-mdx/payload-types.ts new file mode 100644 index 000000000..b7360f0eb --- /dev/null +++ b/test/lexical-mdx/payload-types.ts @@ -0,0 +1,231 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * This file was automatically generated by Payload. + * DO NOT MODIFY IT BY HAND. Instead, modify your source Payload config, + * and re-run `payload generate:types` to regenerate this file. + */ + +export interface Config { + auth: { + users: UserAuthOperations; + }; + collections: { + posts: Post; + simple: Simple; + media: Media; + users: User; + 'payload-locked-documents': PayloadLockedDocument; + 'payload-preferences': PayloadPreference; + 'payload-migrations': PayloadMigration; + }; + db: { + defaultIDType: string; + }; + globals: {}; + locale: null; + user: User & { + collection: 'users'; + }; +} +export interface UserAuthOperations { + forgotPassword: { + email: string; + password: string; + }; + login: { + email: string; + password: string; + }; + registerFirstUser: { + email: string; + password: string; + }; + unlock: { + email: string; + password: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "posts". + */ +export interface Post { + id: string; + docPath: string; + frontMatter?: + | { + key?: string | null; + value?: string | null; + id?: string | null; + }[] + | null; + richText?: { + root: { + type: string; + children: { + type: string; + version: number; + [k: string]: unknown; + }[]; + direction: ('ltr' | 'rtl') | null; + format: 'left' | 'start' | 'center' | 'right' | 'end' | 'justify' | ''; + indent: number; + version: number; + }; + [k: string]: unknown; + } | null; + updatedAt: string; + createdAt: string; + _status?: ('draft' | 'published') | null; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "simple". + */ +export interface Simple { + id: string; + text?: string | null; + updatedAt: string; + createdAt: string; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "media". + */ +export interface Media { + id: string; + updatedAt: string; + createdAt: string; + url?: string | null; + thumbnailURL?: string | null; + filename?: string | null; + mimeType?: string | null; + filesize?: number | null; + width?: number | null; + height?: number | null; + focalX?: number | null; + focalY?: number | null; + sizes?: { + thumbnail?: { + url?: string | null; + width?: number | null; + height?: number | null; + mimeType?: string | null; + filesize?: number | null; + filename?: string | null; + }; + medium?: { + url?: string | null; + width?: number | null; + height?: number | null; + mimeType?: string | null; + filesize?: number | null; + filename?: string | null; + }; + large?: { + url?: string | null; + width?: number | null; + height?: number | null; + mimeType?: string | null; + filesize?: number | null; + filename?: string | null; + }; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "users". + */ +export interface User { + id: string; + updatedAt: string; + createdAt: string; + email: string; + resetPasswordToken?: string | null; + resetPasswordExpiration?: string | null; + salt?: string | null; + hash?: string | null; + loginAttempts?: number | null; + lockUntil?: string | null; + password?: string | null; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "payload-locked-documents". + */ +export interface PayloadLockedDocument { + id: string; + document?: + | ({ + relationTo: 'posts'; + value: string | Post; + } | null) + | ({ + relationTo: 'simple'; + value: string | Simple; + } | null) + | ({ + relationTo: 'media'; + value: string | Media; + } | null) + | ({ + relationTo: 'users'; + value: string | User; + } | null); + editedAt?: string | null; + globalSlug?: string | null; + user: { + relationTo: 'users'; + value: string | User; + }; + updatedAt: string; + createdAt: string; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "payload-preferences". + */ +export interface PayloadPreference { + id: string; + user: { + relationTo: 'users'; + value: string | User; + }; + key?: string | null; + value?: + | { + [k: string]: unknown; + } + | unknown[] + | string + | number + | boolean + | null; + updatedAt: string; + createdAt: string; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "payload-migrations". + */ +export interface PayloadMigration { + id: string; + name?: string | null; + batch?: number | null; + updatedAt: string; + createdAt: string; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "auth". + */ +export interface Auth { + [k: string]: unknown; +} + + +declare module 'payload' { + // @ts-ignore + export interface GeneratedTypes extends Config {} +} \ No newline at end of file diff --git a/test/lexical-mdx/schema.graphql b/test/lexical-mdx/schema.graphql new file mode 100644 index 000000000..1038d843d --- /dev/null +++ b/test/lexical-mdx/schema.graphql @@ -0,0 +1,1902 @@ +type Query { + Post(id: String!, draft: Boolean): Post + Posts(draft: Boolean, where: Post_where, limit: Int, page: Int, sort: String): Posts + countPosts(draft: Boolean, where: Post_where): countPosts + docAccessPost(id: String!): postsDocAccess + versionPost(id: String): PostVersion + versionsPosts(where: versionsPost_where, limit: Int, page: Int, sort: String): versionsPosts + User(id: String!, draft: Boolean): User + Users(draft: Boolean, where: User_where, limit: Int, page: Int, sort: String): Users + countUsers(draft: Boolean, where: User_where): countUsers + docAccessUser(id: String!): usersDocAccess + meUser: usersMe + initializedUser: Boolean + PayloadPreference(id: String!, draft: Boolean): PayloadPreference + PayloadPreferences( + draft: Boolean + where: PayloadPreference_where + limit: Int + page: Int + sort: String + ): PayloadPreferences + countPayloadPreferences(draft: Boolean, where: PayloadPreference_where): countPayloadPreferences + docAccessPayloadPreference(id: String!): payload_preferencesDocAccess + Menu(draft: Boolean): Menu + docAccessMenu: menuDocAccess + Access: Access +} + +type Post { + id: String + text: String + richText(depth: Int): JSON + richText2(depth: Int): JSON + updatedAt: DateTime + createdAt: DateTime + _status: Post__status +} + +""" +The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). +""" +scalar JSON + @specifiedBy(url: "http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf") + +""" +A date-time string at UTC, such as 2007-12-03T10:15:30Z, compliant with the `date-time` format outlined in section 5.6 of the RFC 3339 profile of the ISO 8601 standard for representation of dates and times using the Gregorian calendar. +""" +scalar DateTime + +enum Post__status { + draft + published +} + +type Posts { + docs: [Post] + hasNextPage: Boolean + hasPrevPage: Boolean + limit: Int + nextPage: Int + offset: Int + page: Int + pagingCounter: Int + prevPage: Int + totalDocs: Int + totalPages: Int +} + +input Post_where { + text: Post_text_operator + richText: Post_richText_operator + richText2: Post_richText2_operator + updatedAt: Post_updatedAt_operator + createdAt: Post_createdAt_operator + _status: Post__status_operator + id: Post_id_operator + AND: [Post_where_and] + OR: [Post_where_or] +} + +input Post_text_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input Post_richText_operator { + equals: JSON + not_equals: JSON + like: JSON + contains: JSON + exists: Boolean +} + +input Post_richText2_operator { + equals: JSON + not_equals: JSON + like: JSON + contains: JSON + exists: Boolean +} + +input Post_updatedAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input Post_createdAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input Post__status_operator { + equals: Post__status_Input + not_equals: Post__status_Input + in: [Post__status_Input] + not_in: [Post__status_Input] + all: [Post__status_Input] + exists: Boolean +} + +enum Post__status_Input { + draft + published +} + +input Post_id_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input Post_where_and { + text: Post_text_operator + richText: Post_richText_operator + richText2: Post_richText2_operator + updatedAt: Post_updatedAt_operator + createdAt: Post_createdAt_operator + _status: Post__status_operator + id: Post_id_operator + AND: [Post_where_and] + OR: [Post_where_or] +} + +input Post_where_or { + text: Post_text_operator + richText: Post_richText_operator + richText2: Post_richText2_operator + updatedAt: Post_updatedAt_operator + createdAt: Post_createdAt_operator + _status: Post__status_operator + id: Post_id_operator + AND: [Post_where_and] + OR: [Post_where_or] +} + +type countPosts { + totalDocs: Int +} + +type postsDocAccess { + fields: PostsDocAccessFields + create: PostsCreateDocAccess + read: PostsReadDocAccess + update: PostsUpdateDocAccess + delete: PostsDeleteDocAccess + readVersions: PostsReadVersionsDocAccess +} + +type PostsDocAccessFields { + text: PostsDocAccessFields_text + richText: PostsDocAccessFields_richText + richText2: PostsDocAccessFields_richText2 + updatedAt: PostsDocAccessFields_updatedAt + createdAt: PostsDocAccessFields_createdAt + _status: PostsDocAccessFields__status +} + +type PostsDocAccessFields_text { + create: PostsDocAccessFields_text_Create + read: PostsDocAccessFields_text_Read + update: PostsDocAccessFields_text_Update + delete: PostsDocAccessFields_text_Delete +} + +type PostsDocAccessFields_text_Create { + permission: Boolean! +} + +type PostsDocAccessFields_text_Read { + permission: Boolean! +} + +type PostsDocAccessFields_text_Update { + permission: Boolean! +} + +type PostsDocAccessFields_text_Delete { + permission: Boolean! +} + +type PostsDocAccessFields_richText { + create: PostsDocAccessFields_richText_Create + read: PostsDocAccessFields_richText_Read + update: PostsDocAccessFields_richText_Update + delete: PostsDocAccessFields_richText_Delete +} + +type PostsDocAccessFields_richText_Create { + permission: Boolean! +} + +type PostsDocAccessFields_richText_Read { + permission: Boolean! +} + +type PostsDocAccessFields_richText_Update { + permission: Boolean! +} + +type PostsDocAccessFields_richText_Delete { + permission: Boolean! +} + +type PostsDocAccessFields_richText2 { + create: PostsDocAccessFields_richText2_Create + read: PostsDocAccessFields_richText2_Read + update: PostsDocAccessFields_richText2_Update + delete: PostsDocAccessFields_richText2_Delete +} + +type PostsDocAccessFields_richText2_Create { + permission: Boolean! +} + +type PostsDocAccessFields_richText2_Read { + permission: Boolean! +} + +type PostsDocAccessFields_richText2_Update { + permission: Boolean! +} + +type PostsDocAccessFields_richText2_Delete { + permission: Boolean! +} + +type PostsDocAccessFields_updatedAt { + create: PostsDocAccessFields_updatedAt_Create + read: PostsDocAccessFields_updatedAt_Read + update: PostsDocAccessFields_updatedAt_Update + delete: PostsDocAccessFields_updatedAt_Delete +} + +type PostsDocAccessFields_updatedAt_Create { + permission: Boolean! +} + +type PostsDocAccessFields_updatedAt_Read { + permission: Boolean! +} + +type PostsDocAccessFields_updatedAt_Update { + permission: Boolean! +} + +type PostsDocAccessFields_updatedAt_Delete { + permission: Boolean! +} + +type PostsDocAccessFields_createdAt { + create: PostsDocAccessFields_createdAt_Create + read: PostsDocAccessFields_createdAt_Read + update: PostsDocAccessFields_createdAt_Update + delete: PostsDocAccessFields_createdAt_Delete +} + +type PostsDocAccessFields_createdAt_Create { + permission: Boolean! +} + +type PostsDocAccessFields_createdAt_Read { + permission: Boolean! +} + +type PostsDocAccessFields_createdAt_Update { + permission: Boolean! +} + +type PostsDocAccessFields_createdAt_Delete { + permission: Boolean! +} + +type PostsDocAccessFields__status { + create: PostsDocAccessFields__status_Create + read: PostsDocAccessFields__status_Read + update: PostsDocAccessFields__status_Update + delete: PostsDocAccessFields__status_Delete +} + +type PostsDocAccessFields__status_Create { + permission: Boolean! +} + +type PostsDocAccessFields__status_Read { + permission: Boolean! +} + +type PostsDocAccessFields__status_Update { + permission: Boolean! +} + +type PostsDocAccessFields__status_Delete { + permission: Boolean! +} + +type PostsCreateDocAccess { + permission: Boolean! + where: JSONObject +} + +""" +The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). +""" +scalar JSONObject + @specifiedBy(url: "http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf") + +type PostsReadDocAccess { + permission: Boolean! + where: JSONObject +} + +type PostsUpdateDocAccess { + permission: Boolean! + where: JSONObject +} + +type PostsDeleteDocAccess { + permission: Boolean! + where: JSONObject +} + +type PostsReadVersionsDocAccess { + permission: Boolean! + where: JSONObject +} + +type PostVersion { + parent(draft: Boolean): Post + version: PostVersion_Version + createdAt: DateTime + updatedAt: DateTime + latest: Boolean + id: String +} + +type PostVersion_Version { + text: String + richText(depth: Int): JSON + richText2(depth: Int): JSON + updatedAt: DateTime + createdAt: DateTime + _status: PostVersion_Version__status +} + +enum PostVersion_Version__status { + draft + published +} + +type versionsPosts { + docs: [PostVersion] + hasNextPage: Boolean + hasPrevPage: Boolean + limit: Int + nextPage: Int + offset: Int + page: Int + pagingCounter: Int + prevPage: Int + totalDocs: Int + totalPages: Int +} + +input versionsPost_where { + parent: versionsPost_parent_operator + version__text: versionsPost_version__text_operator + version__richText: versionsPost_version__richText_operator + version__richText2: versionsPost_version__richText2_operator + version__updatedAt: versionsPost_version__updatedAt_operator + version__createdAt: versionsPost_version__createdAt_operator + version___status: versionsPost_version___status_operator + createdAt: versionsPost_createdAt_operator + updatedAt: versionsPost_updatedAt_operator + latest: versionsPost_latest_operator + id: versionsPost_id_operator + AND: [versionsPost_where_and] + OR: [versionsPost_where_or] +} + +input versionsPost_parent_operator { + equals: JSON + not_equals: JSON + in: [JSON] + not_in: [JSON] + all: [JSON] + exists: Boolean +} + +input versionsPost_version__text_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input versionsPost_version__richText_operator { + equals: JSON + not_equals: JSON + like: JSON + contains: JSON + exists: Boolean +} + +input versionsPost_version__richText2_operator { + equals: JSON + not_equals: JSON + like: JSON + contains: JSON + exists: Boolean +} + +input versionsPost_version__updatedAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input versionsPost_version__createdAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input versionsPost_version___status_operator { + equals: versionsPost_version___status_Input + not_equals: versionsPost_version___status_Input + in: [versionsPost_version___status_Input] + not_in: [versionsPost_version___status_Input] + all: [versionsPost_version___status_Input] + exists: Boolean +} + +enum versionsPost_version___status_Input { + draft + published +} + +input versionsPost_createdAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input versionsPost_updatedAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input versionsPost_latest_operator { + equals: Boolean + not_equals: Boolean + exists: Boolean +} + +input versionsPost_id_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input versionsPost_where_and { + parent: versionsPost_parent_operator + version__text: versionsPost_version__text_operator + version__richText: versionsPost_version__richText_operator + version__richText2: versionsPost_version__richText2_operator + version__updatedAt: versionsPost_version__updatedAt_operator + version__createdAt: versionsPost_version__createdAt_operator + version___status: versionsPost_version___status_operator + createdAt: versionsPost_createdAt_operator + updatedAt: versionsPost_updatedAt_operator + latest: versionsPost_latest_operator + id: versionsPost_id_operator + AND: [versionsPost_where_and] + OR: [versionsPost_where_or] +} + +input versionsPost_where_or { + parent: versionsPost_parent_operator + version__text: versionsPost_version__text_operator + version__richText: versionsPost_version__richText_operator + version__richText2: versionsPost_version__richText2_operator + version__updatedAt: versionsPost_version__updatedAt_operator + version__createdAt: versionsPost_version__createdAt_operator + version___status: versionsPost_version___status_operator + createdAt: versionsPost_createdAt_operator + updatedAt: versionsPost_updatedAt_operator + latest: versionsPost_latest_operator + id: versionsPost_id_operator + AND: [versionsPost_where_and] + OR: [versionsPost_where_or] +} + +type User { + id: String + updatedAt: DateTime + createdAt: DateTime + email: EmailAddress! + resetPasswordToken: String + resetPasswordExpiration: DateTime + salt: String + hash: String + loginAttempts: Float + lockUntil: DateTime + password: String! +} + +""" +A field whose value conforms to the standard internet email address format as specified in HTML Spec: https://html.spec.whatwg.org/multipage/input.html#valid-e-mail-address. +""" +scalar EmailAddress + @specifiedBy(url: "https://html.spec.whatwg.org/multipage/input.html#valid-e-mail-address") + +type Users { + docs: [User] + hasNextPage: Boolean + hasPrevPage: Boolean + limit: Int + nextPage: Int + offset: Int + page: Int + pagingCounter: Int + prevPage: Int + totalDocs: Int + totalPages: Int +} + +input User_where { + updatedAt: User_updatedAt_operator + createdAt: User_createdAt_operator + email: User_email_operator + id: User_id_operator + AND: [User_where_and] + OR: [User_where_or] +} + +input User_updatedAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input User_createdAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input User_email_operator { + equals: EmailAddress + not_equals: EmailAddress + like: EmailAddress + contains: EmailAddress + in: [EmailAddress] + not_in: [EmailAddress] + all: [EmailAddress] +} + +input User_id_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input User_where_and { + updatedAt: User_updatedAt_operator + createdAt: User_createdAt_operator + email: User_email_operator + id: User_id_operator + AND: [User_where_and] + OR: [User_where_or] +} + +input User_where_or { + updatedAt: User_updatedAt_operator + createdAt: User_createdAt_operator + email: User_email_operator + id: User_id_operator + AND: [User_where_and] + OR: [User_where_or] +} + +type countUsers { + totalDocs: Int +} + +type usersDocAccess { + fields: UsersDocAccessFields + create: UsersCreateDocAccess + read: UsersReadDocAccess + update: UsersUpdateDocAccess + delete: UsersDeleteDocAccess + unlock: UsersUnlockDocAccess +} + +type UsersDocAccessFields { + updatedAt: UsersDocAccessFields_updatedAt + createdAt: UsersDocAccessFields_createdAt + email: UsersDocAccessFields_email + password: UsersDocAccessFields_password +} + +type UsersDocAccessFields_updatedAt { + create: UsersDocAccessFields_updatedAt_Create + read: UsersDocAccessFields_updatedAt_Read + update: UsersDocAccessFields_updatedAt_Update + delete: UsersDocAccessFields_updatedAt_Delete +} + +type UsersDocAccessFields_updatedAt_Create { + permission: Boolean! +} + +type UsersDocAccessFields_updatedAt_Read { + permission: Boolean! +} + +type UsersDocAccessFields_updatedAt_Update { + permission: Boolean! +} + +type UsersDocAccessFields_updatedAt_Delete { + permission: Boolean! +} + +type UsersDocAccessFields_createdAt { + create: UsersDocAccessFields_createdAt_Create + read: UsersDocAccessFields_createdAt_Read + update: UsersDocAccessFields_createdAt_Update + delete: UsersDocAccessFields_createdAt_Delete +} + +type UsersDocAccessFields_createdAt_Create { + permission: Boolean! +} + +type UsersDocAccessFields_createdAt_Read { + permission: Boolean! +} + +type UsersDocAccessFields_createdAt_Update { + permission: Boolean! +} + +type UsersDocAccessFields_createdAt_Delete { + permission: Boolean! +} + +type UsersDocAccessFields_email { + create: UsersDocAccessFields_email_Create + read: UsersDocAccessFields_email_Read + update: UsersDocAccessFields_email_Update + delete: UsersDocAccessFields_email_Delete +} + +type UsersDocAccessFields_email_Create { + permission: Boolean! +} + +type UsersDocAccessFields_email_Read { + permission: Boolean! +} + +type UsersDocAccessFields_email_Update { + permission: Boolean! +} + +type UsersDocAccessFields_email_Delete { + permission: Boolean! +} + +type UsersDocAccessFields_password { + create: UsersDocAccessFields_password_Create + read: UsersDocAccessFields_password_Read + update: UsersDocAccessFields_password_Update + delete: UsersDocAccessFields_password_Delete +} + +type UsersDocAccessFields_password_Create { + permission: Boolean! +} + +type UsersDocAccessFields_password_Read { + permission: Boolean! +} + +type UsersDocAccessFields_password_Update { + permission: Boolean! +} + +type UsersDocAccessFields_password_Delete { + permission: Boolean! +} + +type UsersCreateDocAccess { + permission: Boolean! + where: JSONObject +} + +type UsersReadDocAccess { + permission: Boolean! + where: JSONObject +} + +type UsersUpdateDocAccess { + permission: Boolean! + where: JSONObject +} + +type UsersDeleteDocAccess { + permission: Boolean! + where: JSONObject +} + +type UsersUnlockDocAccess { + permission: Boolean! + where: JSONObject +} + +type usersMe { + collection: String + exp: Int + token: String + user: User +} + +type PayloadPreference { + id: String + user: PayloadPreference_User_Relationship! + key: String + value: JSON + updatedAt: DateTime + createdAt: DateTime +} + +type PayloadPreference_User_Relationship { + relationTo: PayloadPreference_User_RelationTo + value: PayloadPreference_User +} + +enum PayloadPreference_User_RelationTo { + users +} + +union PayloadPreference_User = User + +type PayloadPreferences { + docs: [PayloadPreference] + hasNextPage: Boolean + hasPrevPage: Boolean + limit: Int + nextPage: Int + offset: Int + page: Int + pagingCounter: Int + prevPage: Int + totalDocs: Int + totalPages: Int +} + +input PayloadPreference_where { + user: PayloadPreference_user_Relation + key: PayloadPreference_key_operator + value: PayloadPreference_value_operator + updatedAt: PayloadPreference_updatedAt_operator + createdAt: PayloadPreference_createdAt_operator + id: PayloadPreference_id_operator + AND: [PayloadPreference_where_and] + OR: [PayloadPreference_where_or] +} + +input PayloadPreference_user_Relation { + relationTo: PayloadPreference_user_Relation_RelationTo + value: JSON +} + +enum PayloadPreference_user_Relation_RelationTo { + users +} + +input PayloadPreference_key_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input PayloadPreference_value_operator { + equals: JSON + not_equals: JSON + like: JSON + contains: JSON + within: JSON + intersects: JSON + exists: Boolean +} + +input PayloadPreference_updatedAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input PayloadPreference_createdAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input PayloadPreference_id_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input PayloadPreference_where_and { + user: PayloadPreference_user_Relation + key: PayloadPreference_key_operator + value: PayloadPreference_value_operator + updatedAt: PayloadPreference_updatedAt_operator + createdAt: PayloadPreference_createdAt_operator + id: PayloadPreference_id_operator + AND: [PayloadPreference_where_and] + OR: [PayloadPreference_where_or] +} + +input PayloadPreference_where_or { + user: PayloadPreference_user_Relation + key: PayloadPreference_key_operator + value: PayloadPreference_value_operator + updatedAt: PayloadPreference_updatedAt_operator + createdAt: PayloadPreference_createdAt_operator + id: PayloadPreference_id_operator + AND: [PayloadPreference_where_and] + OR: [PayloadPreference_where_or] +} + +type countPayloadPreferences { + totalDocs: Int +} + +type payload_preferencesDocAccess { + fields: PayloadPreferencesDocAccessFields + create: PayloadPreferencesCreateDocAccess + read: PayloadPreferencesReadDocAccess + update: PayloadPreferencesUpdateDocAccess + delete: PayloadPreferencesDeleteDocAccess +} + +type PayloadPreferencesDocAccessFields { + user: PayloadPreferencesDocAccessFields_user + key: PayloadPreferencesDocAccessFields_key + value: PayloadPreferencesDocAccessFields_value + updatedAt: PayloadPreferencesDocAccessFields_updatedAt + createdAt: PayloadPreferencesDocAccessFields_createdAt +} + +type PayloadPreferencesDocAccessFields_user { + create: PayloadPreferencesDocAccessFields_user_Create + read: PayloadPreferencesDocAccessFields_user_Read + update: PayloadPreferencesDocAccessFields_user_Update + delete: PayloadPreferencesDocAccessFields_user_Delete +} + +type PayloadPreferencesDocAccessFields_user_Create { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_user_Read { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_user_Update { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_user_Delete { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_key { + create: PayloadPreferencesDocAccessFields_key_Create + read: PayloadPreferencesDocAccessFields_key_Read + update: PayloadPreferencesDocAccessFields_key_Update + delete: PayloadPreferencesDocAccessFields_key_Delete +} + +type PayloadPreferencesDocAccessFields_key_Create { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_key_Read { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_key_Update { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_key_Delete { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_value { + create: PayloadPreferencesDocAccessFields_value_Create + read: PayloadPreferencesDocAccessFields_value_Read + update: PayloadPreferencesDocAccessFields_value_Update + delete: PayloadPreferencesDocAccessFields_value_Delete +} + +type PayloadPreferencesDocAccessFields_value_Create { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_value_Read { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_value_Update { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_value_Delete { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_updatedAt { + create: PayloadPreferencesDocAccessFields_updatedAt_Create + read: PayloadPreferencesDocAccessFields_updatedAt_Read + update: PayloadPreferencesDocAccessFields_updatedAt_Update + delete: PayloadPreferencesDocAccessFields_updatedAt_Delete +} + +type PayloadPreferencesDocAccessFields_updatedAt_Create { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_updatedAt_Read { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_updatedAt_Update { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_updatedAt_Delete { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_createdAt { + create: PayloadPreferencesDocAccessFields_createdAt_Create + read: PayloadPreferencesDocAccessFields_createdAt_Read + update: PayloadPreferencesDocAccessFields_createdAt_Update + delete: PayloadPreferencesDocAccessFields_createdAt_Delete +} + +type PayloadPreferencesDocAccessFields_createdAt_Create { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_createdAt_Read { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_createdAt_Update { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_createdAt_Delete { + permission: Boolean! +} + +type PayloadPreferencesCreateDocAccess { + permission: Boolean! + where: JSONObject +} + +type PayloadPreferencesReadDocAccess { + permission: Boolean! + where: JSONObject +} + +type PayloadPreferencesUpdateDocAccess { + permission: Boolean! + where: JSONObject +} + +type PayloadPreferencesDeleteDocAccess { + permission: Boolean! + where: JSONObject +} + +type Menu { + globalText: String + updatedAt: DateTime + createdAt: DateTime +} + +type menuDocAccess { + fields: MenuDocAccessFields + read: MenuReadDocAccess + update: MenuUpdateDocAccess +} + +type MenuDocAccessFields { + globalText: MenuDocAccessFields_globalText + updatedAt: MenuDocAccessFields_updatedAt + createdAt: MenuDocAccessFields_createdAt +} + +type MenuDocAccessFields_globalText { + create: MenuDocAccessFields_globalText_Create + read: MenuDocAccessFields_globalText_Read + update: MenuDocAccessFields_globalText_Update + delete: MenuDocAccessFields_globalText_Delete +} + +type MenuDocAccessFields_globalText_Create { + permission: Boolean! +} + +type MenuDocAccessFields_globalText_Read { + permission: Boolean! +} + +type MenuDocAccessFields_globalText_Update { + permission: Boolean! +} + +type MenuDocAccessFields_globalText_Delete { + permission: Boolean! +} + +type MenuDocAccessFields_updatedAt { + create: MenuDocAccessFields_updatedAt_Create + read: MenuDocAccessFields_updatedAt_Read + update: MenuDocAccessFields_updatedAt_Update + delete: MenuDocAccessFields_updatedAt_Delete +} + +type MenuDocAccessFields_updatedAt_Create { + permission: Boolean! +} + +type MenuDocAccessFields_updatedAt_Read { + permission: Boolean! +} + +type MenuDocAccessFields_updatedAt_Update { + permission: Boolean! +} + +type MenuDocAccessFields_updatedAt_Delete { + permission: Boolean! +} + +type MenuDocAccessFields_createdAt { + create: MenuDocAccessFields_createdAt_Create + read: MenuDocAccessFields_createdAt_Read + update: MenuDocAccessFields_createdAt_Update + delete: MenuDocAccessFields_createdAt_Delete +} + +type MenuDocAccessFields_createdAt_Create { + permission: Boolean! +} + +type MenuDocAccessFields_createdAt_Read { + permission: Boolean! +} + +type MenuDocAccessFields_createdAt_Update { + permission: Boolean! +} + +type MenuDocAccessFields_createdAt_Delete { + permission: Boolean! +} + +type MenuReadDocAccess { + permission: Boolean! + where: JSONObject +} + +type MenuUpdateDocAccess { + permission: Boolean! + where: JSONObject +} + +type Access { + canAccessAdmin: Boolean! + posts: postsAccess + users: usersAccess + payload_preferences: payload_preferencesAccess + menu: menuAccess +} + +type postsAccess { + fields: PostsFields + create: PostsCreateAccess + read: PostsReadAccess + update: PostsUpdateAccess + delete: PostsDeleteAccess + readVersions: PostsReadVersionsAccess +} + +type PostsFields { + text: PostsFields_text + richText: PostsFields_richText + richText2: PostsFields_richText2 + updatedAt: PostsFields_updatedAt + createdAt: PostsFields_createdAt + _status: PostsFields__status +} + +type PostsFields_text { + create: PostsFields_text_Create + read: PostsFields_text_Read + update: PostsFields_text_Update + delete: PostsFields_text_Delete +} + +type PostsFields_text_Create { + permission: Boolean! +} + +type PostsFields_text_Read { + permission: Boolean! +} + +type PostsFields_text_Update { + permission: Boolean! +} + +type PostsFields_text_Delete { + permission: Boolean! +} + +type PostsFields_richText { + create: PostsFields_richText_Create + read: PostsFields_richText_Read + update: PostsFields_richText_Update + delete: PostsFields_richText_Delete +} + +type PostsFields_richText_Create { + permission: Boolean! +} + +type PostsFields_richText_Read { + permission: Boolean! +} + +type PostsFields_richText_Update { + permission: Boolean! +} + +type PostsFields_richText_Delete { + permission: Boolean! +} + +type PostsFields_richText2 { + create: PostsFields_richText2_Create + read: PostsFields_richText2_Read + update: PostsFields_richText2_Update + delete: PostsFields_richText2_Delete +} + +type PostsFields_richText2_Create { + permission: Boolean! +} + +type PostsFields_richText2_Read { + permission: Boolean! +} + +type PostsFields_richText2_Update { + permission: Boolean! +} + +type PostsFields_richText2_Delete { + permission: Boolean! +} + +type PostsFields_updatedAt { + create: PostsFields_updatedAt_Create + read: PostsFields_updatedAt_Read + update: PostsFields_updatedAt_Update + delete: PostsFields_updatedAt_Delete +} + +type PostsFields_updatedAt_Create { + permission: Boolean! +} + +type PostsFields_updatedAt_Read { + permission: Boolean! +} + +type PostsFields_updatedAt_Update { + permission: Boolean! +} + +type PostsFields_updatedAt_Delete { + permission: Boolean! +} + +type PostsFields_createdAt { + create: PostsFields_createdAt_Create + read: PostsFields_createdAt_Read + update: PostsFields_createdAt_Update + delete: PostsFields_createdAt_Delete +} + +type PostsFields_createdAt_Create { + permission: Boolean! +} + +type PostsFields_createdAt_Read { + permission: Boolean! +} + +type PostsFields_createdAt_Update { + permission: Boolean! +} + +type PostsFields_createdAt_Delete { + permission: Boolean! +} + +type PostsFields__status { + create: PostsFields__status_Create + read: PostsFields__status_Read + update: PostsFields__status_Update + delete: PostsFields__status_Delete +} + +type PostsFields__status_Create { + permission: Boolean! +} + +type PostsFields__status_Read { + permission: Boolean! +} + +type PostsFields__status_Update { + permission: Boolean! +} + +type PostsFields__status_Delete { + permission: Boolean! +} + +type PostsCreateAccess { + permission: Boolean! + where: JSONObject +} + +type PostsReadAccess { + permission: Boolean! + where: JSONObject +} + +type PostsUpdateAccess { + permission: Boolean! + where: JSONObject +} + +type PostsDeleteAccess { + permission: Boolean! + where: JSONObject +} + +type PostsReadVersionsAccess { + permission: Boolean! + where: JSONObject +} + +type usersAccess { + fields: UsersFields + create: UsersCreateAccess + read: UsersReadAccess + update: UsersUpdateAccess + delete: UsersDeleteAccess + unlock: UsersUnlockAccess +} + +type UsersFields { + updatedAt: UsersFields_updatedAt + createdAt: UsersFields_createdAt + email: UsersFields_email + password: UsersFields_password +} + +type UsersFields_updatedAt { + create: UsersFields_updatedAt_Create + read: UsersFields_updatedAt_Read + update: UsersFields_updatedAt_Update + delete: UsersFields_updatedAt_Delete +} + +type UsersFields_updatedAt_Create { + permission: Boolean! +} + +type UsersFields_updatedAt_Read { + permission: Boolean! +} + +type UsersFields_updatedAt_Update { + permission: Boolean! +} + +type UsersFields_updatedAt_Delete { + permission: Boolean! +} + +type UsersFields_createdAt { + create: UsersFields_createdAt_Create + read: UsersFields_createdAt_Read + update: UsersFields_createdAt_Update + delete: UsersFields_createdAt_Delete +} + +type UsersFields_createdAt_Create { + permission: Boolean! +} + +type UsersFields_createdAt_Read { + permission: Boolean! +} + +type UsersFields_createdAt_Update { + permission: Boolean! +} + +type UsersFields_createdAt_Delete { + permission: Boolean! +} + +type UsersFields_email { + create: UsersFields_email_Create + read: UsersFields_email_Read + update: UsersFields_email_Update + delete: UsersFields_email_Delete +} + +type UsersFields_email_Create { + permission: Boolean! +} + +type UsersFields_email_Read { + permission: Boolean! +} + +type UsersFields_email_Update { + permission: Boolean! +} + +type UsersFields_email_Delete { + permission: Boolean! +} + +type UsersFields_password { + create: UsersFields_password_Create + read: UsersFields_password_Read + update: UsersFields_password_Update + delete: UsersFields_password_Delete +} + +type UsersFields_password_Create { + permission: Boolean! +} + +type UsersFields_password_Read { + permission: Boolean! +} + +type UsersFields_password_Update { + permission: Boolean! +} + +type UsersFields_password_Delete { + permission: Boolean! +} + +type UsersCreateAccess { + permission: Boolean! + where: JSONObject +} + +type UsersReadAccess { + permission: Boolean! + where: JSONObject +} + +type UsersUpdateAccess { + permission: Boolean! + where: JSONObject +} + +type UsersDeleteAccess { + permission: Boolean! + where: JSONObject +} + +type UsersUnlockAccess { + permission: Boolean! + where: JSONObject +} + +type payload_preferencesAccess { + fields: PayloadPreferencesFields + create: PayloadPreferencesCreateAccess + read: PayloadPreferencesReadAccess + update: PayloadPreferencesUpdateAccess + delete: PayloadPreferencesDeleteAccess +} + +type PayloadPreferencesFields { + user: PayloadPreferencesFields_user + key: PayloadPreferencesFields_key + value: PayloadPreferencesFields_value + updatedAt: PayloadPreferencesFields_updatedAt + createdAt: PayloadPreferencesFields_createdAt +} + +type PayloadPreferencesFields_user { + create: PayloadPreferencesFields_user_Create + read: PayloadPreferencesFields_user_Read + update: PayloadPreferencesFields_user_Update + delete: PayloadPreferencesFields_user_Delete +} + +type PayloadPreferencesFields_user_Create { + permission: Boolean! +} + +type PayloadPreferencesFields_user_Read { + permission: Boolean! +} + +type PayloadPreferencesFields_user_Update { + permission: Boolean! +} + +type PayloadPreferencesFields_user_Delete { + permission: Boolean! +} + +type PayloadPreferencesFields_key { + create: PayloadPreferencesFields_key_Create + read: PayloadPreferencesFields_key_Read + update: PayloadPreferencesFields_key_Update + delete: PayloadPreferencesFields_key_Delete +} + +type PayloadPreferencesFields_key_Create { + permission: Boolean! +} + +type PayloadPreferencesFields_key_Read { + permission: Boolean! +} + +type PayloadPreferencesFields_key_Update { + permission: Boolean! +} + +type PayloadPreferencesFields_key_Delete { + permission: Boolean! +} + +type PayloadPreferencesFields_value { + create: PayloadPreferencesFields_value_Create + read: PayloadPreferencesFields_value_Read + update: PayloadPreferencesFields_value_Update + delete: PayloadPreferencesFields_value_Delete +} + +type PayloadPreferencesFields_value_Create { + permission: Boolean! +} + +type PayloadPreferencesFields_value_Read { + permission: Boolean! +} + +type PayloadPreferencesFields_value_Update { + permission: Boolean! +} + +type PayloadPreferencesFields_value_Delete { + permission: Boolean! +} + +type PayloadPreferencesFields_updatedAt { + create: PayloadPreferencesFields_updatedAt_Create + read: PayloadPreferencesFields_updatedAt_Read + update: PayloadPreferencesFields_updatedAt_Update + delete: PayloadPreferencesFields_updatedAt_Delete +} + +type PayloadPreferencesFields_updatedAt_Create { + permission: Boolean! +} + +type PayloadPreferencesFields_updatedAt_Read { + permission: Boolean! +} + +type PayloadPreferencesFields_updatedAt_Update { + permission: Boolean! +} + +type PayloadPreferencesFields_updatedAt_Delete { + permission: Boolean! +} + +type PayloadPreferencesFields_createdAt { + create: PayloadPreferencesFields_createdAt_Create + read: PayloadPreferencesFields_createdAt_Read + update: PayloadPreferencesFields_createdAt_Update + delete: PayloadPreferencesFields_createdAt_Delete +} + +type PayloadPreferencesFields_createdAt_Create { + permission: Boolean! +} + +type PayloadPreferencesFields_createdAt_Read { + permission: Boolean! +} + +type PayloadPreferencesFields_createdAt_Update { + permission: Boolean! +} + +type PayloadPreferencesFields_createdAt_Delete { + permission: Boolean! +} + +type PayloadPreferencesCreateAccess { + permission: Boolean! + where: JSONObject +} + +type PayloadPreferencesReadAccess { + permission: Boolean! + where: JSONObject +} + +type PayloadPreferencesUpdateAccess { + permission: Boolean! + where: JSONObject +} + +type PayloadPreferencesDeleteAccess { + permission: Boolean! + where: JSONObject +} + +type menuAccess { + fields: MenuFields + read: MenuReadAccess + update: MenuUpdateAccess +} + +type MenuFields { + globalText: MenuFields_globalText + updatedAt: MenuFields_updatedAt + createdAt: MenuFields_createdAt +} + +type MenuFields_globalText { + create: MenuFields_globalText_Create + read: MenuFields_globalText_Read + update: MenuFields_globalText_Update + delete: MenuFields_globalText_Delete +} + +type MenuFields_globalText_Create { + permission: Boolean! +} + +type MenuFields_globalText_Read { + permission: Boolean! +} + +type MenuFields_globalText_Update { + permission: Boolean! +} + +type MenuFields_globalText_Delete { + permission: Boolean! +} + +type MenuFields_updatedAt { + create: MenuFields_updatedAt_Create + read: MenuFields_updatedAt_Read + update: MenuFields_updatedAt_Update + delete: MenuFields_updatedAt_Delete +} + +type MenuFields_updatedAt_Create { + permission: Boolean! +} + +type MenuFields_updatedAt_Read { + permission: Boolean! +} + +type MenuFields_updatedAt_Update { + permission: Boolean! +} + +type MenuFields_updatedAt_Delete { + permission: Boolean! +} + +type MenuFields_createdAt { + create: MenuFields_createdAt_Create + read: MenuFields_createdAt_Read + update: MenuFields_createdAt_Update + delete: MenuFields_createdAt_Delete +} + +type MenuFields_createdAt_Create { + permission: Boolean! +} + +type MenuFields_createdAt_Read { + permission: Boolean! +} + +type MenuFields_createdAt_Update { + permission: Boolean! +} + +type MenuFields_createdAt_Delete { + permission: Boolean! +} + +type MenuReadAccess { + permission: Boolean! + where: JSONObject +} + +type MenuUpdateAccess { + permission: Boolean! + where: JSONObject +} + +type Mutation { + createPost(data: mutationPostInput!, draft: Boolean): Post + updatePost(id: String!, autosave: Boolean, data: mutationPostUpdateInput!, draft: Boolean): Post + deletePost(id: String!): Post + duplicatePost(id: String!): Post + restoreVersionPost(id: String): Post + createUser(data: mutationUserInput!, draft: Boolean): User + updateUser(id: String!, autosave: Boolean, data: mutationUserUpdateInput!, draft: Boolean): User + deleteUser(id: String!): User + refreshTokenUser(token: String): usersRefreshedUser + logoutUser: String + unlockUser(email: String!): Boolean! + loginUser(email: String, password: String): usersLoginResult + forgotPasswordUser(disableEmail: Boolean, email: String!, expiration: Int): Boolean! + resetPasswordUser(password: String, token: String): usersResetPassword + verifyEmailUser(token: String): Boolean + createPayloadPreference(data: mutationPayloadPreferenceInput!, draft: Boolean): PayloadPreference + updatePayloadPreference( + id: String! + autosave: Boolean + data: mutationPayloadPreferenceUpdateInput! + draft: Boolean + ): PayloadPreference + deletePayloadPreference(id: String!): PayloadPreference + duplicatePayloadPreference(id: String!): PayloadPreference + updateMenu(data: mutationMenuInput!, draft: Boolean): Menu +} + +input mutationPostInput { + text: String + richText: JSON + richText2: JSON + updatedAt: String + createdAt: String + _status: Post__status_MutationInput +} + +enum Post__status_MutationInput { + draft + published +} + +input mutationPostUpdateInput { + text: String + richText: JSON + richText2: JSON + updatedAt: String + createdAt: String + _status: PostUpdate__status_MutationInput +} + +enum PostUpdate__status_MutationInput { + draft + published +} + +input mutationUserInput { + updatedAt: String + createdAt: String + email: String! + resetPasswordToken: String + resetPasswordExpiration: String + salt: String + hash: String + loginAttempts: Float + lockUntil: String + password: String! +} + +input mutationUserUpdateInput { + updatedAt: String + createdAt: String + email: String + resetPasswordToken: String + resetPasswordExpiration: String + salt: String + hash: String + loginAttempts: Float + lockUntil: String + password: String +} + +type usersRefreshedUser { + exp: Int + refreshedToken: String + user: usersJWT +} + +type usersJWT { + email: EmailAddress! + collection: String! +} + +type usersLoginResult { + exp: Int + token: String + user: User +} + +type usersResetPassword { + token: String + user: User +} + +input mutationPayloadPreferenceInput { + user: PayloadPreference_UserRelationshipInput + key: String + value: JSON + updatedAt: String + createdAt: String +} + +input PayloadPreference_UserRelationshipInput { + relationTo: PayloadPreference_UserRelationshipInputRelationTo + value: JSON +} + +enum PayloadPreference_UserRelationshipInputRelationTo { + users +} + +input mutationPayloadPreferenceUpdateInput { + user: PayloadPreferenceUpdate_UserRelationshipInput + key: String + value: JSON + updatedAt: String + createdAt: String +} + +input PayloadPreferenceUpdate_UserRelationshipInput { + relationTo: PayloadPreferenceUpdate_UserRelationshipInputRelationTo + value: JSON +} + +enum PayloadPreferenceUpdate_UserRelationshipInputRelationTo { + users +} + +input mutationMenuInput { + globalText: String + updatedAt: String + createdAt: String +} diff --git a/test/lexical-mdx/tableJson.ts b/test/lexical-mdx/tableJson.ts new file mode 100644 index 000000000..2eac7eccb --- /dev/null +++ b/test/lexical-mdx/tableJson.ts @@ -0,0 +1,387 @@ +export const tableJson = { + children: [ + { + children: [ + { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' Option ', + type: 'text', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + direction: null, + format: '', + indent: 0, + type: 'tablecell', + version: 1, + backgroundColor: null, + colSpan: 1, + headerState: 1, + rowSpan: 1, + }, + { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' Default route ', + type: 'text', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + direction: null, + format: '', + indent: 0, + type: 'tablecell', + version: 1, + backgroundColor: null, + colSpan: 1, + headerState: 1, + rowSpan: 1, + }, + { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' Description ', + type: 'text', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + direction: null, + format: '', + indent: 0, + type: 'tablecell', + version: 1, + backgroundColor: null, + colSpan: 1, + headerState: 1, + rowSpan: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'tablerow', + version: 1, + }, + { + children: [ + { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' ', + type: 'text', + version: 1, + }, + { + detail: 0, + format: 16, + mode: 'normal', + style: '', + text: 'account', + type: 'text', + version: 1, + }, + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' ', + type: 'text', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + direction: null, + format: '', + indent: 0, + type: 'tablecell', + version: 1, + backgroundColor: null, + colSpan: 1, + headerState: 0, + rowSpan: 1, + }, + { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' ', + type: 'text', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + direction: null, + format: '', + indent: 0, + type: 'tablecell', + version: 1, + backgroundColor: null, + colSpan: 1, + headerState: 0, + rowSpan: 1, + }, + { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: " The user's account page. ", + type: 'text', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + direction: null, + format: '', + indent: 0, + type: 'tablecell', + version: 1, + backgroundColor: null, + colSpan: 1, + headerState: 0, + rowSpan: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'tablerow', + version: 1, + }, + { + children: [ + { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' ', + type: 'text', + version: 1, + }, + { + detail: 0, + format: 16, + mode: 'normal', + style: '', + text: 'createFirstUser', + type: 'text', + version: 1, + }, + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' ', + type: 'text', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + direction: null, + format: '', + indent: 0, + type: 'tablecell', + version: 1, + backgroundColor: null, + colSpan: 1, + headerState: 0, + rowSpan: 1, + }, + { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' ', + type: 'text', + version: 1, + }, + { + detail: 0, + format: 16, + mode: 'normal', + style: '', + text: '/create-first-user', + type: 'text', + version: 1, + }, + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' ', + type: 'text', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + direction: null, + format: '', + indent: 0, + type: 'tablecell', + version: 1, + backgroundColor: null, + colSpan: 1, + headerState: 0, + rowSpan: 1, + }, + { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text: ' The page to create the first user. ', + type: 'text', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'paragraph', + version: 1, + textFormat: 0, + textStyle: '', + }, + ], + direction: null, + format: '', + indent: 0, + type: 'tablecell', + version: 1, + backgroundColor: null, + colSpan: 1, + headerState: 0, + rowSpan: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'tablerow', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'table', + version: 1, +} diff --git a/test/lexical-mdx/textToRichText.ts b/test/lexical-mdx/textToRichText.ts new file mode 100644 index 000000000..707882461 --- /dev/null +++ b/test/lexical-mdx/textToRichText.ts @@ -0,0 +1,33 @@ +export function textToRichText(text: string) { + return { + root: { + children: [ + { + children: [ + { + detail: 0, + format: 0, + mode: 'normal', + style: '', + text, + type: 'text', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + textFormat: 0, + textStyle: '', + type: 'paragraph', + version: 1, + }, + ], + direction: null, + format: '', + indent: 0, + type: 'root', + version: 1, + }, + } +} diff --git a/test/lexical-mdx/tsconfig.eslint.json b/test/lexical-mdx/tsconfig.eslint.json new file mode 100644 index 000000000..b34cc7afb --- /dev/null +++ b/test/lexical-mdx/tsconfig.eslint.json @@ -0,0 +1,13 @@ +{ + // extend your base config to share compilerOptions, etc + //"extends": "./tsconfig.json", + "compilerOptions": { + // ensure that nobody can accidentally use this config for a build + "noEmit": true + }, + "include": [ + // whatever paths you intend to lint + "./**/*.ts", + "./**/*.tsx" + ] +} diff --git a/test/lexical-mdx/tsconfig.json b/test/lexical-mdx/tsconfig.json new file mode 100644 index 000000000..3c43903cf --- /dev/null +++ b/test/lexical-mdx/tsconfig.json @@ -0,0 +1,3 @@ +{ + "extends": "../tsconfig.json" +}