From 1d6ffcb80e4c3fa4220f8a40798a98cea8b95842 Mon Sep 17 00:00:00 2001 From: Said Akhrarov <36972061+akhrarovsaid@users.noreply.github.com> Date: Wed, 9 Jul 2025 09:59:22 -0400 Subject: [PATCH 001/143] feat(ui): adds support for copy pasting complex fields (#11513) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What? This PR introduces support for copy + pasting complex fields such as Arrays and Blocks. These changes introduce a new `ClipboardAction` component that houses logic for copy + pasting to and from the clipboard to supported fields. I've scoped this PR to include only Blocks & Arrays, however the structure of the components introduced lend themselves to be easily extended to other field types. I've limited the scope because there may be design & functional blockers that make it unclear how to add actions to particular fields. Supported fields: - Arrays ([Demo](https://github.com/user-attachments/assets/523916f6-77d0-43e2-9a11-a6a9d8c1b71c)) - Array Rows ([Demo](https://github.com/user-attachments/assets/0cd01a1f-3e5e-4fea-ac83-8c0bba8d1aac)) - Blocks ([Demo](https://github.com/user-attachments/assets/4c55ac2b-55f4-4793-9b53-309b2e090dd9)) - Block Rows ([Demo](https://github.com/user-attachments/assets/1b4d2bea-981a-485b-a6c4-c59a77a50567)) Fields that may be supported in the future with minimal effort by adopting the changes introduced here: - Tabs - Groups - Collapsible - Relationships This PR also encompasses e2e tests that check both field and row-level copy/pasting. ### Why? To make it simpler and faster to copy complex fields over between documents and rows within those docs. ### How? Introduces a new `ClipboardAction` component with helper utilities to aid in copy/pasting and validating field data. Addresses #2977 & #10703 Notes: - There seems to be an issue with Blocks & Arrays that contain RichText fields where the RichText field dissappears from the dom upon replacing form state. These fields are resurfaced after either saving the data or dragging/dropping the row containing them. - Copying a Row and then pasting it at the field-level will overwrite the field to include only that one row. This is intended however can be changed if requested. - Clipboard permissions are required to use this feature. [See Clipboard API caniuse](https://caniuse.com/async-clipboard). #### TODO - [x] ~~I forgot BlockReferences~~ - [x] ~~Fix tests failing due to new buttons causing locator conflicts~~ - [x] ~~Ensure deeply nested structures work~~ - [x] ~~Add missing translations~~ - [x] ~~Implement local storage instead of clipboard api~~ - [x] ~~Improve tests~~ --------- Co-authored-by: Germán Jabloñski <43938777+GermanJablo@users.noreply.github.com> --- packages/translations/src/clientKeys.ts | 7 + packages/translations/src/languages/ar.ts | 7 + packages/translations/src/languages/az.ts | 8 + packages/translations/src/languages/bg.ts | 8 + packages/translations/src/languages/bnBd.ts | 8 + packages/translations/src/languages/bnIn.ts | 8 + packages/translations/src/languages/ca.ts | 8 + packages/translations/src/languages/cs.ts | 8 + packages/translations/src/languages/da.ts | 8 + packages/translations/src/languages/de.ts | 12 +- packages/translations/src/languages/en.ts | 8 + packages/translations/src/languages/es.ts | 8 + packages/translations/src/languages/et.ts | 8 + packages/translations/src/languages/fa.ts | 8 + packages/translations/src/languages/fr.ts | 8 + packages/translations/src/languages/he.ts | 7 + packages/translations/src/languages/hr.ts | 8 + packages/translations/src/languages/hu.ts | 8 + packages/translations/src/languages/hy.ts | 8 + packages/translations/src/languages/it.ts | 8 + packages/translations/src/languages/ja.ts | 8 + packages/translations/src/languages/ko.ts | 8 + packages/translations/src/languages/lt.ts | 8 + packages/translations/src/languages/lv.ts | 8 + packages/translations/src/languages/my.ts | 8 + packages/translations/src/languages/nb.ts | 8 + packages/translations/src/languages/nl.ts | 8 + packages/translations/src/languages/pl.ts | 7 + packages/translations/src/languages/pt.ts | 8 + packages/translations/src/languages/ro.ts | 8 + packages/translations/src/languages/rs.ts | 8 + .../translations/src/languages/rsLatin.ts | 8 + packages/translations/src/languages/ru.ts | 8 + packages/translations/src/languages/sk.ts | 8 + packages/translations/src/languages/sl.ts | 8 + packages/translations/src/languages/sv.ts | 8 + packages/translations/src/languages/th.ts | 8 + packages/translations/src/languages/tr.ts | 8 + packages/translations/src/languages/uk.ts | 8 + packages/translations/src/languages/vi.ts | 8 + packages/translations/src/languages/zh.ts | 7 + packages/translations/src/languages/zhTw.ts | 7 + .../ui/src/elements/ArrayAction/index.tsx | 25 +- .../ClipboardAction/ClipboardActionLabel.tsx | 32 +++ .../ClipboardAction/clipboardUtilities.ts | 67 +++++ .../ui/src/elements/ClipboardAction/index.tsx | 117 +++++++++ .../ClipboardAction/isClipboardDataValid.ts | 109 ++++++++ .../mergeFormStateFromClipboard.ts | 131 ++++++++++ .../ui/src/elements/ClipboardAction/types.ts | 58 +++++ packages/ui/src/fields/Array/ArrayRow.tsx | 6 + packages/ui/src/fields/Array/index.tsx | 161 ++++++++++-- packages/ui/src/fields/Blocks/BlockRow.tsx | 6 + packages/ui/src/fields/Blocks/RowActions.tsx | 6 + packages/ui/src/fields/Blocks/index.tsx | 173 +++++++++++-- test/access-control/e2e.spec.ts | 2 +- test/fields/collections/Array/e2e.spec.ts | 233 ++++++++++++++++++ test/fields/collections/Blocks/e2e.spec.ts | 220 +++++++++++++++++ test/fields/collections/Blocks/index.ts | 25 ++ test/helpers/e2e/copyPasteField.ts | 44 ++++ test/joins/e2e.spec.ts | 2 +- test/localization/e2e.spec.ts | 4 +- 61 files changed, 1699 insertions(+), 56 deletions(-) create mode 100644 packages/ui/src/elements/ClipboardAction/ClipboardActionLabel.tsx create mode 100644 packages/ui/src/elements/ClipboardAction/clipboardUtilities.ts create mode 100644 packages/ui/src/elements/ClipboardAction/index.tsx create mode 100644 packages/ui/src/elements/ClipboardAction/isClipboardDataValid.ts create mode 100644 packages/ui/src/elements/ClipboardAction/mergeFormStateFromClipboard.ts create mode 100644 packages/ui/src/elements/ClipboardAction/types.ts create mode 100644 test/helpers/e2e/copyPasteField.ts diff --git a/packages/translations/src/clientKeys.ts b/packages/translations/src/clientKeys.ts index ddfa284015..c586ea55bb 100644 --- a/packages/translations/src/clientKeys.ts +++ b/packages/translations/src/clientKeys.ts @@ -68,12 +68,15 @@ export const clientTranslationKeys = createClientTranslationKeys([ 'error:emailOrPasswordIncorrect', 'error:usernameOrPasswordIncorrect', 'error:loadingDocument', + 'error:insufficientClipboardPermissions', + 'error:invalidClipboardData', 'error:invalidRequestArgs', 'error:invalidFileType', 'error:logoutFailed', 'error:noMatchedField', 'error:notAllowedToAccessPage', 'error:previewing', + 'error:unableToCopy', 'error:unableToDeleteCount', 'error:unableToReindexCollection', 'error:unableToUpdateCount', @@ -182,6 +185,8 @@ export const clientTranslationKeys = createClientTranslationKeys([ 'general:copied', 'general:clearAll', 'general:copy', + 'general:copyField', + 'general:copyRow', 'general:copyWarning', 'general:copying', 'general:create', @@ -267,6 +272,8 @@ export const clientTranslationKeys = createClientTranslationKeys([ 'general:overwriteExistingData', 'general:pageNotFound', 'general:password', + 'general:pasteField', + 'general:pasteRow', 'general:payloadSettings', 'general:perPage', 'general:previous', diff --git a/packages/translations/src/languages/ar.ts b/packages/translations/src/languages/ar.ts index 790fbbef2d..e8382ab45a 100644 --- a/packages/translations/src/languages/ar.ts +++ b/packages/translations/src/languages/ar.ts @@ -90,6 +90,8 @@ export const arTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'الحقل التالي غير صالح:', followingFieldsInvalid_other: 'الحقول التالية غير صالحة:', incorrectCollection: 'مجموعة غير صحيحة', + insufficientClipboardPermissions: 'تم رفض الوصول إلى الحافظة. يرجى التحقق من أذونات الحافظة.', + invalidClipboardData: 'بيانات الحافظة غير صالحة.', invalidFileType: 'نوع ملف غير صالح', invalidFileTypeValue: 'نوع ملف غير صالح: {{value}}', invalidRequestArgs: 'تم تمرير وسيطات غير صالحة في الطلب: {{args}}', @@ -111,6 +113,7 @@ export const arTranslations: DefaultTranslationsObject = { problemUploadingFile: 'حدث خطأ اثناء رفع الملفّ.', tokenInvalidOrExpired: 'الرّمز إمّا غير صالح أو منتهي الصّلاحيّة.', tokenNotProvided: 'لم يتم تقديم الرمز.', + unableToCopy: 'تعذر النسخ.', unableToDeleteCount: 'يتعذّر حذف {{count}} من {{total}} {{label}}.', unableToReindexCollection: 'خطأ في إعادة فهرسة المجموعة {{collection}}. تم إيقاف العملية.', unableToUpdateCount: 'يتعذّر تحديث {{count}} من {{total}} {{label}}.', @@ -237,7 +240,9 @@ export const arTranslations: DefaultTranslationsObject = { 'سيؤدي هذا إلى إزالة الفهارس الحالية وإعادة فهرسة المستندات في جميع المجموعات.', copied: 'تمّ النّسخ', copy: 'نسخ', + copyField: 'نسخ الحقل', copying: 'نسخ', + copyRow: 'نسخ الصف', copyWarning: 'أنت على وشك الكتابة فوق {{to}} بـ {{from}} لـ {{label}} {{title}}. هل أنت متأكد؟', create: 'إنشاء', created: 'تمّ الإنشاء', @@ -330,6 +335,8 @@ export const arTranslations: DefaultTranslationsObject = { overwriteExistingData: 'استبدل بيانات الحقل الموجودة', pageNotFound: 'الصّفحة غير موجودة', password: 'كلمة المرور', + pasteField: 'لصق الحقل', + pasteRow: 'لصق الصف', payloadSettings: 'الإعدادات', perPage: 'لكلّ صفحة: {{limit}}', previous: 'سابق', diff --git a/packages/translations/src/languages/az.ts b/packages/translations/src/languages/az.ts index 95807d643c..ff7f352eed 100644 --- a/packages/translations/src/languages/az.ts +++ b/packages/translations/src/languages/az.ts @@ -90,6 +90,9 @@ export const azTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Aşağıdakı sahə yanlışdır:', followingFieldsInvalid_other: 'Aşağıdaki sahələr yanlışdır:', incorrectCollection: 'Yanlış Kolleksiya', + insufficientClipboardPermissions: + 'Mübadilə buferinə giriş rədd edildi. Zəhmət olmasa, icazələri yoxlayın.', + invalidClipboardData: 'Yanlış mübadilə buferi məlumatı.', invalidFileType: 'Yanlış fayl növü', invalidFileTypeValue: 'Yanlış fayl növü: {{value}}', invalidRequestArgs: 'Sorguda etibarsız arqumentlər təqdim edildi: {{args}}', @@ -111,6 +114,7 @@ export const azTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Faylın yüklənməsi zamanı problem yarandı.', tokenInvalidOrExpired: 'Token ya yanlışdır və ya müddəti bitib.', tokenNotProvided: 'Token təqdim edilməyib.', + unableToCopy: 'Kopyalama mümkün deyil.', unableToDeleteCount: '{{count}} dən {{total}} {{label}} silinə bilmir.', unableToReindexCollection: '{{collection}} kolleksiyasının yenidən indekslənməsi zamanı səhv baş verdi. Əməliyyat dayandırıldı.', @@ -241,7 +245,9 @@ export const azTranslations: DefaultTranslationsObject = { 'Bu, mövcud indeksləri siləcək və bütün kolleksiyalardakı sənədləri yenidən indeksləyəcək.', copied: 'Kopyalandı', copy: 'Kopyala', + copyField: 'Sahəni kopyala', copying: 'Kopyalama', + copyRow: 'Sətiri kopyala', copyWarning: 'Siz {{label}} {{title}} üçün {{from}} ilə {{to}} -nu üzərindən yazmaq ətrafındasınız. Eminsiniz?', create: 'Yarat', @@ -336,6 +342,8 @@ export const azTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Mövcud sahə məlumatlarını yenidən yazın', pageNotFound: 'Səhifə tapılmadı', password: 'Şifrə', + pasteField: 'Sahəni yapışdır', + pasteRow: 'Sətiri yapışdır', payloadSettings: 'Payload Parametrləri', perPage: 'Hər səhifədə: {{limit}}', previous: 'Əvvəlki', diff --git a/packages/translations/src/languages/bg.ts b/packages/translations/src/languages/bg.ts index b1327aa19d..4ac64a7b60 100644 --- a/packages/translations/src/languages/bg.ts +++ b/packages/translations/src/languages/bg.ts @@ -90,6 +90,9 @@ export const bgTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Следното поле е некоректно:', followingFieldsInvalid_other: 'Следните полета са некоректни:', incorrectCollection: 'Грешна колекция', + insufficientClipboardPermissions: + 'Достъпът до клипборда е отказан. Моля, проверете вашите разрешения за клипборда.', + invalidClipboardData: 'Невалидни данни в клипборда.', invalidFileType: 'Невалиден тип на файл', invalidFileTypeValue: 'Невалиден тип на файл: {{value}}', invalidRequestArgs: 'Невалидни аргументи в заявката: {{args}}', @@ -111,6 +114,7 @@ export const bgTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Имаше проблем при качването на файла.', tokenInvalidOrExpired: 'Ключът е невалиден или изтекъл.', tokenNotProvided: 'Токенът не е предоставен.', + unableToCopy: 'Неуспешно копиране.', unableToDeleteCount: 'Не беше възможно да се изтрият {{count}} от {{total}} {{label}}.', unableToReindexCollection: 'Грешка при преиндексиране на колекцията {{collection}}. Операцията е прекратена.', @@ -240,7 +244,9 @@ export const bgTranslations: DefaultTranslationsObject = { 'Това ще премахне съществуващите индекси и ще преиндексира документите във всички колекции.', copied: 'Копирано', copy: 'Копирай', + copyField: 'Копирай поле', copying: 'Копиране', + copyRow: 'Копирай ред', copyWarning: 'Предстои да презапишете {{to}} с {{from}} за {{label}} {{title}}. Сигурни ли сте?', create: 'Създай', @@ -335,6 +341,8 @@ export const bgTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Презапишете съществуващите данни в полето', pageNotFound: 'Страницата не беше открита', password: 'Парола', + pasteField: 'Постави поле', + pasteRow: 'Постави ред', payloadSettings: 'Настройки на Payload', perPage: 'На страница: {{limit}}', previous: 'Предишен', diff --git a/packages/translations/src/languages/bnBd.ts b/packages/translations/src/languages/bnBd.ts index 2afab6ad17..3119dc1c1b 100644 --- a/packages/translations/src/languages/bnBd.ts +++ b/packages/translations/src/languages/bnBd.ts @@ -90,6 +90,9 @@ export const bnBdTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'নিম্নলিখিত ক্ষেত্রটি অবৈধ:', followingFieldsInvalid_other: 'নিম্নলিখিত ক্ষেত্রগুলি অবৈধ:', incorrectCollection: 'ভুল সংগ্রহ', + insufficientClipboardPermissions: + 'ক্লিপবোর্ড অ্যাক্সেস প্রত্যাখ্যান করা হয়েছে। দয়া করে আপনার ক্লিপবোর্ড অনুমতিগুলি পরীক্ষা করুন।', + invalidClipboardData: 'অবৈধ ক্লিপবোর্ড ডেটা।', invalidFileType: 'অবৈধ ফাইল প্রকার', invalidFileTypeValue: 'অবৈধ ফাইল প্রকার: {{value}}', invalidRequestArgs: 'অনুরোধে অবৈধ আর্গুমেন্ট পাস করা হয়েছে: {{args}}', @@ -111,6 +114,7 @@ export const bnBdTranslations: DefaultTranslationsObject = { problemUploadingFile: 'ফাইল আপলোড করতে একটি সমস্যা হয়েছে।', tokenInvalidOrExpired: 'টোকেন অবৈধ বা মেয়াদ শেষ হয়ে গেছে।', tokenNotProvided: 'টোকেন প্রদান করা হয়নি।', + unableToCopy: 'কপি করা সম্ভব নয়।', unableToDeleteCount: '{{total}} {{label}} এর মধ্যে {{count}} টি মুছতে অক্ষম।', unableToReindexCollection: '{{collection}} সংগ্রহ পুনরায় সূচিবদ্ধ করতে ত্রুটি হয়েছে। অপারেশন বাতিল করা হয়েছে।', @@ -242,7 +246,9 @@ export const bnBdTranslations: DefaultTranslationsObject = { 'এটি বিদ্যমান সূচিগুলি সরিয়ে দেবে এবং সমস্ত সংগ্রহগুলির ডকুমেন্টগুলি পুনরায় সূচিবদ্ধ করবে।', copied: 'কপি করা হয়েছে', copy: 'কপি করুন', + copyField: 'ফিল্ড কপি করুন', copying: 'কপি করা হচ্ছে', + copyRow: 'সারি কপি করুন', copyWarning: 'আপনি {{label}} {{title}} এর জন্য {{to}} কে {{from}} দ্বারা ওভাররাইট করতে চলেছেন। আপনি কি নিশ্চিত?', create: 'তৈরি করুন', @@ -337,6 +343,8 @@ export const bnBdTranslations: DefaultTranslationsObject = { overwriteExistingData: 'বিদ্যমান ফিল্ড ডেটা ওভাররাইট করুন', pageNotFound: 'পৃষ্ঠা পাওয়া যায়নি', password: 'পাসওয়ার্ড', + pasteField: 'ফিল্ড পেস্ট করুন', + pasteRow: 'সারি পেস্ট করুন', payloadSettings: 'পেলোড সেটিংস', perPage: 'প্রতি পৃষ্ঠায়: {{limit}}', previous: 'পূর্ববর্তী', diff --git a/packages/translations/src/languages/bnIn.ts b/packages/translations/src/languages/bnIn.ts index 6ee79296d7..58e4d8e566 100644 --- a/packages/translations/src/languages/bnIn.ts +++ b/packages/translations/src/languages/bnIn.ts @@ -90,6 +90,9 @@ export const bnInTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'নিম্নলিখিত ক্ষেত্রটি অবৈধ:', followingFieldsInvalid_other: 'নিম্নলিখিত ক্ষেত্রগুলি অবৈধ:', incorrectCollection: 'ভুল সংগ্রহ', + insufficientClipboardPermissions: + 'ক্লিপবোর্ড অ্যাক্সেস অস্বীকৃত হয়েছে। অনুগ্রহ করে আপনার ক্লিপবোর্ড অনুমতিগুলি পরীক্ষা করুন।', + invalidClipboardData: 'অবৈধ ক্লিপবোর্ড ডেটা।', invalidFileType: 'অবৈধ ফাইল প্রকার', invalidFileTypeValue: 'অবৈধ ফাইল প্রকার: {{value}}', invalidRequestArgs: 'অনুরোধে অবৈধ আর্গুমেন্ট পাস করা হয়েছে: {{args}}', @@ -111,6 +114,7 @@ export const bnInTranslations: DefaultTranslationsObject = { problemUploadingFile: 'ফাইল আপলোড করতে একটি সমস্যা হয়েছে।', tokenInvalidOrExpired: 'টোকেন অবৈধ বা মেয়াদ শেষ হয়ে গেছে।', tokenNotProvided: 'টোকেন প্রদান করা হয়নি।', + unableToCopy: 'কপি করতে অক্ষম।', unableToDeleteCount: '{{total}} {{label}} এর মধ্যে {{count}} টি মুছতে অক্ষম।', unableToReindexCollection: '{{collection}} সংগ্রহ পুনরায় সূচিবদ্ধ করতে ত্রুটি হয়েছে। অপারেশন বাতিল করা হয়েছে।', @@ -242,7 +246,9 @@ export const bnInTranslations: DefaultTranslationsObject = { 'এটি বিদ্যমান সূচিগুলি সরিয়ে দেবে এবং সমস্ত সংগ্রহগুলির ডকুমেন্টগুলি পুনরায় সূচিবদ্ধ করবে।', copied: 'কপি করা হয়েছে', copy: 'কপি করুন', + copyField: 'ফিল্ড কপি করুন', copying: 'কপি করা হচ্ছে', + copyRow: 'সারি কপি করুন', copyWarning: 'আপনি {{label}} {{title}} এর জন্য {{to}} কে {{from}} দ্বারা ওভাররাইট করতে চলেছেন। আপনি কি নিশ্চিত?', create: 'তৈরি করুন', @@ -337,6 +343,8 @@ export const bnInTranslations: DefaultTranslationsObject = { overwriteExistingData: 'বিদ্যমান ফিল্ড ডেটা ওভাররাইট করুন', pageNotFound: 'পৃষ্ঠা পাওয়া যায়নি', password: 'পাসওয়ার্ড', + pasteField: 'ফিল্ড পেস্ট করুন', + pasteRow: 'সারি পেস্ট করুন', payloadSettings: 'পেলোড সেটিংস', perPage: 'প্রতি পৃষ্ঠায়: {{limit}}', previous: 'পূর্ববর্তী', diff --git a/packages/translations/src/languages/ca.ts b/packages/translations/src/languages/ca.ts index f724b604dd..adb6438367 100644 --- a/packages/translations/src/languages/ca.ts +++ b/packages/translations/src/languages/ca.ts @@ -91,6 +91,9 @@ export const caTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'El següent camp no és vàlid:', followingFieldsInvalid_other: 'Els següents camps no són vàlids:', incorrectCollection: 'Col·lecció incorrecta', + insufficientClipboardPermissions: + 'Accés al porta-retalls denegat. Comproveu els permisos del porta-retalls.', + invalidClipboardData: 'Dades del porta-retalls no vàlides.', invalidFileType: "Tipus d'arxiu no vàlid", invalidFileTypeValue: "Tipus d'arxiu no vàlid: {{value}}", invalidRequestArgs: 'Arguments no vàlids en la sol·licitud: {{args}}', @@ -112,6 +115,7 @@ export const caTranslations: DefaultTranslationsObject = { problemUploadingFile: "Hi ha hagut un problema mentre es carregava l'arxiu.", tokenInvalidOrExpired: 'El token és invàlid o ha caducat.', tokenNotProvided: "No s'ha proporcionat cap token.", + unableToCopy: 'No es pot copiar.', unableToDeleteCount: "No s'han pogut eliminar {{count}} de {{total}} {{label}}.", unableToReindexCollection: 'Error al reindexar la col·lecció {{collection}}. Operació cancel·lada.', @@ -241,7 +245,9 @@ export const caTranslations: DefaultTranslationsObject = { 'Aixo eliminarà els índexs existents i reindexarà els documents de totes les col·leccions.', copied: 'Copiat', copy: 'Copiar', + copyField: 'Copiar camp', copying: 'Copiant', + copyRow: 'Copiar fila', copyWarning: 'Estas a punt de sobreescriure {{to}} amb {{from}} per {{label}} {{title}}. Estas segur?', create: 'Crear', @@ -336,6 +342,8 @@ export const caTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Sobreescriu les dades existents', pageNotFound: 'Pàgina no trobada', password: 'Contrasenya', + pasteField: 'Enganxar camp', + pasteRow: 'Enganxar fila', payloadSettings: 'configuracio Payload', perPage: 'Per pagian: {{limit}}', previous: 'Previ', diff --git a/packages/translations/src/languages/cs.ts b/packages/translations/src/languages/cs.ts index 5cdd9a101f..da916e1aab 100644 --- a/packages/translations/src/languages/cs.ts +++ b/packages/translations/src/languages/cs.ts @@ -90,6 +90,9 @@ export const csTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Následující pole je neplatné:', followingFieldsInvalid_other: 'Následující pole jsou neplatná:', incorrectCollection: 'Nesprávná kolekce', + insufficientClipboardPermissions: + 'Přístup ke schránce byl odepřen. Zkontrolujte oprávnění ke schránce.', + invalidClipboardData: 'Neplatná data ve schránce.', invalidFileType: 'Neplatný typ souboru', invalidFileTypeValue: 'Neplatný typ souboru: {{value}}', invalidRequestArgs: 'Neplatné argumenty v požadavku: {{args}}', @@ -111,6 +114,7 @@ export const csTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Při nahrávání souboru došlo k chybě.', tokenInvalidOrExpired: 'Token je neplatný nebo vypršel.', tokenNotProvided: 'Token není poskytnut.', + unableToCopy: 'Nelze zkopírovat.', unableToDeleteCount: 'Nelze smazat {{count}} z {{total}} {{label}}', unableToReindexCollection: 'Chyba při přeindexování kolekce {{collection}}. Operace byla přerušena.', @@ -240,7 +244,9 @@ export const csTranslations: DefaultTranslationsObject = { 'Tímto budou odstraněny stávající indexy a dokumenty ve všech kolekcích budou znovu zaindexovány.', copied: 'Zkopírováno', copy: 'Kopírovat', + copyField: 'Kopírovat pole', copying: 'Kopírování', + copyRow: 'Kopírovat řádek', copyWarning: 'Chystáte se přepsat {{to}} s {{from}} pro {{label}} {{title}}. Jste si jistý?', create: 'Vytvořit', created: 'Vytvořeno', @@ -334,6 +340,8 @@ export const csTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Přepsat existující data pole', pageNotFound: 'Stránka nenalezena', password: 'Heslo', + pasteField: 'Vložit pole', + pasteRow: 'Vložit řádek', payloadSettings: 'Payload nastavení', perPage: 'Na stránku: {{limit}}', previous: 'Předchozí', diff --git a/packages/translations/src/languages/da.ts b/packages/translations/src/languages/da.ts index a7d4d81af0..5ed7783b17 100644 --- a/packages/translations/src/languages/da.ts +++ b/packages/translations/src/languages/da.ts @@ -89,6 +89,9 @@ export const daTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Feltet er ugyldigt:', followingFieldsInvalid_other: 'Felterne er ugyldige:', incorrectCollection: 'Forkert samling', + insufficientClipboardPermissions: + 'Adgang til udklipsholder nægtet. Kontroller dine udklipsholderrettigheder.', + invalidClipboardData: 'Ugyldige data i udklipsholderen.', invalidFileType: 'Ugyldig filtype', invalidFileTypeValue: 'Ugyldig filtype: {{value}}', invalidRequestArgs: 'Ugyldige argumenter i anmodningen: {{args}}', @@ -110,6 +113,7 @@ export const daTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Der opstod et problem under uploadingen af filen.', tokenInvalidOrExpired: 'Token er enten ugyldig eller udløbet.', tokenNotProvided: 'Token ikke angivet.', + unableToCopy: 'Kan ikke kopiere.', unableToDeleteCount: 'Kunne ikke slette {{count}} mangler {{total}} {{label}}.', unableToReindexCollection: 'Fejl ved genindeksering af samling {{collection}}. Operationen blev afbrudt.', @@ -239,7 +243,9 @@ export const daTranslations: DefaultTranslationsObject = { 'Dette vil fjerne eksisterende indekser og genindeksere dokumenter i alle samlinger.', copied: 'Kopieret', copy: 'Kopier', + copyField: 'Kopiér felt', copying: 'Kopiering', + copyRow: 'Kopiér række', copyWarning: 'Du er lige ved at overskrive {{to}} med {{from}} for {{label}} {{title}}. Er du sikker?', create: 'Opret', @@ -333,6 +339,8 @@ export const daTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Overskriv eksisterende feltdata', pageNotFound: 'Siden blev ikke fundet', password: 'Adgangskode', + pasteField: 'Indsæt felt', + pasteRow: 'Indsæt række', payloadSettings: 'Payload-indstillinger', perPage: 'Per side: {{limit}}', previous: 'Tidligere', diff --git a/packages/translations/src/languages/de.ts b/packages/translations/src/languages/de.ts index 5ca319cc80..876b445845 100644 --- a/packages/translations/src/languages/de.ts +++ b/packages/translations/src/languages/de.ts @@ -92,6 +92,9 @@ export const deTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Das folgende Feld ist nicht korrekt:', followingFieldsInvalid_other: 'Die folgenden Felder sind nicht korrekt:', incorrectCollection: 'Falsche Sammlung', + insufficientClipboardPermissions: + 'Zugriff auf die Zwischenablage verweigert. Bitte überprüfen Sie die Berechtigungen.', + invalidClipboardData: 'Ungültige Zwischenablagedaten.', invalidFileType: 'Ungültiger Datei-Typ', invalidFileTypeValue: 'Ungültiger Datei-Typ: {{value}}', invalidRequestArgs: 'Ungültige Argumente in der Anfrage: {{args}}', @@ -112,8 +115,9 @@ export const deTranslations: DefaultTranslationsObject = { previewing: 'Bei der Vorschau dieses Dokuments ist ein Fehler aufgetreten.', problemUploadingFile: 'Beim Hochladen der Datei ist ein Fehler aufgetreten.', tokenInvalidOrExpired: 'Token ist entweder ungültig oder abgelaufen.', - tokenNotProvided: 'Kein Token vorhanden.', - unableToDeleteCount: '{{count}} von {{total}} {{label}} konnten nicht gelöscht werden.', + tokenNotProvided: 'Token nicht bereitgestellt.', + unableToCopy: 'Kopieren nicht möglich.', + unableToDeleteCount: '{{count}} von {{total}} {{label}} konnte nicht gelöscht werden.', unableToReindexCollection: 'Fehler beim Neuindizieren der Sammlung {{collection}}. Vorgang abgebrochen.', unableToUpdateCount: '{{count}} von {{total}} {{label}} konnten nicht aktualisiert werden.', @@ -246,7 +250,9 @@ export const deTranslations: DefaultTranslationsObject = { 'Dies entfernt bestehende Indizes und indiziert die Dokumente in allen Sammlungen neu.', copied: 'Kopiert', copy: 'Kopieren', + copyField: 'Feld kopieren', copying: 'Kopieren', + copyRow: 'Zeile kopieren', copyWarning: 'Du bist dabei, {{to}} mit {{from}} für {{label}} {{title}} zu überschreiben. Bist du dir sicher?', create: 'Erstellen', @@ -341,6 +347,8 @@ export const deTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Vorhandene Eingaben überschreiben', pageNotFound: 'Seite nicht gefunden', password: 'Passwort', + pasteField: 'Feld einfügen', + pasteRow: 'Zeile einfügen', payloadSettings: 'Payload-Einstellungen', perPage: 'Pro Seite: {{limit}}', previous: 'Vorherige', diff --git a/packages/translations/src/languages/en.ts b/packages/translations/src/languages/en.ts index a00695959c..9563bfe3a3 100644 --- a/packages/translations/src/languages/en.ts +++ b/packages/translations/src/languages/en.ts @@ -91,6 +91,9 @@ export const enTranslations = { followingFieldsInvalid_one: 'The following field is invalid:', followingFieldsInvalid_other: 'The following fields are invalid:', incorrectCollection: 'Incorrect Collection', + insufficientClipboardPermissions: + 'Clipboard access denied. Please check your clipboard permissions.', + invalidClipboardData: 'Invalid clipboard data.', invalidFileType: 'Invalid file type', invalidFileTypeValue: 'Invalid file type: {{value}}', invalidRequestArgs: 'Invalid arguments passed in request: {{args}}', @@ -112,6 +115,7 @@ export const enTranslations = { problemUploadingFile: 'There was a problem while uploading the file.', tokenInvalidOrExpired: 'Token is either invalid or has expired.', tokenNotProvided: 'Token not provided.', + unableToCopy: 'Unable to copy.', unableToDeleteCount: 'Unable to delete {{count}} out of {{total}} {{label}}.', unableToReindexCollection: 'Error reindexing collection {{collection}}. Operation aborted.', unableToUpdateCount: 'Unable to update {{count}} out of {{total}} {{label}}.', @@ -241,7 +245,9 @@ export const enTranslations = { 'This will remove existing indexes and reindex documents in all collections.', copied: 'Copied', copy: 'Copy', + copyField: 'Copy Field', copying: 'Copying', + copyRow: 'Copy Row', copyWarning: 'You are about to overwrite {{to}} with {{from}} for {{label}} {{title}}. Are you sure?', create: 'Create', @@ -336,6 +342,8 @@ export const enTranslations = { overwriteExistingData: 'Overwrite existing field data', pageNotFound: 'Page not found', password: 'Password', + pasteField: 'Paste Field', + pasteRow: 'Paste Row', payloadSettings: 'Payload Settings', perPage: 'Per Page: {{limit}}', previous: 'Previous', diff --git a/packages/translations/src/languages/es.ts b/packages/translations/src/languages/es.ts index 41186f09f3..362fa355a7 100644 --- a/packages/translations/src/languages/es.ts +++ b/packages/translations/src/languages/es.ts @@ -90,6 +90,9 @@ export const esTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'El siguiente campo es inválido:', followingFieldsInvalid_other: 'Los siguientes campos son inválidos:', incorrectCollection: 'Colección Incorrecta', + insufficientClipboardPermissions: + 'Acceso al portapapeles denegado. Verifique los permisos del portapapeles.', + invalidClipboardData: 'Datos del portapapeles no válidos.', invalidFileType: 'Tipo de archivo inválido', invalidFileTypeValue: 'Tipo de archivo inválido: {{value}}', invalidRequestArgs: 'Argumentos inválidos en la solicitud: {{args}}', @@ -111,6 +114,7 @@ export const esTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Ocurrió un problema al subir el archivo.', tokenInvalidOrExpired: 'El token es inválido o ya expiró.', tokenNotProvided: 'Token no proporcionado.', + unableToCopy: 'No se puede copiar.', unableToDeleteCount: 'No se pudo eliminar {{count}} de {{total}} {{label}}.', unableToReindexCollection: 'Error al reindexar la colección {{collection}}. Operación abortada.', @@ -245,7 +249,9 @@ export const esTranslations: DefaultTranslationsObject = { 'Esto eliminará los índices existentes y volverá a indexar los documentos en todas las colecciones.', copied: 'Copiado', copy: 'Copiar', + copyField: 'Copiar campo', copying: 'Copiando', + copyRow: 'Copiar fila', copyWarning: 'Estás a punto de sobrescribir {{to}} con {{from}} para {{label}} {{title}}. ¿Estás seguro?', create: 'Crear', @@ -340,6 +346,8 @@ export const esTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Sobrescribir los datos existentes del campo', pageNotFound: 'Página no encontrada', password: 'Contraseña', + pasteField: 'Pegar campo', + pasteRow: 'Pegar fila', payloadSettings: 'Configuración de Payload', perPage: 'Por página: {{limit}}', previous: 'Anterior', diff --git a/packages/translations/src/languages/et.ts b/packages/translations/src/languages/et.ts index 25004901ca..4263e0c2a4 100644 --- a/packages/translations/src/languages/et.ts +++ b/packages/translations/src/languages/et.ts @@ -89,6 +89,9 @@ export const etTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Järgmine väli on vigane:', followingFieldsInvalid_other: 'Järgmised väljad on vigased:', incorrectCollection: 'Vale kollektsioon', + insufficientClipboardPermissions: + 'Lõikelaua juurdepääs keelatud. Palun kontrollige oma lõikelaua õigusi.', + invalidClipboardData: 'Kehtetu lõikelaua andmed.', invalidFileType: 'Vale failitüüp', invalidFileTypeValue: 'Vale failitüüp: {{value}}', invalidRequestArgs: 'Päringule edastati vigased argumendid: {{args}}', @@ -110,6 +113,7 @@ export const etTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Faili üleslaadimisel tekkis probleem.', tokenInvalidOrExpired: 'Võti on kas vigane või aegunud.', tokenNotProvided: 'Võtit ei esitatud.', + unableToCopy: 'Kopeerimine ebaõnnestus.', unableToDeleteCount: 'Ei õnnestunud kustutada {{count}} {{total}}-st {{label}}.', unableToReindexCollection: 'Viga kollektsiooni {{collection}} taasindekseerimisel. Toiming katkestatud.', @@ -239,7 +243,9 @@ export const etTranslations: DefaultTranslationsObject = { 'See eemaldab olemasolevad indeksid ja indekseerib uuesti dokumendid kõigis kollektsioonides.', copied: 'Kopeeritud', copy: 'Kopeeri', + copyField: 'Kopeeri väli', copying: 'Kopeerimine', + copyRow: 'Kopeeri rida', copyWarning: 'Olete üle kirjutamas {{to}} {{from}}-ga {{label}} {{title}} jaoks. Olete kindel?', create: 'Loo', created: 'Loodud', @@ -332,6 +338,8 @@ export const etTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Kirjuta olemasolevad välja andmed üle', pageNotFound: 'Lehte ei leitud', password: 'Parool', + pasteField: 'Kleebi väli', + pasteRow: 'Kleebi rida', payloadSettings: 'Payload seaded', perPage: 'Lehel: {{limit}}', previous: 'Eelmine', diff --git a/packages/translations/src/languages/fa.ts b/packages/translations/src/languages/fa.ts index 0b7aaf46c5..d2141cb6df 100644 --- a/packages/translations/src/languages/fa.ts +++ b/packages/translations/src/languages/fa.ts @@ -89,6 +89,9 @@ export const faTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'کادر زیر نامعتبر است:', followingFieldsInvalid_other: 'کادرهای زیر نامعتبر هستند:', incorrectCollection: 'مجموعه نادرست', + insufficientClipboardPermissions: + 'دسترسی به کلیپ‌بورد رد شد. لطفاً دسترسی‌های کلیپ‌بورد خود را بررسی کنید.', + invalidClipboardData: 'داده‌های نامعتبر در کلیپ‌بورد.', invalidFileType: 'نوع رسانه نامعتبر است', invalidFileTypeValue: 'نوع رسانه نامعتبر: {{value}}', invalidRequestArgs: 'آرگومان‌های نامعتبر در درخواست ارسال شدند: {{args}}', @@ -110,6 +113,7 @@ export const faTranslations: DefaultTranslationsObject = { problemUploadingFile: 'هنگام بارگذاری سند خطایی رخ داد.', tokenInvalidOrExpired: 'ژتون شما نامعتبر یا منقضی شده است.', tokenNotProvided: 'توکن ارائه نشده است.', + unableToCopy: 'کپی امکان‌پذیر نیست.', unableToDeleteCount: 'نمی‌توان {{count}} از {{total}} {{label}} را حذف کرد.', unableToReindexCollection: 'خطا در بازنمایه‌سازی مجموعه {{collection}}. عملیات متوقف شد.', unableToUpdateCount: 'امکان به روز رسانی {{count}} خارج از {{total}} {{label}} وجود ندارد.', @@ -239,7 +243,9 @@ export const faTranslations: DefaultTranslationsObject = { 'این کار ایندکس‌های موجود را حذف کرده و اسناد را در همه مجموعه‌ها بازایندکس می‌کند.', copied: 'رونوشت شده', copy: 'رونوشت', + copyField: 'کپی فیلد', copying: 'کپی کردن', + copyRow: 'کپی ردیف', copyWarning: 'شما در حال استفاده از {{from}} به جای {{to}} برای {{label}} {{title}} هستید. آیا مطمئن هستید؟', create: 'ساختن', @@ -334,6 +340,8 @@ export const faTranslations: DefaultTranslationsObject = { overwriteExistingData: 'بازنویسی داده‌های فیلد موجود', pageNotFound: 'برگه یافت نشد', password: 'گذرواژه', + pasteField: 'چسباندن فیلد', + pasteRow: 'چسباندن ردیف', payloadSettings: 'تنظیمات پی‌لود', perPage: 'هر برگه: {{limit}}', previous: 'قبلی', diff --git a/packages/translations/src/languages/fr.ts b/packages/translations/src/languages/fr.ts index 9b3cd34d7d..142a3c8acb 100644 --- a/packages/translations/src/languages/fr.ts +++ b/packages/translations/src/languages/fr.ts @@ -92,6 +92,9 @@ export const frTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Le champ suivant n’est pas valide :', followingFieldsInvalid_other: 'Les champs suivants ne sont pas valides :', incorrectCollection: 'Collection incorrecte', + insufficientClipboardPermissions: + 'Accès au presse-papiers refusé. Veuillez vérifier vos autorisations pour le presse-papiers.', + invalidClipboardData: 'Données invalides dans le presse-papiers.', invalidFileType: 'Type de fichier invalide', invalidFileTypeValue: 'Type de fichier invalide : {{value}}', invalidRequestArgs: 'Arguments non valides dans la requête : {{args}}', @@ -114,6 +117,7 @@ export const frTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Il y a eu un problème lors du téléversement du fichier.', tokenInvalidOrExpired: 'Le jeton n’est soit pas valide ou a expiré.', tokenNotProvided: 'Jeton non fourni.', + unableToCopy: 'Impossible de copier.', unableToDeleteCount: 'Impossible de supprimer {{count}} sur {{total}} {{label}}.', unableToReindexCollection: 'Erreur lors de la réindexation de la collection {{collection}}. Opération annulée.', @@ -248,7 +252,9 @@ export const frTranslations: DefaultTranslationsObject = { 'Cela supprimera les index existants et réindexera les documents dans toutes les collections.', copied: 'Copié', copy: 'Copie', + copyField: 'Copier le champ', copying: 'Copie', + copyRow: 'Copier la ligne', copyWarning: "Vous êtes sur le point d'écraser {{to}} avec {{from}} pour {{label}} {{title}}. Êtes-vous sûr ?", create: 'Créer', @@ -343,6 +349,8 @@ export const frTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Écraser les données existantes du champ', pageNotFound: 'Page non trouvée', password: 'Mot de passe', + pasteField: 'Coller le champ', + pasteRow: 'Coller la ligne', payloadSettings: 'Paramètres de Payload', perPage: 'Par Page: {{limit}}', previous: 'Précédent', diff --git a/packages/translations/src/languages/he.ts b/packages/translations/src/languages/he.ts index 5db6d8be3f..595c64adee 100644 --- a/packages/translations/src/languages/he.ts +++ b/packages/translations/src/languages/he.ts @@ -88,6 +88,8 @@ export const heTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'השדה הבא אינו תקין:', followingFieldsInvalid_other: 'השדות הבאים אינם תקינים:', incorrectCollection: 'אוסף שגוי', + insufficientClipboardPermissions: 'הגישה ללוח הרחב נדחתה. אנא בדוק את הרשאות הלוח הרחב שלך.', + invalidClipboardData: 'נתוני לוח רחב לא חוקיים.', invalidFileType: 'סוג קובץ לא תקין', invalidFileTypeValue: 'סוג קובץ לא תקין: {{value}}', invalidRequestArgs: 'ארגומנטים לא חוקיים הועברו בבקשה: {{args}}', @@ -109,6 +111,7 @@ export const heTranslations: DefaultTranslationsObject = { problemUploadingFile: 'אירעה בעיה בזמן העלאת הקובץ.', tokenInvalidOrExpired: 'הטוקן אינו תקין או שפג תוקפו.', tokenNotProvided: 'טוקן לא סופק.', + unableToCopy: 'לא ניתן להעתיק.', unableToDeleteCount: 'לא ניתן למחוק {{count}} מתוך {{total}} {{label}}.', unableToReindexCollection: 'שגיאה בהחזרת אינדקס של אוסף {{collection}}. הפעולה בוטלה.', unableToUpdateCount: 'לא ניתן לעדכן {{count}} מתוך {{total}} {{label}}.', @@ -234,7 +237,9 @@ export const heTranslations: DefaultTranslationsObject = { confirmReindexDescriptionAll: 'זה יסיר את האינדקסים הקיימים ויחזיר אינדקס למסמכים בכל האוספים.', copied: 'הועתק', copy: 'העתק', + copyField: 'העתק שדה', copying: 'העתקה', + copyRow: 'העתק שורה', copyWarning: 'אתה עומד לדרוס את {{to}} באמצעות {{from}} עבור {{label}} {{title}}. האם אתה בטוח?', create: 'יצירה', @@ -327,6 +332,8 @@ export const heTranslations: DefaultTranslationsObject = { overwriteExistingData: 'דרוס את נתוני השדה הקיימים', pageNotFound: 'הדף לא נמצא', password: 'סיסמה', + pasteField: 'הדבק שדה', + pasteRow: 'הדבק שורה', payloadSettings: 'הגדרות מערכת Payload', perPage: '{{limit}} בכל עמוד', previous: 'קודם', diff --git a/packages/translations/src/languages/hr.ts b/packages/translations/src/languages/hr.ts index f9460d15e5..8e38eeccf3 100644 --- a/packages/translations/src/languages/hr.ts +++ b/packages/translations/src/languages/hr.ts @@ -91,6 +91,9 @@ export const hrTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Ovo polje je neispravno:', followingFieldsInvalid_other: 'Ova polja su neispravna:', incorrectCollection: 'Neispravna kolekcija', + insufficientClipboardPermissions: + 'Pristup međuspremniku odbijen. Provjerite svoja dopuštenja za međuspremnik.', + invalidClipboardData: 'Nevažeći podaci u međuspremniku.', invalidFileType: 'Neispravan tip datoteke', invalidFileTypeValue: 'Neispravan tip datoteke: {{value}}', invalidRequestArgs: 'Nevažeći argumenti u zahtjevu: {{args}}', @@ -112,6 +115,7 @@ export const hrTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Došlo je do problema pri učitavanju datoteke.', tokenInvalidOrExpired: 'Token je neispravan ili je istekao.', tokenNotProvided: 'Token nije pružen.', + unableToCopy: 'Nije moguće kopirati.', unableToDeleteCount: 'Nije moguće izbrisati {{count}} od {{total}} {{label}}.', unableToReindexCollection: 'Pogreška pri ponovnom indeksiranju kolekcije {{collection}}. Operacija je prekinuta.', @@ -241,7 +245,9 @@ export const hrTranslations: DefaultTranslationsObject = { 'Ovo će ukloniti postojeće indekse i ponovno indeksirati dokumente u svim kolekcijama.', copied: 'Kopirano', copy: 'Kopiraj', + copyField: 'Kopiraj polje', copying: 'Kopiranje', + copyRow: 'Kopiraj redak', copyWarning: 'Na rubu ste prepisivanja {{to}} s {{from}} za {{label}} {{title}}. Jeste li sigurni?', create: 'Izradi', @@ -336,6 +342,8 @@ export const hrTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Prepišite postojeće podatke u polju', pageNotFound: 'Stranica nije pronađena', password: 'Lozinka', + pasteField: 'Zalijepi polje', + pasteRow: 'Zalijepi redak', payloadSettings: 'Payload postavke', perPage: 'Po stranici: {{limit}}', previous: 'Prethodni', diff --git a/packages/translations/src/languages/hu.ts b/packages/translations/src/languages/hu.ts index f398316926..1bb4386db6 100644 --- a/packages/translations/src/languages/hu.ts +++ b/packages/translations/src/languages/hu.ts @@ -92,6 +92,9 @@ export const huTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'A következő mező érvénytelen:', followingFieldsInvalid_other: 'A következő mezők érvénytelenek:', incorrectCollection: 'Helytelen gyűjtemény', + insufficientClipboardPermissions: + 'A vágólaphoz való hozzáférés elutasítva. Kérjük, ellenőrizze a vágólap engedélyeit.', + invalidClipboardData: 'Érvénytelen vágólap adat.', invalidFileType: 'Érvénytelen fájltípus', invalidFileTypeValue: 'Érvénytelen fájltípus: {{value}}', invalidRequestArgs: 'Érvénytelen argumentumok a kérésben: {{args}}', @@ -113,6 +116,7 @@ export const huTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Hiba történt a fájl feltöltése közben.', tokenInvalidOrExpired: 'A token érvénytelen vagy lejárt.', tokenNotProvided: 'Token nem biztosított.', + unableToCopy: 'Másolás nem lehetséges.', unableToDeleteCount: 'Nem sikerült törölni {{count}}/{{total}} {{label}}.', unableToReindexCollection: 'Hiba a(z) {{collection}} gyűjtemény újraindexelésekor. A művelet megszakítva.', @@ -243,7 +247,9 @@ export const huTranslations: DefaultTranslationsObject = { 'Ez eltávolítja a meglévő indexeket, és újraindexálja a dokumentumokat az összes gyűjteményben.', copied: 'Másolva', copy: 'Másolás', + copyField: 'Mező másolása', copying: 'Másolás', + copyRow: 'Sor másolása', copyWarning: 'Ön azzal készül felülírni {{to}} -t {{from}} -mal a {{label}} {{title}} számára. Biztos benne?', create: 'Létrehozás', @@ -337,6 +343,8 @@ export const huTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Írja felül a meglévő mezőadatokat', pageNotFound: 'Az oldal nem található', password: 'Jelszó', + pasteField: 'Mező beillesztése', + pasteRow: 'Sor beillesztése', payloadSettings: 'Payload beállítások', perPage: 'Oldalanként: {{limit}}', previous: 'Előző', diff --git a/packages/translations/src/languages/hy.ts b/packages/translations/src/languages/hy.ts index 149d84490c..17a97ca9d1 100644 --- a/packages/translations/src/languages/hy.ts +++ b/packages/translations/src/languages/hy.ts @@ -90,6 +90,9 @@ export const hyTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Հետևյալ դաշտն անվավեր է։', followingFieldsInvalid_other: 'Հետևյալ դաշտերն անվավեր են։', incorrectCollection: 'Սխալ հավաքածու', + insufficientClipboardPermissions: + 'Սեղմատախտակին հասանելիությունը մերժվել է։ Խնդրում ենք ստուգել ձեր սեղմատախտակի թույլտվությունները։', + invalidClipboardData: 'Անվավեր սեղմատախտակի տվյալներ։', invalidFileType: 'Անվավեր ֆայլի տեսակ', invalidFileTypeValue: 'Անվավեր ֆայլի տեսակ՝ {{value}}', invalidRequestArgs: 'Հայտում փոխանցված անվավեր արգումենտներ՝ {{args}}', @@ -111,6 +114,7 @@ export const hyTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Ֆայլը վերբեռնելու ժամանակ խնդիր է առաջացել։', tokenInvalidOrExpired: 'Թոքենն անվավեր է կամ ժամկետանց։', tokenNotProvided: 'Թոքենը տրամադրված չէ։', + unableToCopy: 'Չհաջողվեց պատճենել։', unableToDeleteCount: 'Հնարավոր չէ ջնջել {{count}}-ը {{total}} {{label}}-ից։', unableToReindexCollection: 'Հավաքածու {{collection}}-ը վերաինդեքսավորելու սխալ։ Գործողությունն ընդհատվել է։', @@ -241,7 +245,9 @@ export const hyTranslations: DefaultTranslationsObject = { 'Սա կհեռացնի գոյություն ունեցող ինդեքսները և կվերաինդեքսավորի փաստաթղթերը բոլոր հավաքածուներում։', copied: 'Պատճենված', copy: 'Պատճենել', + copyField: 'Պատճենել դաշտը', copying: 'Պատճենվում է', + copyRow: 'Պատճենել տողը', copyWarning: 'Դուք պատրաստվում եք վերագրել {{to}}-ը {{from}}-ով {{label}} {{title}}-ի համար։ Համոզվա՞ծ եք։', create: 'Ստեղծել', @@ -336,6 +342,8 @@ export const hyTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Վերագրել գոյություն ունեցող դաշտի տվյալները', pageNotFound: 'Էջը չի գտնվել', password: 'Գաղտնաբառ', + pasteField: 'Տեղադրել դաշտը', + pasteRow: 'Տեղադրել տողը', payloadSettings: 'Payload-ի կարգավորումներ', perPage: 'Էջում՝ {{limit}}', previous: 'Նախորդ', diff --git a/packages/translations/src/languages/it.ts b/packages/translations/src/languages/it.ts index e8f65f1b2c..4edcbd5ff1 100644 --- a/packages/translations/src/languages/it.ts +++ b/packages/translations/src/languages/it.ts @@ -91,6 +91,9 @@ export const itTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Il seguente campo non è valido:', followingFieldsInvalid_other: 'I seguenti campi non sono validi:', incorrectCollection: 'Collezione non corretta', + insufficientClipboardPermissions: + 'Accesso alla clipboard negato. Verifica i permessi della clipboard.', + invalidClipboardData: 'Dati della clipboard non validi.', invalidFileType: 'Tipo di file non valido', invalidFileTypeValue: 'Tipo di file non valido: {{value}}', invalidRequestArgs: 'Argomenti non validi nella richiesta: {{args}}', @@ -113,6 +116,7 @@ export const itTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Si è verificato un problema durante il caricamento del file.', tokenInvalidOrExpired: 'Il token non è valido o è scaduto.', tokenNotProvided: 'Token non fornito.', + unableToCopy: 'Impossibile copiare.', unableToDeleteCount: 'Impossibile eliminare {{count}} su {{total}} {{label}}.', unableToReindexCollection: 'Errore durante la reindicizzazione della collezione {{collection}}. Operazione annullata.', @@ -245,7 +249,9 @@ export const itTranslations: DefaultTranslationsObject = { "Questo rimuoverà gli indici esistenti e rifarà l'indice dei documenti in tutte le collezioni.", copied: 'Copiato', copy: 'Copia', + copyField: 'Copia campo', copying: 'Copia', + copyRow: 'Copia riga', copyWarning: 'Stai per sovrascrivere {{to}} con {{from}} per {{label}} {{title}}. Sei sicuro?', create: 'Crea', created: 'Data di creazione', @@ -338,6 +344,8 @@ export const itTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Sovrascrivi i dati del campo esistente', pageNotFound: 'Pagina non trovata', password: 'Password', + pasteField: 'Incolla campo', + pasteRow: 'Incolla riga', payloadSettings: 'Impostazioni di Payload', perPage: 'Per Pagina: {{limit}}', previous: 'Precedente', diff --git a/packages/translations/src/languages/ja.ts b/packages/translations/src/languages/ja.ts index 9de2d07a17..30f97016fd 100644 --- a/packages/translations/src/languages/ja.ts +++ b/packages/translations/src/languages/ja.ts @@ -91,6 +91,9 @@ export const jaTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: '次のフィールドは無効です:', followingFieldsInvalid_other: '次のフィールドは無効です:', incorrectCollection: '不正なコレクション', + insufficientClipboardPermissions: + 'クリップボードへのアクセスが拒否されました。クリップボードの権限を確認してください。', + invalidClipboardData: '無効なクリップボードデータ。', invalidFileType: '無効なファイル形式', invalidFileTypeValue: '無効なファイル形式: {{value}}', invalidRequestArgs: 'リクエストに無効な引数が渡されました: {{args}}', @@ -112,6 +115,7 @@ export const jaTranslations: DefaultTranslationsObject = { problemUploadingFile: 'ファイルのアップロード中に問題が発生しました。', tokenInvalidOrExpired: 'トークンが無効、または、有効期限が切れています。', tokenNotProvided: 'トークンが提供されていません。', + unableToCopy: 'コピーできません。', unableToDeleteCount: '{{total}} {{label}} から {{count}} を削除できません。', unableToReindexCollection: 'コレクション {{collection}} の再インデックス中にエラーが発生しました。操作は中止されました。', @@ -241,7 +245,9 @@ export const jaTranslations: DefaultTranslationsObject = { 'これにより既存のインデックスが削除され、すべてのコレクション内のドキュメントが再インデックスされます。', copied: 'コピーしました', copy: 'コピー', + copyField: 'フィールドをコピー', copying: 'コピーする', + copyRow: '行をコピー', copyWarning: 'あなたは{{label}} {{title}}の{{to}}を{{from}}で上書きしようとしています。よろしいですか?', create: '作成', @@ -336,6 +342,8 @@ export const jaTranslations: DefaultTranslationsObject = { overwriteExistingData: '既存のフィールドデータを上書きする', pageNotFound: 'ページが見つかりません', password: 'パスワード', + pasteField: 'フィールドを貼り付け', + pasteRow: '行を貼り付け', payloadSettings: 'Payload 設定', perPage: '表示件数: {{limit}}', previous: '前の', diff --git a/packages/translations/src/languages/ko.ts b/packages/translations/src/languages/ko.ts index 8d33c629fc..39f967fc01 100644 --- a/packages/translations/src/languages/ko.ts +++ b/packages/translations/src/languages/ko.ts @@ -90,6 +90,9 @@ export const koTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: '다음 입력란이 유효하지 않습니다:', followingFieldsInvalid_other: '다음 입력란이 유효하지 않습니다:', incorrectCollection: '잘못된 컬렉션', + insufficientClipboardPermissions: + '클립보드 접근이 거부되었습니다. 클립보드 권한을 확인하십시오.', + invalidClipboardData: '유효하지 않은 클립보드 데이터입니다.', invalidFileType: '잘못된 파일 형식', invalidFileTypeValue: '잘못된 파일 형식: {{value}}', invalidRequestArgs: '요청에 잘못된 인수가 전달되었습니다: {{args}}', @@ -111,6 +114,7 @@ export const koTranslations: DefaultTranslationsObject = { problemUploadingFile: '파일 업로드 중에 문제가 발생했습니다.', tokenInvalidOrExpired: '토큰이 유효하지 않거나 만료되었습니다.', tokenNotProvided: '토큰이 제공되지 않았습니다.', + unableToCopy: '복사할 수 없습니다.', unableToDeleteCount: '총 {{total}}개 중 {{count}}개의 {{label}}을(를) 삭제할 수 없습니다.', unableToReindexCollection: '{{collection}} 컬렉션의 재인덱싱 중 오류가 발생했습니다. 작업이 중단되었습니다.', @@ -239,7 +243,9 @@ export const koTranslations: DefaultTranslationsObject = { '이 작업은 기존 인덱스를 삭제하고 모든 컬렉션 내의 문서를 다시 인덱싱합니다.', copied: '복사됨', copy: '복사', + copyField: '필드 복사', copying: '복사하기', + copyRow: '행 복사', copyWarning: '{{label}} {{title}}에 대해 {{from}}으로 {{to}}를 덮어쓰려고 합니다. 확실합니까?', create: '생성', created: '생성됨', @@ -333,6 +339,8 @@ export const koTranslations: DefaultTranslationsObject = { overwriteExistingData: '기존 필드 데이터 덮어쓰기', pageNotFound: '페이지를 찾을 수 없음', password: '비밀번호', + pasteField: '필드 붙여넣기', + pasteRow: '행 붙여넣기', payloadSettings: 'Payload 설정', perPage: '페이지당 개수: {{limit}}', previous: '이전', diff --git a/packages/translations/src/languages/lt.ts b/packages/translations/src/languages/lt.ts index 1669dcad56..68310295f7 100644 --- a/packages/translations/src/languages/lt.ts +++ b/packages/translations/src/languages/lt.ts @@ -91,6 +91,9 @@ export const ltTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Šis laukas yra netinkamas:', followingFieldsInvalid_other: 'Šie laukai yra neteisingi:', incorrectCollection: 'Neteisinga kolekcija', + insufficientClipboardPermissions: + 'Prieiga prie iškarpinės atmesta. Patikrinkite savo iškarpinės teises.', + invalidClipboardData: 'Neteisingi iškarpinės duomenys.', invalidFileType: 'Netinkamas failo tipas', invalidFileTypeValue: 'Neteisingas failo tipas: {{value}}', invalidRequestArgs: 'Netinkami argumentai perduoti užklausoje: {{args}}', @@ -112,6 +115,7 @@ export const ltTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Failo įkelti nepavyko dėl problemos.', tokenInvalidOrExpired: 'Žetonas yra neteisingas arba jo galiojimas pasibaigė.', tokenNotProvided: 'Žetonas nesuteiktas.', + unableToCopy: 'Nepavyko nukopijuoti.', unableToDeleteCount: 'Negalima ištrinti {{count}} iš {{total}} {{label}}.', unableToReindexCollection: 'Klaida perindeksuojant rinkinį {{collection}}. Operacija nutraukta.', @@ -243,7 +247,9 @@ export const ltTranslations: DefaultTranslationsObject = { 'Tai pašalins esamas indeksus ir perindeksuos dokumentus visose kolekcijose.', copied: 'Nukopijuota', copy: 'Kopijuoti', + copyField: 'Kopijuoti lauką', copying: 'Kopijavimas', + copyRow: 'Kopijuoti eilutę', copyWarning: 'Jūs ketinate perrašyti {{to}} į {{from}} šildymui {{label}} {{title}}. Ar esate tikri?', create: 'Sukurti', @@ -338,6 +344,8 @@ export const ltTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Perrašyti esamus lauko duomenis', pageNotFound: 'Puslapis nerastas', password: 'Slaptažodis', + pasteField: 'Įklijuoti lauką', + pasteRow: 'Įklijuoti eilutę', payloadSettings: 'Payload nustatymai', perPage: 'Puslapyje: {{limit}}', previous: 'Ankstesnis', diff --git a/packages/translations/src/languages/lv.ts b/packages/translations/src/languages/lv.ts index 4cf52aad6b..051e2d34ac 100644 --- a/packages/translations/src/languages/lv.ts +++ b/packages/translations/src/languages/lv.ts @@ -90,6 +90,9 @@ export const lvTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Šis lauks nav derīgs:', followingFieldsInvalid_other: 'Šie lauki nav derīgi:', incorrectCollection: 'Nepareiza kolekcija', + insufficientClipboardPermissions: + 'Piekļuve starpliktuvei liegta. Lūdzu, pārbaudiet savas starpliktuves atļaujas.', + invalidClipboardData: 'Nederīgi starpliktuves dati.', invalidFileType: 'Nederīgs faila tips', invalidFileTypeValue: 'Nederīgs faila tips: {{value}}', invalidRequestArgs: 'Pieprasījumā nodoti nederīgi argumenti: {{args}}', @@ -111,6 +114,7 @@ export const lvTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Radās problēma, augšupielādējot failu.', tokenInvalidOrExpired: 'Tokens ir nederīgs vai beidzies.', tokenNotProvided: 'Tokens nav norādīts.', + unableToCopy: 'Neizdevās kopēt.', unableToDeleteCount: 'Neizdevās izdzēst {{count}} no {{total}} {{label}}.', unableToReindexCollection: 'Radās kļūda, pārindeksējot kolekciju {{collection}}. Operācija pārtraukta.', @@ -240,7 +244,9 @@ export const lvTranslations: DefaultTranslationsObject = { 'Tas noņems esošos indeksus un pārindeksēs dokumentus visās kolekcijās.', copied: 'Nokopēts', copy: 'Kopēt', + copyField: 'Kopēt lauku', copying: 'Kopē...', + copyRow: 'Kopēt rindu', copyWarning: 'Jūs grasāties pārrakstīt {{to}} ar {{from}} priekš {{label}} {{title}}. Vai esat pārliecināts?', create: 'Izveidot', @@ -335,6 +341,8 @@ export const lvTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Pārrakstīt esošos datus', pageNotFound: 'Lapa nav atrasta', password: 'Parole', + pasteField: 'Ielīmēt lauku', + pasteRow: 'Ielīmēt rindu', payloadSettings: 'Payload iestatījumi', perPage: 'Lapas ieraksti: {{limit}}', previous: 'Iepriekšējais', diff --git a/packages/translations/src/languages/my.ts b/packages/translations/src/languages/my.ts index 857cd3f2cb..e4575be8c4 100644 --- a/packages/translations/src/languages/my.ts +++ b/packages/translations/src/languages/my.ts @@ -90,6 +90,9 @@ export const myTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'ထည့်သွင်းထားသော အချက်အလက်သည် မမှန်ကန်ပါ။', followingFieldsInvalid_other: 'ထည့်သွင်းထားသော အချက်အလက်များသည် မမှန်ကန်ပါ။', incorrectCollection: 'မှားယွင်းသော စုစည်းမှု', + insufficientClipboardPermissions: + 'ကလစ်ဘုတ်ဝင်ရောက်ခွင့်ပြုချက်မရှိပါ။ ကလစ်ဘုတ်ပြုချက်များကိုစစ်ဆေးပါ။', + invalidClipboardData: 'မမှန်ကန်သောကလစ်ဘုတ်ဒေတာ။', invalidFileType: 'မမှန်ကန်သော ဖိုင်အမျိုးအစား', invalidFileTypeValue: 'မမှန်ကန်သော ဖိုင်အမျိုးအစား: {{value}}', invalidRequestArgs: 'တောင်းဆိုမှုတွင် မှားယွင်းသော အကြောင်းပြချက်များ ပေးပို့ထားသည်: {{args}}', @@ -111,6 +114,7 @@ export const myTranslations: DefaultTranslationsObject = { problemUploadingFile: 'ဖိုင်ကို အပ်လုဒ်တင်ရာတွင် ပြဿနာရှိနေသည်။', tokenInvalidOrExpired: 'တိုကင်သည် မမှန်ကန်ပါ သို့မဟုတ် သက်တမ်းကုန်သွားပါပြီ။', tokenNotProvided: 'Token မပေးထားပါ။', + unableToCopy: 'ကူးရန်မဖြစ်နိုင်ပါ။', unableToDeleteCount: '{{total}} {{label}} မှ {{count}} ကို ဖျက်၍မရပါ။', unableToReindexCollection: '{{collection}} စုစည်းမှုကို ပြန်လည်အညွှန်းပြုလုပ်ခြင်း အမှားရှိနေသည်။ လုပ်ဆောင်မှုကို ဖျက်သိမ်းခဲ့သည်။', @@ -243,7 +247,9 @@ export const myTranslations: DefaultTranslationsObject = { 'ဤသည်သည် ရှိပြီးသား အညွှန်းများကို ဖျက်ပစ်ပြီး အားလုံးသော ကော်လက်ရှင်းများတွင် စာရွက်များကို ထပ်လိပ်ပါလိမ့်မည်။', copied: 'ကူးယူပြီးပြီ။', copy: 'ကူးယူမည်။', + copyField: 'ကွက်လပ်ကိုကူးပါ', copying: 'ကူးယူခြင်း', + copyRow: 'တန်းကိုကူးပါ', copyWarning: 'Anda akan menulis ganti {{to}} dengan {{from}} untuk {{label}} {{title}}. Adakah anda pasti?', create: 'ဖန်တီးမည်။', @@ -338,6 +344,8 @@ export const myTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Menulis semula data bidang yang sedia ada', pageNotFound: 'ရောက်ရှိနေသော စာမျက်နှာသည် မရှိပါ။', password: 'စကားဝှက်', + pasteField: 'ကွက်လပ်ကိုတင်ပါ', + pasteRow: 'တန်းကိုတင်ပါ', payloadSettings: 'ရွေးချယ်စရာများ', perPage: 'စာမျက်နှာ အလိုက်: {{limit}}', previous: 'ယခင်', diff --git a/packages/translations/src/languages/nb.ts b/packages/translations/src/languages/nb.ts index 413514e28d..f2f5a2b438 100644 --- a/packages/translations/src/languages/nb.ts +++ b/packages/translations/src/languages/nb.ts @@ -90,6 +90,9 @@ export const nbTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Følgende felt er ugyldig:', followingFieldsInvalid_other: 'Følgende felter er ugyldige:', incorrectCollection: 'Ugyldig samling', + insufficientClipboardPermissions: + 'Tilgang til utklippstavlen ble nektet. Sjekk utklippstavle-tillatelsene dine.', + invalidClipboardData: 'Ugyldige utklippstavldata.', invalidFileType: 'Ugyldig filtype', invalidFileTypeValue: 'Ugyldig filtype: {{value}}', invalidRequestArgs: 'Ugyldige argumenter i forespørselen: {{args}}', @@ -111,6 +114,7 @@ export const nbTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Det oppstod et problem under opplasting av filen.', tokenInvalidOrExpired: 'Token er enten ugyldig eller har utløpt.', tokenNotProvided: 'Token ikke angitt.', + unableToCopy: 'Kan ikke kopiere.', unableToDeleteCount: 'Kan ikke slette {{count}} av {{total}} {{label}}.', unableToReindexCollection: 'Feil ved reindeksering av samlingen {{collection}}. Operasjonen ble avbrutt.', @@ -241,7 +245,9 @@ export const nbTranslations: DefaultTranslationsObject = { 'Dette vil fjerne eksisterende indekser og reindeksere dokumentene i alle samlinger.', copied: 'Kopiert', copy: 'Kopiér', + copyField: 'Kopier felt', copying: 'Kopiering', + copyRow: 'Kopier rad', copyWarning: 'Du er i ferd med å overskrive {{to}} med {{from}} for {{label}} {{title}}. Er du sikker?', create: 'Opprett', @@ -336,6 +342,8 @@ export const nbTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Overskriv eksisterende feltdata', pageNotFound: 'Siden ble ikke funnet', password: 'Passord', + pasteField: 'Lim inn felt', + pasteRow: 'Lim inn rad', payloadSettings: 'Payload-innstillinger', perPage: 'Per side: {{limit}}', previous: 'Forrige', diff --git a/packages/translations/src/languages/nl.ts b/packages/translations/src/languages/nl.ts index f629798e0a..dee4ae3886 100644 --- a/packages/translations/src/languages/nl.ts +++ b/packages/translations/src/languages/nl.ts @@ -91,6 +91,9 @@ export const nlTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Het volgende veld is ongeldig:', followingFieldsInvalid_other: 'De volgende velden zijn ongeldig:', incorrectCollection: 'Ongeldige collectie', + insufficientClipboardPermissions: + 'Toegang tot het klembord geweigerd. Controleer je klembordmachtigingen.', + invalidClipboardData: 'Ongeldige klembordgegevens.', invalidFileType: 'Ongeldig bestandstype', invalidFileTypeValue: 'Ongeldig bestandstype: {{value}}', invalidRequestArgs: 'Ongeldige argumenten in verzoek: {{args}}', @@ -112,6 +115,7 @@ export const nlTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Er was een probleem bij het uploaden van het bestand.', tokenInvalidOrExpired: 'Token is ongeldig of verlopen.', tokenNotProvided: 'Token niet verstrekt.', + unableToCopy: 'Kan niet kopiëren.', unableToDeleteCount: 'Kan {{count}} van {{total}} {{label}} niet verwijderen.', unableToReindexCollection: 'Fout bij het herindexeren van de collectie {{collection}}. De operatie is afgebroken.', @@ -244,7 +248,9 @@ export const nlTranslations: DefaultTranslationsObject = { 'Dit verwijdert bestaande indexen en indexeert de documenten in alle collecties opnieuw.', copied: 'Gekopieerd', copy: 'Kopiëren', + copyField: 'Veld kopiëren', copying: 'Kopiëren', + copyRow: 'Rij kopiëren', copyWarning: 'U staat op het punt om {{to}} te overschrijven met {{from}} voor {{label}} {{title}}. Bent u zeker?', create: 'Aanmaken', @@ -339,6 +345,8 @@ export const nlTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Overschrijf bestaande veldgegevens', pageNotFound: 'Pagina niet gevonden', password: 'Wachtwoord', + pasteField: 'Veld plakken', + pasteRow: 'Rij plakken', payloadSettings: 'Payload Instellingen', perPage: 'Per pagina: {{limit}}', previous: 'Vorige', diff --git a/packages/translations/src/languages/pl.ts b/packages/translations/src/languages/pl.ts index 2fe46c666a..a4ff89c11f 100644 --- a/packages/translations/src/languages/pl.ts +++ b/packages/translations/src/languages/pl.ts @@ -90,6 +90,8 @@ export const plTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'To pole jest nieprawidłowe:', followingFieldsInvalid_other: 'Następujące pola są nieprawidłowe:', incorrectCollection: 'Nieprawidłowa kolekcja', + insufficientClipboardPermissions: 'Odmowa dostępu do schowka. Sprawdź uprawnienia schowka.', + invalidClipboardData: 'Nieprawidłowe dane schowka.', invalidFileType: 'Nieprawidłowy typ pliku', invalidFileTypeValue: 'Nieprawidłowy typ pliku: {{value}}', invalidRequestArgs: 'Nieprawidłowe argumenty w żądaniu: {{args}}', @@ -111,6 +113,7 @@ export const plTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Wystąpił problem podczas przesyłania pliku.', tokenInvalidOrExpired: 'Token jest nieprawidłowy lub wygasł.', tokenNotProvided: 'Token nie został dostarczony.', + unableToCopy: 'Nie można skopiować.', unableToDeleteCount: 'Nie można usunąć {{count}} z {{total}} {{label}}.', unableToReindexCollection: 'Błąd podczas ponownego indeksowania kolekcji {{collection}}. Operacja została przerwana.', @@ -241,7 +244,9 @@ export const plTranslations: DefaultTranslationsObject = { 'Spowoduje to usunięcie istniejących indeksów i ponowne zaindeksowanie dokumentów we wszystkich kolekcjach.', copied: 'Skopiowano', copy: 'Skopiuj', + copyField: 'Kopiuj pole', copying: 'Kopiowanie', + copyRow: 'Kopiuj wiersz', copyWarning: 'Zamierzasz nadpisać {{to}} na {{from}} dla {{label}} {{title}}. Czy jesteś pewny?', create: 'Stwórz', @@ -336,6 +341,8 @@ export const plTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Nadpisz istniejące dane pola', pageNotFound: 'Strona nie znaleziona', password: 'Hasło', + pasteField: 'Wklej pole', + pasteRow: 'Wklej wiersz', payloadSettings: 'Ustawienia Payload', perPage: 'Na stronę: {{limit}}', previous: 'Poprzedni', diff --git a/packages/translations/src/languages/pt.ts b/packages/translations/src/languages/pt.ts index eeb889a213..b3f1f366b7 100644 --- a/packages/translations/src/languages/pt.ts +++ b/packages/translations/src/languages/pt.ts @@ -91,6 +91,9 @@ export const ptTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'O campo a seguir está inválido:', followingFieldsInvalid_other: 'Os campos a seguir estão inválidos:', incorrectCollection: 'Coleção Incorreta', + insufficientClipboardPermissions: + 'Acesso à área de transferência negado. Verifique suas permissões da área de transferência.', + invalidClipboardData: 'Dados inválidos na área de transferência.', invalidFileType: 'Tipo de arquivo inválido', invalidFileTypeValue: 'Tipo de arquivo inválido: {{value}}', invalidRequestArgs: 'Argumentos inválidos passados na solicitação: {{args}}', @@ -112,6 +115,7 @@ export const ptTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Ocorreu um problema ao carregar o arquivo.', tokenInvalidOrExpired: 'Token expirado ou inválido.', tokenNotProvided: 'Token não fornecido.', + unableToCopy: 'Não é possível copiar.', unableToDeleteCount: 'Não é possível excluir {{count}} de {{total}} {{label}}.', unableToReindexCollection: 'Erro ao reindexar a coleção {{collection}}. Operação abortada.', unableToUpdateCount: 'Não foi possível atualizar {{count}} de {{total}} {{label}}.', @@ -241,7 +245,9 @@ export const ptTranslations: DefaultTranslationsObject = { 'Isso removerá os índices existentes e reindexará os documentos em todas as coleções.', copied: 'Copiado', copy: 'Copiar', + copyField: 'Copiar campo', copying: 'Copiando', + copyRow: 'Copiar linha', copyWarning: 'Você está prestes a sobrescrever {{to}} com {{from}} para {{label}} {{title}}. Tem certeza?', create: 'Criar', @@ -336,6 +342,8 @@ export const ptTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Sobrescrever dados de campo existentes', pageNotFound: 'Página não encontrada', password: 'Senha', + pasteField: 'Colar campo', + pasteRow: 'Colar linha', payloadSettings: 'Configurações do Payload', perPage: 'Itens por Página: {{limit}}', previous: 'Anterior', diff --git a/packages/translations/src/languages/ro.ts b/packages/translations/src/languages/ro.ts index 85fbf75393..64e557c856 100644 --- a/packages/translations/src/languages/ro.ts +++ b/packages/translations/src/languages/ro.ts @@ -92,6 +92,9 @@ export const roTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Următorul câmp nu este valid:', followingFieldsInvalid_other: 'Următoarele câmpuri nu sunt valabile:', incorrectCollection: 'Colecție incorectă', + insufficientClipboardPermissions: + 'Accesul la clipboard a fost refuzat. Verificați permisiunile clipboard-ului.', + invalidClipboardData: 'Date invalide în clipboard.', invalidFileType: 'Tip de fișier invalid', invalidFileTypeValue: 'Tip de fișier invalid: {{value}}', invalidRequestArgs: 'Argumente invalide transmise în cerere: {{args}}', @@ -113,6 +116,7 @@ export const roTranslations: DefaultTranslationsObject = { problemUploadingFile: 'A existat o problemă în timpul încărcării fișierului.', tokenInvalidOrExpired: 'Tokenul este invalid sau a expirat.', tokenNotProvided: 'Tokenul nu a fost furnizat.', + unableToCopy: 'Imposibil de copiat.', unableToDeleteCount: 'Nu se poate șterge {{count}} din {{total}} {{label}}.', unableToReindexCollection: 'Eroare la reindexarea colecției {{collection}}. Operațiune anulată.', @@ -245,7 +249,9 @@ export const roTranslations: DefaultTranslationsObject = { 'Aceasta va elimina indexurile existente și va reindexa documentele din toate colecțiile.', copied: 'Copiat', copy: 'Copiați', + copyField: 'Copiază câmpul', copying: 'Copiere', + copyRow: 'Copiază rândul', copyWarning: 'Sunteți pe cale să suprascrieți {{to}} cu {{from}} pentru {{label}} {{title}}. Sunteți sigur?', create: 'Creează', @@ -340,6 +346,8 @@ export const roTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Suprascrieți datele existente din câmp', pageNotFound: 'Pagina nu a fost găsită', password: 'Parola', + pasteField: 'Lipește câmpul', + pasteRow: 'Lipește rândul', payloadSettings: 'Setări de Payload', perPage: 'Pe pagină: {{limit}}', previous: 'Anterior', diff --git a/packages/translations/src/languages/rs.ts b/packages/translations/src/languages/rs.ts index d90726c8f6..7fa6b100eb 100644 --- a/packages/translations/src/languages/rs.ts +++ b/packages/translations/src/languages/rs.ts @@ -91,6 +91,9 @@ export const rsTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Ово поље је невалидно:', followingFieldsInvalid_other: 'Ова поља су невалидна:', incorrectCollection: 'Невалидна колекција', + insufficientClipboardPermissions: + 'Приступ к клипборду је одбијен. Провјерите своја овлашћења за клипборд.', + invalidClipboardData: 'Неважећи подаци у клипборду.', invalidFileType: 'Невалидан тип датотеке', invalidFileTypeValue: 'Невалидан тип датотеке: {{value}}', invalidRequestArgs: 'Неважећи аргументи прослеђени у захтеву: {{args}}', @@ -112,6 +115,7 @@ export const rsTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Постоји проблем при учитавању датотеке.', tokenInvalidOrExpired: 'Токен је невалидан или је истекао.', tokenNotProvided: 'Token nije dostavljen.', + unableToCopy: 'Није могуће копирати.', unableToDeleteCount: 'Није могуће избрисати {{count}} од {{total}} {{label}}.', unableToReindexCollection: 'Грешка при реиндексирању колекције {{collection}}. Операција је прекинута.', @@ -241,7 +245,9 @@ export const rsTranslations: DefaultTranslationsObject = { 'Ovo će ukloniti postojeće indekse i ponovo indeksirati dokumente u svim kolekcijama.', copied: 'Копирано', copy: 'Копирај', + copyField: 'Копирај поље', copying: 'Kopiranje', + copyRow: 'Копирај ред', copyWarning: 'На путу сте да препишете {{to}} са {{from}} за {{label}} {{title}}. Да ли сте сигурни?', create: 'Креирај', @@ -336,6 +342,8 @@ export const rsTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Prepišite postojeće podatke u polju', pageNotFound: 'Страница није пронађена', password: 'Лозинка', + pasteField: 'Залепи поље', + pasteRow: 'Залепи ред', payloadSettings: 'Payload поставке', perPage: 'По страници: {{limit}}', previous: 'Prethodni', diff --git a/packages/translations/src/languages/rsLatin.ts b/packages/translations/src/languages/rsLatin.ts index 096fdf31a2..beee303953 100644 --- a/packages/translations/src/languages/rsLatin.ts +++ b/packages/translations/src/languages/rsLatin.ts @@ -91,6 +91,9 @@ export const rsLatinTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Ovo polje je nevalidno:', followingFieldsInvalid_other: 'Ova polja su nevalidna:', incorrectCollection: 'Nevalidna kolekcija', + insufficientClipboardPermissions: + 'Pristup clipboard-u odbijen. Proverite svoja dopuštenja za clipboard.', + invalidClipboardData: 'Nevažeći podaci u clipboard-u.', invalidFileType: 'Nevalidan tip datoteke', invalidFileTypeValue: 'Nevalidan tip datoteke: {{value}}', invalidRequestArgs: 'Nevažeći argumenti prosleđeni u zahtevu: {{args}}', @@ -112,6 +115,7 @@ export const rsLatinTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Postoji problem pri učitavanju datoteke.', tokenInvalidOrExpired: 'Token je nevalidan ili je istekao.', tokenNotProvided: 'Token nije obezbeđen.', + unableToCopy: 'Kopiranje nije moguće.', unableToDeleteCount: 'Nije moguće izbrisati {{count}} od {{total}} {{label}}.', unableToReindexCollection: 'Greška pri reindeksiranju kolekcije {{collection}}. Operacija je prekinuta.', @@ -241,7 +245,9 @@ export const rsLatinTranslations: DefaultTranslationsObject = { 'Ovo će ukloniti postojeće indekse i ponovo indeksirati dokumente u svim kolekcijama.', copied: 'Kopirano', copy: 'Kopiraj', + copyField: 'Kopiraj polje', copying: 'Kopiranje', + copyRow: 'Kopiraj red', copyWarning: 'Na korak ste da prepišete {{to}} sa {{from}} za {{label}} {{title}}. Da li ste sigurni?', create: 'Kreiraj', @@ -336,6 +342,8 @@ export const rsLatinTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Prepiši postojeće podatke iz polja', pageNotFound: 'Stranica nije pronađena', password: 'Lozinka', + pasteField: 'Zalepi polje', + pasteRow: 'Zalepi red', payloadSettings: 'Payload postavke', perPage: 'Po stranici: {{limit}}', previous: 'Prethodni', diff --git a/packages/translations/src/languages/ru.ts b/packages/translations/src/languages/ru.ts index 53aa502e18..ee29c5d406 100644 --- a/packages/translations/src/languages/ru.ts +++ b/packages/translations/src/languages/ru.ts @@ -91,6 +91,9 @@ export const ruTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Следующее поле недействительно:', followingFieldsInvalid_other: 'Следующие поля недействительны:', incorrectCollection: 'Неправильная Коллекция', + insufficientClipboardPermissions: + 'Доступ к буферу обмена отклонен. Проверьте разрешения буфера обмена.', + invalidClipboardData: 'Неверные данные в буфере обмена.', invalidFileType: 'Недопустимый тип файла', invalidFileTypeValue: 'Недопустимый тип файла: {{value}}', invalidRequestArgs: 'В запрос переданы недопустимые аргументы: {{args}}', @@ -112,6 +115,7 @@ export const ruTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Возникла проблема при загрузке файла.', tokenInvalidOrExpired: 'Токен либо недействителен, либо срок его действия истек.', tokenNotProvided: 'Токен не предоставлен.', + unableToCopy: 'Не удалось скопировать.', unableToDeleteCount: 'Не удалось удалить {{count}} из {{total}} {{label}}.', unableToReindexCollection: 'Ошибка при переиндексации коллекции {{collection}}. Операция прервана.', @@ -243,7 +247,9 @@ export const ruTranslations: DefaultTranslationsObject = { 'Это удалит существующие индексы и переиндексирует документы во всех коллекциях.', copied: 'Скопировано', copy: 'Скопировать', + copyField: 'Копировать поле', copying: 'Копирование', + copyRow: 'Копировать строку', copyWarning: 'Вы собираетесь перезаписать {{to}} на {{from}} для {{label}} {{title}}. Вы уверены?', create: 'Создать', @@ -338,6 +344,8 @@ export const ruTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Перезаписать существующие данные поля', pageNotFound: 'Страница не найдена', password: 'Пароль', + pasteField: 'Вставить поле', + pasteRow: 'Вставить строку', payloadSettings: 'Настройки Payload', perPage: 'На странице: {{limit}}', previous: 'Предыдущий', diff --git a/packages/translations/src/languages/sk.ts b/packages/translations/src/languages/sk.ts index f0dd04d0d6..f4479b1745 100644 --- a/packages/translations/src/languages/sk.ts +++ b/packages/translations/src/languages/sk.ts @@ -91,6 +91,9 @@ export const skTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Nasledujúce pole je neplatné:', followingFieldsInvalid_other: 'Nasledujúce polia sú neplatné:', incorrectCollection: 'Nesprávna kolekcia', + insufficientClipboardPermissions: + 'Prístup do schránky bol zamietnutý. Skontrolujte svoje oprávnenia pre schránku.', + invalidClipboardData: 'Neplatné dáta v schránke.', invalidFileType: 'Neplatný typ súboru', invalidFileTypeValue: 'Neplatný typ súboru: {{value}}', invalidRequestArgs: 'Neplatné argumenty odoslané v požiadavke: {{args}}', @@ -112,6 +115,7 @@ export const skTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Pri nahrávaní súboru došlo k chybe.', tokenInvalidOrExpired: 'Token je neplatný alebo vypršal.', tokenNotProvided: 'Token nie je poskytnutý.', + unableToCopy: 'Kopírovanie nie je možné.', unableToDeleteCount: 'Nie je možné zmazať {{count}} z {{total}} {{label}}.', unableToReindexCollection: 'Chyba pri reindexácii kolekcie {{collection}}. Operácia bola prerušená.', @@ -243,7 +247,9 @@ export const skTranslations: DefaultTranslationsObject = { 'Týmto sa odstránia existujúce indexy a znova sa zaindexujú dokumenty vo všetkých kolekciách.', copied: 'Skopírované', copy: 'Kopírovať', + copyField: 'Kopírovať pole', copying: 'Kopírovanie', + copyRow: 'Kopírovať riadok', copyWarning: 'Chystáte sa prepísať {{to}} na {{from}} pre {{label}} {{title}}. Ste si istý?', create: 'Vytvoriť', created: 'Vytvořeno', @@ -336,6 +342,8 @@ export const skTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Prepísať existujúce pole dát', pageNotFound: 'Stránka nenájdená', password: 'Heslo', + pasteField: 'Prilepiť pole', + pasteRow: 'Prilepiť riadok', payloadSettings: 'Nastavenia dátového záznamu', perPage: 'Na stránku: {{limit}}', previous: 'Predchádzajúci', diff --git a/packages/translations/src/languages/sl.ts b/packages/translations/src/languages/sl.ts index 5f388f0c17..f424587ff5 100644 --- a/packages/translations/src/languages/sl.ts +++ b/packages/translations/src/languages/sl.ts @@ -90,6 +90,9 @@ export const slTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Naslednje polje je neveljavno:', followingFieldsInvalid_other: 'Naslednja polja so neveljavna:', incorrectCollection: 'Napačna zbirka', + insufficientClipboardPermissions: + 'Dostop do odložišča je bil zavrnjen. Preverite dovoljenja za odložišče.', + invalidClipboardData: 'Neveljavni podatki v odložišču.', invalidFileType: 'Neveljaven tip datoteke', invalidFileTypeValue: 'Neveljaven tip datoteke: {{value}}', invalidRequestArgs: 'V zahtevi so bili poslani neveljavni argumenti: {{args}}', @@ -111,6 +114,7 @@ export const slTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Pri nalaganju datoteke je prišlo do težave.', tokenInvalidOrExpired: 'Žeton je neveljaven ali je potekel.', tokenNotProvided: 'Žeton ni bil posredovan.', + unableToCopy: 'Kopiranje ni mogoče.', unableToDeleteCount: 'Ni bilo mogoče izbrisati {{count}} od {{total}} {{label}}.', unableToReindexCollection: 'Napaka pri reindeksiranju zbirke {{collection}}. Operacija je bila prekinjena.', @@ -241,7 +245,9 @@ export const slTranslations: DefaultTranslationsObject = { 'To bo odstranilo obstoječe indekse in ponovno indeksiralo dokumente v vseh zbirkah.', copied: 'Kopirano', copy: 'Kopiraj', + copyField: 'Kopiraj polje', copying: 'Kopiranje', + copyRow: 'Kopiraj vrstico', copyWarning: 'Prepisali boste {{to}} z {{from}} za {{label}} {{title}}. Ste prepričani?', create: 'Ustvari', created: 'Ustvarjeno', @@ -335,6 +341,8 @@ export const slTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Prepišite obstoječe podatke polja', pageNotFound: 'Stran ni najdena', password: 'Geslo', + pasteField: 'Prilepi polje', + pasteRow: 'Prilepi vrstico', payloadSettings: 'Nastavitve Payloada', perPage: 'Na stran: {{limit}}', previous: 'Prejšnji', diff --git a/packages/translations/src/languages/sv.ts b/packages/translations/src/languages/sv.ts index 1eec3056ab..5fcdcb7199 100644 --- a/packages/translations/src/languages/sv.ts +++ b/packages/translations/src/languages/sv.ts @@ -90,6 +90,9 @@ export const svTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Följande fält är ogiltigt:', followingFieldsInvalid_other: 'Följande fält är ogiltiga:', incorrectCollection: 'Felaktig samling', + insufficientClipboardPermissions: + 'Åtkomst till urklipp nekades. Kontrollera dina behörigheter för urklipp.', + invalidClipboardData: 'Ogiltiga urklippsdata.', invalidFileType: 'Ogiltig filtyp', invalidFileTypeValue: 'Ogiltig filtyp: {{value}}', invalidRequestArgs: 'Ogiltiga argument har skickats i begäran: {{args}}', @@ -111,6 +114,7 @@ export const svTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Det uppstod ett problem när filen laddades upp.', tokenInvalidOrExpired: 'Token är antingen ogiltig eller har löpt ut.', tokenNotProvided: 'Token inte tillhandahållet.', + unableToCopy: 'Kan inte kopiera.', unableToDeleteCount: 'Det gick inte att ta bort {{count}} av {{total}} {{label}}.', unableToReindexCollection: 'Fel vid omindexering av samlingen {{collection}}. Operationen avbröts.', @@ -241,7 +245,9 @@ export const svTranslations: DefaultTranslationsObject = { 'Detta kommer att ta bort befintliga index och omindexera dokumenten i alla samlingar.', copied: 'Kopierad', copy: 'Kopiera', + copyField: 'Kopiera fält', copying: 'Kopierar...', + copyRow: 'Kopiera rad', copyWarning: 'Du håller på att skriva över {{to}} med {{from}} för {{label}} {{title}}. Är du säker?', create: 'Skapa', @@ -336,6 +342,8 @@ export const svTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Skriv över befintlig fältdatabas', pageNotFound: 'Sidan hittas inte', password: 'Lösenord', + pasteField: 'Klistra in fält', + pasteRow: 'Klistra in rad', payloadSettings: 'Programinställningar', perPage: 'Per Sida: {{limit}}', previous: 'Föregående', diff --git a/packages/translations/src/languages/th.ts b/packages/translations/src/languages/th.ts index a1855b951e..9a39e8f1ef 100644 --- a/packages/translations/src/languages/th.ts +++ b/packages/translations/src/languages/th.ts @@ -88,6 +88,9 @@ export const thTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'ช่องต่อไปนี้ไม่ถูกต้อง:', followingFieldsInvalid_other: 'ช่องต่อไปนี้ไม่ถูกต้อง:', incorrectCollection: 'Collection ไม่ถูกต้อง', + insufficientClipboardPermissions: + 'การเข้าถึงคลิปบอร์ดถูกปฏิเสธ กรุณาตรวจสอบสิทธิ์การเข้าถึงคลิปบอร์ดของคุณ', + invalidClipboardData: 'ข้อมูลคลิปบอร์ดไม่ถูกต้อง', invalidFileType: 'ประเภทของไฟล์ไม่ถูกต้อง', invalidFileTypeValue: 'ประเภทของไฟล์ไม่ถูกต้อง: {{value}}', invalidRequestArgs: 'มีการส่งอาร์กิวเมนต์ที่ไม่ถูกต้องในคำขอ: {{args}}', @@ -109,6 +112,7 @@ export const thTranslations: DefaultTranslationsObject = { problemUploadingFile: 'เกิดปัญหาระหว่างการอัปโหลดไฟล์', tokenInvalidOrExpired: 'Token ไม่ถูกต้องหรือหมดอายุ', tokenNotProvided: 'ไม่ได้รับโทเค็น', + unableToCopy: 'ไม่สามารถคัดลอกได้', unableToDeleteCount: 'ไม่สามารถลบ {{count}} จาก {{total}} {{label}}', unableToReindexCollection: 'เกิดข้อผิดพลาดในการจัดทำดัชนีใหม่ของคอลเลกชัน {{collection}}. การดำเนินการถูกยกเลิก', @@ -236,7 +240,9 @@ export const thTranslations: DefaultTranslationsObject = { 'การดำเนินการนี้จะลบดัชนีที่มีอยู่และทำการจัดทำดัชนีใหม่ในเอกสารของทุกคอลเลกชัน.', copied: 'คัดลอกแล้ว', copy: 'คัดลอก', + copyField: 'คัดลอกฟิลด์', copying: 'การคัดลอก', + copyRow: 'คัดลอกแถว', copyWarning: 'คุณกำลังจะเขียนทับ {{to}} ด้วย {{from}} สำหรับ {{label}} {{title}}. คุณแน่ใจหรือไม่?', create: 'สร้าง', @@ -330,6 +336,8 @@ export const thTranslations: DefaultTranslationsObject = { overwriteExistingData: 'เขียนทับข้อมูลในฟิลด์ที่มีอยู่แล้ว', pageNotFound: 'ไม่พบหน้าที่ต้องการ', password: 'รหัสผ่าน', + pasteField: 'วางฟิลด์', + pasteRow: 'วางแถว', payloadSettings: 'การตั้งค่า Payload', perPage: 'จำนวนต่อหน้า: {{limit}}', previous: 'ก่อนหน้านี้', diff --git a/packages/translations/src/languages/tr.ts b/packages/translations/src/languages/tr.ts index b05c8b2129..57375f0008 100644 --- a/packages/translations/src/languages/tr.ts +++ b/packages/translations/src/languages/tr.ts @@ -91,6 +91,9 @@ export const trTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Lütfen geçersiz alanı düzeltin:', followingFieldsInvalid_other: 'Lütfen geçersiz alanları düzeltin:', incorrectCollection: 'Hatalı koleksiyon', + insufficientClipboardPermissions: + 'Pano erişim reddedildi. Lütfen pano izinlerinizi kontrol edin.', + invalidClipboardData: 'Geçersiz pano verisi.', invalidFileType: 'Geçersiz dosya türü', invalidFileTypeValue: 'Geçersiz dosya türü: {{value}}', invalidRequestArgs: 'İstek içerisinde geçersiz argümanlar iletildi: {{args}}', @@ -112,6 +115,7 @@ export const trTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Dosya yüklenirken bir sorun oluştu.', tokenInvalidOrExpired: 'Geçersiz veya süresi dolmuş token.', tokenNotProvided: 'Jeton sağlanmadı.', + unableToCopy: 'Kopyalanamıyor.', unableToDeleteCount: '{{total}} {{label}} içinden {{count}} silinemiyor.', unableToReindexCollection: '{{collection}} koleksiyonunun yeniden indekslenmesinde hata oluştu. İşlem durduruldu.', @@ -244,7 +248,9 @@ export const trTranslations: DefaultTranslationsObject = { 'Bu işlem mevcut dizinleri kaldıracak ve tüm koleksiyonlardaki belgeleri yeniden dizine alacaktır.', copied: 'Kopyalandı', copy: 'Kopyala', + copyField: 'Alanı kopyala', copying: 'Kopyalama', + copyRow: 'Satırı kopyala', copyWarning: "{{to}}'yu {{from}} ile {{label}} {{title}} için üstüne yazmak üzeresiniz. Emin misiniz?", create: 'Oluştur', @@ -339,6 +345,8 @@ export const trTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Mevcut alan verilerinin üzerine yazın', pageNotFound: 'Sayfa bulunamadı', password: 'Parola', + pasteField: 'Alanı yapıştır', + pasteRow: 'Satırı yapıştır', payloadSettings: 'Ayarlar', perPage: 'Sayfa başına: {{limit}}', previous: 'Önceki', diff --git a/packages/translations/src/languages/uk.ts b/packages/translations/src/languages/uk.ts index 9864931d43..c1f96e4771 100644 --- a/packages/translations/src/languages/uk.ts +++ b/packages/translations/src/languages/uk.ts @@ -91,6 +91,9 @@ export const ukTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Наступне поле невірне:', followingFieldsInvalid_other: 'Наступні поля невірні', incorrectCollection: 'Неправильна колекція', + insufficientClipboardPermissions: + 'Доступ до буфера обміну відхилено. Перевірте свої дозволи на буфер обміну.', + invalidClipboardData: 'Невірні дані в буфері обміну.', invalidFileType: 'Невірний тип файлу', invalidFileTypeValue: 'Невірний тип файлу: {{value}}', invalidRequestArgs: 'Неправильні аргументи передано в запиті: {{args}}', @@ -112,6 +115,7 @@ export const ukTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Виникла помилка під час завантаження файлу.', tokenInvalidOrExpired: 'Токен недійсний, або його строк дії закінчився.', tokenNotProvided: 'Токен не надано.', + unableToCopy: 'Неможливо скопіювати.', unableToDeleteCount: 'Не вдалося видалити {{count}} із {{total}} {{label}}.', unableToReindexCollection: 'Помилка при повторному індексуванні колекції {{collection}}. Операцію скасовано.', @@ -241,7 +245,9 @@ export const ukTranslations: DefaultTranslationsObject = { 'Це видалить наявні індекси та перебудує індекси документів у всіх колекціях.', copied: 'Скопійовано', copy: 'Скопіювати', + copyField: 'Копіювати поле', copying: 'Копіювання', + copyRow: 'Копіювати рядок', copyWarning: 'Ви збираєтесь замінити {{to}} на {{from}} для {{label}} {{title}}. Ви впевнені?', create: 'Створити', created: 'Створено', @@ -335,6 +341,8 @@ export const ukTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Перезаписати існуючі дані поля', pageNotFound: 'Сторінка не знайдена', password: 'Пароль', + pasteField: 'Вставити поле', + pasteRow: 'Вставити рядок', payloadSettings: 'Налаштування Payload', perPage: 'На сторінці: {{limit}}', previous: 'Попередній', diff --git a/packages/translations/src/languages/vi.ts b/packages/translations/src/languages/vi.ts index 2c59447e76..d235a7de03 100644 --- a/packages/translations/src/languages/vi.ts +++ b/packages/translations/src/languages/vi.ts @@ -90,6 +90,9 @@ export const viTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: 'Lỗi - Field sau không hợp lệ:', followingFieldsInvalid_other: 'Lỗi - Những fields sau không hợp lệ:', incorrectCollection: 'Lỗi - Collection không hợp lệ.', + insufficientClipboardPermissions: + 'Truy cập vào bộ nhớ tạm bị từ chối. Vui lòng kiểm tra quyền truy cập bộ nhớ tạm của bạn.', + invalidClipboardData: 'Dữ liệu bộ nhớ tạm không hợp lệ.', invalidFileType: 'Lỗi - Định dạng tệp không hợp lệ.', invalidFileTypeValue: 'Lỗi - Định dạng tệp không hợp lệ: {{value}}.', invalidRequestArgs: 'Các đối số không hợp lệ đã được truyền trong yêu cầu: {{args}}', @@ -111,6 +114,7 @@ export const viTranslations: DefaultTranslationsObject = { problemUploadingFile: 'Lỗi - Đã xảy ra vấn để khi tải lên file sau.', tokenInvalidOrExpired: 'Lỗi - Token không hợp lệ hoặc đã hết hạn.', tokenNotProvided: 'Không cung cấp mã thông báo.', + unableToCopy: 'Không thể sao chép.', unableToDeleteCount: 'Không thể xóa {{count}} trong số {{total}} {{label}}.', unableToReindexCollection: 'Lỗi khi tái lập chỉ mục bộ sưu tập {{collection}}. Quá trình bị hủy.', @@ -240,7 +244,9 @@ export const viTranslations: DefaultTranslationsObject = { 'Điều này sẽ xóa các chỉ mục hiện tại và tái lập chỉ mục các tài liệu trong tất cả các bộ sưu tập.', copied: 'Đâ sao chép', copy: 'Sao chép', + copyField: 'Sao chép trường', copying: 'Sao chép', + copyRow: 'Sao chép dòng', copyWarning: 'Bạn đang chuẩn bị ghi đè {{to}} bằng {{from}} cho {{label}} {{title}}. Bạn có chắc chắn không?', create: 'Tạo', @@ -335,6 +341,8 @@ export const viTranslations: DefaultTranslationsObject = { overwriteExistingData: 'Ghi đè dữ liệu trường hiện tại', pageNotFound: 'Không tìm thấy trang', password: 'Mật khẩu', + pasteField: 'Dán trường', + pasteRow: 'Dán dòng', payloadSettings: 'Cài đặt', perPage: 'Hiển thị mỗi trang: {{limit}}', previous: 'Trước đó', diff --git a/packages/translations/src/languages/zh.ts b/packages/translations/src/languages/zh.ts index a654befe06..c8a4b04e19 100644 --- a/packages/translations/src/languages/zh.ts +++ b/packages/translations/src/languages/zh.ts @@ -86,6 +86,8 @@ export const zhTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: '下面的字段是无效的:', followingFieldsInvalid_other: '以下字段是无效的:', incorrectCollection: '不正确的集合', + insufficientClipboardPermissions: '剪贴板访问被拒绝。请检查您的剪贴板权限。', + invalidClipboardData: '剪贴板数据无效。', invalidFileType: '无效的文件类型', invalidFileTypeValue: '无效的文件类型: {{value}}', invalidRequestArgs: '请求中传递了无效的参数:{{args}}', @@ -107,6 +109,7 @@ export const zhTranslations: DefaultTranslationsObject = { problemUploadingFile: '上传文件时出现了问题。', tokenInvalidOrExpired: '令牌无效或已过期。', tokenNotProvided: '未提供令牌。', + unableToCopy: '无法复制。', unableToDeleteCount: '无法从 {{total}} {{label}} 中删除 {{count}}。', unableToReindexCollection: '重新索引集合 {{collection}} 时出错。操作已中止。', unableToUpdateCount: '无法更新 {{count}} 个,共 {{total}} 个 {{label}}。', @@ -229,7 +232,9 @@ export const zhTranslations: DefaultTranslationsObject = { confirmReindexDescriptionAll: '此操作将删除现有索引,并重新索引所有集合中的文档。', copied: '已复制', copy: '复制', + copyField: '复制字段', copying: '复制中', + copyRow: '复制行', copyWarning: '您即将用{{from}}覆盖{{to}},用于{{label}} {{title}}。您确定吗?', create: '创建', created: '已创建', @@ -321,6 +326,8 @@ export const zhTranslations: DefaultTranslationsObject = { overwriteExistingData: '覆盖现有字段数据', pageNotFound: '未找到页面', password: '密码', + pasteField: '粘贴字段', + pasteRow: '粘贴行', payloadSettings: 'Payload设置', perPage: '每一页: {{limit}}', previous: '前一个', diff --git a/packages/translations/src/languages/zhTw.ts b/packages/translations/src/languages/zhTw.ts index f23ff13a1d..2512bb0916 100644 --- a/packages/translations/src/languages/zhTw.ts +++ b/packages/translations/src/languages/zhTw.ts @@ -86,6 +86,8 @@ export const zhTwTranslations: DefaultTranslationsObject = { followingFieldsInvalid_one: '下面的字串是無效的:', followingFieldsInvalid_other: '以下字串是無效的:', incorrectCollection: '不正確的集合', + insufficientClipboardPermissions: '剪貼簿訪問被拒絕。請檢查您的剪貼簿權限。', + invalidClipboardData: '剪貼簿資料無效。', invalidFileType: '無效的文件類型', invalidFileTypeValue: '無效的文件類型: {{value}}', invalidRequestArgs: '請求中傳遞了無效的參數:{{args}}', @@ -107,6 +109,7 @@ export const zhTwTranslations: DefaultTranslationsObject = { problemUploadingFile: '上傳文件時出現了問題。', tokenInvalidOrExpired: '令牌無效或已過期。', tokenNotProvided: '未提供令牌。', + unableToCopy: '無法複製。', unableToDeleteCount: '無法從 {{total}} 個中刪除 {{count}} 個 {{label}}。', unableToReindexCollection: '重新索引集合 {{collection}} 時出現錯誤。操作已中止。', unableToUpdateCount: '無法從 {{total}} 個中更新 {{count}} 個 {{label}}。', @@ -229,7 +232,9 @@ export const zhTwTranslations: DefaultTranslationsObject = { confirmReindexDescriptionAll: '此操作將刪除現有索引並重新索引所有集合中的文件。', copied: '已複製', copy: '複製', + copyField: '複製欄位', copying: '複製', + copyRow: '複製列', copyWarning: '您即將以{{from}}覆蓋{{to}},這將影響{{label}} {{title}}。您確定要這麼做嗎?', create: '建立', created: '已建立', @@ -321,6 +326,8 @@ export const zhTwTranslations: DefaultTranslationsObject = { overwriteExistingData: '覆蓋現有欄位資料', pageNotFound: '未找到頁面', password: '密碼', + pasteField: '貼上欄位', + pasteRow: '貼上列', payloadSettings: 'Payload設定', perPage: '每一頁: {{limit}} 個', previous: '先前的', diff --git a/packages/ui/src/elements/ArrayAction/index.tsx b/packages/ui/src/elements/ArrayAction/index.tsx index 60d5986eb0..81e778bb05 100644 --- a/packages/ui/src/elements/ArrayAction/index.tsx +++ b/packages/ui/src/elements/ArrayAction/index.tsx @@ -7,29 +7,34 @@ import { MoreIcon } from '../../icons/More/index.js' import { PlusIcon } from '../../icons/Plus/index.js' import { XIcon } from '../../icons/X/index.js' import { useTranslation } from '../../providers/Translation/index.js' -import { Popup, PopupList } from '../Popup/index.js' +import { ClipboardActionLabel } from '../ClipboardAction/ClipboardActionLabel.js' import './index.scss' +import { Popup, PopupList } from '../Popup/index.js' const baseClass = 'array-actions' export type Props = { addRow: (current: number, blockType?: string) => Promise | void + copyRow: (index: number) => void duplicateRow: (current: number) => void hasMaxRows: boolean index: number isSortable?: boolean moveRow: (from: number, to: number) => void + pasteRow: (index: number) => void removeRow: (index: number) => void rowCount: number } export const ArrayAction: React.FC = ({ addRow, + copyRow, duplicateRow, hasMaxRows, index, isSortable, moveRow, + pasteRow, removeRow, rowCount, }) => { @@ -96,6 +101,24 @@ export const ArrayAction: React.FC = ({ )} + { + copyRow(index) + close() + }} + > + + + { + pasteRow(index) + close() + }} + > + + { diff --git a/packages/ui/src/elements/ClipboardAction/ClipboardActionLabel.tsx b/packages/ui/src/elements/ClipboardAction/ClipboardActionLabel.tsx new file mode 100644 index 0000000000..5a01f4ca65 --- /dev/null +++ b/packages/ui/src/elements/ClipboardAction/ClipboardActionLabel.tsx @@ -0,0 +1,32 @@ +'use client' + +import { Fragment } from 'react' + +import { CopyIcon } from '../../icons/Copy/index.js' +import { EditIcon } from '../../icons/Edit/index.js' +import { useTranslation } from '../../providers/Translation/index.js' + +export const ClipboardActionLabel = ({ + isPaste, + isRow, +}: { + isPaste?: boolean + isRow?: boolean +}) => { + const { t } = useTranslation() + + let label = t('general:copyField') + if (!isRow && isPaste) { + label = t('general:pasteField') + } else if (isRow && !isPaste) { + label = t('general:copyRow') + } else if (isRow && isPaste) { + label = t('general:pasteRow') + } + + return ( + + {isPaste ? : } {label} + + ) +} diff --git a/packages/ui/src/elements/ClipboardAction/clipboardUtilities.ts b/packages/ui/src/elements/ClipboardAction/clipboardUtilities.ts new file mode 100644 index 0000000000..3d9915af3f --- /dev/null +++ b/packages/ui/src/elements/ClipboardAction/clipboardUtilities.ts @@ -0,0 +1,67 @@ +import type { + ClipboardCopyActionArgs, + ClipboardPasteActionArgs, + ClipboardPasteActionValidateArgs, + ClipboardPasteData, +} from './types.js' + +import { isClipboardDataValid } from './isClipboardDataValid.js' + +const localStorageClipboardKey = '_payloadClipboard' + +/** + * @note This function doesn't use the Clipboard API, but localStorage. See rationale in #11513 + */ +export function clipboardCopy(args: ClipboardCopyActionArgs): string | true { + const { getDataToCopy, t, ...rest } = args + + const dataToWrite = { + data: getDataToCopy(), + ...rest, + } + + try { + localStorage.setItem(localStorageClipboardKey, JSON.stringify(dataToWrite)) + return true + } catch (_err) { + return t('error:unableToCopy') + } +} + +/** + * @note This function doesn't use the Clipboard API, but localStorage. See rationale in #11513 + */ +export function clipboardPaste({ + onPaste, + path: fieldPath, + t, + ...args +}: ClipboardPasteActionArgs): string | true { + let dataToPaste: ClipboardPasteData + + try { + const jsonFromClipboard = localStorage.getItem(localStorageClipboardKey) + + if (!jsonFromClipboard) { + return t('error:invalidClipboardData') + } + + dataToPaste = JSON.parse(jsonFromClipboard) + } catch (_err) { + return t('error:invalidClipboardData') + } + + const dataToValidate = { + ...dataToPaste, + ...args, + fieldPath, + } as ClipboardPasteActionValidateArgs + + if (!isClipboardDataValid(dataToValidate)) { + return t('error:invalidClipboardData') + } + + onPaste(dataToPaste) + + return true +} diff --git a/packages/ui/src/elements/ClipboardAction/index.tsx b/packages/ui/src/elements/ClipboardAction/index.tsx new file mode 100644 index 0000000000..8a1d531d8d --- /dev/null +++ b/packages/ui/src/elements/ClipboardAction/index.tsx @@ -0,0 +1,117 @@ +'use client' + +import type { FormStateWithoutComponents } from 'payload' + +import { type FC, useCallback } from 'react' +import { toast } from 'sonner' + +import type { ClipboardCopyData, OnPasteFn } from './types.js' + +import { MoreIcon } from '../../icons/More/index.js' +import { useTranslation } from '../../providers/Translation/index.js' +import { Popup, PopupList } from '../Popup/index.js' +import { ClipboardActionLabel } from './ClipboardActionLabel.js' +import { clipboardCopy, clipboardPaste } from './clipboardUtilities.js' + +const baseClass = 'clipboard-action' + +type Props = { + className?: string + copyClassName?: string + disableCopy?: boolean + disablePaste?: boolean + getDataToCopy: () => FormStateWithoutComponents + isRow?: boolean + onPaste: OnPasteFn + pasteClassName?: string +} & ClipboardCopyData + +/** + * Menu actions for copying and pasting fields. Currently, this is only used in Arrays and Blocks. + * @note This component doesn't use the Clipboard API, but localStorage. See rationale in #11513 + */ +export const ClipboardAction: FC = ({ + className, + copyClassName, + disableCopy, + disablePaste, + isRow, + onPaste, + pasteClassName, + path, + ...rest +}) => { + const { t } = useTranslation() + + const classes = [`${baseClass}__popup`, className].filter(Boolean).join(' ') + + const handleCopy = useCallback(() => { + const clipboardResult = clipboardCopy({ + path, + t, + ...rest, + }) + + if (typeof clipboardResult === 'string') { + toast.error(clipboardResult) + } else { + toast.success(t('general:copied')) + } + }, [t, rest, path]) + + const handlePaste = useCallback(() => { + const clipboardResult = clipboardPaste( + rest.type === 'array' + ? { + onPaste, + path, + schemaFields: rest.fields, + t, + } + : { + onPaste, + path, + schemaBlocks: rest.blocks, + t, + }, + ) + + if (typeof clipboardResult === 'string') { + toast.error(clipboardResult) + } + }, [onPaste, rest, path, t]) + + return ( + } + className={classes} + horizontalAlign="center" + render={({ close }) => ( + + { + void handleCopy() + close() + }} + > + + + { + void handlePaste() + close() + }} + > + + + + )} + size="large" + verticalAlign="bottom" + /> + ) +} diff --git a/packages/ui/src/elements/ClipboardAction/isClipboardDataValid.ts b/packages/ui/src/elements/ClipboardAction/isClipboardDataValid.ts new file mode 100644 index 0000000000..1ad9489435 --- /dev/null +++ b/packages/ui/src/elements/ClipboardAction/isClipboardDataValid.ts @@ -0,0 +1,109 @@ +import type { ClientBlock, ClientField } from 'payload' + +import { fieldAffectsData, fieldHasSubFields } from 'payload/shared' + +import type { ClipboardPasteActionValidateArgs } from './types.js' + +/** + * Validates whether clipboard data is compatible with the target schema. + * For this to be true, the copied field and the target to be pasted must + * be structurally equivalent (same schema) + * + * @returns True if the clipboard data is valid and can be pasted, false otherwise + */ +export function isClipboardDataValid({ data, path, ...args }: ClipboardPasteActionValidateArgs) { + if (typeof data === 'undefined' || !path || !args.type) { + return false + } + + if (args.type === 'blocks') { + return isClipboardBlocksValid({ + blocksFromClipboard: args.blocks, + blocksFromConfig: args.schemaBlocks, + }) + } else { + return isClipboardFieldsValid({ + fieldsFromClipboard: args.fields, + fieldsFromConfig: args.schemaFields, + }) + } +} + +function isClipboardFieldsValid({ + fieldsFromClipboard, + fieldsFromConfig, +}: { + fieldsFromClipboard: ClientField[] + fieldsFromConfig?: ClientField[] +}): boolean { + if (!fieldsFromConfig || fieldsFromClipboard.length !== fieldsFromConfig?.length) { + return false + } + + return fieldsFromClipboard.every((clipboardField, i) => { + const configField = fieldsFromConfig[i] + + if (clipboardField.type !== configField.type) { + return false + } + + const affectsData = fieldAffectsData(clipboardField) && fieldAffectsData(configField) + if (affectsData && clipboardField.name !== configField.name) { + return false + } + + const hasNestedFieldsConfig = fieldHasSubFields(configField) + const hasNestedFieldsClipboard = fieldHasSubFields(clipboardField) + if (hasNestedFieldsClipboard !== hasNestedFieldsConfig) { + return false + } + + if (hasNestedFieldsClipboard && hasNestedFieldsConfig) { + return isClipboardFieldsValid({ + fieldsFromClipboard: clipboardField.fields, + fieldsFromConfig: configField.fields, + }) + } + + return true + }) +} + +function isClipboardBlocksValid({ + blocksFromClipboard, + blocksFromConfig, +}: { + blocksFromClipboard: ClientBlock[] + blocksFromConfig?: ClientBlock[] +}) { + const configBlockMap = new Map(blocksFromConfig?.map((block) => [block.slug, block])) + + if (!configBlockMap.size) { + return false + } + + const checkedSlugs = new Set() + + for (const currBlock of blocksFromClipboard) { + const currSlug = currBlock.slug + + if (!checkedSlugs.has(currSlug)) { + const configBlock = configBlockMap.get(currSlug) + if (!configBlock) { + return false + } + + if ( + !isClipboardFieldsValid({ + fieldsFromClipboard: currBlock.fields, + fieldsFromConfig: configBlock.fields, + }) + ) { + return false + } + + checkedSlugs.add(currSlug) + } + } + return true +} diff --git a/packages/ui/src/elements/ClipboardAction/mergeFormStateFromClipboard.ts b/packages/ui/src/elements/ClipboardAction/mergeFormStateFromClipboard.ts new file mode 100644 index 0000000000..5a8d01c1f2 --- /dev/null +++ b/packages/ui/src/elements/ClipboardAction/mergeFormStateFromClipboard.ts @@ -0,0 +1,131 @@ +import type { FieldState, FormState } from 'payload' + +import type { ClipboardPasteData } from './types.js' + +export function reduceFormStateByPath({ + formState, + path, + rowIndex, +}: { + formState: FormState + path: string + rowIndex?: number +}) { + const filteredState: Record = {} + const prefix = typeof rowIndex !== 'number' ? path : `${path}.${rowIndex}` + + for (const key in formState) { + if (!key.startsWith(prefix)) { + continue + } + + const { customComponents: _, validate: __, ...field } = formState[key] + + if (Array.isArray(field.rows)) { + field.rows = field.rows.map((row) => { + if (!row || typeof row !== 'object') { + return row + } + const { customComponents: _, ...serializableRow } = row + return serializableRow + }) + } + + filteredState[key] = field + } + + return filteredState +} + +export function mergeFormStateFromClipboard({ + dataFromClipboard: clipboardData, + formState, + path, + rowIndex, +}: { + dataFromClipboard: ClipboardPasteData + formState: FormState + path: string + rowIndex?: number +}) { + const { + type: typeFromClipboard, + data: dataFromClipboard, + path: pathFromClipboard, + rowIndex: rowIndexFromClipboard, + } = clipboardData + + const copyFromField = typeof rowIndexFromClipboard !== 'number' + const pasteIntoField = typeof rowIndex !== 'number' + const fromRowToField = !copyFromField && pasteIntoField + const isArray = typeFromClipboard === 'array' + + let pathToReplace: string + if (copyFromField && pasteIntoField) { + pathToReplace = pathFromClipboard + } else if (copyFromField) { + pathToReplace = `${pathFromClipboard}.${rowIndex}` + } else { + pathToReplace = `${pathFromClipboard}.${rowIndexFromClipboard}` + } + + let targetSegment: string + if (!pasteIntoField) { + targetSegment = `${path}.${rowIndex}` + } else if (fromRowToField) { + targetSegment = `${path}.0` + } else { + targetSegment = path + } + + if (fromRowToField) { + const lastRenderedPath = `${path}.0` + const rowIDFromClipboard = dataFromClipboard[`${pathToReplace}.id`].value as string + const hasRows = formState[path].rows?.length + + formState[path].rows = [ + { + ...(hasRows && isArray ? formState[path].rows[0] : {}), + id: rowIDFromClipboard, + isLoading: false, + lastRenderedPath, + }, + ] + formState[path].value = 1 + formState[path].initialValue = 1 + formState[path].disableFormData = true + + for (const fieldPath in formState) { + if ( + fieldPath !== path && + !fieldPath.startsWith(lastRenderedPath) && + fieldPath.startsWith(path) + ) { + delete formState[fieldPath] + } + } + } + + for (const clipboardPath in dataFromClipboard) { + // Pasting a row id, skip overwriting + if ( + (!pasteIntoField && clipboardPath.endsWith('.id')) || + !clipboardPath.startsWith(pathToReplace) + ) { + continue + } + + const newPath = clipboardPath.replace(pathToReplace, targetSegment) + + const customComponents = isArray ? formState[newPath]?.customComponents : undefined + const validate = isArray ? formState[newPath]?.validate : undefined + + formState[newPath] = { + customComponents, + validate, + ...dataFromClipboard[clipboardPath], + } + } + + return formState +} diff --git a/packages/ui/src/elements/ClipboardAction/types.ts b/packages/ui/src/elements/ClipboardAction/types.ts new file mode 100644 index 0000000000..e0e6a03678 --- /dev/null +++ b/packages/ui/src/elements/ClipboardAction/types.ts @@ -0,0 +1,58 @@ +import type { TFunction } from '@payloadcms/translations' +import type { ClientBlock, ClientField, FormStateWithoutComponents } from 'payload' + +export type ClipboardCopyBlocksSchema = { + schemaBlocks: ClientBlock[] +} + +export type ClipboardCopyBlocksData = { + blocks: ClientBlock[] + type: 'blocks' +} + +export type ClipboardCopyFieldsSchema = { + schemaFields: ClientField[] +} + +export type ClipboardCopyFieldsData = { + fields: ClientField[] + type: 'array' +} + +export type ClipboardCopyData = { + path: string + rowIndex?: number +} & (ClipboardCopyBlocksData | ClipboardCopyFieldsData) + +export type ClipboardCopyActionArgs = { + getDataToCopy: () => FormStateWithoutComponents + t: TFunction +} & ClipboardCopyData + +export type ClipboardPasteData = { + data: FormStateWithoutComponents + path: string + rowIndex?: number +} & (ClipboardCopyBlocksData | ClipboardCopyFieldsData) + +export type OnPasteFn = (data: ClipboardPasteData) => void + +export type ClipboardPasteActionArgs = { + onPaste: OnPasteFn + path: string + t: TFunction +} & (ClipboardCopyBlocksSchema | ClipboardCopyFieldsSchema) + +export type ClipboardPasteActionValidateArgs = { + fieldPath: string +} & ( + | { + schemaBlocks: ClientBlock[] + type: 'blocks' + } + | { + schemaFields: ClientField[] + type: 'array' + } +) & + ClipboardPasteData diff --git a/packages/ui/src/fields/Array/ArrayRow.tsx b/packages/ui/src/fields/Array/ArrayRow.tsx index d3c495d167..1cc5fe11fa 100644 --- a/packages/ui/src/fields/Array/ArrayRow.tsx +++ b/packages/ui/src/fields/Array/ArrayRow.tsx @@ -21,6 +21,7 @@ const baseClass = 'array-field' type ArrayRowProps = { readonly addRow: (rowIndex: number) => Promise | void + readonly copyRow: (rowIndex: number) => void readonly CustomRowLabel?: React.ReactNode readonly duplicateRow: (rowIndex: number) => void readonly errorCount: number @@ -32,6 +33,7 @@ type ArrayRowProps = { readonly labels: Partial readonly moveRow: (fromIndex: number, toIndex: number) => void readonly parentPath: string + readonly pasteRow: (rowIndex: number) => void readonly path: string readonly permissions: SanitizedFieldPermissions readonly readOnly?: boolean @@ -46,6 +48,7 @@ type ArrayRowProps = { export const ArrayRow: React.FC = ({ addRow, attributes, + copyRow, CustomRowLabel, duplicateRow, errorCount, @@ -59,6 +62,7 @@ export const ArrayRow: React.FC = ({ listeners, moveRow, parentPath, + pasteRow, path, permissions, readOnly, @@ -107,11 +111,13 @@ export const ArrayRow: React.FC = ({ !readOnly ? ( diff --git a/packages/ui/src/fields/Array/index.tsx b/packages/ui/src/fields/Array/index.tsx index 954937599b..fe6cd8cfae 100644 --- a/packages/ui/src/fields/Array/index.tsx +++ b/packages/ui/src/fields/Array/index.tsx @@ -6,10 +6,19 @@ import type { } from 'payload' import { getTranslation } from '@payloadcms/translations' -import React, { useCallback } from 'react' +import React, { Fragment, useCallback } from 'react' +import { toast } from 'sonner' + +import type { ClipboardPasteData } from '../../elements/ClipboardAction/types.js' import { Banner } from '../../elements/Banner/index.js' import { Button } from '../../elements/Button/index.js' +import { clipboardCopy, clipboardPaste } from '../../elements/ClipboardAction/clipboardUtilities.js' +import { ClipboardAction } from '../../elements/ClipboardAction/index.js' +import { + mergeFormStateFromClipboard, + reduceFormStateByPath, +} from '../../elements/ClipboardAction/mergeFormStateFromClipboard.js' import { DraggableSortableItem } from '../../elements/DraggableSortable/DraggableSortableItem/index.js' import { DraggableSortable } from '../../elements/DraggableSortable/index.js' import { ErrorPill } from '../../elements/ErrorPill/index.js' @@ -21,6 +30,7 @@ import { useForm, useFormSubmitted } from '../../forms/Form/context.js' import { extractRowsAndCollapsedIDs, toggleAllRows } from '../../forms/Form/rowHelpers.js' import { NullifyLocaleField } from '../../forms/NullifyField/index.js' import { useField } from '../../forms/useField/index.js' +import './index.scss' import { withCondition } from '../../forms/withCondition/index.js' import { useConfig } from '../../providers/Config/index.js' import { useDocumentInfo } from '../../providers/DocumentInfo/index.js' @@ -29,7 +39,6 @@ import { useTranslation } from '../../providers/Translation/index.js' import { scrollToID } from '../../utilities/scrollToID.js' import { fieldBaseClass } from '../shared/index.js' import { ArrayRow } from './ArrayRow.js' -import './index.scss' const baseClass = 'array-field' @@ -37,6 +46,7 @@ export const ArrayFieldComponent: ArrayFieldClientComponent = (props) => { const { field: { name, + type, admin: { className, description, isSortable = true } = {}, fields, label, @@ -58,7 +68,15 @@ export const ArrayFieldComponent: ArrayFieldClientComponent = (props) => { const minRows = (minRowsProp ?? required) ? 1 : 0 const { setDocFieldPreferences } = useDocumentInfo() - const { addFieldRow, dispatchFields, moveFieldRow, removeFieldRow, setModified } = useForm() + const { + addFieldRow, + dispatchFields, + getFields, + moveFieldRow, + removeFieldRow, + replaceState, + setModified, + } = useForm() const submitted = useFormSubmitted() const { code: locale } = useLocale() const { i18n, t } = useTranslation() @@ -196,6 +214,83 @@ export const ArrayFieldComponent: ArrayFieldClientComponent = (props) => { [dispatchFields, path, rows, setDocFieldPreferences], ) + const copyRow = useCallback( + (rowIndex: number) => { + const formState = { ...getFields() } + const clipboardResult = clipboardCopy({ + type, + fields, + getDataToCopy: () => + reduceFormStateByPath({ + formState, + path, + rowIndex, + }), + path, + rowIndex, + t, + }) + + if (typeof clipboardResult === 'string') { + toast.error(clipboardResult) + } else { + toast.success(t('general:copied')) + } + }, + [fields, getFields, path, t, type], + ) + + const pasteRow = useCallback( + (rowIndex: number) => { + const formState = { ...getFields() } + const pasteArgs = { + onPaste: (dataFromClipboard: ClipboardPasteData) => { + const newState = mergeFormStateFromClipboard({ + dataFromClipboard, + formState, + path, + rowIndex, + }) + replaceState(newState) + setModified(true) + }, + path, + schemaFields: fields, + t, + } + + const clipboardResult = clipboardPaste(pasteArgs) + + if (typeof clipboardResult === 'string') { + toast.error(clipboardResult) + } + }, + [fields, getFields, path, replaceState, setModified, t], + ) + + const pasteField = useCallback( + (dataFromClipboard: ClipboardPasteData) => { + const formState = { ...getFields() } + const newState = mergeFormStateFromClipboard({ + dataFromClipboard, + formState, + path, + }) + replaceState(newState) + setModified(true) + }, + [getFields, path, replaceState, setModified], + ) + + const getDataToCopy = useCallback( + () => + reduceFormStateByPath({ + formState: { ...getFields() }, + path, + }), + [getFields, path], + ) + const hasMaxRows = maxRows && rows.length >= maxRows const fieldErrorCount = errorPaths.length @@ -243,28 +338,42 @@ export const ArrayFieldComponent: ArrayFieldClientComponent = (props) => { )} - {rows?.length > 0 && ( -
    -
  • - -
  • -
  • - -
  • -
- )} +
    + {rows?.length > 0 && ( + +
  • + +
  • +
  • + +
  • +
    + )} +
  • + 0)} + disablePaste={readOnly} + fields={fields} + getDataToCopy={getDataToCopy} + onPaste={pasteField} + path={path} + type={type} + /> +
  • +
{ { labels={labels} moveRow={moveRow} parentPath={path} + pasteRow={pasteRow} path={rowPath} permissions={permissions} readOnly={readOnly || disabled} diff --git a/packages/ui/src/fields/Blocks/BlockRow.tsx b/packages/ui/src/fields/Blocks/BlockRow.tsx index 14b01a45eb..257399b44d 100644 --- a/packages/ui/src/fields/Blocks/BlockRow.tsx +++ b/packages/ui/src/fields/Blocks/BlockRow.tsx @@ -25,6 +25,7 @@ type BlocksFieldProps = { addRow: (rowIndex: number, blockType: string) => Promise | void block: ClientBlock blocks: (ClientBlock | string)[] | ClientBlock[] + copyRow: (rowIndex: number) => void duplicateRow: (rowIndex: number) => void errorCount: number fields: ClientField[] @@ -35,6 +36,7 @@ type BlocksFieldProps = { labels: Labels moveRow: (fromIndex: number, toIndex: number) => void parentPath: string + pasteRow: (rowIndex: number) => void path: string permissions: SanitizedFieldPermissions readOnly: boolean @@ -51,6 +53,7 @@ export const BlockRow: React.FC = ({ attributes, block, blocks, + copyRow, duplicateRow, errorCount, fields, @@ -62,6 +65,7 @@ export const BlockRow: React.FC = ({ listeners, moveRow, parentPath, + pasteRow, path, permissions, readOnly, @@ -119,12 +123,14 @@ export const BlockRow: React.FC = ({ addRow={addRow} blocks={blocks} blockType={row.blockType} + copyRow={copyRow} duplicateRow={duplicateRow} fields={block.fields} hasMaxRows={hasMaxRows} isSortable={isSortable} labels={labels} moveRow={moveRow} + pasteRow={pasteRow} removeRow={removeRow} rowCount={rowCount} rowIndex={rowIndex} diff --git a/packages/ui/src/fields/Blocks/RowActions.tsx b/packages/ui/src/fields/Blocks/RowActions.tsx index 9e996b7b35..5b810e3a05 100644 --- a/packages/ui/src/fields/Blocks/RowActions.tsx +++ b/packages/ui/src/fields/Blocks/RowActions.tsx @@ -12,12 +12,14 @@ export const RowActions: React.FC<{ readonly addRow: (rowIndex: number, blockType: string) => Promise | void readonly blocks: (ClientBlock | string)[] readonly blockType: string + readonly copyRow: (rowIndex: number) => void readonly duplicateRow: (rowIndex: number, blockType: string) => void readonly fields: ClientField[] readonly hasMaxRows?: boolean readonly isSortable?: boolean readonly labels: Labels readonly moveRow: (fromIndex: number, toIndex: number) => void + readonly pasteRow: (rowIndex: number) => void readonly removeRow: (rowIndex: number) => void readonly rowCount: number readonly rowIndex: number @@ -26,11 +28,13 @@ export const RowActions: React.FC<{ addRow, blocks, blockType, + copyRow, duplicateRow, hasMaxRows, isSortable, labels, moveRow, + pasteRow, removeRow, rowCount, rowIndex, @@ -60,11 +64,13 @@ export const RowActions: React.FC<{ setIndexToAdd(index) openModal(drawerSlug) }} + copyRow={copyRow} duplicateRow={() => duplicateRow(rowIndex, blockType)} hasMaxRows={hasMaxRows} index={rowIndex} isSortable={isSortable} moveRow={moveRow} + pasteRow={pasteRow} removeRow={removeRow} rowCount={rowCount} /> diff --git a/packages/ui/src/fields/Blocks/index.tsx b/packages/ui/src/fields/Blocks/index.tsx index 99a8578083..39cf410e90 100644 --- a/packages/ui/src/fields/Blocks/index.tsx +++ b/packages/ui/src/fields/Blocks/index.tsx @@ -2,10 +2,19 @@ import type { BlocksFieldClientComponent, ClientBlock } from 'payload' import { getTranslation } from '@payloadcms/translations' -import React, { Fragment, useCallback } from 'react' +import React, { Fragment, useCallback, useMemo } from 'react' +import { toast } from 'sonner' + +import type { ClipboardPasteData } from '../../elements/ClipboardAction/types.js' import { Banner } from '../../elements/Banner/index.js' import { Button } from '../../elements/Button/index.js' +import { clipboardCopy, clipboardPaste } from '../../elements/ClipboardAction/clipboardUtilities.js' +import { ClipboardAction } from '../../elements/ClipboardAction/index.js' +import { + mergeFormStateFromClipboard, + reduceFormStateByPath, +} from '../../elements/ClipboardAction/mergeFormStateFromClipboard.js' import { DraggableSortableItem } from '../../elements/DraggableSortable/DraggableSortableItem/index.js' import { DraggableSortable } from '../../elements/DraggableSortable/index.js' import { DrawerToggler } from '../../elements/Drawer/index.js' @@ -22,13 +31,13 @@ import { useDocumentInfo } from '../../providers/DocumentInfo/index.js' import { useLocale } from '../../providers/Locale/index.js' import { useTranslation } from '../../providers/Translation/index.js' import { scrollToID } from '../../utilities/scrollToID.js' +import './index.scss' import { FieldDescription } from '../FieldDescription/index.js' import { FieldError } from '../FieldError/index.js' import { FieldLabel } from '../FieldLabel/index.js' import { fieldBaseClass } from '../shared/index.js' import { BlockRow } from './BlockRow.js' import { BlocksDrawer } from './BlocksDrawer/index.js' -import './index.scss' const baseClass = 'blocks-field' @@ -38,6 +47,7 @@ const BlocksFieldComponent: BlocksFieldClientComponent = (props) => { const { field: { name, + type, admin: { className, description, isSortable = true } = {}, blockReferences, blocks, @@ -60,7 +70,15 @@ const BlocksFieldComponent: BlocksFieldClientComponent = (props) => { const minRows = (minRowsProp ?? required) ? 1 : 0 const { setDocFieldPreferences } = useDocumentInfo() - const { addFieldRow, dispatchFields, moveFieldRow, removeFieldRow, setModified } = useForm() + const { + addFieldRow, + dispatchFields, + getFields, + moveFieldRow, + removeFieldRow, + replaceState, + setModified, + } = useForm() const { code: locale } = useLocale() const { config: { localization }, @@ -84,6 +102,23 @@ const BlocksFieldComponent: BlocksFieldClientComponent = (props) => { return true })() + const clientBlocks = useMemo(() => { + if (!blockReferences) { + return blocks + } + + const resolvedBlocks: ClientBlock[] = [] + for (const blockReference of blockReferences) { + const block = + typeof blockReference === 'string' ? config.blocksMap[blockReference] : blockReference + if (block) { + resolvedBlocks.push(block) + } + } + + return resolvedBlocks + }, [blockReferences, blocks, config.blocksMap]) + const memoizedValidate = useCallback( (value, options) => { // alternative locales can be null @@ -184,6 +219,73 @@ const BlocksFieldComponent: BlocksFieldClientComponent = (props) => { [dispatchFields, path, rows, setDocFieldPreferences], ) + const copyRow = useCallback( + (rowIndex: number) => { + const clipboardResult = clipboardCopy({ + type, + blocks: clientBlocks, + getDataToCopy: () => + reduceFormStateByPath({ + formState: { ...getFields() }, + path, + rowIndex, + }), + path, + rowIndex, + t, + }) + + if (typeof clipboardResult === 'string') { + toast.error(clipboardResult) + } else { + toast.success(t('general:copied')) + } + }, + [clientBlocks, path, t, type, getFields], + ) + + const pasteRow = useCallback( + (rowIndex: number) => { + const pasteArgs = { + onPaste: (dataFromClipboard: ClipboardPasteData) => { + const formState = { ...getFields() } + const newState = mergeFormStateFromClipboard({ + dataFromClipboard, + formState, + path, + rowIndex, + }) + replaceState(newState) + setModified(true) + }, + path, + schemaBlocks: clientBlocks, + t, + } + + const clipboardResult = clipboardPaste(pasteArgs) + + if (typeof clipboardResult === 'string') { + toast.error(clipboardResult) + } + }, + [clientBlocks, getFields, path, replaceState, setModified, t], + ) + + const pasteBlocks = useCallback( + (dataFromClipboard: ClipboardPasteData) => { + const formState = { ...getFields() } + const newState = mergeFormStateFromClipboard({ + dataFromClipboard, + formState, + path, + }) + replaceState(newState) + setModified(true) + }, + [getFields, path, replaceState, setModified], + ) + const hasMaxRows = maxRows && rows.length >= maxRows const fieldErrorCount = errorPaths.length @@ -225,28 +327,47 @@ const BlocksFieldComponent: BlocksFieldClientComponent = (props) => { )} - {rows.length > 0 && ( -
    -
  • - -
  • -
  • - -
  • -
- )} +
    + {rows.length > 0 && ( + +
  • + +
  • +
  • + +
  • +
    + )} +
  • + 0)} + disablePaste={readOnly} + getDataToCopy={() => + reduceFormStateByPath({ + formState: { ...getFields() }, + path, + }) + } + onPaste={pasteBlocks} + path={path} + type={type} + /> +
  • +
{ addRow={addRow} block={blockConfig} blocks={blockReferences ?? blocks} + copyRow={copyRow} duplicateRow={duplicateRow} errorCount={rowErrorCount} fields={blockConfig.fields} @@ -298,6 +420,7 @@ const BlocksFieldComponent: BlocksFieldClientComponent = (props) => { labels={labels} moveRow={moveRow} parentPath={path} + pasteRow={pasteRow} path={rowPath} permissions={permissions} readOnly={readOnly || disabled} diff --git a/test/access-control/e2e.spec.ts b/test/access-control/e2e.spec.ts index 41a9469b2a..f9d05c195c 100644 --- a/test/access-control/e2e.spec.ts +++ b/test/access-control/e2e.spec.ts @@ -729,7 +729,7 @@ describe('Access Control', () => { await page.locator('#field-unnamedTab').fill('unnamed tab') // array field - await page.locator('#field-array button').click() + await page.locator('#field-array > button').click() await page.locator('#field-array__0__text').fill('array row 0') await saveDocAndAssert(page) diff --git a/test/fields/collections/Array/e2e.spec.ts b/test/fields/collections/Array/e2e.spec.ts index 29bf43ce80..c5f1c27b1b 100644 --- a/test/fields/collections/Array/e2e.spec.ts +++ b/test/fields/collections/Array/e2e.spec.ts @@ -3,6 +3,7 @@ import type { Page } from '@playwright/test' import { expect, test } from '@playwright/test' import { assertToastErrors } from 'helpers/assertToastErrors.js' +import { copyPasteField } from 'helpers/e2e/copyPasteField.js' import { toggleBlockOrArrayRow } from 'helpers/e2e/toggleCollapsible.js' import path from 'path' import { wait } from 'payload/shared' @@ -439,4 +440,236 @@ describe('Array', () => { expect(await field.count()).toEqual(0) }) }) + + describe('copy paste', () => { + test('should prevent copying an empty array field', async () => { + await page.goto(url.create) + const arrayFieldPopupBtn = page.locator( + '#field-collapsedArray .popup.clipboard-action__popup button.popup-button', + ) + await arrayFieldPopupBtn.click() + const disabledCopyBtn = page.locator( + '#field-collapsedArray .popup.clipboard-action__popup .popup__content div.popup-button-list__disabled:has-text("Copy Field")', + ) + await expect(disabledCopyBtn).toBeVisible() + }) + + test('should prevent pasting into readonly array field', async () => { + await page.goto(url.create) + await copyPasteField({ + fieldName: 'readOnly', + page, + }) + const popupBtn = page.locator( + '#field-readOnly .popup.clipboard-action__popup button.popup-button', + ) + await expect(popupBtn).toBeVisible() + await popupBtn.click() + const disabledPasteBtn = page.locator( + '#field-readOnly .popup.clipboard-action__popup .popup__content div.popup-button-list__disabled:has-text("Paste Field")', + ) + await expect(disabledPasteBtn).toBeVisible() + }) + + test('should prevent pasting into array field with different schema', async () => { + await page.goto(url.create) + await copyPasteField({ + fieldName: 'readOnly', + page, + }) + await copyPasteField({ + fieldName: 'items', + page, + action: 'paste', + }) + const pasteErrorToast = page + .locator('.payload-toast-item.toast-error') + .filter({ hasText: 'Invalid clipboard data.' }) + await expect(pasteErrorToast).toBeVisible() + }) + + test('should copy and paste array fields', async () => { + await page.goto(url.create) + const arrayField = page.locator('#field-items') + const row = arrayField.locator('#items-row-0') + const rowTextInput = row.locator('#field-items__0__text') + + const textVal = 'row one copy' + await rowTextInput.fill(textVal) + + await copyPasteField({ + page, + fieldName: 'items', + }) + + await page.reload() + + await expect(rowTextInput).toHaveValue('row one') + + await copyPasteField({ + page, + action: 'paste', + fieldName: 'items', + }) + + await expect(rowTextInput).toHaveValue(textVal) + }) + + test('should copy and paste array rows', async () => { + await page.goto(url.create) + const arrayField = page.locator('#field-items') + const row = arrayField.locator('#items-row-0') + const rowTextInput = row.locator('#field-items__0__text') + + const textVal = 'row one copy' + await rowTextInput.fill(textVal) + + await copyPasteField({ + page, + fieldName: 'items', + rowIndex: 0, + }) + + await page.reload() + + await expect(rowTextInput).toHaveValue('row one') + + await copyPasteField({ + page, + action: 'paste', + fieldName: 'items', + rowIndex: 0, + }) + + await expect(rowTextInput).toHaveValue(textVal) + }) + + test('should copy an array row and paste into a field with the same schema', async () => { + await page.goto(url.create) + + await copyPasteField({ + page, + fieldName: 'localized', + rowIndex: 0, + }) + + await copyPasteField({ + page, + fieldName: 'disableSort', + action: 'paste', + }) + + const rowsContainer = page + .locator('#field-disableSort > div.array-field__draggable-rows') + .first() + await expect(rowsContainer).toBeVisible() + const rowTextInput = rowsContainer.locator('#field-disableSort__0__text') + await expect(rowTextInput).toHaveValue('row one') + }) + + test('should copy an array field and paste into a row with the same schema', async () => { + await page.goto(url.create) + + await copyPasteField({ + page, + fieldName: 'localized', + }) + + const field = page.locator('#field-disableSort') + const addArrayBtn = field + .locator('button.array-field__add-row') + .filter({ hasText: 'Add Disable Sort' }) + await expect(addArrayBtn).toBeVisible() + await addArrayBtn.click() + + const row = field.locator('#disableSort-row-0') + await expect(row).toBeVisible() + + await copyPasteField({ page, action: 'paste', fieldName: 'disableSort' }) + + const rowsContainer = page + .locator('#field-disableSort > div.array-field__draggable-rows') + .first() + await expect(rowsContainer).toBeVisible() + const rowTextInput = rowsContainer.locator('#field-disableSort__0__text') + await expect(rowTextInput).toHaveValue('row one') + }) + + test('should correctly paste a row with nested arrays into a row with no children', async () => { + await page.goto(url.create) + + const field = page.locator('#field-items') + const addSubArrayBtn = field.locator( + '#field-items__0__subArray > button.array-field__add-row', + ) + await addSubArrayBtn.click() + + const textInputRowOne = field.locator('#field-items__0__subArray__0__text') + await expect(textInputRowOne).toBeVisible() + + const textInputRowOneValue = 'sub array row one' + await textInputRowOne.fill(textInputRowOneValue) + + await copyPasteField({ + page, + fieldName: 'items', + rowIndex: 0, + }) + + await copyPasteField({ + page, + fieldName: 'items', + rowIndex: 1, + action: 'paste', + }) + + const textInputRowTwo = field.locator('#field-items__1__subArray__0__text') + await expect(textInputRowTwo).toBeVisible() + await expect(textInputRowTwo).toHaveValue(textInputRowOneValue) + }) + + test('should replace the rows of a nested array field with those of its paste counterpart', async () => { + await page.goto(url.create) + + const field = page.locator('#field-items') + + const addSubArrayBtn = field.locator( + '#field-items__0__subArray > button.array-field__add-row', + ) + await expect(addSubArrayBtn).toBeVisible() + await addSubArrayBtn.click() + await addSubArrayBtn.click() + + const addSubArrayBtn2 = field.locator( + '#field-items__1__subArray > button.array-field__add-row', + ) + await expect(addSubArrayBtn2).toBeVisible() + await addSubArrayBtn2.click() + + const subArrayContainer = field.locator( + '#field-items__0__subArray > div.array-field__draggable-rows > div', + ) + const subArrayContainer2 = field.locator( + '#field-items__1__subArray > div.array-field__draggable-rows > div', + ) + await expect(subArrayContainer).toHaveCount(2) + await expect(subArrayContainer2).toHaveCount(1) + + await copyPasteField({ + page, + fieldName: 'items', + rowIndex: 1, + }) + + await copyPasteField({ + page, + fieldName: 'items', + rowIndex: 0, + action: 'paste', + }) + + await expect(subArrayContainer).toHaveCount(1) + await expect(subArrayContainer2).toHaveCount(1) + }) + }) }) diff --git a/test/fields/collections/Blocks/e2e.spec.ts b/test/fields/collections/Blocks/e2e.spec.ts index 5a2da4cd34..16fc3e14f1 100644 --- a/test/fields/collections/Blocks/e2e.spec.ts +++ b/test/fields/collections/Blocks/e2e.spec.ts @@ -2,6 +2,7 @@ import type { BrowserContext, Page } from '@playwright/test' import { expect, test } from '@playwright/test' import { addBlock } from 'helpers/e2e/addBlock.js' +import { copyPasteField } from 'helpers/e2e/copyPasteField.js' import { openBlocksDrawer } from 'helpers/e2e/openBlocksDrawer.js' import { reorderBlocks } from 'helpers/e2e/reorderBlocks.js' import { scrollEntirePage } from 'helpers/e2e/scrollEntirePage.js' @@ -502,4 +503,223 @@ describe('Block fields', () => { await expect(groupLabel).toHaveText('Group in en') }) }) + + describe('copy paste', () => { + test('should prevent copying an empty block field', async () => { + await page.goto(url.create) + const popupBtn = page.locator( + '#field-i18nBlocks .popup.clipboard-action__popup button.popup-button', + ) + await popupBtn.click() + const disabledCopyBtn = page.locator( + '#field-i18nBlocks .popup.clipboard-action__popup .popup__content div.popup-button-list__disabled:has-text("Copy Field")', + ) + await expect(disabledCopyBtn).toBeVisible() + }) + + test('should prevent pasting into readonly block field', async () => { + await page.goto(url.create) + await copyPasteField({ + fieldName: 'readOnly', + page, + }) + const popupBtn = page.locator( + '#field-readOnly .popup.clipboard-action__popup button.popup-button', + ) + await expect(popupBtn).toBeVisible() + await popupBtn.click() + const disabledPasteBtn = page.locator( + '#field-readOnly .popup.clipboard-action__popup .popup__content div.popup-button-list__disabled:has-text("Paste Field")', + ) + await expect(disabledPasteBtn).toBeVisible() + }) + + test('should prevent pasting into block field with different schema', async () => { + await page.goto(url.create) + await copyPasteField({ + fieldName: 'readOnly', + page, + }) + await copyPasteField({ + fieldName: 'groupedBlocks', + page, + action: 'paste', + }) + const pasteErrorToast = page + .locator('.payload-toast-item.toast-error') + .filter({ hasText: 'Invalid clipboard data.' }) + await expect(pasteErrorToast).toBeVisible() + }) + + test('should copy and paste block fields', async () => { + await page.goto(url.create) + const field = page.locator('#field-blocks') + const row = field.locator('#blocks-row-0') + const rowTextInput = row.locator('#field-blocks__0__text') + + const textVal = 'row one copy' + await rowTextInput.fill(textVal) + + await copyPasteField({ + page, + fieldName: 'blocks', + }) + + await page.reload() + + await expect(rowTextInput).toHaveValue('first block') + + await copyPasteField({ + page, + action: 'paste', + fieldName: 'blocks', + }) + + await expect(rowTextInput).toHaveValue(textVal) + }) + + test('should copy and paste block rows', async () => { + await page.goto(url.create) + const field = page.locator('#field-blocks') + const row = field.locator('#blocks-row-0') + const rowTextInput = row.locator('#field-blocks__0__text') + + const textVal = 'row one copy' + await rowTextInput.fill(textVal) + + await copyPasteField({ + page, + fieldName: 'blocks', + rowIndex: 0, + }) + + await page.reload() + + await expect(rowTextInput).toHaveValue('first block') + + await copyPasteField({ + page, + action: 'paste', + fieldName: 'blocks', + rowIndex: 0, + }) + + await expect(rowTextInput).toHaveValue(textVal) + }) + + test('should copy a block row and paste into a field with the same schema', async () => { + await page.goto(url.create) + + await copyPasteField({ + page, + fieldName: 'blocks', + rowIndex: 1, + }) + + await copyPasteField({ + page, + fieldName: 'duplicate', + action: 'paste', + }) + + const rowsContainer = page.locator('#field-duplicate > div.blocks-field__rows').first() + await expect(rowsContainer).toBeVisible() + const rowTextInput = rowsContainer.locator('#field-duplicate__0__number') + await expect(rowTextInput).toHaveValue('342') + }) + + test('should copy a block field and paste into a row with the same schema', async () => { + await page.goto(url.create) + + const originalField = page.locator('#field-blocks') + const originalRow = originalField.locator('#blocks-row-0') + const originalInput = originalRow.locator('#field-blocks__0__text') + + const textVal = 'row one copy' + await originalInput.fill(textVal) + + await copyPasteField({ + page, + fieldName: 'blocks', + }) + + const field = page.locator('#field-duplicate') + const fieldInput = field.locator('#field-duplicate__0__text') + await expect(fieldInput).toHaveValue('first block') + + await copyPasteField({ page, action: 'paste', fieldName: 'duplicate', rowIndex: 0 }) + + const rowsContainer = page.locator('#field-duplicate > div.blocks-field__rows').first() + await expect(rowsContainer).toBeVisible() + const rowTextInput = rowsContainer.locator('#field-duplicate__0__text') + await expect(rowTextInput).toHaveValue('row one copy') + }) + + test('should correctly paste a row with nested blocks into a row with no children', async () => { + await page.goto(url.create) + + const field = page.locator('#field-blocks') + await addBlock({ page, fieldName: 'blocks', blockToSelect: 'Sub Block' }) + + const textInputRowOne = field.locator('#field-blocks__2__subBlocks__1__text') + await expect(textInputRowOne).toBeVisible() + + const textInputRowOneValue = 'copied second sub block' + await textInputRowOne.fill(textInputRowOneValue) + + await copyPasteField({ + page, + fieldName: 'blocks', + rowIndex: 2, + }) + + await copyPasteField({ + page, + fieldName: 'blocks', + rowIndex: 4, + action: 'paste', + }) + + const textInputRowTwo = field.locator('#field-blocks__4__subBlocks__1__text') + await expect(textInputRowTwo).toBeVisible() + await expect(textInputRowTwo).toHaveValue(textInputRowOneValue) + }) + + test('should replace the rows of a nested block field with those of its paste counterpart', async () => { + await page.goto(url.create) + + await addBlock({ + page, + fieldName: 'blocks', + blockToSelect: 'Sub Block', + }) + + const field = page.locator('#field-blocks') + + const subArrayContainer = field.locator( + '#field-blocks__2__subBlocks > div.blocks-field__rows > div', + ) + const subArrayContainer2 = field.locator( + '#field-blocks__4__subBlocks > div.blocks-field__rows > div', + ) + await expect(subArrayContainer).toHaveCount(2) + await expect(subArrayContainer2).toHaveCount(0) + + await copyPasteField({ + page, + fieldName: 'blocks', + rowIndex: 4, + }) + + await copyPasteField({ + page, + fieldName: 'blocks', + rowIndex: 2, + action: 'paste', + }) + + await expect(subArrayContainer).toHaveCount(0) + await expect(subArrayContainer2).toHaveCount(0) + }) + }) }) diff --git a/test/fields/collections/Blocks/index.ts b/test/fields/collections/Blocks/index.ts index 491d2d7bc7..f058aa75c3 100644 --- a/test/fields/collections/Blocks/index.ts +++ b/test/fields/collections/Blocks/index.ts @@ -495,6 +495,31 @@ const BlockFields: CollectionConfig = { }, ], }, + { + name: 'readOnly', + type: 'blocks', + admin: { + readOnly: true, + }, + defaultValue: [ + { + blockType: 'readOnlyBlock', + title: 'readOnly', + }, + ], + blocks: [ + { + slug: 'readOnlyBlock', + fields: [ + { + type: 'text', + name: 'title', + defaultValue: 'readOnly', + }, + ], + }, + ], + }, ], } diff --git a/test/helpers/e2e/copyPasteField.ts b/test/helpers/e2e/copyPasteField.ts new file mode 100644 index 0000000000..b633b75e9d --- /dev/null +++ b/test/helpers/e2e/copyPasteField.ts @@ -0,0 +1,44 @@ +import type { Page } from '@playwright/test' + +import { expect } from '@playwright/test' +import { wait } from 'payload/shared' + +export async function copyPasteField({ + fieldName, + rowIndex, + page, + action = 'copy', +}: { + action?: 'copy' | 'paste' + fieldName: string + page: Page + rowIndex?: number +}) { + const isCopy = action === 'copy' + const field = page.locator(`#field-${fieldName}`) + const rowAction = typeof rowIndex === 'number' + await expect(field).toBeVisible() + + if (rowAction) { + await wait(1000) + } + + const popupBtnSelector = rowAction + ? `#${fieldName}-row-${rowIndex} .collapsible__actions button.array-actions__button` + : 'header .clipboard-action__popup button.popup-button' + const popupBtn = field.locator(popupBtnSelector).first() + await expect(popupBtn).toBeVisible() + await popupBtn.click() + + const actionBtnSelector = rowAction + ? `#${fieldName}-row-${rowIndex} .popup__content .popup-button-list button.array-actions__${action}` + : `.popup.clipboard-action__popup .popup__content .popup-button-list button:has-text("${isCopy ? 'Copy' : 'Paste'} Field")` + const actionBtn = field.locator(actionBtnSelector).first() + await expect(actionBtn).toBeVisible() + await actionBtn.click() + + if (isCopy) { + const copySuccessToast = page.locator('.payload-toast-item.toast-success') + await expect(copySuccessToast).toBeVisible() + } +} diff --git a/test/joins/e2e.spec.ts b/test/joins/e2e.spec.ts index 3928426a35..9aaf8edd6e 100644 --- a/test/joins/e2e.spec.ts +++ b/test/joins/e2e.spec.ts @@ -349,7 +349,7 @@ describe('Join Field', () => { await editButton.click() const drawer = page.locator('[id^=doc-drawer_posts_1_]') await expect(drawer).toBeVisible() - const popupButton = drawer.locator('button.popup-button') + const popupButton = drawer.locator('.doc-controls__popup button.popup-button') await expect(popupButton).toBeVisible() await popupButton.click() const deleteButton = drawer.locator('#action-delete') diff --git a/test/localization/e2e.spec.ts b/test/localization/e2e.spec.ts index fc34f0002a..ddb0dbf175 100644 --- a/test/localization/e2e.spec.ts +++ b/test/localization/e2e.spec.ts @@ -440,7 +440,9 @@ describe('Localization', () => { await addBlock.click() const selectBlock = page.locator('.blocks-drawer__block button') await selectBlock.click() - const addContentButton = page.locator('#field-content__0__content button') + const addContentButton = page + .locator('#field-content__0__content') + .getByRole('button', { name: 'Add Content' }) await addContentButton.click() await selectBlock.click() const textField = page.locator('#field-content__0__content__0__text') From a7a05012fbe09e22840159d05fd9d535af4ed8d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20Jablo=C3=B1ski?= <43938777+GermanJablo@users.noreply.github.com> Date: Wed, 9 Jul 2025 15:39:02 +0100 Subject: [PATCH 002/143] feat(next): add redirect from `${adminRoute}/collections` to `${adminRoute}` (#13061) Occasionally, I find myself on a URL like `https://domain.com/admin/collections/myCollection/docId` and I modify the URL with the intention of going to the admin panel, but I shorten it in the wrong place: `https://domain.com/admin/collections`. The confusion arises because the admin panel basically displays the collections. I think this redirect is a subtle but nice touch, since `/collections` is a URL that doesn't exist. EDIT: now I'm doing also the same thing for `/globals` --- packages/next/src/views/Root/index.tsx | 27 ++++++++++++++++++++++++++ test/admin-root/e2e.spec.ts | 7 +++++++ test/admin/config.ts | 4 ++++ test/admin/e2e/general/e2e.spec.ts | 13 +++++++++++++ 4 files changed, 51 insertions(+) diff --git a/packages/next/src/views/Root/index.tsx b/packages/next/src/views/Root/index.tsx index be6a4c9567..7a32e7d42a 100644 --- a/packages/next/src/views/Root/index.tsx +++ b/packages/next/src/views/Root/index.tsx @@ -17,6 +17,7 @@ import React from 'react' import { DefaultTemplate } from '../../templates/Default/index.js' import { MinimalTemplate } from '../../templates/Minimal/index.js' import { initPage } from '../../utilities/initPage/index.js' +import { getCustomViewByRoute } from './getCustomViewByRoute.js' import { getRouteData } from './getRouteData.js' export type GenerateViewMetadata = (args: { @@ -62,6 +63,32 @@ export const RootPage = async ({ const searchParams = await searchParamsPromise + // Redirect `${adminRoute}/collections` to `${adminRoute}` + if (segments.length === 1 && segments[0] === 'collections') { + const { viewKey } = getCustomViewByRoute({ + config, + currentRoute: '/collections', + }) + + // Only redirect if there's NO custom view configured for /collections + if (!viewKey) { + redirect(adminRoute) + } + } + + // Redirect `${adminRoute}/globals` to `${adminRoute}` + if (segments.length === 1 && segments[0] === 'globals') { + const { viewKey } = getCustomViewByRoute({ + config, + currentRoute: '/globals', + }) + + // Only redirect if there's NO custom view configured for /globals + if (!viewKey) { + redirect(adminRoute) + } + } + const { browseByFolderSlugs, DefaultView, diff --git a/test/admin-root/e2e.spec.ts b/test/admin-root/e2e.spec.ts index 99cf45c2ce..dd4a65a24c 100644 --- a/test/admin-root/e2e.spec.ts +++ b/test/admin-root/e2e.spec.ts @@ -64,6 +64,13 @@ test.describe('Admin Panel (Root)', () => { // }) // }) + test('should redirect `${adminRoute}/collections` to `${adminRoute}', async () => { + const collectionsURL = `${url.admin}/collections` + await page.goto(collectionsURL) + // Should redirect to dashboard + await expect.poll(() => page.url()).toBe(`${url.admin}`) + }) + test('renders admin panel at root', async () => { await page.goto(url.admin) const pageURL = page.url() diff --git a/test/admin/config.ts b/test/admin/config.ts index 44537855ef..52d1d84e4e 100644 --- a/test/admin/config.ts +++ b/test/admin/config.ts @@ -83,6 +83,10 @@ export default buildConfigWithDefaults({ views: { // Dashboard: CustomDashboardView, // Account: CustomAccountView, + collections: { + Component: '/components/views/CustomView/index.js#CustomView', + path: '/collections', + }, CustomDefaultView: { Component: '/components/views/CustomDefault/index.js#CustomDefaultView', path: '/custom-default-view', diff --git a/test/admin/e2e/general/e2e.spec.ts b/test/admin/e2e/general/e2e.spec.ts index cd61c646d4..277b0614ff 100644 --- a/test/admin/e2e/general/e2e.spec.ts +++ b/test/admin/e2e/general/e2e.spec.ts @@ -358,6 +358,19 @@ describe('General', () => { const response = await page.goto(customLogoutRouteURL) expect(response.status() !== 404).toBeTruthy() }) + + test('should not redirect `${adminRoute}/collections` to `${adminRoute} if there is a custom view', async () => { + const collectionsURL = `${serverURL}/admin/collections` + await page.goto(collectionsURL) + await expect(page.getByText('Custom View').first()).toBeVisible() + }) + + test('should redirect `${adminRoute}/globals` to `${adminRoute}', async () => { + const globalsURL = `${serverURL}/admin/globals` + await page.goto(globalsURL) + // Should redirect to dashboard + await expect.poll(() => page.url()).toBe(`${serverURL}/admin`) + }) }) describe('navigation', () => { From 1c6a79bb5708d652579bfb91faf70d505ad18356 Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Wed, 9 Jul 2025 15:42:06 -0400 Subject: [PATCH 003/143] fix(plugin-import-export): sync field select dropdown with form value (#13103) ### What? Fixes a sync issue between the "Fields to Export" `` dropdown and the underlying form state in the import-export plugin. ### Why? Previously, the dropdown displayed outdated selections until an extra click occurred. This was caused by an unnecessary `useState` (`displayedValue`) that fell out of sync with the `useField` form value. ### How? - Removed the separate `displayedValue` state - Derived the selected values directly from the form field value using inline mapping --- .../src/components/FieldsToExport/index.tsx | 35 +++++++------------ 1 file changed, 13 insertions(+), 22 deletions(-) diff --git a/packages/plugin-import-export/src/components/FieldsToExport/index.tsx b/packages/plugin-import-export/src/components/FieldsToExport/index.tsx index e3c32e1870..fda7a26896 100644 --- a/packages/plugin-import-export/src/components/FieldsToExport/index.tsx +++ b/packages/plugin-import-export/src/components/FieldsToExport/index.tsx @@ -11,7 +11,7 @@ import { useField, usePreferences, } from '@payloadcms/ui' -import React, { useEffect, useState } from 'react' +import React, { useEffect } from 'react' import { useImportExport } from '../ImportExportProvider/index.js' import { reduceFields } from './reduceFields.js' @@ -25,28 +25,10 @@ export const FieldsToExport: SelectFieldClientComponent = (props) => { const { getEntityConfig } = useConfig() const { collection } = useImportExport() const { getPreference } = usePreferences() - const [displayedValue, setDisplayedValue] = useState< - { id: string; label: ReactNode; value: string }[] - >([]) const collectionConfig = getEntityConfig({ collectionSlug: collectionSlug ?? collection }) const fieldOptions = reduceFields({ fields: collectionConfig?.fields }) - useEffect(() => { - if (value && value.length > 0) { - setDisplayedValue((prevDisplayedValue) => { - if (prevDisplayedValue.length > 0) { - return prevDisplayedValue - } // Prevent unnecessary updates - - return value.map((field) => { - const match = fieldOptions.find((option) => option.value === field) - return match ? { ...match, id: field } : { id: field, label: field, value: field } - }) - }) - } - }, [value, fieldOptions]) - useEffect(() => { if (id || !collectionSlug) { return @@ -70,16 +52,18 @@ export const FieldsToExport: SelectFieldClientComponent = (props) => { id, collectionConfig?.admin?.defaultColumns, ]) + const onChange = (options: { id: string; label: ReactNode; value: string }[]) => { if (!options) { setValue([]) return } - const updatedValue = options?.map((option) => + + const updatedValue = options.map((option) => typeof option === 'object' ? option.value : option, ) + setValue(updatedValue) - setDisplayedValue(options) } return ( @@ -96,7 +80,14 @@ export const FieldsToExport: SelectFieldClientComponent = (props) => { // @ts-expect-error react select option onChange={onChange} options={fieldOptions} - value={displayedValue} + value={ + Array.isArray(value) + ? value.map((val) => { + const match = fieldOptions.find((opt) => opt.value === val) + return match ? { ...match, id: val } : { id: val, label: val, value: val } + }) + : [] + } /> ) From e99c67f5f92643c8d670ad185f57b927fdc4f82a Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Wed, 9 Jul 2025 12:42:26 -0700 Subject: [PATCH 004/143] fix: ensure we perform ssrf check within dispatcher (#13078) Previously, we were performing this check before calling the fetch function. This changes it to perform the check within the dispatcher. It adjusts the int tests to both trigger the dispatcher lookup function (which is only triggered when not already passing a valid IP) and the check before calling fetch --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210733180484570 --- packages/payload/src/index.ts | 6 +- packages/payload/src/uploads/safeFetch.ts | 69 ++++++++++++++--------- test/uploads/int.spec.ts | 42 +++++++++++++- 3 files changed, 85 insertions(+), 32 deletions(-) diff --git a/packages/payload/src/index.ts b/packages/payload/src/index.ts index 9d293a4234..b768b4bde0 100644 --- a/packages/payload/src/index.ts +++ b/packages/payload/src/index.ts @@ -156,7 +156,6 @@ export { extractAccessFromPermission } from './auth/extractAccessFromPermission. export { getAccessResults } from './auth/getAccessResults.js' export { getFieldsToSign } from './auth/getFieldsToSign.js' export { getLoginOptions } from './auth/getLoginOptions.js' - export interface GeneratedTypes { authUntyped: { [slug: string]: { @@ -1536,9 +1535,10 @@ export { importHandlerPath } from './queues/operations/runJobs/runJob/importHand export { getLocalI18n } from './translations/getLocalI18n.js' export * from './types/index.js' export { getFileByPath } from './uploads/getFileByPath.js' +export { _internal_safeFetchGlobal } from './uploads/safeFetch.js' export type * from './uploads/types.js' -export { addDataAndFileToRequest } from './utilities/addDataAndFileToRequest.js' +export { addDataAndFileToRequest } from './utilities/addDataAndFileToRequest.js' export { addLocalesToRequestFromData, sanitizeLocales } from './utilities/addLocalesToRequest.js' export { commitTransaction } from './utilities/commitTransaction.js' export { @@ -1610,8 +1610,8 @@ export { deleteCollectionVersions } from './versions/deleteCollectionVersions.js export { appendVersionToQueryKey } from './versions/drafts/appendVersionToQueryKey.js' export { getQueryDraftsSort } from './versions/drafts/getQueryDraftsSort.js' export { enforceMaxVersions } from './versions/enforceMaxVersions.js' -export { getLatestCollectionVersion } from './versions/getLatestCollectionVersion.js' +export { getLatestCollectionVersion } from './versions/getLatestCollectionVersion.js' export { getLatestGlobalVersion } from './versions/getLatestGlobalVersion.js' export { saveVersion } from './versions/saveVersion.js' export type { SchedulePublishTaskInput } from './versions/schedule/types.js' diff --git a/packages/payload/src/uploads/safeFetch.ts b/packages/payload/src/uploads/safeFetch.ts index ee54bac3bd..4d0787e1c4 100644 --- a/packages/payload/src/uploads/safeFetch.ts +++ b/packages/payload/src/uploads/safeFetch.ts @@ -1,9 +1,16 @@ -import type { Dispatcher } from 'undici' +import type { LookupFunction } from 'net' -import { lookup } from 'dns/promises' +import { lookup } from 'dns' import ipaddr from 'ipaddr.js' import { Agent, fetch as undiciFetch } from 'undici' +/** + * @internal this is used to mock the IP `lookup` function in integration tests + */ +export const _internal_safeFetchGlobal = { + lookup, +} + const isSafeIp = (ip: string) => { try { if (!ip) { @@ -25,32 +32,31 @@ const isSafeIp = (ip: string) => { return true } -/** - * Checks if a hostname or IP address is safe to fetch from. - * @param hostname a hostname or IP address - * @returns - */ -const isSafe = async (hostname: string) => { - try { - if (ipaddr.isValid(hostname)) { - return isSafeIp(hostname) +const ssrfFilterInterceptor: LookupFunction = (hostname, options, callback) => { + _internal_safeFetchGlobal.lookup(hostname, options, (err, address, family) => { + if (err) { + callback(err, address, family) + } else { + let ips = [] as string[] + if (Array.isArray(address)) { + ips = address.map((a) => a.address) + } else { + ips = [address] + } + + if (ips.some((ip) => !isSafeIp(ip))) { + callback(new Error(`Blocked unsafe attempt to ${hostname}`), address, family) + return + } + + callback(null, address, family) } - - const { address } = await lookup(hostname) - return isSafeIp(address) - } catch (_ignore) { - return false - } + }) } -const ssrfFilterInterceptor: Dispatcher.DispatcherComposeInterceptor = (dispatch) => { - return (opts, handler) => { - return dispatch(opts, handler) - } -} - -const safeDispatcher = new Agent().compose(ssrfFilterInterceptor) - +const safeDispatcher = new Agent({ + connect: { lookup: ssrfFilterInterceptor }, +}) /** * A "safe" version of undici's fetch that prevents SSRF attacks. * @@ -64,11 +70,18 @@ export const safeFetch = async (...args: Parameters) => { try { const url = new URL(unverifiedUrl) - const isHostnameSafe = await isSafe(url.hostname) - if (!isHostnameSafe) { - throw new Error(`Blocked unsafe attempt to ${url.toString()}`) + let hostname = url.hostname + + // Strip brackets from IPv6 addresses (e.g., "[::1]" => "::1") + if (hostname.startsWith('[') && hostname.endsWith(']')) { + hostname = hostname.slice(1, -1) } + if (ipaddr.isValid(hostname)) { + if (!isSafeIp(hostname)) { + throw new Error(`Blocked unsafe attempt to ${hostname}`) + } + } return await undiciFetch(url, { ...options, dispatcher: safeDispatcher, diff --git a/test/uploads/int.spec.ts b/test/uploads/int.spec.ts index e7e51381ef..ecebd7bfc8 100644 --- a/test/uploads/int.spec.ts +++ b/test/uploads/int.spec.ts @@ -3,7 +3,7 @@ import type { CollectionSlug, Payload } from 'payload' import { randomUUID } from 'crypto' import fs from 'fs' import path from 'path' -import { getFileByPath } from 'payload' +import { _internal_safeFetchGlobal, getFileByPath } from 'payload' import { fileURLToPath } from 'url' import { promisify } from 'util' @@ -575,6 +575,46 @@ describe('Collections - Uploads', () => { `( 'should block or filter uploading from $collection with URL: $url', async ({ url, collection, errorContains }) => { + const globalCachedFn = _internal_safeFetchGlobal.lookup + + let hostname = new URL(url).hostname + + const isIPV6 = hostname.includes('::') + + // Strip brackets from IPv6 addresses + // eslint-disable-next-line jest/no-conditional-in-test + if (isIPV6) { + hostname = hostname.slice(1, -1) + } + + // Here we're essentially mocking our own DNS provider, to get 'https://www.payloadcms.com/test.png' to resolve to the IP + // we'd like to test for + // @ts-expect-error this does not need to be mocked 100% correctly + _internal_safeFetchGlobal.lookup = (_hostname, _options, callback) => { + // eslint-disable-next-line jest/no-conditional-in-test + callback(null, hostname as any, isIPV6 ? 6 : 4) + } + + await expect( + payload.create({ + collection, + data: { + filename: 'test.png', + // Need to pass a domain for lookup to be called. We monkey patch the IP lookup function above + // to return the IP address we want to test. + url: 'https://www.payloadcms.com/test.png', + }, + }), + ).rejects.toThrow( + expect.objectContaining({ + name: 'FileRetrievalError', + message: expect.stringContaining(errorContains), + }), + ) + + _internal_safeFetchGlobal.lookup = globalCachedFn + + // Now ensure this throws if we pass the IP address directly, without the mock await expect( payload.create({ collection, From 0806ee17620caa9f21ba652ada3cdec0c0c54ef6 Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Wed, 9 Jul 2025 15:44:22 -0400 Subject: [PATCH 005/143] fix(plugin-import-export): selectionToUse field to dynamically show valid export options (#13092) ### What? Updated the `selectionToUse` export field to properly render a radio group with dynamic options based on current selection state and applied filters. - Fixed an edge case where `currentFilters` would appear as an option even when the `where` clause was empty (e.g. `{ or: [] }`). ### Why? Previously, the `selectionToUse` field displayed all options (current selection, current filters, all documents) regardless of context. This caused confusion when only one of them was applicable. ### How? - Added a custom field component that dynamically computes available options based on: - Current filters from `useListQuery` - Selection state from `useSelection` - Injected the dynamic `field` prop into `RadioGroupField` to enable rendering. - Ensured the `where` field updates automatically in sync with the selected radio. - Added `isWhereEmpty` utility to avoid showing `currentFilters` when `query.where` contains no meaningful conditions (e.g. `{ or: [] }`). --- .../components/SelectionToUseField/index.tsx | 130 ++++++++++++++++++ .../src/components/SortBy/index.tsx | 2 +- .../src/components/WhereField/index.scss | 0 .../src/components/WhereField/index.tsx | 72 ---------- .../src/export/getFields.ts | 21 +-- .../plugin-import-export/src/exports/rsc.ts | 2 +- 6 files changed, 145 insertions(+), 82 deletions(-) create mode 100644 packages/plugin-import-export/src/components/SelectionToUseField/index.tsx delete mode 100644 packages/plugin-import-export/src/components/WhereField/index.scss delete mode 100644 packages/plugin-import-export/src/components/WhereField/index.tsx diff --git a/packages/plugin-import-export/src/components/SelectionToUseField/index.tsx b/packages/plugin-import-export/src/components/SelectionToUseField/index.tsx new file mode 100644 index 0000000000..9a6d359178 --- /dev/null +++ b/packages/plugin-import-export/src/components/SelectionToUseField/index.tsx @@ -0,0 +1,130 @@ +'use client' + +import type { Where } from 'payload' + +import { + RadioGroupField, + useDocumentInfo, + useField, + useListQuery, + useSelection, + useTranslation, +} from '@payloadcms/ui' +import React, { useEffect, useMemo } from 'react' + +const isWhereEmpty = (where: Where): boolean => { + if (!where || typeof where !== 'object') { + return true + } + + // Flatten one level of OR/AND wrappers + if (Array.isArray(where.and)) { + return where.and.length === 0 + } + if (Array.isArray(where.or)) { + return where.or.length === 0 + } + + return Object.keys(where).length === 0 +} + +export const SelectionToUseField: React.FC = () => { + const { id } = useDocumentInfo() + const { query } = useListQuery() + const { selectAll, selected } = useSelection() + const { t } = useTranslation() + + const { setValue: setSelectionToUseValue, value: selectionToUseValue } = useField({ + path: 'selectionToUse', + }) + + const { setValue: setWhere } = useField({ + path: 'where', + }) + + const hasMeaningfulFilters = query?.where && !isWhereEmpty(query.where) + + const availableOptions = useMemo(() => { + const options = [ + { + // @ts-expect-error - this is not correctly typed in plugins right now + label: t('plugin-import-export:selectionToUse-allDocuments'), + value: 'all', + }, + ] + + if (hasMeaningfulFilters) { + options.unshift({ + // @ts-expect-error - this is not correctly typed in plugins right now + label: t('plugin-import-export:selectionToUse-currentFilters'), + value: 'currentFilters', + }) + } + + if (['allInPage', 'some'].includes(selectAll)) { + options.unshift({ + // @ts-expect-error - this is not correctly typed in plugins right now + label: t('plugin-import-export:selectionToUse-currentSelection'), + value: 'currentSelection', + }) + } + + return options + }, [hasMeaningfulFilters, selectAll, t]) + + // Auto-set default + useEffect(() => { + if (id) { + return + } + + let defaultSelection: 'all' | 'currentFilters' | 'currentSelection' = 'all' + + if (['allInPage', 'some'].includes(selectAll)) { + defaultSelection = 'currentSelection' + } else if (query?.where) { + defaultSelection = 'currentFilters' + } + + setSelectionToUseValue(defaultSelection) + }, [id, selectAll, query?.where, setSelectionToUseValue]) + + // Sync where clause with selected option + useEffect(() => { + if (id) { + return + } + + if (selectionToUseValue === 'currentFilters' && query?.where) { + setWhere(query.where) + } else if (selectionToUseValue === 'currentSelection' && selected) { + const ids = [...selected.entries()].filter(([_, isSelected]) => isSelected).map(([id]) => id) + + setWhere({ id: { in: ids } }) + } else if (selectionToUseValue === 'all') { + setWhere({}) + } + }, [id, selectionToUseValue, query?.where, selected, setWhere]) + + // Hide component if no other options besides "all" are available + if (availableOptions.length <= 1) { + return null + } + + return ( + + ) +} diff --git a/packages/plugin-import-export/src/components/SortBy/index.tsx b/packages/plugin-import-export/src/components/SortBy/index.tsx index 7680ed82c1..8952ba4d95 100644 --- a/packages/plugin-import-export/src/components/SortBy/index.tsx +++ b/packages/plugin-import-export/src/components/SortBy/index.tsx @@ -46,7 +46,7 @@ export const SortBy: SelectFieldClientComponent = (props) => { if (option && (!displayedValue || displayedValue.value !== value)) { setDisplayedValue(option) } - }, [value, fieldOptions]) + }, [displayedValue, fieldOptions, value]) useEffect(() => { if (id || !query?.sort || value) { diff --git a/packages/plugin-import-export/src/components/WhereField/index.scss b/packages/plugin-import-export/src/components/WhereField/index.scss deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/packages/plugin-import-export/src/components/WhereField/index.tsx b/packages/plugin-import-export/src/components/WhereField/index.tsx deleted file mode 100644 index 68f4daf653..0000000000 --- a/packages/plugin-import-export/src/components/WhereField/index.tsx +++ /dev/null @@ -1,72 +0,0 @@ -'use client' - -import type React from 'react' - -import { useDocumentInfo, useField, useListQuery, useSelection } from '@payloadcms/ui' -import { useEffect } from 'react' - -import './index.scss' - -export const WhereField: React.FC = () => { - const { setValue: setSelectionToUseValue, value: selectionToUseValue } = useField({ - path: 'selectionToUse', - }) - - const { setValue } = useField({ path: 'where' }) - const { selectAll, selected } = useSelection() - const { query } = useListQuery() - const { id } = useDocumentInfo() - - // setValue based on selectionToUseValue - useEffect(() => { - if (id) { - return - } - - if (selectionToUseValue === 'currentFilters' && query && query?.where) { - setValue(query.where) - } - - if (selectionToUseValue === 'currentSelection' && selected) { - const ids = [] - - for (const [key, value] of selected) { - if (value) { - ids.push(key) - } - } - - setValue({ - id: { - in: ids, - }, - }) - } - - if (selectionToUseValue === 'all' && selected) { - setValue({}) - } - - // Selected set a where query with IDs - }, [id, selectionToUseValue, query, selected, setValue]) - - // handles default value of selectionToUse - useEffect(() => { - if (id) { - return - } - let defaultSelection: 'all' | 'currentFilters' | 'currentSelection' = 'all' - - if (['allInPage', 'some'].includes(selectAll)) { - defaultSelection = 'currentSelection' - } - - if (defaultSelection === 'all' && query?.where) { - defaultSelection = 'currentFilters' - } - - setSelectionToUseValue(defaultSelection) - }, [id, query, selectAll, setSelectionToUseValue]) - - return null -} diff --git a/packages/plugin-import-export/src/export/getFields.ts b/packages/plugin-import-export/src/export/getFields.ts index c57d84b878..c76e7de9cb 100644 --- a/packages/plugin-import-export/src/export/getFields.ts +++ b/packages/plugin-import-export/src/export/getFields.ts @@ -132,12 +132,13 @@ export const getFields = (config: Config): Field[] => { ], }, { - // virtual field for the UI component to modify the hidden `where` field name: 'selectionToUse', type: 'radio', - defaultValue: 'all', - // @ts-expect-error - this is not correctly typed in plugins right now - label: ({ t }) => t('plugin-import-export:field-selectionToUse-label'), + admin: { + components: { + Field: '@payloadcms/plugin-import-export/rsc#SelectionToUseField', + }, + }, options: [ { // @ts-expect-error - this is not correctly typed in plugins right now @@ -155,7 +156,6 @@ export const getFields = (config: Config): Field[] => { value: 'all', }, ], - virtual: true, }, { name: 'fields', @@ -184,11 +184,16 @@ export const getFields = (config: Config): Field[] => { name: 'where', type: 'json', admin: { - components: { - Field: '@payloadcms/plugin-import-export/rsc#WhereField', - }, + hidden: true, }, defaultValue: {}, + hooks: { + beforeValidate: [ + ({ value }) => { + return value ?? {} + }, + ], + }, }, ], // @ts-expect-error - this is not correctly typed in plugins right now diff --git a/packages/plugin-import-export/src/exports/rsc.ts b/packages/plugin-import-export/src/exports/rsc.ts index 5072288925..9f2339b7bf 100644 --- a/packages/plugin-import-export/src/exports/rsc.ts +++ b/packages/plugin-import-export/src/exports/rsc.ts @@ -4,5 +4,5 @@ export { ExportSaveButton } from '../components/ExportSaveButton/index.js' export { FieldsToExport } from '../components/FieldsToExport/index.js' export { ImportExportProvider } from '../components/ImportExportProvider/index.js' export { Preview } from '../components/Preview/index.js' +export { SelectionToUseField } from '../components/SelectionToUseField/index.js' export { SortBy } from '../components/SortBy/index.js' -export { WhereField } from '../components/WhereField/index.js' From c6105f1e0d66ebbd7e82379f8f52e9b4647e43ef Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Wed, 9 Jul 2025 15:46:48 -0400 Subject: [PATCH 006/143] fix(plugin-import-export): flattening logic for polymorphic relationships in CSV exports (#13094) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What? Improves the flattening logic used in the import-export plugin to correctly handle polymorphic relationships (both `hasOne` and `hasMany`) when generating CSV columns. ### Why? Previously, `hasMany` polymorphic relationships would flatten their full `value` object recursively, resulting in unwanted keys like `createdAt`, `title`, `email`, etc. This change ensures that only the `id` and `relationTo` fields are included, matching how `hasOne` polymorphic fields already behave. ### How? - Updated `flattenObject` to special-case `hasMany` polymorphic relationships and extract only `relationTo` and `id` per index. - Refined `getFlattenedFieldKeys` to return correct column keys for polymorphic fields: - `hasMany polymorphic → name_0_relationTo`, `name_0_id` - `hasOne polymorphic → name_relationTo`, `name_id` - `monomorphic → name` or `name_0` - **Added try/catch blocks** around `toCSVFunctions` calls in `flattenObject`, with descriptive error messages including the column path and input value. This improves debuggability if a custom `toCSV` function throws. --- .../src/export/createExport.ts | 1 - .../src/export/flattenObject.ts | 90 ++++++++++++------- .../src/export/getCustomFieldFunctions.ts | 19 +--- packages/plugin-import-export/src/index.ts | 1 - .../src/utilities/getFlattenedFieldKeys.ts | 26 ++++-- test/plugin-import-export/int.spec.ts | 4 +- 6 files changed, 86 insertions(+), 55 deletions(-) diff --git a/packages/plugin-import-export/src/export/createExport.ts b/packages/plugin-import-export/src/export/createExport.ts index c06fc4bd6d..2623241209 100644 --- a/packages/plugin-import-export/src/export/createExport.ts +++ b/packages/plugin-import-export/src/export/createExport.ts @@ -106,7 +106,6 @@ export const createExport = async (args: CreateExportArgs) => { const toCSVFunctions = getCustomFieldFunctions({ fields: collectionConfig.flattenedFields, - select, }) if (download) { diff --git a/packages/plugin-import-export/src/export/flattenObject.ts b/packages/plugin-import-export/src/export/flattenObject.ts index 78902b245a..022238aacf 100644 --- a/packages/plugin-import-export/src/export/flattenObject.ts +++ b/packages/plugin-import-export/src/export/flattenObject.ts @@ -24,20 +24,38 @@ export const flattenObject = ({ if (Array.isArray(value)) { value.forEach((item, index) => { if (typeof item === 'object' && item !== null) { + // Case: hasMany polymorphic relationships + if ( + 'relationTo' in item && + 'value' in item && + typeof item.value === 'object' && + item.value !== null + ) { + row[`${`${newKey}_${index}`}_relationTo`] = item.relationTo + row[`${`${newKey}_${index}`}_id`] = item.value.id + return + } + flatten(item, `${newKey}_${index}`) } else { if (toCSVFunctions?.[newKey]) { const columnName = `${newKey}_${index}` - const result = toCSVFunctions[newKey]({ - columnName, - data: row, - doc, - row, - siblingDoc, - value: item, - }) - if (typeof result !== 'undefined') { - row[columnName] = result + try { + const result = toCSVFunctions[newKey]({ + columnName, + data: row, + doc, + row, + siblingDoc, + value: item, + }) + if (typeof result !== 'undefined') { + row[columnName] = result + } + } catch (error) { + throw new Error( + `Error in toCSVFunction for array item "${columnName}": ${JSON.stringify(item)}\n${(error as Error).message}`, + ) } } else { row[`${newKey}_${index}`] = item @@ -48,30 +66,42 @@ export const flattenObject = ({ if (!toCSVFunctions?.[newKey]) { flatten(value, newKey) } else { - const result = toCSVFunctions[newKey]({ - columnName: newKey, - data: row, - doc, - row, - siblingDoc, - value, - }) - if (typeof result !== 'undefined') { - row[newKey] = result + try { + const result = toCSVFunctions[newKey]({ + columnName: newKey, + data: row, + doc, + row, + siblingDoc, + value, + }) + if (typeof result !== 'undefined') { + row[newKey] = result + } + } catch (error) { + throw new Error( + `Error in toCSVFunction for nested object "${newKey}": ${JSON.stringify(value)}\n${(error as Error).message}`, + ) } } } else { if (toCSVFunctions?.[newKey]) { - const result = toCSVFunctions[newKey]({ - columnName: newKey, - data: row, - doc, - row, - siblingDoc, - value, - }) - if (typeof result !== 'undefined') { - row[newKey] = result + try { + const result = toCSVFunctions[newKey]({ + columnName: newKey, + data: row, + doc, + row, + siblingDoc, + value, + }) + if (typeof result !== 'undefined') { + row[newKey] = result + } + } catch (error) { + throw new Error( + `Error in toCSVFunction for field "${newKey}": ${JSON.stringify(value)}\n${(error as Error).message}`, + ) } } else { row[newKey] = value diff --git a/packages/plugin-import-export/src/export/getCustomFieldFunctions.ts b/packages/plugin-import-export/src/export/getCustomFieldFunctions.ts index 826bee0ab8..931c4bcf04 100644 --- a/packages/plugin-import-export/src/export/getCustomFieldFunctions.ts +++ b/packages/plugin-import-export/src/export/getCustomFieldFunctions.ts @@ -1,21 +1,12 @@ -import { - type FlattenedField, - type SelectIncludeType, - traverseFields, - type TraverseFieldsCallback, -} from 'payload' +import { type FlattenedField, traverseFields, type TraverseFieldsCallback } from 'payload' import type { ToCSVFunction } from '../types.js' type Args = { fields: FlattenedField[] - select: SelectIncludeType | undefined } -export const getCustomFieldFunctions = ({ - fields, - select, -}: Args): Record => { +export const getCustomFieldFunctions = ({ fields }: Args): Record => { const result: Record = {} const buildCustomFunctions: TraverseFieldsCallback = ({ field, parentRef, ref }) => { @@ -54,7 +45,7 @@ export const getCustomFieldFunctions = ({ data[`${ref.prefix}${field.name}_relationTo`] = relationTo } } - return undefined + return undefined // prevents further flattening } } } else { @@ -98,10 +89,6 @@ export const getCustomFieldFunctions = ({ } } } - - // TODO: do this so we only return the functions needed based on the select used - ////@ts-expect-error ref is untyped - // ref.select = typeof select !== 'undefined' || select[field.name] ? select : {} } traverseFields({ callback: buildCustomFunctions, fields }) diff --git a/packages/plugin-import-export/src/index.ts b/packages/plugin-import-export/src/index.ts index 9bf001c281..de79864bd9 100644 --- a/packages/plugin-import-export/src/index.ts +++ b/packages/plugin-import-export/src/index.ts @@ -110,7 +110,6 @@ export const importExportPlugin = const toCSVFunctions = getCustomFieldFunctions({ fields: collection.config.fields as FlattenedField[], - select, }) const possibleKeys = getFlattenedFieldKeys(collection.config.fields as FlattenedField[]) diff --git a/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts b/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts index 491d3a2461..5ba649c13e 100644 --- a/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts +++ b/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts @@ -16,7 +16,13 @@ export const getFlattenedFieldKeys = (fields: FieldWithPresentational[], prefix const keys: string[] = [] fields.forEach((field) => { - if (!('name' in field) || typeof field.name !== 'string') { + const fieldHasToCSVFunction = + 'custom' in field && + typeof field.custom === 'object' && + 'plugin-import-export' in field.custom && + field.custom['plugin-import-export']?.toCSV + + if (!('name' in field) || typeof field.name !== 'string' || fieldHasToCSVFunction) { return } @@ -41,11 +47,21 @@ export const getFlattenedFieldKeys = (fields: FieldWithPresentational[], prefix break case 'relationship': if (field.hasMany) { - // e.g. hasManyPolymorphic_0_value_id - keys.push(`${name}_0_relationTo`, `${name}_0_value_id`) + if (Array.isArray(field.relationTo)) { + // hasMany polymorphic + keys.push(`${name}_0_relationTo`, `${name}_0_id`) + } else { + // hasMany monomorphic + keys.push(`${name}_0`) + } } else { - // e.g. hasOnePolymorphic_id - keys.push(`${name}_id`, `${name}_relationTo`) + if (Array.isArray(field.relationTo)) { + // hasOne polymorphic + keys.push(`${name}_relationTo`, `${name}_id`) + } else { + // hasOne monomorphic + keys.push(name) + } } break case 'tabs': diff --git a/test/plugin-import-export/int.spec.ts b/test/plugin-import-export/int.spec.ts index 806b50d456..afc6ecb854 100644 --- a/test/plugin-import-export/int.spec.ts +++ b/test/plugin-import-export/int.spec.ts @@ -569,9 +569,9 @@ describe('@payloadcms/plugin-import-export', () => { expect(data[0].hasOnePolymorphic_relationTo).toBe('posts') // hasManyPolymorphic - expect(data[0].hasManyPolymorphic_0_value_id).toBeDefined() + expect(data[0].hasManyPolymorphic_0_id).toBeDefined() expect(data[0].hasManyPolymorphic_0_relationTo).toBe('users') - expect(data[0].hasManyPolymorphic_1_value_id).toBeDefined() + expect(data[0].hasManyPolymorphic_1_id).toBeDefined() expect(data[0].hasManyPolymorphic_1_relationTo).toBe('posts') }) From 5e82f9ff41521acfcf328f8e846d847a0c2fd2d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20Jablo=C3=B1ski?= <43938777+GermanJablo@users.noreply.github.com> Date: Thu, 10 Jul 2025 11:10:37 +0100 Subject: [PATCH 007/143] feat(next): redirect non-existent documents to list view with banner (#13062) Currently, when a nonexistent document is accessed via the URL, a `NotFound` page is displayed with a button to return to the dashboard. In most cases, the next step the user will take is to navigate to the list of documents in that collection. If we automatically redirect users to the list view and display the error in a banner, we can save them a couple of redirects. This is a very common scenario when writing tests or restarting the local environment. ## Before ![image](https://github.com/user-attachments/assets/ea7af410-5567-4dd2-b44b-67177aa795e6) ## After ![image](https://github.com/user-attachments/assets/72b38d2f-63f2-4a2b-94c4-76ea90d80c24) --- packages/next/src/views/Document/index.tsx | 13 ++++++- packages/next/src/views/List/index.tsx | 4 +++ .../src/views/List/renderListViewSlots.tsx | 36 +++++++++++++++---- packages/translations/src/clientKeys.ts | 1 + packages/translations/src/languages/ar.ts | 2 ++ packages/translations/src/languages/az.ts | 2 ++ packages/translations/src/languages/bg.ts | 2 ++ packages/translations/src/languages/bnBd.ts | 2 ++ packages/translations/src/languages/bnIn.ts | 2 ++ packages/translations/src/languages/ca.ts | 2 ++ packages/translations/src/languages/cs.ts | 2 ++ packages/translations/src/languages/da.ts | 2 ++ packages/translations/src/languages/de.ts | 2 ++ packages/translations/src/languages/en.ts | 2 ++ packages/translations/src/languages/es.ts | 2 ++ packages/translations/src/languages/et.ts | 2 ++ packages/translations/src/languages/fa.ts | 2 ++ packages/translations/src/languages/fr.ts | 2 ++ packages/translations/src/languages/he.ts | 2 ++ packages/translations/src/languages/hr.ts | 2 ++ packages/translations/src/languages/hu.ts | 2 ++ packages/translations/src/languages/hy.ts | 2 ++ packages/translations/src/languages/it.ts | 2 ++ packages/translations/src/languages/ja.ts | 2 ++ packages/translations/src/languages/ko.ts | 2 ++ packages/translations/src/languages/lt.ts | 2 ++ packages/translations/src/languages/lv.ts | 2 ++ packages/translations/src/languages/my.ts | 2 ++ packages/translations/src/languages/nb.ts | 2 ++ packages/translations/src/languages/nl.ts | 2 ++ packages/translations/src/languages/pl.ts | 2 ++ packages/translations/src/languages/pt.ts | 2 ++ packages/translations/src/languages/ro.ts | 2 ++ packages/translations/src/languages/rs.ts | 2 ++ .../translations/src/languages/rsLatin.ts | 2 ++ packages/translations/src/languages/ru.ts | 2 ++ packages/translations/src/languages/sk.ts | 2 ++ packages/translations/src/languages/sl.ts | 2 ++ packages/translations/src/languages/sv.ts | 2 ++ packages/translations/src/languages/th.ts | 2 ++ packages/translations/src/languages/tr.ts | 2 ++ packages/translations/src/languages/uk.ts | 2 ++ packages/translations/src/languages/vi.ts | 2 ++ packages/translations/src/languages/zh.ts | 2 ++ packages/translations/src/languages/zhTw.ts | 1 + test/admin/e2e/general/e2e.spec.ts | 20 +++++++---- 46 files changed, 140 insertions(+), 15 deletions(-) diff --git a/packages/next/src/views/Document/index.tsx b/packages/next/src/views/Document/index.tsx index 88e42680c4..6995374069 100644 --- a/packages/next/src/views/Document/index.tsx +++ b/packages/next/src/views/Document/index.tsx @@ -120,7 +120,18 @@ export const renderDocument = async ({ })) if (isEditing && !doc) { - throw new Error('not-found') + // If it's a collection document that doesn't exist, redirect to collection list + if (collectionSlug) { + const redirectURL = formatAdminURL({ + adminRoute, + path: `/collections/${collectionSlug}?notFound=${encodeURIComponent(idFromArgs)}`, + serverURL, + }) + redirect(redirectURL) + } else { + // For globals or other cases, keep the 404 behavior + throw new Error('not-found') + } } const [ diff --git a/packages/next/src/views/List/index.tsx b/packages/next/src/views/List/index.tsx index 3df0d9de82..a2af698ea1 100644 --- a/packages/next/src/views/List/index.tsx +++ b/packages/next/src/views/List/index.tsx @@ -225,6 +225,9 @@ export const renderListView = async ( const hasCreatePermission = permissions?.collections?.[collectionSlug]?.create + // Check if there's a notFound query parameter (document ID that wasn't found) + const notFoundDocId = typeof searchParams?.notFound === 'string' ? searchParams.notFound : null + const serverProps: ListViewServerPropsOnly = { collectionConfig, data, @@ -248,6 +251,7 @@ export const renderListView = async ( }, collectionConfig, description: staticDescription, + notFoundDocId, payload, serverProps, }) diff --git a/packages/next/src/views/List/renderListViewSlots.tsx b/packages/next/src/views/List/renderListViewSlots.tsx index a0c36d016f..200fcf7f37 100644 --- a/packages/next/src/views/List/renderListViewSlots.tsx +++ b/packages/next/src/views/List/renderListViewSlots.tsx @@ -16,12 +16,15 @@ import type { ViewDescriptionServerPropsOnly, } from 'payload' +import { Banner } from '@payloadcms/ui/elements/Banner' import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerComponent' +import React from 'react' type Args = { clientProps: ListViewSlotSharedClientProps collectionConfig: SanitizedCollectionConfig description?: StaticDescription + notFoundDocId?: null | string payload: Payload serverProps: ListViewServerPropsOnly } @@ -30,6 +33,7 @@ export const renderListViewSlots = ({ clientProps, collectionConfig, description, + notFoundDocId, payload, serverProps, }: Args): ListViewSlots => { @@ -75,13 +79,31 @@ export const renderListViewSlots = ({ }) } - if (collectionConfig.admin.components?.beforeListTable) { - result.BeforeListTable = RenderServerComponent({ - clientProps: clientProps satisfies BeforeListTableClientProps, - Component: collectionConfig.admin.components.beforeListTable, - importMap: payload.importMap, - serverProps: serverProps satisfies BeforeListTableServerPropsOnly, - }) + // Handle beforeListTable with optional banner + const existingBeforeListTable = collectionConfig.admin.components?.beforeListTable + ? RenderServerComponent({ + clientProps: clientProps satisfies BeforeListTableClientProps, + Component: collectionConfig.admin.components.beforeListTable, + importMap: payload.importMap, + serverProps: serverProps satisfies BeforeListTableServerPropsOnly, + }) + : null + + // Create banner for document not found + const notFoundBanner = notFoundDocId ? ( + + {serverProps.i18n.t('error:documentNotFound', { id: notFoundDocId })} + + ) : null + + // Combine banner and existing component + if (notFoundBanner || existingBeforeListTable) { + result.BeforeListTable = ( + + {notFoundBanner} + {existingBeforeListTable} + + ) } if (collectionConfig.admin.components?.Description) { diff --git a/packages/translations/src/clientKeys.ts b/packages/translations/src/clientKeys.ts index c586ea55bb..0a9c986847 100644 --- a/packages/translations/src/clientKeys.ts +++ b/packages/translations/src/clientKeys.ts @@ -65,6 +65,7 @@ export const clientTranslationKeys = createClientTranslationKeys([ 'error:autosaving', 'error:correctInvalidFields', 'error:deletingTitle', + 'error:documentNotFound', 'error:emailOrPasswordIncorrect', 'error:usernameOrPasswordIncorrect', 'error:loadingDocument', diff --git a/packages/translations/src/languages/ar.ts b/packages/translations/src/languages/ar.ts index e8382ab45a..6a33e7ecbe 100644 --- a/packages/translations/src/languages/ar.ts +++ b/packages/translations/src/languages/ar.ts @@ -86,6 +86,8 @@ export const arTranslations: DefaultTranslationsObject = { deletingFile: 'حدث خطأ أثناء حذف الملف.', deletingTitle: 'حدث خطأ أثناء حذف {{title}}. يرجى التحقق من الاتصال الخاص بك والمحاولة مرة أخرى.', + documentNotFound: + 'لم يتم العثور على المستند بالمعرف {{id}}. قد يكون قد تم حذفه أو لم يكن موجودًا أصلاً ، أو قد لا يكون لديك الوصول إليه.', emailOrPasswordIncorrect: 'البريد الإلكتروني أو كلمة المرور المقدمة غير صحيحة.', followingFieldsInvalid_one: 'الحقل التالي غير صالح:', followingFieldsInvalid_other: 'الحقول التالية غير صالحة:', diff --git a/packages/translations/src/languages/az.ts b/packages/translations/src/languages/az.ts index ff7f352eed..59726751e4 100644 --- a/packages/translations/src/languages/az.ts +++ b/packages/translations/src/languages/az.ts @@ -86,6 +86,8 @@ export const azTranslations: DefaultTranslationsObject = { deletingFile: 'Faylın silinməsində xəta baş verdi.', deletingTitle: '{{title}} silinərkən xəta baş verdi. Zəhmət olmasa, bağlantınızı yoxlayın və yenidən cəhd edin.', + documentNotFound: + '{{id}} ID-li sənəd tapılmadı. Bu, onun silinmiş və ya heç vaxt mövcud olmamış ola bilər və ya sizin ona giriş hüququnuz olmayabilir.', emailOrPasswordIncorrect: 'Təqdim olunan e-poçt və ya şifrə yanlışdır.', followingFieldsInvalid_one: 'Aşağıdakı sahə yanlışdır:', followingFieldsInvalid_other: 'Aşağıdaki sahələr yanlışdır:', diff --git a/packages/translations/src/languages/bg.ts b/packages/translations/src/languages/bg.ts index 4ac64a7b60..b507d73d69 100644 --- a/packages/translations/src/languages/bg.ts +++ b/packages/translations/src/languages/bg.ts @@ -86,6 +86,8 @@ export const bgTranslations: DefaultTranslationsObject = { deletingFile: 'Имаше грешка при изтриването на файла.', deletingTitle: 'Имаше проблем при изтриването на {{title}}. Моля провери връзката си и опитай отново.', + documentNotFound: + 'Документът с ID {{id}} не можа да бъде намерен. Възможно е да е бил изтрит или никога да не е съществувал или може би нямате достъп до него.', emailOrPasswordIncorrect: 'Имейлът или паролата не са правилни.', followingFieldsInvalid_one: 'Следното поле е некоректно:', followingFieldsInvalid_other: 'Следните полета са некоректни:', diff --git a/packages/translations/src/languages/bnBd.ts b/packages/translations/src/languages/bnBd.ts index 3119dc1c1b..50a3462030 100644 --- a/packages/translations/src/languages/bnBd.ts +++ b/packages/translations/src/languages/bnBd.ts @@ -86,6 +86,8 @@ export const bnBdTranslations: DefaultTranslationsObject = { deletingFile: 'ফাইল মুছতে একটি ত্রুটি হয়েছে।', deletingTitle: '{{title}} মুছতে একটি ত্রুটি হয়েছে। আপনার সংযোগ পরীক্ষা করুন এবং আবার চেষ্টা করুন।', + documentNotFound: + 'আইডি {{id}} এর সাথে সম্পর্কিত ডকুমেন্টটি পাওয়া যাচ্ছে না। এটি মোছা হয়েছে বা কখনই না থাকতে পারে, অথবা আপনার এর প্রবেশাধিকার না থ', emailOrPasswordIncorrect: 'প্রদত্ত ইমেইল বা পাসওয়ার্ড ভুল।', followingFieldsInvalid_one: 'নিম্নলিখিত ক্ষেত্রটি অবৈধ:', followingFieldsInvalid_other: 'নিম্নলিখিত ক্ষেত্রগুলি অবৈধ:', diff --git a/packages/translations/src/languages/bnIn.ts b/packages/translations/src/languages/bnIn.ts index 58e4d8e566..97e1a90f76 100644 --- a/packages/translations/src/languages/bnIn.ts +++ b/packages/translations/src/languages/bnIn.ts @@ -86,6 +86,8 @@ export const bnInTranslations: DefaultTranslationsObject = { deletingFile: 'ফাইল মুছতে একটি ত্রুটি হয়েছে।', deletingTitle: '{{title}} মুছতে একটি ত্রুটি হয়েছে। আপনার সংযোগ পরীক্ষা করুন এবং আবার চেষ্টা করুন।', + documentNotFound: + 'ID সহ {{id}} ডকুমেন্টটি পাওয়া যায়নি। এটি মুছে ফেলা হতে পারে বা কখনই ছিল না, অথবা আপনার এটির অ্যাক্সেস নেই।', emailOrPasswordIncorrect: 'প্রদত্ত ইমেইল বা পাসওয়ার্ড ভুল।', followingFieldsInvalid_one: 'নিম্নলিখিত ক্ষেত্রটি অবৈধ:', followingFieldsInvalid_other: 'নিম্নলিখিত ক্ষেত্রগুলি অবৈধ:', diff --git a/packages/translations/src/languages/ca.ts b/packages/translations/src/languages/ca.ts index adb6438367..36a9a5823c 100644 --- a/packages/translations/src/languages/ca.ts +++ b/packages/translations/src/languages/ca.ts @@ -86,6 +86,8 @@ export const caTranslations: DefaultTranslationsObject = { deletingFile: "Hi ha hagut un error en eliminar l'arxiu.", deletingTitle: "Hi ha hagut un error mentre s'eliminava {{title}}. Si us plau, comprova la teva connexió i torna-ho a intentar.", + documentNotFound: + "El document amb ID {{id}} no s'ha pogut trobar. Pot haver estat esborrat o mai haver existit, o potser no tens accés a aquest.", emailOrPasswordIncorrect: 'El correu electrònic o la contrasenya proporcionats no són correctes.', followingFieldsInvalid_one: 'El següent camp no és vàlid:', diff --git a/packages/translations/src/languages/cs.ts b/packages/translations/src/languages/cs.ts index da916e1aab..4808651d5e 100644 --- a/packages/translations/src/languages/cs.ts +++ b/packages/translations/src/languages/cs.ts @@ -86,6 +86,8 @@ export const csTranslations: DefaultTranslationsObject = { deletingFile: 'Při mazání souboru došlo k chybě.', deletingTitle: 'Při mazání {{title}} došlo k chybě. Zkontrolujte své připojení a zkuste to znovu.', + documentNotFound: + 'Dokument s ID {{id}} nebyl nalezen. Mohlo být smazáno nebo nikdy neexistovalo, nebo k němu nemáte přístup.', emailOrPasswordIncorrect: 'Zadaný email nebo heslo není správné.', followingFieldsInvalid_one: 'Následující pole je neplatné:', followingFieldsInvalid_other: 'Následující pole jsou neplatná:', diff --git a/packages/translations/src/languages/da.ts b/packages/translations/src/languages/da.ts index 5ed7783b17..0f449e1c2a 100644 --- a/packages/translations/src/languages/da.ts +++ b/packages/translations/src/languages/da.ts @@ -85,6 +85,8 @@ export const daTranslations: DefaultTranslationsObject = { deletingFile: 'Der opstod en fejl under sletning af filen.', deletingTitle: 'Der opstod en fejl under sletningen {{title}}. Tjek din forbindelse eller prøv igen.', + documentNotFound: + 'Dokumentet med ID {{id}} kunne ikke findes. Det kan være slettet eller har aldrig eksisteret, eller du har muligvis ikke adgang til det.', emailOrPasswordIncorrect: 'Email eller adgangskode er forkert.', followingFieldsInvalid_one: 'Feltet er ugyldigt:', followingFieldsInvalid_other: 'Felterne er ugyldige:', diff --git a/packages/translations/src/languages/de.ts b/packages/translations/src/languages/de.ts index 876b445845..9c2fd199d3 100644 --- a/packages/translations/src/languages/de.ts +++ b/packages/translations/src/languages/de.ts @@ -88,6 +88,8 @@ export const deTranslations: DefaultTranslationsObject = { deletingFile: 'Beim Löschen der Datei ist ein Fehler aufgetreten.', deletingTitle: 'Es gab ein Problem während der Löschung von {{title}}. Bitte überprüfe deine Verbindung und versuche es erneut.', + documentNotFound: + 'Das Dokument mit der ID {{id}} konnte nicht gefunden werden. Es könnte gelöscht oder niemals existiert haben, oder Sie haben möglicherweise keinen Zugang dazu.', emailOrPasswordIncorrect: 'Die E-Mail-Adresse oder das Passwort sind nicht korrekt.', followingFieldsInvalid_one: 'Das folgende Feld ist nicht korrekt:', followingFieldsInvalid_other: 'Die folgenden Felder sind nicht korrekt:', diff --git a/packages/translations/src/languages/en.ts b/packages/translations/src/languages/en.ts index 9563bfe3a3..0b2d0f7694 100644 --- a/packages/translations/src/languages/en.ts +++ b/packages/translations/src/languages/en.ts @@ -87,6 +87,8 @@ export const enTranslations = { deletingFile: 'There was an error deleting file.', deletingTitle: 'There was an error while deleting {{title}}. Please check your connection and try again.', + documentNotFound: + 'The document with ID {{id}} could not be found. It may have been deleted or never existed, or you may not have access to it.', emailOrPasswordIncorrect: 'The email or password provided is incorrect.', followingFieldsInvalid_one: 'The following field is invalid:', followingFieldsInvalid_other: 'The following fields are invalid:', diff --git a/packages/translations/src/languages/es.ts b/packages/translations/src/languages/es.ts index 362fa355a7..311848771a 100644 --- a/packages/translations/src/languages/es.ts +++ b/packages/translations/src/languages/es.ts @@ -86,6 +86,8 @@ export const esTranslations: DefaultTranslationsObject = { deletingFile: 'Ocurrió un error al eliminar el archivo.', deletingTitle: 'Ocurrió un error al eliminar {{title}}. Por favor, revisa tu conexión y vuelve a intentarlo.', + documentNotFound: + 'No se pudo encontrar el documento con ID {{id}}. Puede haber sido eliminado o nunca existió, o puede que no tenga acceso a él.', emailOrPasswordIncorrect: 'El correo o la contraseña son incorrectos.', followingFieldsInvalid_one: 'El siguiente campo es inválido:', followingFieldsInvalid_other: 'Los siguientes campos son inválidos:', diff --git a/packages/translations/src/languages/et.ts b/packages/translations/src/languages/et.ts index 4263e0c2a4..1a72d3e422 100644 --- a/packages/translations/src/languages/et.ts +++ b/packages/translations/src/languages/et.ts @@ -85,6 +85,8 @@ export const etTranslations: DefaultTranslationsObject = { deletingFile: 'Faili kustutamisel tekkis viga.', deletingTitle: '{{title}} kustutamisel tekkis viga. Palun kontrollige ühendust ja proovige uuesti.', + documentNotFound: + 'Dokumenti ID-ga {{id}} ei leitud. Võimalik, et see on kustutatud või pole seda kunagi olnud, või ei pruugi teil sellele juurdepääsu olla.', emailOrPasswordIncorrect: 'Sisestatud e-post või parool on vale.', followingFieldsInvalid_one: 'Järgmine väli on vigane:', followingFieldsInvalid_other: 'Järgmised väljad on vigased:', diff --git a/packages/translations/src/languages/fa.ts b/packages/translations/src/languages/fa.ts index d2141cb6df..b0c0012eef 100644 --- a/packages/translations/src/languages/fa.ts +++ b/packages/translations/src/languages/fa.ts @@ -85,6 +85,8 @@ export const faTranslations: DefaultTranslationsObject = { correctInvalidFields: 'لطفا کادرهای نامعتبر را تصحیح کنید.', deletingFile: 'هنگام حذف فایل خطایی روی داد.', deletingTitle: 'هنگام حذف {{title}} خطایی رخ داد. لطفاً وضعیت اتصال اینترنت خود را بررسی کنید.', + documentNotFound: + 'سند با شناسه {{id}} پیدا نشد. ممکن است حذف شده باشد یا هرگز وجود نداشته باشد، یا شاید شما به آن دسترسی نداشته باشید.', emailOrPasswordIncorrect: 'رایانامه یا گذرواژه ارائه شده نادرست است.', followingFieldsInvalid_one: 'کادر زیر نامعتبر است:', followingFieldsInvalid_other: 'کادرهای زیر نامعتبر هستند:', diff --git a/packages/translations/src/languages/fr.ts b/packages/translations/src/languages/fr.ts index 142a3c8acb..24ec5fd7b0 100644 --- a/packages/translations/src/languages/fr.ts +++ b/packages/translations/src/languages/fr.ts @@ -88,6 +88,8 @@ export const frTranslations: DefaultTranslationsObject = { deletingFile: 'Une erreur s’est produite lors de la suppression du fichier.', deletingTitle: 'Une erreur s’est produite lors de la suppression de {{title}}. Veuillez vérifier votre connexion puis réessayer.', + documentNotFound: + "Le document avec l'ID {{id}} n'a pas pu être trouvé. Il a peut-être été supprimé ou n'a jamais existé, ou vous n'avez peut-être pas accès à celui-ci.", emailOrPasswordIncorrect: 'L’adresse e-mail ou le mot de passe fourni est incorrect.', followingFieldsInvalid_one: 'Le champ suivant n’est pas valide :', followingFieldsInvalid_other: 'Les champs suivants ne sont pas valides :', diff --git a/packages/translations/src/languages/he.ts b/packages/translations/src/languages/he.ts index 595c64adee..32d5ad7200 100644 --- a/packages/translations/src/languages/he.ts +++ b/packages/translations/src/languages/he.ts @@ -84,6 +84,8 @@ export const heTranslations: DefaultTranslationsObject = { correctInvalidFields: 'נא לתקן שדות לא תקינים.', deletingFile: 'אירעה שגיאה במחיקת הקובץ.', deletingTitle: 'אירעה שגיאה במחיקת {{title}}. נא בדוק את החיבור שלך ונסה שנית.', + documentNotFound: + 'המסמך עם המזהה {{id}} לא נמצא. ייתכן שהוא נמחק או שלעולם לא היה, או שאין לך גישה אליו.', emailOrPasswordIncorrect: 'כתובת הדוא"ל או הסיסמה שסופקו אינם נכונים.', followingFieldsInvalid_one: 'השדה הבא אינו תקין:', followingFieldsInvalid_other: 'השדות הבאים אינם תקינים:', diff --git a/packages/translations/src/languages/hr.ts b/packages/translations/src/languages/hr.ts index 8e38eeccf3..7271e0ac06 100644 --- a/packages/translations/src/languages/hr.ts +++ b/packages/translations/src/languages/hr.ts @@ -87,6 +87,8 @@ export const hrTranslations: DefaultTranslationsObject = { deletingFile: 'Dogodila se pogreška pri brisanju datoteke.', deletingTitle: 'Dogodila se pogreška pri brisanju {{title}}. Molimo provjerite svoju internet vezu i pokušajte ponovno.', + documentNotFound: + 'Dokument s ID-om {{id}} nije mogao biti pronađen. Možda je izbrisan ili nikad nije postojao, ili možda nemate pristup njemu.', emailOrPasswordIncorrect: 'E-mail adresa ili lozinka netočni.', followingFieldsInvalid_one: 'Ovo polje je neispravno:', followingFieldsInvalid_other: 'Ova polja su neispravna:', diff --git a/packages/translations/src/languages/hu.ts b/packages/translations/src/languages/hu.ts index 1bb4386db6..eac7af8c04 100644 --- a/packages/translations/src/languages/hu.ts +++ b/packages/translations/src/languages/hu.ts @@ -88,6 +88,8 @@ export const huTranslations: DefaultTranslationsObject = { deletingFile: 'Hiba történt a fájl törlésekor.', deletingTitle: 'Hiba történt a {{title}} törlése közben. Kérjük, ellenőrizze a kapcsolatot, és próbálja meg újra.', + documentNotFound: + 'A dokumentum azonosítóval {{id}} nem található. Lehet, hogy törölték, soha nem létezett, vagy Önnek nincs hozzáférése hozzá.', emailOrPasswordIncorrect: 'A megadott e-mail-cím vagy jelszó helytelen.', followingFieldsInvalid_one: 'A következő mező érvénytelen:', followingFieldsInvalid_other: 'A következő mezők érvénytelenek:', diff --git a/packages/translations/src/languages/hy.ts b/packages/translations/src/languages/hy.ts index 17a97ca9d1..925181f244 100644 --- a/packages/translations/src/languages/hy.ts +++ b/packages/translations/src/languages/hy.ts @@ -86,6 +86,8 @@ export const hyTranslations: DefaultTranslationsObject = { deletingFile: 'Ֆայլը ջնջելու ժամանակ սխալ է տեղի ունեցել։', deletingTitle: '{{title}}-ը ջնջելու ժամանակ սխալ է տեղի ունեցել։ Խնդրում ենք ստուգել Ձեր կապը և կրկին փորձել։', + documentNotFound: + 'Գրառումը ID-ով {{id}} չի գտնվել։ Այն կարող է ջնջվել կամ նույնիսկ էլ գոյություն չունել։ Ֆո', emailOrPasswordIncorrect: 'Տրամադրված էլ. փոստը կամ գաղտնաբառը սխալ է։', followingFieldsInvalid_one: 'Հետևյալ դաշտն անվավեր է։', followingFieldsInvalid_other: 'Հետևյալ դաշտերն անվավեր են։', diff --git a/packages/translations/src/languages/it.ts b/packages/translations/src/languages/it.ts index 4edcbd5ff1..55bfdbcd8e 100644 --- a/packages/translations/src/languages/it.ts +++ b/packages/translations/src/languages/it.ts @@ -87,6 +87,8 @@ export const itTranslations: DefaultTranslationsObject = { deletingFile: "Si è verificato un errore durante l'eleminazione del file.", deletingTitle: "Si è verificato un errore durante l'eliminazione di {{title}}. Per favore controlla la tua connessione e riprova.", + documentNotFound: + 'Il documento con ID {{id}} non è stato trovato. Potrebbe essere stato eliminato o mai esistito, oppure potresti non avere accesso ad esso.', emailOrPasswordIncorrect: "L'email o la password fornita non è corretta.", followingFieldsInvalid_one: 'Il seguente campo non è valido:', followingFieldsInvalid_other: 'I seguenti campi non sono validi:', diff --git a/packages/translations/src/languages/ja.ts b/packages/translations/src/languages/ja.ts index 30f97016fd..51d9284b96 100644 --- a/packages/translations/src/languages/ja.ts +++ b/packages/translations/src/languages/ja.ts @@ -87,6 +87,8 @@ export const jaTranslations: DefaultTranslationsObject = { deletingFile: 'ファイルの削除中にエラーが発生しました。', deletingTitle: '{{title}} を削除する際にエラーが発生しました。接続を確認してからもう一度お試しください。', + documentNotFound: + 'ID {{id}}のドキュメントが見つかりませんでした。削除されたか、存在しなかったか、またはアクセス権限がない可能性があります。', emailOrPasswordIncorrect: 'メールアドレス、または、パスワードが正しくありません。', followingFieldsInvalid_one: '次のフィールドは無効です:', followingFieldsInvalid_other: '次のフィールドは無効です:', diff --git a/packages/translations/src/languages/ko.ts b/packages/translations/src/languages/ko.ts index 39f967fc01..e053968388 100644 --- a/packages/translations/src/languages/ko.ts +++ b/packages/translations/src/languages/ko.ts @@ -86,6 +86,8 @@ export const koTranslations: DefaultTranslationsObject = { deletingFile: '파일을 삭제하는 중에 오류가 발생했습니다.', deletingTitle: '{{title}} 삭제하는 중에 오류가 발생했습니다. 인터넷 연결을 확인하고 다시 시도하세요.', + documentNotFound: + 'ID가 {{id}}인 문서를 찾을 수 없습니다. 이 문서는 삭제되었거나 존재하지 않았거나, 당신이 접근 권한이 없을 수 있습니다.', emailOrPasswordIncorrect: '입력한 이메일 또는 비밀번호가 올바르지 않습니다.', followingFieldsInvalid_one: '다음 입력란이 유효하지 않습니다:', followingFieldsInvalid_other: '다음 입력란이 유효하지 않습니다:', diff --git a/packages/translations/src/languages/lt.ts b/packages/translations/src/languages/lt.ts index 68310295f7..0a9b605a10 100644 --- a/packages/translations/src/languages/lt.ts +++ b/packages/translations/src/languages/lt.ts @@ -87,6 +87,8 @@ export const ltTranslations: DefaultTranslationsObject = { deletingFile: 'Įvyko klaida trinant failą.', deletingTitle: 'Įvyko klaida bandant ištrinti {{title}}. Patikrinkite savo ryšį ir bandykite dar kartą.', + documentNotFound: + 'Dokumentas su ID {{id}} nerastas. Gali būti, kad jis buvo ištrintas arba niekada neegzistavo, arba jūs neturite prieigos prie jo.', emailOrPasswordIncorrect: 'Pateiktas el. pašto adresas arba slaptažodis yra neteisingi.', followingFieldsInvalid_one: 'Šis laukas yra netinkamas:', followingFieldsInvalid_other: 'Šie laukai yra neteisingi:', diff --git a/packages/translations/src/languages/lv.ts b/packages/translations/src/languages/lv.ts index 051e2d34ac..e7fb84bb10 100644 --- a/packages/translations/src/languages/lv.ts +++ b/packages/translations/src/languages/lv.ts @@ -86,6 +86,8 @@ export const lvTranslations: DefaultTranslationsObject = { deletingFile: 'Radās kļūda, dzēšot failu.', deletingTitle: 'Radās kļūda, dzēšot {{title}}. Lūdzu, pārbaudiet savienojumu un mēģiniet vēlreiz.', + documentNotFound: + 'Dokuments ar ID {{id}} netika atrasts. Iespējams, tas ir izdzēsts vai nekad nav eksistējis, vai arī jums nav pieejas tam.', emailOrPasswordIncorrect: 'Norādītais e-pasts vai parole nav pareiza.', followingFieldsInvalid_one: 'Šis lauks nav derīgs:', followingFieldsInvalid_other: 'Šie lauki nav derīgi:', diff --git a/packages/translations/src/languages/my.ts b/packages/translations/src/languages/my.ts index e4575be8c4..c05bf18710 100644 --- a/packages/translations/src/languages/my.ts +++ b/packages/translations/src/languages/my.ts @@ -86,6 +86,8 @@ export const myTranslations: DefaultTranslationsObject = { deletingFile: 'ဖိုင်ကိုဖျက်ရာတွင် အမှားအယွင်းရှိနေသည်။', deletingTitle: '{{title}} ကို ဖျက်ရာတွင် အမှားအယွင်းရှိခဲ့သည်။ သင့် အင်တာနက်လိုင်းအား စစ်ဆေးပြီး ထပ်မံကြို့စားကြည့်ပါ။', + documentNotFound: + 'Dokumen dengan ID {{id}} tidak dapat ditemui. Ia mungkin telah dipadam atau tidak pernah wujud, atau anda mungkin tidak mempunyai akses kepadanya.', emailOrPasswordIncorrect: 'ထည့်သွင်းထားသော အီးမေးလ် သို့မဟုတ် စကားဝှက်သည် မမှန်ပါ။', followingFieldsInvalid_one: 'ထည့်သွင်းထားသော အချက်အလက်သည် မမှန်ကန်ပါ။', followingFieldsInvalid_other: 'ထည့်သွင်းထားသော အချက်အလက်များသည် မမှန်ကန်ပါ။', diff --git a/packages/translations/src/languages/nb.ts b/packages/translations/src/languages/nb.ts index f2f5a2b438..90c796312c 100644 --- a/packages/translations/src/languages/nb.ts +++ b/packages/translations/src/languages/nb.ts @@ -86,6 +86,8 @@ export const nbTranslations: DefaultTranslationsObject = { deletingFile: 'Det oppstod en feil under sletting av filen.', deletingTitle: 'Det oppstod en feil under sletting av {{title}}. Sjekk tilkoblingen og prøv igjen.', + documentNotFound: + 'Dokumentet med ID {{id}} kunne ikke bli funnet. Det kan ha blitt slettet eller aldri eksistert, eller du har kanskje ikke tilgang til det.', emailOrPasswordIncorrect: 'E-postadressen eller passordet er feil.', followingFieldsInvalid_one: 'Følgende felt er ugyldig:', followingFieldsInvalid_other: 'Følgende felter er ugyldige:', diff --git a/packages/translations/src/languages/nl.ts b/packages/translations/src/languages/nl.ts index dee4ae3886..316cc69508 100644 --- a/packages/translations/src/languages/nl.ts +++ b/packages/translations/src/languages/nl.ts @@ -87,6 +87,8 @@ export const nlTranslations: DefaultTranslationsObject = { deletingFile: 'Er is een fout opgetreden bij het verwijderen van dit bestand.', deletingTitle: 'Er is een fout opgetreden tijdens het verwijderen van {{title}}. Controleer uw verbinding en probeer het opnieuw.', + documentNotFound: + 'Het document met ID {{id}} kon niet worden gevonden. Het kan zijn verwijderd of heeft nooit bestaan, of u heeft mogelijk geen toegang tot het.', emailOrPasswordIncorrect: 'Het opgegeven e-mailadres of wachtwoord is onjuist.', followingFieldsInvalid_one: 'Het volgende veld is ongeldig:', followingFieldsInvalid_other: 'De volgende velden zijn ongeldig:', diff --git a/packages/translations/src/languages/pl.ts b/packages/translations/src/languages/pl.ts index a4ff89c11f..2c12ba691b 100644 --- a/packages/translations/src/languages/pl.ts +++ b/packages/translations/src/languages/pl.ts @@ -86,6 +86,8 @@ export const plTranslations: DefaultTranslationsObject = { deletingFile: '', deletingTitle: 'Wystąpił błąd podczas usuwania {{title}}. Proszę, sprawdź swoje połączenie i spróbuj ponownie.', + documentNotFound: + 'Dokument o ID {{id}} nie mógł zostać znaleziony. Mogło zostać usunięte lub nigdy nie istniało, lub może nie masz do niego dostępu.', emailOrPasswordIncorrect: 'Podany adres e-mail lub hasło jest nieprawidłowe.', followingFieldsInvalid_one: 'To pole jest nieprawidłowe:', followingFieldsInvalid_other: 'Następujące pola są nieprawidłowe:', diff --git a/packages/translations/src/languages/pt.ts b/packages/translations/src/languages/pt.ts index b3f1f366b7..8b8f95fa9a 100644 --- a/packages/translations/src/languages/pt.ts +++ b/packages/translations/src/languages/pt.ts @@ -87,6 +87,8 @@ export const ptTranslations: DefaultTranslationsObject = { deletingFile: 'Ocorreu um erro ao excluir o arquivo.', deletingTitle: 'Ocorreu um erro ao excluir {{title}}. Por favor, verifique sua conexão e tente novamente.', + documentNotFound: + 'O documento com o ID {{id}} não pôde ser encontrado. Ele pode ter sido deletado ou nunca ter existido, ou você pode não ter acesso a ele.', emailOrPasswordIncorrect: 'O email ou senha fornecido está incorreto.', followingFieldsInvalid_one: 'O campo a seguir está inválido:', followingFieldsInvalid_other: 'Os campos a seguir estão inválidos:', diff --git a/packages/translations/src/languages/ro.ts b/packages/translations/src/languages/ro.ts index 64e557c856..38825ec119 100644 --- a/packages/translations/src/languages/ro.ts +++ b/packages/translations/src/languages/ro.ts @@ -88,6 +88,8 @@ export const roTranslations: DefaultTranslationsObject = { deletingFile: 'S-a produs o eroare la ștergerea fișierului.', deletingTitle: 'S-a produs o eroare în timpul ștergerii {{title}}. Vă rugăm să verificați conexiunea și să încercați din nou.', + documentNotFound: + 'Documentul cu ID-ul {{id}} nu a putut fi găsit. S-ar putea să fi fost șters sau să nu fi existat niciodată, sau s-ar putea să nu aveți acces la acesta.', emailOrPasswordIncorrect: 'Adresa de e-mail sau parola este incorectă.', followingFieldsInvalid_one: 'Următorul câmp nu este valid:', followingFieldsInvalid_other: 'Următoarele câmpuri nu sunt valabile:', diff --git a/packages/translations/src/languages/rs.ts b/packages/translations/src/languages/rs.ts index 7fa6b100eb..78803c8e58 100644 --- a/packages/translations/src/languages/rs.ts +++ b/packages/translations/src/languages/rs.ts @@ -87,6 +87,8 @@ export const rsTranslations: DefaultTranslationsObject = { deletingFile: 'Догодила се грешка при брисању датотеке.', deletingTitle: 'Догодила се грешка при брисању {{title}}. Проверите интернет конекцију и покушајте поново.', + documentNotFound: + 'Dokument sa ID-om {{id}} nije mogao biti pronađen. Moguće je da je obrisan ili nikada nije postojao, ili možda nemate pristup njemu.', emailOrPasswordIncorrect: 'Емаил или лозинка су неисправни.', followingFieldsInvalid_one: 'Ово поље је невалидно:', followingFieldsInvalid_other: 'Ова поља су невалидна:', diff --git a/packages/translations/src/languages/rsLatin.ts b/packages/translations/src/languages/rsLatin.ts index beee303953..31c321059b 100644 --- a/packages/translations/src/languages/rsLatin.ts +++ b/packages/translations/src/languages/rsLatin.ts @@ -87,6 +87,8 @@ export const rsLatinTranslations: DefaultTranslationsObject = { deletingFile: 'Dogodila se greška pri brisanju datoteke.', deletingTitle: 'Dogodila se greška pri brisanju {{title}}. Proverite internet konekciju i pokušajte ponovo.', + documentNotFound: + 'Dokument sa ID {{id}} nije mogao biti pronađen. Moguće je da je obrisan ili nikad nije postojao, ili možda nemate pristup njemu.', emailOrPasswordIncorrect: 'Adresa e-pošte ili lozinka su neispravni.', followingFieldsInvalid_one: 'Ovo polje je nevalidno:', followingFieldsInvalid_other: 'Ova polja su nevalidna:', diff --git a/packages/translations/src/languages/ru.ts b/packages/translations/src/languages/ru.ts index ee29c5d406..e881a1c1d4 100644 --- a/packages/translations/src/languages/ru.ts +++ b/packages/translations/src/languages/ru.ts @@ -87,6 +87,8 @@ export const ruTranslations: DefaultTranslationsObject = { deletingFile: 'Произошла ошибка при удалении файла.', deletingTitle: 'При удалении {{title}} произошла ошибка. Пожалуйста, проверьте соединение и повторите попытку.', + documentNotFound: + 'Документ с ID {{id}} не удалось найти. Возможно, он был удален или никогда не существовал, или у вас нет доступа к нему.', emailOrPasswordIncorrect: 'Указанный email или пароль неверен.', followingFieldsInvalid_one: 'Следующее поле недействительно:', followingFieldsInvalid_other: 'Следующие поля недействительны:', diff --git a/packages/translations/src/languages/sk.ts b/packages/translations/src/languages/sk.ts index f4479b1745..44713ae7fe 100644 --- a/packages/translations/src/languages/sk.ts +++ b/packages/translations/src/languages/sk.ts @@ -87,6 +87,8 @@ export const skTranslations: DefaultTranslationsObject = { deletingFile: 'Pri mazaní súboru došlo k chybe.', deletingTitle: 'Pri mazaní {{title}} došlo k chybe. Skontrolujte svoje pripojenie a skúste to znova.', + documentNotFound: + 'Dokument s ID {{id}} sa nepodarilo nájsť. Možno bol vymazaný, nikdy neexistoval, alebo k nemu nemáte prístup.', emailOrPasswordIncorrect: 'Zadaný email alebo heslo nie je správne.', followingFieldsInvalid_one: 'Nasledujúce pole je neplatné:', followingFieldsInvalid_other: 'Nasledujúce polia sú neplatné:', diff --git a/packages/translations/src/languages/sl.ts b/packages/translations/src/languages/sl.ts index f424587ff5..45954b2d29 100644 --- a/packages/translations/src/languages/sl.ts +++ b/packages/translations/src/languages/sl.ts @@ -86,6 +86,8 @@ export const slTranslations: DefaultTranslationsObject = { deletingFile: 'Pri brisanju datoteke je prišlo do napake.', deletingTitle: 'Pri brisanju {{title}} je prišlo do napake. Prosimo, preverite povezavo in poskusite znova.', + documentNotFound: + 'Dokumenta z ID {{id}} ni bilo mogoče najti. Morda je bil izbrisan ali nikoli ni obstajal, ali pa do njega nimate dostopa.', emailOrPasswordIncorrect: 'Vnesena e-pošta ali geslo je napačno.', followingFieldsInvalid_one: 'Naslednje polje je neveljavno:', followingFieldsInvalid_other: 'Naslednja polja so neveljavna:', diff --git a/packages/translations/src/languages/sv.ts b/packages/translations/src/languages/sv.ts index 5fcdcb7199..3a9b1e12d2 100644 --- a/packages/translations/src/languages/sv.ts +++ b/packages/translations/src/languages/sv.ts @@ -86,6 +86,8 @@ export const svTranslations: DefaultTranslationsObject = { deletingFile: 'Det gick inte att ta bort filen', deletingTitle: 'Det uppstod ett fel vid borttagningen av {{title}}. Vänligen kontrollera din anslutning och försök igen.', + documentNotFound: + 'Dokumentet med ID {{id}} kunde inte hittas. Det kan ha raderats eller aldrig existerat, eller så kanske du inte har tillgång till det.', emailOrPasswordIncorrect: 'E-postadressen eller lösenordet som angivits är felaktigt.', followingFieldsInvalid_one: 'Följande fält är ogiltigt:', followingFieldsInvalid_other: 'Följande fält är ogiltiga:', diff --git a/packages/translations/src/languages/th.ts b/packages/translations/src/languages/th.ts index 9a39e8f1ef..8d53deae75 100644 --- a/packages/translations/src/languages/th.ts +++ b/packages/translations/src/languages/th.ts @@ -84,6 +84,8 @@ export const thTranslations: DefaultTranslationsObject = { correctInvalidFields: 'โปรดแก้ไขช่องที่ไม่ถูกต้อง', deletingFile: 'เกิดปัญหาระหว่างการลบไฟล์', deletingTitle: 'เกิดปัญหาระหว่างการลบ {{title}} โปรดตรวจสอบการเชื่อมต่อของคุณแล้วลองอีกครั้ง', + documentNotFound: + 'ไม่พบเอกสารที่มี ID {{id}} อาจจะถูกลบหรือไม่เคยมีอยู่ หรือคุณอาจไม่มีสิทธิ์เข้าถึง', emailOrPasswordIncorrect: 'อีเมลหรือรหัสผ่านไม่ถูกต้อง', followingFieldsInvalid_one: 'ช่องต่อไปนี้ไม่ถูกต้อง:', followingFieldsInvalid_other: 'ช่องต่อไปนี้ไม่ถูกต้อง:', diff --git a/packages/translations/src/languages/tr.ts b/packages/translations/src/languages/tr.ts index 57375f0008..1630721cf9 100644 --- a/packages/translations/src/languages/tr.ts +++ b/packages/translations/src/languages/tr.ts @@ -87,6 +87,8 @@ export const trTranslations: DefaultTranslationsObject = { deletingFile: 'Dosya silinirken bir hatayla karşılaşıldı.', deletingTitle: '{{title}} silinirken bir sorun yaşandı. Lütfen internet bağlantınızı kontrol edip tekrar deneyin.', + documentNotFound: + "ID'si {{id}} olan belge bulunamadı. Silinmiş olabilir, hiç var olmamış olabilir veya belgeye erişiminiz olmayabilir.", emailOrPasswordIncorrect: 'Girilen e-posta veya parola hatalı', followingFieldsInvalid_one: 'Lütfen geçersiz alanı düzeltin:', followingFieldsInvalid_other: 'Lütfen geçersiz alanları düzeltin:', diff --git a/packages/translations/src/languages/uk.ts b/packages/translations/src/languages/uk.ts index c1f96e4771..e76e29e6be 100644 --- a/packages/translations/src/languages/uk.ts +++ b/packages/translations/src/languages/uk.ts @@ -87,6 +87,8 @@ export const ukTranslations: DefaultTranslationsObject = { deletingFile: 'Виникла помилка під час видалення файлу', deletingTitle: "Виникла помилка під час видалення {{title}}. Будь ласка, перевірте ваше з'єднання та спробуйте ще раз.", + documentNotFound: + 'Документ з ID {{id}} не вдалося знайти. Можливо, він був видалений або ніколи не існував, або у вас немає доступу до нього.', emailOrPasswordIncorrect: 'Вказана адреса електронної пошти або пароль є невірними', followingFieldsInvalid_one: 'Наступне поле невірне:', followingFieldsInvalid_other: 'Наступні поля невірні', diff --git a/packages/translations/src/languages/vi.ts b/packages/translations/src/languages/vi.ts index d235a7de03..ae280b4fc0 100644 --- a/packages/translations/src/languages/vi.ts +++ b/packages/translations/src/languages/vi.ts @@ -86,6 +86,8 @@ export const viTranslations: DefaultTranslationsObject = { deletingFile: 'Lỗi - Đã xảy ra vấn đề khi xóa tệp này.', deletingTitle: 'Lỗi - Đã xảy ra vấn đề khi xóa {{title}}. Hãy kiểm tra kết nối mạng và thử lại.', + documentNotFound: + 'Tài liệu có ID {{id}} không thể tìm thấy. Nó có thể đã bị xóa hoặc chưa từng tồn tại, hoặc bạn có thể không có quyền truy cập vào nó.', emailOrPasswordIncorrect: 'Lỗi - Email hoặc mật khẩu không chính xác.', followingFieldsInvalid_one: 'Lỗi - Field sau không hợp lệ:', followingFieldsInvalid_other: 'Lỗi - Những fields sau không hợp lệ:', diff --git a/packages/translations/src/languages/zh.ts b/packages/translations/src/languages/zh.ts index c8a4b04e19..296612d0e1 100644 --- a/packages/translations/src/languages/zh.ts +++ b/packages/translations/src/languages/zh.ts @@ -82,6 +82,8 @@ export const zhTranslations: DefaultTranslationsObject = { correctInvalidFields: '请更正无效字段。', deletingFile: '删除文件时出现了错误。', deletingTitle: '删除{{title}}时出现了错误。请检查您的连接并重试。', + documentNotFound: + '无法找到ID为{{id}}的文档。可能是已经被删除,或者从未存在,或者您可能无法访问它。', emailOrPasswordIncorrect: '提供的电子邮件或密码不正确。', followingFieldsInvalid_one: '下面的字段是无效的:', followingFieldsInvalid_other: '以下字段是无效的:', diff --git a/packages/translations/src/languages/zhTw.ts b/packages/translations/src/languages/zhTw.ts index 2512bb0916..6cf18d9773 100644 --- a/packages/translations/src/languages/zhTw.ts +++ b/packages/translations/src/languages/zhTw.ts @@ -82,6 +82,7 @@ export const zhTwTranslations: DefaultTranslationsObject = { correctInvalidFields: '請更正無效區塊。', deletingFile: '刪除文件時出現了錯誤。', deletingTitle: '刪除{{title}}時出現了錯誤。請檢查您的網路連線並重試。', + documentNotFound: '找不到具有ID {{id}}的文件。可能已被刪除或從未存在,或者您可能無法訪問它。', emailOrPasswordIncorrect: '提供的電子郵件或密碼不正確。', followingFieldsInvalid_one: '下面的字串是無效的:', followingFieldsInvalid_other: '以下字串是無效的:', diff --git a/test/admin/e2e/general/e2e.spec.ts b/test/admin/e2e/general/e2e.spec.ts index 277b0614ff..6710f65c07 100644 --- a/test/admin/e2e/general/e2e.spec.ts +++ b/test/admin/e2e/general/e2e.spec.ts @@ -346,19 +346,25 @@ describe('General', () => { await expect(page.locator('.not-found')).toContainText('Nothing found') }) - test('should 404 not found documents', async () => { - const unknownDocumentURL = `${postsUrl.collection(postsCollectionSlug)}/1234` - const response = await page.goto(unknownDocumentURL) - expect(response.status() === 404).toBeTruthy() - await expect(page.locator('.not-found')).toContainText('Nothing found') - }) - test('should use custom logout route', async () => { const customLogoutRouteURL = `${serverURL}${adminRoutes.routes.admin}${adminRoutes.admin.routes.logout}` const response = await page.goto(customLogoutRouteURL) expect(response.status() !== 404).toBeTruthy() }) + test('should redirect from non-existent document ID to collection list', async () => { + const nonExistentDocURL = `${serverURL}/admin/collections/${postsCollectionSlug}/999999` + await page.goto(nonExistentDocURL) + // Should redirect to collection list with notFound query parameter + await expect + .poll(() => page.url(), { timeout: POLL_TOPASS_TIMEOUT }) + .toMatch(`${serverURL}/admin/collections/${postsCollectionSlug}?notFound=999999`) + + // Should show warning banner about document not found + await expect(page.locator('.banner--type-error')).toBeVisible() + await expect(page.locator('.banner--type-error')).toContainText('999999') + }) + test('should not redirect `${adminRoute}/collections` to `${adminRoute} if there is a custom view', async () => { const collectionsURL = `${serverURL}/admin/collections` await page.goto(collectionsURL) From c77b39c3b4b76073da550e02bcbadfb19c156568 Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Thu, 10 Jul 2025 06:15:09 -0400 Subject: [PATCH 008/143] fix(ui): hidden input should wait for form initialization (#13114) --- packages/ui/src/fields/Hidden/index.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/ui/src/fields/Hidden/index.tsx b/packages/ui/src/fields/Hidden/index.tsx index 3938e354d3..dd0d3182c6 100644 --- a/packages/ui/src/fields/Hidden/index.tsx +++ b/packages/ui/src/fields/Hidden/index.tsx @@ -15,15 +15,15 @@ import { withCondition } from '../../forms/withCondition/index.js' const HiddenFieldComponent: React.FC = (props) => { const { disableModifyingForm = true, path: pathFromProps, value: valueFromProps } = props - const { path, setValue, value } = useField({ + const { formInitializing, path, setValue, value } = useField({ potentiallyStalePath: pathFromProps, }) useEffect(() => { - if (valueFromProps !== undefined) { + if (valueFromProps !== undefined && !formInitializing) { setValue(valueFromProps, disableModifyingForm) } - }, [valueFromProps, setValue, disableModifyingForm]) + }, [valueFromProps, setValue, disableModifyingForm, formInitializing]) return ( Date: Thu, 10 Jul 2025 16:49:12 +0300 Subject: [PATCH 009/143] perf(db-postgres): simplify `db.updateOne` to a single DB call with if the passed data doesn't include nested fields (#13060) In case, if `payload.db.updateOne` received simple data, meaning no: * Arrays / Blocks * Localized Fields * `hasMany: true` text / select / number / relationship fields * relationship fields with `relationTo` as an array This PR simplifies the logic to a single SQL `set` call. No any extra (useless) steps with rewriting all the arrays / blocks / localized tables even if there were no any changes to them. However, it's good to note that `payload.update` (not `payload.db.updateOne`) as for now passes all the previous data as well, so this change won't have any effect unless you're using `payload.db.updateOne` directly (or for our internal logic that uses it), in the future a separate PR with optimization for `payload.update` as well may be implemented. --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210710489889576 --- packages/drizzle/src/updateOne.ts | 118 ++++++++++++++++-- test/database/config.ts | 14 +++ test/database/int.spec.ts | 31 ++++- test/database/payload-types.ts | 16 +++ ...50624_214621.json => 20250707_123508.json} | 86 ++++++++++++- ...{20250624_214621.ts => 20250707_123508.ts} | 18 ++- .../up-down-migration/migrations/index.ts | 8 +- 7 files changed, 272 insertions(+), 19 deletions(-) rename test/database/up-down-migration/migrations/{20250624_214621.json => 20250707_123508.json} (88%) rename test/database/up-down-migration/migrations/{20250624_214621.ts => 20250707_123508.ts} (86%) diff --git a/packages/drizzle/src/updateOne.ts b/packages/drizzle/src/updateOne.ts index 8fddd9378f..ef451c9436 100644 --- a/packages/drizzle/src/updateOne.ts +++ b/packages/drizzle/src/updateOne.ts @@ -1,15 +1,67 @@ import type { LibSQLDatabase } from 'drizzle-orm/libsql' -import type { UpdateOne } from 'payload' +import type { FlattenedField, UpdateOne } from 'payload' +import { eq } from 'drizzle-orm' import toSnakeCase from 'to-snake-case' import type { DrizzleAdapter } from './types.js' +import { buildFindManyArgs } from './find/buildFindManyArgs.js' import { buildQuery } from './queries/buildQuery.js' import { selectDistinct } from './queries/selectDistinct.js' +import { transform } from './transform/read/index.js' +import { transformForWrite } from './transform/write/index.js' import { upsertRow } from './upsertRow/index.js' import { getTransaction } from './utilities/getTransaction.js' +/** + * Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call. + * We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships. + */ +const shouldUseUpsertRow = ({ + data, + fields, +}: { + data: Record + fields: FlattenedField[] +}) => { + for (const key in data) { + const value = data[key] + const field = fields.find((each) => each.name === key) + + if (!field) { + continue + } + + if ( + field.type === 'array' || + field.type === 'blocks' || + ((field.type === 'text' || + field.type === 'relationship' || + field.type === 'upload' || + field.type === 'select' || + field.type === 'number') && + field.hasMany) || + ((field.type === 'relationship' || field.type === 'upload') && + Array.isArray(field.relationTo)) || + field.localized + ) { + return true + } + + if ( + (field.type === 'group' || field.type === 'tab') && + value && + typeof value === 'object' && + shouldUseUpsertRow({ data: value as Record, fields: field.flattenedFields }) + ) { + return true + } + } + + return false +} + export const updateOne: UpdateOne = async function updateOne( this: DrizzleAdapter, { @@ -74,23 +126,71 @@ export const updateOne: UpdateOne = async function updateOne( return null } - const result = await upsertRow({ - id: idToUpdate, + if (!idToUpdate || shouldUseUpsertRow({ data, fields: collection.flattenedFields })) { + const result = await upsertRow({ + id: idToUpdate, + adapter: this, + data, + db, + fields: collection.flattenedFields, + ignoreResult: returning === false, + joinQuery, + operation: 'update', + req, + select, + tableName, + }) + + if (returning === false) { + return null + } + + return result + } + + const { row } = transformForWrite({ adapter: this, data, - db, fields: collection.flattenedFields, - ignoreResult: returning === false, - joinQuery, - operation: 'update', - req, - select, tableName, }) + const drizzle = db as LibSQLDatabase + await drizzle + .update(this.tables[tableName]) + .set(row) + // TODO: we can skip fetching idToUpdate here with using the incoming where + .where(eq(this.tables[tableName].id, idToUpdate)) + if (returning === false) { return null } + const findManyArgs = buildFindManyArgs({ + adapter: this, + depth: 0, + fields: collection.flattenedFields, + joinQuery: false, + select, + tableName, + }) + + findManyArgs.where = eq(this.tables[tableName].id, idToUpdate) + + const doc = await db.query[tableName].findFirst(findManyArgs) + + // ////////////////////////////////// + // TRANSFORM DATA + // ////////////////////////////////// + + const result = transform({ + adapter: this, + config: this.payload.config, + data: doc, + fields: collection.flattenedFields, + joinQuery: false, + tableName, + }) + return result } diff --git a/test/database/config.ts b/test/database/config.ts index 2e6c269729..1027491eae 100644 --- a/test/database/config.ts +++ b/test/database/config.ts @@ -223,6 +223,20 @@ export default buildConfigWithDefaults({ }, ], }, + { + type: 'group', + name: 'group', + fields: [{ name: 'text', type: 'text' }], + }, + { + type: 'tabs', + tabs: [ + { + name: 'tab', + fields: [{ name: 'text', type: 'text' }], + }, + ], + }, ], hooks: { beforeOperation: [ diff --git a/test/database/int.spec.ts b/test/database/int.spec.ts index 52c1969c58..34bcc13da9 100644 --- a/test/database/int.spec.ts +++ b/test/database/int.spec.ts @@ -7,9 +7,7 @@ import { migrateRelationshipsV2_V3, migrateVersionsV1_V2, } from '@payloadcms/db-mongodb/migration-utils' -import { objectToFrontmatter } from '@payloadcms/richtext-lexical' import { randomUUID } from 'crypto' -import { type Table } from 'drizzle-orm' import * as drizzlePg from 'drizzle-orm/pg-core' import * as drizzleSqlite from 'drizzle-orm/sqlite-core' import fs from 'fs' @@ -2809,6 +2807,35 @@ describe('database', () => { } }) + it('should update simple', async () => { + const post = await payload.create({ + collection: 'posts', + data: { + text: 'other text (should not be nuked)', + title: 'hello', + group: { text: 'in group' }, + tab: { text: 'in tab' }, + arrayWithIDs: [{ text: 'some text' }], + }, + }) + const res = await payload.db.updateOne({ + where: { id: { equals: post.id } }, + data: { + title: 'hello updated', + group: { text: 'in group updated' }, + tab: { text: 'in tab updated' }, + }, + collection: 'posts', + }) + + expect(res.title).toBe('hello updated') + expect(res.text).toBe('other text (should not be nuked)') + expect(res.group.text).toBe('in group updated') + expect(res.tab.text).toBe('in tab updated') + expect(res.arrayWithIDs).toHaveLength(1) + expect(res.arrayWithIDs[0].text).toBe('some text') + }) + it('should support x3 nesting blocks', async () => { const res = await payload.create({ collection: 'posts', diff --git a/test/database/payload-types.ts b/test/database/payload-types.ts index 0975fc1b54..d1f52cb4a1 100644 --- a/test/database/payload-types.ts +++ b/test/database/payload-types.ts @@ -232,6 +232,12 @@ export interface Post { blockType: 'block-first'; }[] | null; + group?: { + text?: string | null; + }; + tab?: { + text?: string | null; + }; updatedAt: string; createdAt: string; } @@ -804,6 +810,16 @@ export interface PostsSelect { blockName?: T; }; }; + group?: + | T + | { + text?: T; + }; + tab?: + | T + | { + text?: T; + }; updatedAt?: T; createdAt?: T; } diff --git a/test/database/up-down-migration/migrations/20250624_214621.json b/test/database/up-down-migration/migrations/20250707_123508.json similarity index 88% rename from test/database/up-down-migration/migrations/20250624_214621.json rename to test/database/up-down-migration/migrations/20250707_123508.json index 3e1e61171e..f541345218 100644 --- a/test/database/up-down-migration/migrations/20250624_214621.json +++ b/test/database/up-down-migration/migrations/20250707_123508.json @@ -1,9 +1,93 @@ { - "id": "a3dd8ca0-5e09-407b-9178-e0ff7f15da59", + "id": "bf183b76-944c-4e83-bd58-4aa993885106", "prevId": "00000000-0000-0000-0000-000000000000", "version": "7", "dialect": "postgresql", "tables": { + "public.users_sessions": { + "name": "users_sessions", + "schema": "", + "columns": { + "_order": { + "name": "_order", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "_parent_id": { + "name": "_parent_id", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "id": { + "name": "id", + "type": "varchar", + "primaryKey": true, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp(3) with time zone", + "primaryKey": false, + "notNull": false + }, + "expires_at": { + "name": "expires_at", + "type": "timestamp(3) with time zone", + "primaryKey": false, + "notNull": true + } + }, + "indexes": { + "users_sessions_order_idx": { + "name": "users_sessions_order_idx", + "columns": [ + { + "expression": "_order", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + }, + "users_sessions_parent_id_idx": { + "name": "users_sessions_parent_id_idx", + "columns": [ + { + "expression": "_parent_id", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": { + "users_sessions_parent_id_fk": { + "name": "users_sessions_parent_id_fk", + "tableFrom": "users_sessions", + "tableTo": "users", + "columnsFrom": ["_parent_id"], + "columnsTo": ["id"], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, "public.users": { "name": "users", "schema": "", diff --git a/test/database/up-down-migration/migrations/20250624_214621.ts b/test/database/up-down-migration/migrations/20250707_123508.ts similarity index 86% rename from test/database/up-down-migration/migrations/20250624_214621.ts rename to test/database/up-down-migration/migrations/20250707_123508.ts index 0f65d8f599..098ecd2a0d 100644 --- a/test/database/up-down-migration/migrations/20250624_214621.ts +++ b/test/database/up-down-migration/migrations/20250707_123508.ts @@ -1,10 +1,18 @@ -import type { MigrateDownArgs, MigrateUpArgs } from '@payloadcms/db-postgres' +import type { MigrateDownArgs, MigrateUpArgs} from '@payloadcms/db-postgres'; import { sql } from '@payloadcms/db-postgres' export async function up({ db, payload, req }: MigrateUpArgs): Promise { await db.execute(sql` - CREATE TABLE "users" ( + CREATE TABLE "users_sessions" ( + "_order" integer NOT NULL, + "_parent_id" integer NOT NULL, + "id" varchar PRIMARY KEY NOT NULL, + "created_at" timestamp(3) with time zone, + "expires_at" timestamp(3) with time zone NOT NULL + ); + + CREATE TABLE "users" ( "id" serial PRIMARY KEY NOT NULL, "updated_at" timestamp(3) with time zone DEFAULT now() NOT NULL, "created_at" timestamp(3) with time zone DEFAULT now() NOT NULL, @@ -56,10 +64,13 @@ export async function up({ db, payload, req }: MigrateUpArgs): Promise { "created_at" timestamp(3) with time zone DEFAULT now() NOT NULL ); + ALTER TABLE "users_sessions" ADD CONSTRAINT "users_sessions_parent_id_fk" FOREIGN KEY ("_parent_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action; ALTER TABLE "payload_locked_documents_rels" ADD CONSTRAINT "payload_locked_documents_rels_parent_fk" FOREIGN KEY ("parent_id") REFERENCES "public"."payload_locked_documents"("id") ON DELETE cascade ON UPDATE no action; ALTER TABLE "payload_locked_documents_rels" ADD CONSTRAINT "payload_locked_documents_rels_users_fk" FOREIGN KEY ("users_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action; ALTER TABLE "payload_preferences_rels" ADD CONSTRAINT "payload_preferences_rels_parent_fk" FOREIGN KEY ("parent_id") REFERENCES "public"."payload_preferences"("id") ON DELETE cascade ON UPDATE no action; ALTER TABLE "payload_preferences_rels" ADD CONSTRAINT "payload_preferences_rels_users_fk" FOREIGN KEY ("users_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action; + CREATE INDEX "users_sessions_order_idx" ON "users_sessions" USING btree ("_order"); + CREATE INDEX "users_sessions_parent_id_idx" ON "users_sessions" USING btree ("_parent_id"); CREATE INDEX "users_updated_at_idx" ON "users" USING btree ("updated_at"); CREATE INDEX "users_created_at_idx" ON "users" USING btree ("created_at"); CREATE UNIQUE INDEX "users_email_idx" ON "users" USING btree ("email"); @@ -83,7 +94,8 @@ export async function up({ db, payload, req }: MigrateUpArgs): Promise { export async function down({ db, payload, req }: MigrateDownArgs): Promise { await db.execute(sql` - DROP TABLE "users" CASCADE; + DROP TABLE "users_sessions" CASCADE; + DROP TABLE "users" CASCADE; DROP TABLE "payload_locked_documents" CASCADE; DROP TABLE "payload_locked_documents_rels" CASCADE; DROP TABLE "payload_preferences" CASCADE; diff --git a/test/database/up-down-migration/migrations/index.ts b/test/database/up-down-migration/migrations/index.ts index 91d190b455..0c0f710443 100644 --- a/test/database/up-down-migration/migrations/index.ts +++ b/test/database/up-down-migration/migrations/index.ts @@ -1,9 +1,9 @@ -import * as migration_20250624_214621 from './20250624_214621.js' +import * as migration_20250707_123508 from './20250707_123508.js' export const migrations = [ { - up: migration_20250624_214621.up, - down: migration_20250624_214621.down, - name: '20250624_214621', + up: migration_20250707_123508.up, + down: migration_20250707_123508.down, + name: '20250707_123508', }, ] From cb6a73e1b44d448e96ac6a23016aa00dac89c710 Mon Sep 17 00:00:00 2001 From: Paul Date: Thu, 10 Jul 2025 16:00:26 +0100 Subject: [PATCH 010/143] feat(storage-*): include modified headers into the response headers of files when using adapters (#12096) This PR makes it so that `modifyResponseHeaders` is supported in our adapters when set on the collection config. Previously it would be ignored. This means that users can now modify or append new headers to what's returned by each service. ```ts import type { CollectionConfig } from 'payload' export const Media: CollectionConfig = { slug: 'media', upload: { modifyResponseHeaders: ({ headers }) => { const newHeaders = new Headers(headers) // Copy existing headers newHeaders.set('X-Frame-Options', 'DENY') // Set new header return newHeaders }, }, } ``` Also adds support for `void` return on the `modifyResponseHeaders` function in the case where the user just wants to use existing headers and doesn't need more control. eg: ```ts import type { CollectionConfig } from 'payload' export const Media: CollectionConfig = { slug: 'media', upload: { modifyResponseHeaders: ({ headers }) => { headers.set('X-Frame-Options', 'DENY') // You can directly set headers without returning }, }, } ``` Manual testing checklist (no CI e2es setup for these envs yet): - [x] GCS - [x] S3 - [x] Azure - [x] UploadThing - [x] Vercel Blob --------- Co-authored-by: James --- docs/upload/overview.mdx | 46 ++++++++++++++++++- .../payload/src/uploads/endpoints/getFile.ts | 5 +- packages/payload/src/uploads/types.ts | 3 +- packages/plugin-cloud-storage/src/types.ts | 1 + packages/storage-azure/src/staticHandler.ts | 30 ++++++++++-- packages/storage-gcs/src/staticHandler.ts | 28 ++++++----- packages/storage-s3/src/staticHandler.ts | 35 ++++++++------ .../storage-uploadthing/src/staticHandler.ts | 36 ++++++++++----- .../storage-vercel-blob/src/staticHandler.ts | 40 ++++++++-------- test/storage-azure/collections/Media.ts | 3 ++ test/storage-azure/payload-types.ts | 38 ++++++++++----- test/storage-gcs/collections/Media.ts | 3 ++ test/storage-s3/collections/Media.ts | 3 ++ test/storage-uploadthing/collections/Media.ts | 3 ++ test/storage-uploadthing/payload-types.ts | 38 ++++++++++----- test/storage-vercel-blob/collections/Media.ts | 3 ++ test/storage-vercel-blob/payload-types.ts | 40 +++++++++++----- 17 files changed, 256 insertions(+), 99 deletions(-) diff --git a/docs/upload/overview.mdx b/docs/upload/overview.mdx index 07adf66bb0..2c8218eba6 100644 --- a/docs/upload/overview.mdx +++ b/docs/upload/overview.mdx @@ -116,6 +116,7 @@ _An asterisk denotes that an option is required._ | **`withMetadata`** | If specified, appends metadata to the output image file. Accepts a boolean or a function that receives `metadata` and `req`, returning a boolean. | | **`hideFileInputOnCreate`** | Set to `true` to prevent the admin UI from showing file inputs during document creation, useful for programmatic file generation. | | **`hideRemoveFile`** | Set to `true` to prevent the admin UI having a way to remove an existing file while editing. | +| **`modifyResponseHeaders`** | Accepts an object with existing `headers` and allows you to manipulate the response headers for media files. [More](#modifying-response-headers) | ### Payload-wide Upload Options @@ -453,7 +454,7 @@ To fetch files from **restricted URLs** that would otherwise be blocked by CORS, Here’s how to configure the pasteURL option to control remote URL fetching: -``` +```ts import type { CollectionConfig } from 'payload' export const Media: CollectionConfig = { @@ -466,7 +467,7 @@ export const Media: CollectionConfig = { pathname: '', port: '', protocol: 'https', - search: '' + search: '', }, { hostname: 'example.com', @@ -519,3 +520,44 @@ _An asterisk denotes that an option is required._ ## Access Control All files that are uploaded to each Collection automatically support the `read` [Access Control](/docs/access-control/overview) function from the Collection itself. You can use this to control who should be allowed to see your uploads, and who should not. + +## Modifying response headers + +You can modify the response headers for files by specifying the `modifyResponseHeaders` option in your upload config. This option accepts an object with existing headers and allows you to manipulate the response headers for media files. + +### Modifying existing headers + +With this method you can directly interface with the `Headers` object and modify the existing headers to append or remove headers. + +```ts +import type { CollectionConfig } from 'payload' + +export const Media: CollectionConfig = { + slug: 'media', + upload: { + modifyResponseHeaders: ({ headers }) => { + headers.set('X-Frame-Options', 'DENY') // You can directly set headers without returning + }, + }, +} +``` + +### Return new headers + +You can also return a new `Headers` object with the modified headers. This is useful if you want to set new headers or remove existing ones. + +```ts +import type { CollectionConfig } from 'payload' + +export const Media: CollectionConfig = { + slug: 'media', + upload: { + modifyResponseHeaders: ({ headers }) => { + const newHeaders = new Headers(headers) // Copy existing headers + newHeaders.set('X-Frame-Options', 'DENY') // Set new header + + return newHeaders + }, + }, +} +``` diff --git a/packages/payload/src/uploads/endpoints/getFile.ts b/packages/payload/src/uploads/endpoints/getFile.ts index 69a3bfea13..e7b39c0796 100644 --- a/packages/payload/src/uploads/endpoints/getFile.ts +++ b/packages/payload/src/uploads/endpoints/getFile.ts @@ -38,9 +38,12 @@ export const getFileHandler: PayloadHandler = async (req) => { if (collection.config.upload.handlers?.length) { let customResponse: null | Response | void = null + const headers = new Headers() + for (const handler of collection.config.upload.handlers) { customResponse = await handler(req, { doc: accessResult, + headers, params: { collection: collection.config.slug, filename, @@ -95,7 +98,7 @@ export const getFileHandler: PayloadHandler = async (req) => { headers.set('Content-Type', fileTypeResult.mime) headers.set('Content-Length', stats.size + '') headers = collection.config.upload?.modifyResponseHeaders - ? collection.config.upload.modifyResponseHeaders({ headers }) + ? collection.config.upload.modifyResponseHeaders({ headers }) || headers : headers return new Response(data, { diff --git a/packages/payload/src/uploads/types.ts b/packages/payload/src/uploads/types.ts index 5674f55889..ff4963833a 100644 --- a/packages/payload/src/uploads/types.ts +++ b/packages/payload/src/uploads/types.ts @@ -211,6 +211,7 @@ export type UploadConfig = { req: PayloadRequest, args: { doc: TypeWithID + headers?: Headers params: { clientUploadContext?: unknown; collection: string; filename: string } }, ) => Promise | Promise | Response | void)[] @@ -233,7 +234,7 @@ export type UploadConfig = { * Ability to modify the response headers fetching a file. * @default undefined */ - modifyResponseHeaders?: ({ headers }: { headers: Headers }) => Headers + modifyResponseHeaders?: ({ headers }: { headers: Headers }) => Headers | void /** * Controls the behavior of pasting/uploading files from URLs. * If set to `false`, fetching from remote URLs is disabled. diff --git a/packages/plugin-cloud-storage/src/types.ts b/packages/plugin-cloud-storage/src/types.ts index 8558ccf728..8b88231130 100644 --- a/packages/plugin-cloud-storage/src/types.ts +++ b/packages/plugin-cloud-storage/src/types.ts @@ -58,6 +58,7 @@ export type StaticHandler = ( req: PayloadRequest, args: { doc?: TypeWithID + headers?: Headers params: { clientUploadContext?: unknown; collection: string; filename: string } }, ) => Promise | Response diff --git a/packages/storage-azure/src/staticHandler.ts b/packages/storage-azure/src/staticHandler.ts index 915c7de94d..625b4640dc 100644 --- a/packages/storage-azure/src/staticHandler.ts +++ b/packages/storage-azure/src/staticHandler.ts @@ -14,7 +14,7 @@ interface Args { } export const getHandler = ({ collection, getStorageClient }: Args): StaticHandler => { - return async (req, { params: { clientUploadContext, filename } }) => { + return async (req, { headers: incomingHeaders, params: { clientUploadContext, filename } }) => { try { const prefix = await getFilePrefix({ clientUploadContext, collection, filename, req }) const blockBlobClient = getStorageClient().getBlockBlobClient( @@ -30,14 +30,34 @@ export const getHandler = ({ collection, getStorageClient }: Args): StaticHandle const response = blob._response + let initHeaders: Headers = { + ...(response.headers.rawHeaders() as unknown as Headers), + } + + // Typescript is difficult here with merging these types from Azure + if (incomingHeaders) { + initHeaders = { + ...initHeaders, + ...incomingHeaders, + } + } + + let headers = new Headers(initHeaders) + const etagFromHeaders = req.headers.get('etag') || req.headers.get('if-none-match') const objectEtag = response.headers.get('etag') + if ( + collection.upload && + typeof collection.upload === 'object' && + typeof collection.upload.modifyResponseHeaders === 'function' + ) { + headers = collection.upload.modifyResponseHeaders({ headers }) || headers + } + if (etagFromHeaders && etagFromHeaders === objectEtag) { return new Response(null, { - headers: new Headers({ - ...response.headers.rawHeaders(), - }), + headers, status: 304, }) } @@ -63,7 +83,7 @@ export const getHandler = ({ collection, getStorageClient }: Args): StaticHandle }) return new Response(readableStream, { - headers: response.headers.rawHeaders(), + headers, status: response.status, }) } catch (err: unknown) { diff --git a/packages/storage-gcs/src/staticHandler.ts b/packages/storage-gcs/src/staticHandler.ts index 258fee971b..bceb1b730e 100644 --- a/packages/storage-gcs/src/staticHandler.ts +++ b/packages/storage-gcs/src/staticHandler.ts @@ -12,7 +12,7 @@ interface Args { } export const getHandler = ({ bucket, collection, getStorageClient }: Args): StaticHandler => { - return async (req, { params: { clientUploadContext, filename } }) => { + return async (req, { headers: incomingHeaders, params: { clientUploadContext, filename } }) => { try { const prefix = await getFilePrefix({ clientUploadContext, collection, filename, req }) const file = getStorageClient().bucket(bucket).file(path.posix.join(prefix, filename)) @@ -22,13 +22,23 @@ export const getHandler = ({ bucket, collection, getStorageClient }: Args): Stat const etagFromHeaders = req.headers.get('etag') || req.headers.get('if-none-match') const objectEtag = metadata.etag + let headers = new Headers(incomingHeaders) + + headers.append('Content-Length', String(metadata.size)) + headers.append('Content-Type', String(metadata.contentType)) + headers.append('ETag', String(metadata.etag)) + + if ( + collection.upload && + typeof collection.upload === 'object' && + typeof collection.upload.modifyResponseHeaders === 'function' + ) { + headers = collection.upload.modifyResponseHeaders({ headers }) || headers + } + if (etagFromHeaders && etagFromHeaders === objectEtag) { return new Response(null, { - headers: new Headers({ - 'Content-Length': String(metadata.size), - 'Content-Type': String(metadata.contentType), - ETag: String(metadata.etag), - }), + headers, status: 304, }) } @@ -50,11 +60,7 @@ export const getHandler = ({ bucket, collection, getStorageClient }: Args): Stat }) return new Response(readableStream, { - headers: new Headers({ - 'Content-Length': String(metadata.size), - 'Content-Type': String(metadata.contentType), - ETag: String(metadata.etag), - }), + headers, status: 200, }) } catch (err: unknown) { diff --git a/packages/storage-s3/src/staticHandler.ts b/packages/storage-s3/src/staticHandler.ts index 2f068fd654..08d528ecad 100644 --- a/packages/storage-s3/src/staticHandler.ts +++ b/packages/storage-s3/src/staticHandler.ts @@ -61,7 +61,7 @@ export const getHandler = ({ getStorageClient, signedDownloads, }: Args): StaticHandler => { - return async (req, { params: { clientUploadContext, filename } }) => { + return async (req, { headers: incomingHeaders, params: { clientUploadContext, filename } }) => { let object: AWS.GetObjectOutput | undefined = undefined try { const prefix = await getFilePrefix({ clientUploadContext, collection, filename, req }) @@ -94,17 +94,31 @@ export const getHandler = ({ Key: key, }) + if (!object.Body) { + return new Response(null, { status: 404, statusText: 'Not Found' }) + } + + let headers = new Headers(incomingHeaders) + + headers.append('Content-Length', String(object.ContentLength)) + headers.append('Content-Type', String(object.ContentType)) + headers.append('Accept-Ranges', String(object.AcceptRanges)) + headers.append('ETag', String(object.ETag)) + const etagFromHeaders = req.headers.get('etag') || req.headers.get('if-none-match') const objectEtag = object.ETag + if ( + collection.upload && + typeof collection.upload === 'object' && + typeof collection.upload.modifyResponseHeaders === 'function' + ) { + headers = collection.upload.modifyResponseHeaders({ headers }) || headers + } + if (etagFromHeaders && etagFromHeaders === objectEtag) { return new Response(null, { - headers: new Headers({ - 'Accept-Ranges': String(object.AcceptRanges), - 'Content-Length': String(object.ContentLength), - 'Content-Type': String(object.ContentType), - ETag: String(object.ETag), - }), + headers, status: 304, }) } @@ -125,12 +139,7 @@ export const getHandler = ({ const bodyBuffer = await streamToBuffer(object.Body) return new Response(bodyBuffer, { - headers: new Headers({ - 'Accept-Ranges': String(object.AcceptRanges), - 'Content-Length': String(object.ContentLength), - 'Content-Type': String(object.ContentType), - ETag: String(object.ETag), - }), + headers, status: 200, }) } catch (err) { diff --git a/packages/storage-uploadthing/src/staticHandler.ts b/packages/storage-uploadthing/src/staticHandler.ts index 3321184f14..a3475e7d1b 100644 --- a/packages/storage-uploadthing/src/staticHandler.ts +++ b/packages/storage-uploadthing/src/staticHandler.ts @@ -9,9 +9,13 @@ type Args = { } export const getHandler = ({ utApi }: Args): StaticHandler => { - return async (req, { doc, params: { clientUploadContext, collection, filename } }) => { + return async ( + req, + { doc, headers: incomingHeaders, params: { clientUploadContext, collection, filename } }, + ) => { try { let key: string + const collectionConfig = req.payload.collections[collection]?.config if ( clientUploadContext && @@ -21,7 +25,6 @@ export const getHandler = ({ utApi }: Args): StaticHandler => { ) { key = clientUploadContext.key } else { - const collectionConfig = req.payload.collections[collection]?.config let retrievedDoc = doc if (!retrievedDoc) { @@ -82,23 +85,32 @@ export const getHandler = ({ utApi }: Args): StaticHandler => { const etagFromHeaders = req.headers.get('etag') || req.headers.get('if-none-match') const objectEtag = response.headers.get('etag') + let headers = new Headers(incomingHeaders) + + headers.append('Content-Length', String(blob.size)) + headers.append('Content-Type', blob.type) + + if (objectEtag) { + headers.append('ETag', objectEtag) + } + + if ( + collectionConfig?.upload && + typeof collectionConfig.upload === 'object' && + typeof collectionConfig.upload.modifyResponseHeaders === 'function' + ) { + headers = collectionConfig.upload.modifyResponseHeaders({ headers }) || headers + } + if (etagFromHeaders && etagFromHeaders === objectEtag) { return new Response(null, { - headers: new Headers({ - 'Content-Length': String(blob.size), - 'Content-Type': blob.type, - ETag: objectEtag, - }), + headers, status: 304, }) } return new Response(blob, { - headers: new Headers({ - 'Content-Length': String(blob.size), - 'Content-Type': blob.type, - ETag: objectEtag!, - }), + headers, status: 200, }) } catch (err) { diff --git a/packages/storage-vercel-blob/src/staticHandler.ts b/packages/storage-vercel-blob/src/staticHandler.ts index 153bbb220d..0744434929 100644 --- a/packages/storage-vercel-blob/src/staticHandler.ts +++ b/packages/storage-vercel-blob/src/staticHandler.ts @@ -15,27 +15,36 @@ export const getStaticHandler = ( { baseUrl, cacheControlMaxAge = 0, token }: StaticHandlerArgs, collection: CollectionConfig, ): StaticHandler => { - return async (req, { params: { clientUploadContext, filename } }) => { + return async (req, { headers: incomingHeaders, params: { clientUploadContext, filename } }) => { try { const prefix = await getFilePrefix({ clientUploadContext, collection, filename, req }) const fileKey = path.posix.join(prefix, encodeURIComponent(filename)) const fileUrl = `${baseUrl}/${fileKey}` const etagFromHeaders = req.headers.get('etag') || req.headers.get('if-none-match') const blobMetadata = await head(fileUrl, { token }) - const uploadedAtString = blobMetadata.uploadedAt.toISOString() + const { contentDisposition, contentType, size, uploadedAt } = blobMetadata + const uploadedAtString = uploadedAt.toISOString() const ETag = `"${fileKey}-${uploadedAtString}"` - const { contentDisposition, contentType, size } = blobMetadata + let headers = new Headers(incomingHeaders) + + headers.append('Cache-Control', `public, max-age=${cacheControlMaxAge}`) + headers.append('Content-Disposition', contentDisposition) + headers.append('Content-Length', String(size)) + headers.append('Content-Type', contentType) + headers.append('ETag', ETag) + + if ( + collection.upload && + typeof collection.upload === 'object' && + typeof collection.upload.modifyResponseHeaders === 'function' + ) { + headers = collection.upload.modifyResponseHeaders({ headers }) || headers + } if (etagFromHeaders && etagFromHeaders === ETag) { return new Response(null, { - headers: new Headers({ - 'Cache-Control': `public, max-age=${cacheControlMaxAge}`, - 'Content-Disposition': contentDisposition, - 'Content-Length': String(size), - 'Content-Type': contentType, - ETag, - }), + headers, status: 304, }) } @@ -55,15 +64,10 @@ export const getStaticHandler = ( const bodyBuffer = await blob.arrayBuffer() + headers.append('Last-Modified', uploadedAtString) + return new Response(bodyBuffer, { - headers: new Headers({ - 'Cache-Control': `public, max-age=${cacheControlMaxAge}`, - 'Content-Disposition': contentDisposition, - 'Content-Length': String(size), - 'Content-Type': contentType, - ETag, - 'Last-Modified': blobMetadata.uploadedAt.toUTCString(), - }), + headers, status: 200, }) } catch (err: unknown) { diff --git a/test/storage-azure/collections/Media.ts b/test/storage-azure/collections/Media.ts index c5997222ca..fa2bcf69a2 100644 --- a/test/storage-azure/collections/Media.ts +++ b/test/storage-azure/collections/Media.ts @@ -3,6 +3,9 @@ import type { CollectionConfig } from 'payload' export const Media: CollectionConfig = { slug: 'media', upload: { + modifyResponseHeaders({ headers }) { + headers.set('X-Universal-Truth', 'Set') + }, disableLocalStorage: true, resizeOptions: { position: 'center', diff --git a/test/storage-azure/payload-types.ts b/test/storage-azure/payload-types.ts index 7311e4ebb9..463543e457 100644 --- a/test/storage-azure/payload-types.ts +++ b/test/storage-azure/payload-types.ts @@ -84,7 +84,7 @@ export interface Config { 'payload-migrations': PayloadMigrationsSelect | PayloadMigrationsSelect; }; db: { - defaultIDType: string; + defaultIDType: number; }; globals: {}; globalsSelect: {}; @@ -120,7 +120,7 @@ export interface UserAuthOperations { * via the `definition` "media". */ export interface Media { - id: string; + id: number; alt?: string | null; updatedAt: string; createdAt: string; @@ -157,7 +157,7 @@ export interface Media { * via the `definition` "media-with-prefix". */ export interface MediaWithPrefix { - id: string; + id: number; prefix?: string | null; updatedAt: string; createdAt: string; @@ -176,7 +176,7 @@ export interface MediaWithPrefix { * via the `definition` "users". */ export interface User { - id: string; + id: number; updatedAt: string; createdAt: string; email: string; @@ -186,6 +186,13 @@ export interface User { hash?: string | null; loginAttempts?: number | null; lockUntil?: string | null; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** @@ -193,24 +200,24 @@ export interface User { * via the `definition` "payload-locked-documents". */ export interface PayloadLockedDocument { - id: string; + id: number; document?: | ({ relationTo: 'media'; - value: string | Media; + value: number | Media; } | null) | ({ relationTo: 'media-with-prefix'; - value: string | MediaWithPrefix; + value: number | MediaWithPrefix; } | null) | ({ relationTo: 'users'; - value: string | User; + value: number | User; } | null); globalSlug?: string | null; user: { relationTo: 'users'; - value: string | User; + value: number | User; }; updatedAt: string; createdAt: string; @@ -220,10 +227,10 @@ export interface PayloadLockedDocument { * via the `definition` "payload-preferences". */ export interface PayloadPreference { - id: string; + id: number; user: { relationTo: 'users'; - value: string | User; + value: number | User; }; key?: string | null; value?: @@ -243,7 +250,7 @@ export interface PayloadPreference { * via the `definition` "payload-migrations". */ export interface PayloadMigration { - id: string; + id: number; name?: string | null; batch?: number | null; updatedAt: string; @@ -323,6 +330,13 @@ export interface UsersSelect { hash?: T; loginAttempts?: T; lockUntil?: T; + sessions?: + | T + | { + id?: T; + createdAt?: T; + expiresAt?: T; + }; } /** * This interface was referenced by `Config`'s JSON-Schema diff --git a/test/storage-gcs/collections/Media.ts b/test/storage-gcs/collections/Media.ts index c5997222ca..fa2bcf69a2 100644 --- a/test/storage-gcs/collections/Media.ts +++ b/test/storage-gcs/collections/Media.ts @@ -3,6 +3,9 @@ import type { CollectionConfig } from 'payload' export const Media: CollectionConfig = { slug: 'media', upload: { + modifyResponseHeaders({ headers }) { + headers.set('X-Universal-Truth', 'Set') + }, disableLocalStorage: true, resizeOptions: { position: 'center', diff --git a/test/storage-s3/collections/Media.ts b/test/storage-s3/collections/Media.ts index c5997222ca..fa2bcf69a2 100644 --- a/test/storage-s3/collections/Media.ts +++ b/test/storage-s3/collections/Media.ts @@ -3,6 +3,9 @@ import type { CollectionConfig } from 'payload' export const Media: CollectionConfig = { slug: 'media', upload: { + modifyResponseHeaders({ headers }) { + headers.set('X-Universal-Truth', 'Set') + }, disableLocalStorage: true, resizeOptions: { position: 'center', diff --git a/test/storage-uploadthing/collections/Media.ts b/test/storage-uploadthing/collections/Media.ts index c5997222ca..fa2bcf69a2 100644 --- a/test/storage-uploadthing/collections/Media.ts +++ b/test/storage-uploadthing/collections/Media.ts @@ -3,6 +3,9 @@ import type { CollectionConfig } from 'payload' export const Media: CollectionConfig = { slug: 'media', upload: { + modifyResponseHeaders({ headers }) { + headers.set('X-Universal-Truth', 'Set') + }, disableLocalStorage: true, resizeOptions: { position: 'center', diff --git a/test/storage-uploadthing/payload-types.ts b/test/storage-uploadthing/payload-types.ts index e797a2f874..f98b3f62dd 100644 --- a/test/storage-uploadthing/payload-types.ts +++ b/test/storage-uploadthing/payload-types.ts @@ -84,7 +84,7 @@ export interface Config { 'payload-migrations': PayloadMigrationsSelect | PayloadMigrationsSelect; }; db: { - defaultIDType: string; + defaultIDType: number; }; globals: {}; globalsSelect: {}; @@ -120,7 +120,7 @@ export interface UserAuthOperations { * via the `definition` "media". */ export interface Media { - id: string; + id: number; alt?: string | null; _key?: string | null; updatedAt: string; @@ -160,7 +160,7 @@ export interface Media { * via the `definition` "media-with-prefix". */ export interface MediaWithPrefix { - id: string; + id: number; updatedAt: string; createdAt: string; url?: string | null; @@ -178,7 +178,7 @@ export interface MediaWithPrefix { * via the `definition` "users". */ export interface User { - id: string; + id: number; updatedAt: string; createdAt: string; email: string; @@ -188,6 +188,13 @@ export interface User { hash?: string | null; loginAttempts?: number | null; lockUntil?: string | null; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** @@ -195,24 +202,24 @@ export interface User { * via the `definition` "payload-locked-documents". */ export interface PayloadLockedDocument { - id: string; + id: number; document?: | ({ relationTo: 'media'; - value: string | Media; + value: number | Media; } | null) | ({ relationTo: 'media-with-prefix'; - value: string | MediaWithPrefix; + value: number | MediaWithPrefix; } | null) | ({ relationTo: 'users'; - value: string | User; + value: number | User; } | null); globalSlug?: string | null; user: { relationTo: 'users'; - value: string | User; + value: number | User; }; updatedAt: string; createdAt: string; @@ -222,10 +229,10 @@ export interface PayloadLockedDocument { * via the `definition` "payload-preferences". */ export interface PayloadPreference { - id: string; + id: number; user: { relationTo: 'users'; - value: string | User; + value: number | User; }; key?: string | null; value?: @@ -245,7 +252,7 @@ export interface PayloadPreference { * via the `definition` "payload-migrations". */ export interface PayloadMigration { - id: string; + id: number; name?: string | null; batch?: number | null; updatedAt: string; @@ -327,6 +334,13 @@ export interface UsersSelect { hash?: T; loginAttempts?: T; lockUntil?: T; + sessions?: + | T + | { + id?: T; + createdAt?: T; + expiresAt?: T; + }; } /** * This interface was referenced by `Config`'s JSON-Schema diff --git a/test/storage-vercel-blob/collections/Media.ts b/test/storage-vercel-blob/collections/Media.ts index 93ece25a97..1d2076fd62 100644 --- a/test/storage-vercel-blob/collections/Media.ts +++ b/test/storage-vercel-blob/collections/Media.ts @@ -3,6 +3,9 @@ import type { CollectionConfig } from 'payload' export const Media: CollectionConfig = { slug: 'media', upload: { + modifyResponseHeaders({ headers }) { + headers.set('X-Universal-Truth', 'Set') + }, resizeOptions: { position: 'center', width: 200, diff --git a/test/storage-vercel-blob/payload-types.ts b/test/storage-vercel-blob/payload-types.ts index 83f98d11fe..463543e457 100644 --- a/test/storage-vercel-blob/payload-types.ts +++ b/test/storage-vercel-blob/payload-types.ts @@ -84,7 +84,7 @@ export interface Config { 'payload-migrations': PayloadMigrationsSelect | PayloadMigrationsSelect; }; db: { - defaultIDType: string; + defaultIDType: number; }; globals: {}; globalsSelect: {}; @@ -120,7 +120,7 @@ export interface UserAuthOperations { * via the `definition` "media". */ export interface Media { - id: string; + id: number; alt?: string | null; updatedAt: string; createdAt: string; @@ -157,7 +157,8 @@ export interface Media { * via the `definition` "media-with-prefix". */ export interface MediaWithPrefix { - id: string; + id: number; + prefix?: string | null; updatedAt: string; createdAt: string; url?: string | null; @@ -175,7 +176,7 @@ export interface MediaWithPrefix { * via the `definition` "users". */ export interface User { - id: string; + id: number; updatedAt: string; createdAt: string; email: string; @@ -185,6 +186,13 @@ export interface User { hash?: string | null; loginAttempts?: number | null; lockUntil?: string | null; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** @@ -192,24 +200,24 @@ export interface User { * via the `definition` "payload-locked-documents". */ export interface PayloadLockedDocument { - id: string; + id: number; document?: | ({ relationTo: 'media'; - value: string | Media; + value: number | Media; } | null) | ({ relationTo: 'media-with-prefix'; - value: string | MediaWithPrefix; + value: number | MediaWithPrefix; } | null) | ({ relationTo: 'users'; - value: string | User; + value: number | User; } | null); globalSlug?: string | null; user: { relationTo: 'users'; - value: string | User; + value: number | User; }; updatedAt: string; createdAt: string; @@ -219,10 +227,10 @@ export interface PayloadLockedDocument { * via the `definition` "payload-preferences". */ export interface PayloadPreference { - id: string; + id: number; user: { relationTo: 'users'; - value: string | User; + value: number | User; }; key?: string | null; value?: @@ -242,7 +250,7 @@ export interface PayloadPreference { * via the `definition` "payload-migrations". */ export interface PayloadMigration { - id: string; + id: number; name?: string | null; batch?: number | null; updatedAt: string; @@ -295,6 +303,7 @@ export interface MediaSelect { * via the `definition` "media-with-prefix_select". */ export interface MediaWithPrefixSelect { + prefix?: T; updatedAt?: T; createdAt?: T; url?: T; @@ -321,6 +330,13 @@ export interface UsersSelect { hash?: T; loginAttempts?: T; lockUntil?: T; + sessions?: + | T + | { + id?: T; + createdAt?: T; + expiresAt?: T; + }; } /** * This interface was referenced by `Config`'s JSON-Schema From 0c2b1054e288893601289f4b4143f3b3b2d7702e Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:13:01 -0400 Subject: [PATCH 011/143] fix: login operation not returning collection and _strategy (#13119) The login operation with sessions enabled calls updateOne, in mongodb, data that does not match the schema is removed. `collection` and `_strategy` are not part of the schema so they need to be reassigned after the user is updated. Adds int test. --- packages/payload/src/auth/operations/login.ts | 4 +++- test/auth/int.spec.ts | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/payload/src/auth/operations/login.ts b/packages/payload/src/auth/operations/login.ts index 1e2d35fba1..65e185efbf 100644 --- a/packages/payload/src/auth/operations/login.ts +++ b/packages/payload/src/auth/operations/login.ts @@ -214,7 +214,6 @@ export const loginOperation = async ( user._strategy = 'local-jwt' const authResult = await authenticateLocalStrategy({ doc: user, password }) - user = sanitizeInternalFields(user) const maxLoginAttemptsEnabled = args.collection.config.auth.maxLoginAttempts > 0 @@ -266,6 +265,9 @@ export const loginOperation = async ( returning: false, }) + user.collection = collectionConfig.slug + user._strategy = 'local-jwt' + fieldsToSignArgs.sid = newSessionID } diff --git a/test/auth/int.spec.ts b/test/auth/int.spec.ts index f354782092..29a7d92581 100644 --- a/test/auth/int.spec.ts +++ b/test/auth/int.spec.ts @@ -112,6 +112,9 @@ describe('Auth', () => { const data = await response.json() expect(response.status).toBe(200) + expect(data.user).toBeDefined() + expect(data.user.collection).toBe(slug) + expect(data.user._strategy).toBeDefined() expect(data.token).toBeDefined() }) From 2d91cb613c55cd455ab046f6ee4ac6878f977ad6 Mon Sep 17 00:00:00 2001 From: Paul Date: Thu, 10 Jul 2025 18:44:05 +0100 Subject: [PATCH 012/143] feat: allow joins, select, populate, depth and draft to /me REST API operation (#13116) While we can use `joins`, `select`, `populate`, `depth` or `draft` on auth collections when finding or finding by ID, these arguments weren't supported for `/me` which meant that in some situations like in our ecommerce template we couldn't optimise these calls. A workaround would be to make a call to `/me` and then get the user ID to then use for a `findByID` operation. --- packages/payload/src/auth/endpoints/me.ts | 30 +++++++ packages/payload/src/auth/operations/me.ts | 15 +++- test/select/collections/Users/index.ts | 21 +++++ test/select/config.ts | 2 + test/select/int.spec.ts | 64 ++++++++++++++- test/select/payload-types.ts | 94 +++++++++++++--------- 6 files changed, 184 insertions(+), 42 deletions(-) create mode 100644 test/select/collections/Users/index.ts diff --git a/packages/payload/src/auth/endpoints/me.ts b/packages/payload/src/auth/endpoints/me.ts index d224ea97a7..c022ba7b02 100644 --- a/packages/payload/src/auth/endpoints/me.ts +++ b/packages/payload/src/auth/endpoints/me.ts @@ -1,20 +1,50 @@ import { status as httpStatus } from 'http-status' import type { PayloadHandler } from '../../config/types.js' +import type { JoinParams } from '../../utilities/sanitizeJoinParams.js' import { getRequestCollection } from '../../utilities/getRequestEntity.js' import { headersWithCors } from '../../utilities/headersWithCors.js' +import { isNumber } from '../../utilities/isNumber.js' +import { sanitizeJoinParams } from '../../utilities/sanitizeJoinParams.js' +import { sanitizePopulateParam } from '../../utilities/sanitizePopulateParam.js' +import { sanitizeSelectParam } from '../../utilities/sanitizeSelectParam.js' import { extractJWT } from '../extractJWT.js' import { meOperation } from '../operations/me.js' export const meHandler: PayloadHandler = async (req) => { + const { searchParams } = req const collection = getRequestCollection(req) const currentToken = extractJWT(req) + const depthFromSearchParams = searchParams.get('depth') + const draftFromSearchParams = searchParams.get('depth') + + const { + depth: depthFromQuery, + draft: draftFromQuery, + joins, + populate, + select, + } = req.query as { + depth?: string + draft?: string + joins?: JoinParams + populate?: Record + select?: Record + } + + const depth = depthFromQuery || depthFromSearchParams + const draft = draftFromQuery || draftFromSearchParams const result = await meOperation({ collection, currentToken: currentToken!, + depth: isNumber(depth) ? Number(depth) : undefined, + draft: draft === 'true', + joins: sanitizeJoinParams(joins), + populate: sanitizePopulateParam(populate), req, + select: sanitizeSelectParam(select), }) if (collection.config.auth.removeTokenFromResponses) { diff --git a/packages/payload/src/auth/operations/me.ts b/packages/payload/src/auth/operations/me.ts index a5977dd1f5..bc6f96468e 100644 --- a/packages/payload/src/auth/operations/me.ts +++ b/packages/payload/src/auth/operations/me.ts @@ -2,7 +2,7 @@ import { decodeJwt } from 'jose' import type { Collection } from '../../collections/config/types.js' import type { TypedUser } from '../../index.js' -import type { PayloadRequest } from '../../types/index.js' +import type { JoinQuery, PayloadRequest, PopulateType, SelectType } from '../../types/index.js' import type { ClientUser } from '../types.js' export type MeOperationResult = { @@ -22,11 +22,16 @@ export type MeOperationResult = { export type Arguments = { collection: Collection currentToken?: string + depth?: number + draft?: boolean + joins?: JoinQuery + populate?: PopulateType req: PayloadRequest + select?: SelectType } export const meOperation = async (args: Arguments): Promise => { - const { collection, currentToken, req } = args + const { collection, currentToken, depth, draft, joins, populate, req, select } = args let result: MeOperationResult = { user: null!, @@ -39,9 +44,13 @@ export const meOperation = async (args: Arguments): Promise = const user = (await req.payload.findByID({ id: req.user.id, collection: collection.config.slug, - depth: isGraphQL ? 0 : collection.config.auth.depth, + depth: isGraphQL ? 0 : (depth ?? collection.config.auth.depth), + draft, + joins, overrideAccess: false, + populate, req, + select, showHiddenFields: false, })) as TypedUser diff --git a/test/select/collections/Users/index.ts b/test/select/collections/Users/index.ts new file mode 100644 index 0000000000..ef2f2a95bb --- /dev/null +++ b/test/select/collections/Users/index.ts @@ -0,0 +1,21 @@ +import type { CollectionConfig } from 'payload' + +export const UsersCollection: CollectionConfig = { + slug: 'users', + admin: { + useAsTitle: 'email', + }, + auth: true, + fields: [ + { + name: 'name', + type: 'text', + defaultValue: 'Payload dev', + }, + { + name: 'number', + type: 'number', + defaultValue: 42, + }, + ], +} diff --git a/test/select/config.ts b/test/select/config.ts index e1de30954b..280946aa51 100644 --- a/test/select/config.ts +++ b/test/select/config.ts @@ -15,6 +15,7 @@ import { LocalizedPostsCollection } from './collections/LocalizedPosts/index.js' import { Pages } from './collections/Pages/index.js' import { Points } from './collections/Points/index.js' import { PostsCollection } from './collections/Posts/index.js' +import { UsersCollection } from './collections/Users/index.js' import { VersionedPostsCollection } from './collections/VersionedPosts/index.js' const filename = fileURLToPath(import.meta.url) @@ -42,6 +43,7 @@ export default buildConfigWithDefaults({ fields: [], }, CustomID, + UsersCollection, ], globals: [ { diff --git a/test/select/int.spec.ts b/test/select/int.spec.ts index 851c5d228e..6d591ea89d 100644 --- a/test/select/int.spec.ts +++ b/test/select/int.spec.ts @@ -1,6 +1,8 @@ +import type { Payload } from 'payload' + import { randomUUID } from 'crypto' import path from 'path' -import { deepCopyObject, type Payload } from 'payload' +import { deepCopyObject } from 'payload' import { assert } from 'ts-essentials' import { fileURLToPath } from 'url' @@ -13,9 +15,11 @@ import type { Page, Point, Post, + User, VersionedPost, } from './payload-types.js' +import { devUser } from '../credentials.js' import { initPayloadInt } from '../helpers/initPayloadInt.js' let payload: Payload @@ -1970,6 +1974,64 @@ describe('Select', () => { }) }) + describe('REST API - Logged in', () => { + let token: string | undefined + let loggedInUser: undefined | User + + beforeAll(async () => { + const response = await restClient.POST(`/users/login`, { + body: JSON.stringify({ + email: devUser.email, + password: devUser.password, + }), + }) + + const data = await response.json() + + token = data.token + loggedInUser = data.user + }) + + it('should return only select fields in user from /me', async () => { + const response = await restClient.GET(`/users/me`, { + headers: { + Authorization: `JWT ${token}`, + }, + query: { + depth: 0, + select: { + name: true, + } satisfies Config['collectionsSelect']['users'], + }, + }) + + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.user.name).toBeDefined() + expect(data.user.email).not.toBeDefined() + expect(data.user.number).not.toBeDefined() + }) + + it('should return all fields by default in user from /me', async () => { + const response = await restClient.GET(`/users/me`, { + headers: { + Authorization: `JWT ${token}`, + }, + query: { + depth: 0, + }, + }) + + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.user.email).toBeDefined() + expect(data.user.name).toBeDefined() + expect(data.user.number).toBeDefined() + }) + }) + describe('populate / defaultPopulate', () => { let homePage: Page let aboutPage: Page diff --git a/test/select/payload-types.ts b/test/select/payload-types.ts index 399834aee8..7aa1b9136e 100644 --- a/test/select/payload-types.ts +++ b/test/select/payload-types.ts @@ -100,7 +100,7 @@ export interface Config { 'payload-migrations': PayloadMigrationsSelect | PayloadMigrationsSelect; }; db: { - defaultIDType: string; + defaultIDType: number; }; globals: { 'global-post': GlobalPost; @@ -142,7 +142,7 @@ export interface UserAuthOperations { * via the `definition` "posts". */ export interface Post { - id: string; + id: number; text?: string | null; number?: number | null; select?: ('a' | 'b') | null; @@ -182,17 +182,17 @@ export interface Post { }; unnamedTabText?: string | null; unnamedTabNumber?: number | null; - hasOne?: (string | null) | Rel; - hasMany?: (string | Rel)[] | null; - hasManyUpload?: (string | Upload)[] | null; + hasOne?: (number | null) | Rel; + hasMany?: (number | Rel)[] | null; + hasManyUpload?: (number | Upload)[] | null; hasOnePoly?: { relationTo: 'rels'; - value: string | Rel; + value: number | Rel; } | null; hasManyPoly?: | { relationTo: 'rels'; - value: string | Rel; + value: number | Rel; }[] | null; updatedAt: string; @@ -203,7 +203,7 @@ export interface Post { * via the `definition` "rels". */ export interface Rel { - id: string; + id: number; updatedAt: string; createdAt: string; } @@ -212,7 +212,7 @@ export interface Rel { * via the `definition` "upload". */ export interface Upload { - id: string; + id: number; updatedAt: string; createdAt: string; url?: string | null; @@ -230,7 +230,7 @@ export interface Upload { * via the `definition` "localized-posts". */ export interface LocalizedPost { - id: string; + id: number; text?: string | null; number?: number | null; select?: ('a' | 'b') | null; @@ -301,7 +301,7 @@ export interface LocalizedPost { * via the `definition` "versioned-posts". */ export interface VersionedPost { - id: string; + id: number; text?: string | null; number?: number | null; array?: @@ -327,7 +327,7 @@ export interface VersionedPost { * via the `definition` "deep-posts". */ export interface DeepPost { - id: string; + id: number; group?: { array?: | { @@ -369,22 +369,22 @@ export interface DeepPost { * via the `definition` "pages". */ export interface Page { - id: string; - relatedPage?: (string | null) | Page; + id: number; + relatedPage?: (number | null) | Page; content?: | { title: string; link: { docPoly?: { relationTo: 'pages'; - value: string | Page; + value: number | Page; } | null; - doc?: (string | null) | Page; - docMany?: (string | Page)[] | null; + doc?: (number | null) | Page; + docMany?: (number | Page)[] | null; docHasManyPoly?: | { relationTo: 'pages'; - value: string | Page; + value: number | Page; }[] | null; label: string; @@ -440,7 +440,7 @@ export interface Page { * via the `definition` "points". */ export interface Point { - id: string; + id: number; text?: string | null; /** * @minItems 2 @@ -455,7 +455,7 @@ export interface Point { * via the `definition` "force-select". */ export interface ForceSelect { - id: string; + id: number; text?: string | null; forceSelected?: string | null; array?: @@ -482,7 +482,9 @@ export interface CustomId { * via the `definition` "users". */ export interface User { - id: string; + id: number; + name?: string | null; + number?: number | null; updatedAt: string; createdAt: string; email: string; @@ -492,6 +494,13 @@ export interface User { hash?: string | null; loginAttempts?: number | null; lockUntil?: string | null; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** @@ -499,43 +508,43 @@ export interface User { * via the `definition` "payload-locked-documents". */ export interface PayloadLockedDocument { - id: string; + id: number; document?: | ({ relationTo: 'posts'; - value: string | Post; + value: number | Post; } | null) | ({ relationTo: 'localized-posts'; - value: string | LocalizedPost; + value: number | LocalizedPost; } | null) | ({ relationTo: 'versioned-posts'; - value: string | VersionedPost; + value: number | VersionedPost; } | null) | ({ relationTo: 'deep-posts'; - value: string | DeepPost; + value: number | DeepPost; } | null) | ({ relationTo: 'pages'; - value: string | Page; + value: number | Page; } | null) | ({ relationTo: 'points'; - value: string | Point; + value: number | Point; } | null) | ({ relationTo: 'force-select'; - value: string | ForceSelect; + value: number | ForceSelect; } | null) | ({ relationTo: 'upload'; - value: string | Upload; + value: number | Upload; } | null) | ({ relationTo: 'rels'; - value: string | Rel; + value: number | Rel; } | null) | ({ relationTo: 'custom-ids'; @@ -543,12 +552,12 @@ export interface PayloadLockedDocument { } | null) | ({ relationTo: 'users'; - value: string | User; + value: number | User; } | null); globalSlug?: string | null; user: { relationTo: 'users'; - value: string | User; + value: number | User; }; updatedAt: string; createdAt: string; @@ -558,10 +567,10 @@ export interface PayloadLockedDocument { * via the `definition` "payload-preferences". */ export interface PayloadPreference { - id: string; + id: number; user: { relationTo: 'users'; - value: string | User; + value: number | User; }; key?: string | null; value?: @@ -581,7 +590,7 @@ export interface PayloadPreference { * via the `definition` "payload-migrations". */ export interface PayloadMigration { - id: string; + id: number; name?: string | null; batch?: number | null; updatedAt: string; @@ -917,6 +926,8 @@ export interface CustomIdsSelect { * via the `definition` "users_select". */ export interface UsersSelect { + name?: T; + number?: T; updatedAt?: T; createdAt?: T; email?: T; @@ -926,6 +937,13 @@ export interface UsersSelect { hash?: T; loginAttempts?: T; lockUntil?: T; + sessions?: + | T + | { + id?: T; + createdAt?: T; + expiresAt?: T; + }; } /** * This interface was referenced by `Config`'s JSON-Schema @@ -964,7 +982,7 @@ export interface PayloadMigrationsSelect { * via the `definition` "global-post". */ export interface GlobalPost { - id: string; + id: number; text?: string | null; number?: number | null; updatedAt?: string | null; @@ -975,7 +993,7 @@ export interface GlobalPost { * via the `definition` "force-select-global". */ export interface ForceSelectGlobal { - id: string; + id: number; text?: string | null; forceSelected?: string | null; array?: From f63dfad565db14827ea203851b746f64a00eab9a Mon Sep 17 00:00:00 2001 From: Paul Date: Thu, 10 Jul 2025 19:01:55 +0100 Subject: [PATCH 013/143] fix(ui): ensure that schedule publishing time picker can only be in the future (#13128) Previously you could've selected a date and time in the past to schedule publish. Now we ensure that there is a minimum time and date for scheduled publishing date picker. Additionally updated the disabled items to be more visually obvious that they are disabled: image --- packages/ui/src/elements/DatePicker/index.scss | 17 +++++++++++++++++ .../PublishButton/ScheduleDrawer/index.tsx | 13 ++++++++++++- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/packages/ui/src/elements/DatePicker/index.scss b/packages/ui/src/elements/DatePicker/index.scss index c8ce001343..1f7e0b2a1e 100644 --- a/packages/ui/src/elements/DatePicker/index.scss +++ b/packages/ui/src/elements/DatePicker/index.scss @@ -330,6 +330,16 @@ $cal-icon-width: 18px; border-radius: 0; } + .react-datepicker__month .react-datepicker__day { + &.react-datepicker__day--disabled { + color: var(--theme-elevation-200); + + &:hover { + background: none; + } + } + } + .react-datepicker__navigation--next--with-time:not( .react-datepicker__navigation--next--with-today-button ) { @@ -343,6 +353,13 @@ $cal-icon-width: 18px; li.react-datepicker__time-list-item { line-height: 20px; font-size: base(0.5); + + &.react-datepicker__time-list-item--disabled { + color: var(--theme-elevation-200); + &:hover { + background: none; + } + } } &__appearance--dayOnly, diff --git a/packages/ui/src/elements/PublishButton/ScheduleDrawer/index.tsx b/packages/ui/src/elements/PublishButton/ScheduleDrawer/index.tsx index 9fae1591a9..7f140b0b9c 100644 --- a/packages/ui/src/elements/PublishButton/ScheduleDrawer/index.tsx +++ b/packages/ui/src/elements/PublishButton/ScheduleDrawer/index.tsx @@ -6,6 +6,7 @@ import type { Column, SchedulePublish, Where } from 'payload' import { TZDateMini as TZDate } from '@date-fns/tz/date/mini' import { useModal } from '@faceless-ui/modal' import { getTranslation } from '@payloadcms/translations' +import { endOfToday, isToday, startOfDay } from 'date-fns' import { transpose } from 'date-fns/transpose' import * as qs from 'qs-esm' import React, { useCallback, useMemo } from 'react' @@ -28,8 +29,8 @@ import { DatePickerField } from '../../DatePicker/index.js' import { Drawer } from '../../Drawer/index.js' import { Gutter } from '../../Gutter/index.js' import { ReactSelect } from '../../ReactSelect/index.js' -import { ShimmerEffect } from '../../ShimmerEffect/index.js' import './index.scss' +import { ShimmerEffect } from '../../ShimmerEffect/index.js' import { Table } from '../../Table/index.js' import { TimezonePicker } from '../../TimezonePicker/index.js' import { buildUpcomingColumns } from './buildUpcomingColumns.js' @@ -290,6 +291,14 @@ export const ScheduleDrawer: React.FC = ({ slug, defaultType, schedulePub } }, [upcoming, fetchUpcoming]) + const minTime = useMemo(() => { + if (date && isToday(date)) { + return new Date() + } + + return startOfDay(new Date()) + }, [date]) + return ( = ({ slug, defaultType, schedulePub onChangeDate(e)} pickerAppearance="dayAndTime" readOnly={processing} From b3a994ed6f74dbf9d37e9a61c62886a08fea1419 Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Thu, 10 Jul 2025 14:55:46 -0400 Subject: [PATCH 014/143] feat(plugin-import-export): show delayed toast when export download takes time (#13126) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What? Added a delayed toast message to indicate when an export is being processed, and disabled the download button unless the export form has been modified. ### Why? Previously, there was no feedback during longer export operations, which could confuse users if the request took time to complete. Also, the download button was always enabled, even when the form had not been modified — which could lead to unnecessary exports. ### How? - Introduced a 200ms delay before showing a "Your export is being processed..." toast - Automatically dismisses the toast once the download completes or fails - Hooked into `useFormModified` to: - Track whether the export form has been changed - Disable the download button when the form is unmodified - Reset the modified state after triggering a download --- .../src/components/ExportSaveButton/index.tsx | 37 +++++++++++++++++-- 1 file changed, 34 insertions(+), 3 deletions(-) diff --git a/packages/plugin-import-export/src/components/ExportSaveButton/index.tsx b/packages/plugin-import-export/src/components/ExportSaveButton/index.tsx index e5e0e71846..4d1dbbc2fd 100644 --- a/packages/plugin-import-export/src/components/ExportSaveButton/index.tsx +++ b/packages/plugin-import-export/src/components/ExportSaveButton/index.tsx @@ -1,6 +1,15 @@ 'use client' -import { Button, SaveButton, Translation, useConfig, useForm, useTranslation } from '@payloadcms/ui' +import { + Button, + SaveButton, + toast, + Translation, + useConfig, + useForm, + useFormModified, + useTranslation, +} from '@payloadcms/ui' import React from 'react' import type { @@ -17,13 +26,24 @@ export const ExportSaveButton: React.FC = () => { }, } = useConfig() - const { getData } = useForm() + const { getData, setModified } = useForm() + const modified = useFormModified() const label = t('general:save') const handleDownload = async () => { + let timeoutID: null | ReturnType = null + let toastID: null | number | string = null + try { + setModified(false) // Reset modified state const data = getData() + + // Set a timeout to show toast if the request takes longer than 200ms + timeoutID = setTimeout(() => { + toastID = toast.success('Your export is being processed...') + }, 200) + const response = await fetch(`${serverURL}${api}/exports/download`, { body: JSON.stringify({ data, @@ -35,6 +55,16 @@ export const ExportSaveButton: React.FC = () => { method: 'POST', }) + // Clear the timeout if fetch completes quickly + if (timeoutID) { + clearTimeout(timeoutID) + } + + // Dismiss the toast if it was shown + if (toastID) { + toast.dismiss(toastID) + } + if (!response.ok) { throw new Error('Failed to download file') } @@ -63,13 +93,14 @@ export const ExportSaveButton: React.FC = () => { URL.revokeObjectURL(url) } catch (error) { console.error('Error downloading file:', error) + toast.error('Error downloading file') } } return ( - From c1bad0115af35236487f37231b885c50540e360a Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Thu, 10 Jul 2025 15:02:05 -0400 Subject: [PATCH 015/143] fix(plugin-import-export): sync export field selection with list view columns from query columns (#13131) ### What? Updated the `FieldsToExport` component to use the current list view query (`query.columns`) instead of saved preferences to determine which fields to export. ### Why? Previously, the export field selection was based on collection preferences, which are only updated on page reload. This caused stale or incorrect field sets to be exported if the user changed visible columns without refreshing. ### How? - Replaced `getPreference` usage with `useListQuery` to access `query.columns` - Filtered out excluded fields (those prefixed with `-`) to get only the visible columns - Fallbacks to `defaultColumns` if `query.columns` is not available --- .../src/components/FieldsToExport/index.tsx | 33 ++++++++----------- 1 file changed, 14 insertions(+), 19 deletions(-) diff --git a/packages/plugin-import-export/src/components/FieldsToExport/index.tsx b/packages/plugin-import-export/src/components/FieldsToExport/index.tsx index fda7a26896..45fd172455 100644 --- a/packages/plugin-import-export/src/components/FieldsToExport/index.tsx +++ b/packages/plugin-import-export/src/components/FieldsToExport/index.tsx @@ -1,6 +1,6 @@ 'use client' -import type { CollectionPreferences, SelectFieldClientComponent } from 'payload' +import type { SelectFieldClientComponent } from 'payload' import type { ReactNode } from 'react' import { @@ -9,7 +9,7 @@ import { useConfig, useDocumentInfo, useField, - usePreferences, + useListQuery, } from '@payloadcms/ui' import React, { useEffect } from 'react' @@ -24,7 +24,7 @@ export const FieldsToExport: SelectFieldClientComponent = (props) => { const { value: collectionSlug } = useField({ path: 'collectionSlug' }) const { getEntityConfig } = useConfig() const { collection } = useImportExport() - const { getPreference } = usePreferences() + const { query } = useListQuery() const collectionConfig = getEntityConfig({ collectionSlug: collectionSlug ?? collection }) const fieldOptions = reduceFields({ fields: collectionConfig?.fields }) @@ -34,24 +34,19 @@ export const FieldsToExport: SelectFieldClientComponent = (props) => { return } - const doAsync = async () => { - const currentPreferences = await getPreference<{ - columns: CollectionPreferences['columns'] - }>(`collection-${collectionSlug}`) + const queryColumns = query?.columns - const columns = currentPreferences?.columns?.filter((a) => a.active).map((b) => b.accessor) - setValue(columns ?? collectionConfig?.admin?.defaultColumns ?? []) + if (Array.isArray(queryColumns)) { + const cleanColumns = queryColumns.filter( + (col): col is string => typeof col === 'string' && !col.startsWith('-'), + ) + // If columns are specified in the query, use them + setValue(cleanColumns) + } else { + // Fallback if no columns in query + setValue(collectionConfig?.admin?.defaultColumns ?? []) } - - void doAsync() - }, [ - getPreference, - collection, - setValue, - collectionSlug, - id, - collectionConfig?.admin?.defaultColumns, - ]) + }, [id, collectionSlug, query?.columns, collectionConfig?.admin?.defaultColumns, setValue]) const onChange = (options: { id: string; label: ReactNode; value: string }[]) => { if (!options) { From 19a336797253cb4b16263452f1315223bcf7ee33 Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Fri, 11 Jul 2025 10:26:48 -0400 Subject: [PATCH 016/143] fix(ui): monomorphic joins tables not fetching draft documents (#13139) Monomorphic join fields were not using the `draft` argument when fetching documents to display in the table. This change makes the join field treatment of drafts consistent with the `relationship` type fields. Added e2e test to cover. --- packages/ui/src/utilities/buildTableState.ts | 1 + test/joins/collections/CategoriesVersions.ts | 8 +-- test/joins/collections/Versions.ts | 2 +- test/joins/e2e.spec.ts | 51 +++++++++++++++++++- test/joins/payload-types.ts | 2 + test/joins/shared.ts | 2 + 6 files changed, 61 insertions(+), 5 deletions(-) diff --git a/packages/ui/src/utilities/buildTableState.ts b/packages/ui/src/utilities/buildTableState.ts index 3852fda418..e2b436f9c6 100644 --- a/packages/ui/src/utilities/buildTableState.ts +++ b/packages/ui/src/utilities/buildTableState.ts @@ -214,6 +214,7 @@ const buildTableState = async ( data = await payload.find({ collection: collectionSlug, depth: 0, + draft: true, limit: query?.limit ? parseInt(query.limit, 10) : undefined, locale: req.locale, overrideAccess: false, diff --git a/test/joins/collections/CategoriesVersions.ts b/test/joins/collections/CategoriesVersions.ts index 74a150aee7..5c86a862ef 100644 --- a/test/joins/collections/CategoriesVersions.ts +++ b/test/joins/collections/CategoriesVersions.ts @@ -1,12 +1,14 @@ import type { CollectionConfig } from 'payload' -import { versionsSlug } from './Versions.js' - -export const categoriesVersionsSlug = 'categories-versions' +import { categoriesVersionsSlug, versionsSlug } from '../shared.js' export const CategoriesVersions: CollectionConfig = { slug: categoriesVersionsSlug, fields: [ + { + name: 'title', + type: 'text', + }, { name: 'relatedVersions', type: 'join', diff --git a/test/joins/collections/Versions.ts b/test/joins/collections/Versions.ts index b30ba583b2..f824f1a87c 100644 --- a/test/joins/collections/Versions.ts +++ b/test/joins/collections/Versions.ts @@ -1,6 +1,6 @@ import type { CollectionConfig } from 'payload' -export const versionsSlug = 'versions' +import { versionsSlug } from '../shared.js' export const Versions: CollectionConfig = { slug: versionsSlug, diff --git a/test/joins/e2e.spec.ts b/test/joins/e2e.spec.ts index 9aaf8edd6e..19931b5464 100644 --- a/test/joins/e2e.spec.ts +++ b/test/joins/e2e.spec.ts @@ -22,7 +22,14 @@ import { initPayloadE2ENoConfig } from '../helpers/initPayloadE2ENoConfig.js' import { reInitializeDB } from '../helpers/reInitializeDB.js' import { RESTClient } from '../helpers/rest.js' import { EXPECT_TIMEOUT, TEST_TIMEOUT_LONG } from '../playwright.config.js' -import { categoriesJoinRestrictedSlug, categoriesSlug, postsSlug, uploadsSlug } from './shared.js' +import { + categoriesJoinRestrictedSlug, + categoriesSlug, + categoriesVersionsSlug, + postsSlug, + uploadsSlug, + versionsSlug, +} from './shared.js' const filename = fileURLToPath(import.meta.url) const dirname = path.dirname(filename) @@ -39,6 +46,8 @@ describe('Join Field', () => { let foldersURL: AdminUrlUtil let uploadsURL: AdminUrlUtil let categoriesJoinRestrictedURL: AdminUrlUtil + let categoriesVersionsURL: AdminUrlUtil + let versionsURL: AdminUrlUtil let categoryID: number | string let rootFolderID: number | string @@ -53,6 +62,8 @@ describe('Join Field', () => { uploadsURL = new AdminUrlUtil(serverURL, uploadsSlug) categoriesJoinRestrictedURL = new AdminUrlUtil(serverURL, categoriesJoinRestrictedSlug) foldersURL = new AdminUrlUtil(serverURL, 'folders') + categoriesVersionsURL = new AdminUrlUtil(serverURL, categoriesVersionsSlug) + versionsURL = new AdminUrlUtil(serverURL, versionsSlug) const context = await browser.newContext() page = await context.newPage() @@ -543,4 +554,42 @@ describe('Join Field', () => { await changeLocale(page, 'es') await expect(localizedTextCell).toHaveText('Text in es') }) + + test('should fetch draft documents in joins', async () => { + // create category-versions document + const categoryVersionsDoc = await payload.create({ + collection: categoriesVersionsSlug, + data: { + title: 'Category Versions', + }, + }) + + // create versions document + const versionDoc = await payload.create({ + collection: versionsSlug, + data: { + title: 'Version 1', + categoryVersion: categoryVersionsDoc.id, + }, + }) + + // update versions document with draft data + await payload.update({ + id: versionDoc.id, + collection: versionsSlug, + data: { + title: 'Version 1 - Draft', + }, + draft: true, + }) + + await page.goto(categoriesVersionsURL.edit(categoryVersionsDoc.id)) + const joinField = page.locator('#field-relatedVersions.field-type.join') + await expect(joinField).toBeVisible() + await expect(joinField.locator('.relationship-table table')).toBeVisible() + const row = joinField.locator('.relationship-table tbody tr.row-1') + await expect(row).toBeVisible() + const versionsRowTitle = row.locator('.cell-title span') + await expect(versionsRowTitle).toHaveText('Version 1 - Draft') + }) }) diff --git a/test/joins/payload-types.ts b/test/joins/payload-types.ts index 4fca14e717..18b97da0e2 100644 --- a/test/joins/payload-types.ts +++ b/test/joins/payload-types.ts @@ -519,6 +519,7 @@ export interface Version { */ export interface CategoriesVersion { id: string; + title?: string | null; relatedVersions?: { docs?: (string | Version)[]; hasNextPage?: boolean; @@ -1142,6 +1143,7 @@ export interface VersionsSelect { * via the `definition` "categories-versions_select". */ export interface CategoriesVersionsSelect { + title?: T; relatedVersions?: T; relatedVersionsMany?: T; updatedAt?: T; diff --git a/test/joins/shared.ts b/test/joins/shared.ts index bd936b5ce4..d08c9e8f7f 100644 --- a/test/joins/shared.ts +++ b/test/joins/shared.ts @@ -19,6 +19,8 @@ export const categoriesJoinRestrictedSlug = 'categories-join-restricted' export const collectionRestrictedSlug = 'collection-restricted' export const restrictedCategoriesSlug = 'restricted-categories' +export const categoriesVersionsSlug = 'categories-versions' +export const versionsSlug = 'versions' export const collectionSlugs = [ categoriesSlug, From 5695d22a46f78fceb3fda6090730118e40f678b4 Mon Sep 17 00:00:00 2001 From: Jessica Rynkar <67977755+jessrynkar@users.noreply.github.com> Date: Fri, 11 Jul 2025 16:56:55 +0100 Subject: [PATCH 017/143] fix: execute mimetype validation on the file buffer data (#13117) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What Introduces an additional `mimeType` validation based on the actual file data to ensure the uploaded file matches the allowed `mimeTypes` defined in the upload config. ### Why? The current validation relies on the file extension, which can be easily manipulated. For example, if only PDFs are allowed, a JPEG renamed to `image.pdf` would bypass the check and be accepted. This change prevents such cases by verifying the true MIME type. ### How? Performs a secondary validation using the file’s binary data (buffer), providing a more reliable MIME type check. Fixes #12905 --- .../src/uploads/checkFileRestrictions.ts | 48 +++++++++++++----- .../payload/src/uploads/generateFileData.ts | 2 +- .../uploads/collections/FileMimeType/index.ts | 19 +++++++ test/uploads/config.ts | 2 + test/uploads/e2e.spec.ts | 13 +++++ test/uploads/image-as-pdf.pdf | Bin 0 -> 19572 bytes test/uploads/int.spec.ts | 4 +- test/uploads/shared.ts | 2 + 8 files changed, 73 insertions(+), 17 deletions(-) create mode 100644 test/uploads/collections/FileMimeType/index.ts create mode 100644 test/uploads/image-as-pdf.pdf diff --git a/packages/payload/src/uploads/checkFileRestrictions.ts b/packages/payload/src/uploads/checkFileRestrictions.ts index e4d19bd91c..c3088e7b10 100644 --- a/packages/payload/src/uploads/checkFileRestrictions.ts +++ b/packages/payload/src/uploads/checkFileRestrictions.ts @@ -1,6 +1,9 @@ +import { fileTypeFromBuffer } from 'file-type' + import type { checkFileRestrictionsParams, FileAllowList } from './types.js' -import { APIError } from '../errors/index.js' +import { ValidationError } from '../errors/index.js' +import { validateMimeType } from '../utilities/validateMimeType.js' /** * Restricted file types and their extensions. @@ -39,11 +42,12 @@ export const RESTRICTED_FILE_EXT_AND_TYPES: FileAllowList = [ { extensions: ['command'], mimeType: 'application/x-command' }, ] -export const checkFileRestrictions = ({ +export const checkFileRestrictions = async ({ collection, file, req, -}: checkFileRestrictionsParams): void => { +}: checkFileRestrictionsParams): Promise => { + const errors: string[] = [] const { upload: uploadConfig } = collection const configMimeTypes = uploadConfig && @@ -58,20 +62,36 @@ export const checkFileRestrictions = ({ ? (uploadConfig as { allowRestrictedFileTypes?: boolean }).allowRestrictedFileTypes : false - // Skip validation if `mimeTypes` are defined in the upload config, or `allowRestrictedFileTypes` are allowed - if (allowRestrictedFileTypes || configMimeTypes.length) { + // Skip validation if `allowRestrictedFileTypes` is true + if (allowRestrictedFileTypes) { return } - const isRestricted = RESTRICTED_FILE_EXT_AND_TYPES.some((type) => { - const hasRestrictedExt = type.extensions.some((ext) => file.name.toLowerCase().endsWith(ext)) - const hasRestrictedMime = type.mimeType === file.mimetype - return hasRestrictedExt || hasRestrictedMime - }) + // Secondary mimetype check to assess file type from buffer + if (configMimeTypes.length > 0) { + const detected = await fileTypeFromBuffer(file.data) + const passesMimeTypeCheck = detected?.mime && validateMimeType(detected.mime, configMimeTypes) - if (isRestricted) { - const errorMessage = `File type '${file.mimetype}' not allowed ${file.name}: Restricted file type detected -- set 'allowRestrictedFileTypes' to true to skip this check for this Collection.` - req.payload.logger.error(errorMessage) - throw new APIError(errorMessage) + if (detected && !passesMimeTypeCheck) { + errors.push(`Invalid MIME type: ${detected.mime}.`) + } + } else { + const isRestricted = RESTRICTED_FILE_EXT_AND_TYPES.some((type) => { + const hasRestrictedExt = type.extensions.some((ext) => file.name.toLowerCase().endsWith(ext)) + const hasRestrictedMime = type.mimeType === file.mimetype + return hasRestrictedExt || hasRestrictedMime + }) + if (isRestricted) { + errors.push( + `File type '${file.mimetype}' not allowed ${file.name}: Restricted file type detected -- set 'allowRestrictedFileTypes' to true to skip this check for this Collection.`, + ) + } + } + + if (errors.length > 0) { + req.payload.logger.error(errors.join(', ')) + throw new ValidationError({ + errors: [{ message: errors.join(', '), path: 'file' }], + }) } } diff --git a/packages/payload/src/uploads/generateFileData.ts b/packages/payload/src/uploads/generateFileData.ts index 92e3cf82c0..2a40765b74 100644 --- a/packages/payload/src/uploads/generateFileData.ts +++ b/packages/payload/src/uploads/generateFileData.ts @@ -123,7 +123,7 @@ export const generateFileData = async ({ } } - checkFileRestrictions({ + await checkFileRestrictions({ collection: collectionConfig, file, req, diff --git a/test/uploads/collections/FileMimeType/index.ts b/test/uploads/collections/FileMimeType/index.ts new file mode 100644 index 0000000000..9caf2e1f26 --- /dev/null +++ b/test/uploads/collections/FileMimeType/index.ts @@ -0,0 +1,19 @@ +import type { CollectionConfig } from 'payload' + +import { fileMimeTypeSlug } from '../../shared.js' + +export const FileMimeType: CollectionConfig = { + slug: fileMimeTypeSlug, + admin: { + useAsTitle: 'title', + }, + upload: { + mimeTypes: ['application/pdf'], + }, + fields: [ + { + type: 'text', + name: 'title', + }, + ], +} diff --git a/test/uploads/config.ts b/test/uploads/config.ts index 5fc11af481..3349d785a9 100644 --- a/test/uploads/config.ts +++ b/test/uploads/config.ts @@ -13,6 +13,7 @@ import { AdminThumbnailWithSearchQueries } from './collections/AdminThumbnailWit import { AdminUploadControl } from './collections/AdminUploadControl/index.js' import { BulkUploadsCollection } from './collections/BulkUploads/index.js' import { CustomUploadFieldCollection } from './collections/CustomUploadField/index.js' +import { FileMimeType } from './collections/FileMimeType/index.js' import { SimpleRelationshipCollection } from './collections/SimpleRelationship/index.js' import { Uploads1 } from './collections/Upload1/index.js' import { Uploads2 } from './collections/Upload2/index.js' @@ -908,6 +909,7 @@ export default buildConfigWithDefaults({ }, BulkUploadsCollection, SimpleRelationshipCollection, + FileMimeType, ], onInit: async (payload) => { const uploadsDir = path.resolve(dirname, './media') diff --git a/test/uploads/e2e.spec.ts b/test/uploads/e2e.spec.ts index 697a61ec8e..22f3a1d2c4 100644 --- a/test/uploads/e2e.spec.ts +++ b/test/uploads/e2e.spec.ts @@ -32,6 +32,7 @@ import { constructorOptionsSlug, customFileNameMediaSlug, customUploadFieldSlug, + fileMimeTypeSlug, focalOnlySlug, hideFileInputOnCreateSlug, imageSizesOnlySlug, @@ -84,6 +85,7 @@ let consoleErrorsFromPage: string[] = [] let collectErrorsFromPage: () => boolean let stopCollectingErrorsFromPage: () => boolean let bulkUploadsURL: AdminUrlUtil +let fileMimeTypeURL: AdminUrlUtil describe('Uploads', () => { let page: Page @@ -122,6 +124,7 @@ describe('Uploads', () => { threeDimensionalURL = new AdminUrlUtil(serverURL, threeDimensionalSlug) constructorOptionsURL = new AdminUrlUtil(serverURL, constructorOptionsSlug) bulkUploadsURL = new AdminUrlUtil(serverURL, bulkUploadsSlug) + fileMimeTypeURL = new AdminUrlUtil(serverURL, fileMimeTypeSlug) const context = await browser.newContext() page = await context.newPage() @@ -1578,4 +1581,14 @@ describe('Uploads', () => { await expect(filename).toHaveValue('animated.webp') await saveDocAndAssert(page, '#action-save', 'error') }) + + test('should prevent invalid mimetype disguised as valid mimetype', async () => { + await page.goto(fileMimeTypeURL.create) + await page.setInputFiles('input[type="file"]', path.resolve(dirname, './image-as-pdf.pdf')) + + const filename = page.locator('.file-field__filename') + await expect(filename).toHaveValue('image-as-pdf.pdf') + + await saveDocAndAssert(page, '#action-save', 'error') + }) }) diff --git a/test/uploads/image-as-pdf.pdf b/test/uploads/image-as-pdf.pdf new file mode 100644 index 0000000000000000000000000000000000000000..3a0303dcf8bf9cdfe2d715bcd2775f6de4527467 GIT binary patch literal 19572 zcmeI2Sx6O89LCR?nd`mY0v)%OmvXCXLEzH*a`9@WLWs6mPieb8X_*xSYI`qYw$p>Q zSVkaLw)oKI*|IK%P=R1E!Yt~*f z&-j?Q7{(>eV)cV*uUHi09O$usqXt4vb4ta7c6&0vfhhR0?jKQ76N z!OduLH&4=UsPJSGmt{$j4F*Ng@7}9lXNqd@@(+!4^G-}L24whzLm(x7XSxKpH8>8aY>E^BOglu->(*kGx5UFYq(O7l;Gr z3&pVjI|3jAI|B0nI|6Zl9iceD&PcEs^91Y&%meHQ!~u4M;s85i@&=nMv1=Ti1Yk#~ z5!wr)b`v&P*kn1^Gz4~l9p{{*w~>JzV8=Q006V~rbIt*FL_v@hp|>NLS-_4!9Q1aC z;vnsyw<9(P31(nNC>(k_q;vlJv|~vhR1-RiCg!7^j*_a{!olS>!7fb1Eb%pKZkooj z@(1cC=6k8ZfpmL_wY|yd6fE>izh7Bjvc|4K_`kdX;i<_8$CJyH2T((3BLg+$Is()H zHKaMHg&(M)0vAw2O(!!0YA9`xYKWCv?-e7}5ZVAW)N}%BsD)eqh6^nNN*tgDsfJ*! zppk`CLtz-GA>9Pj05zmJ$n7{#1Jsb_K&qk40n`9Bf&fQN9Ac&+)sSvNssU<9a}d)B Ty&9T7Kn>|8poT&X(^`H34rQyQ literal 0 HcmV?d00001 diff --git a/test/uploads/int.spec.ts b/test/uploads/int.spec.ts index ecebd7bfc8..ff9ec56811 100644 --- a/test/uploads/int.spec.ts +++ b/test/uploads/int.spec.ts @@ -684,8 +684,8 @@ describe('Collections - Uploads', () => { }), ).rejects.toThrow( expect.objectContaining({ - name: 'APIError', - message: `File type 'text/html' not allowed ${file.name}: Restricted file type detected -- set 'allowRestrictedFileTypes' to true to skip this check for this Collection.`, + name: 'ValidationError', + message: `The following field is invalid: file`, }), ) }) diff --git a/test/uploads/shared.ts b/test/uploads/shared.ts index 5cb8c66a77..aa0e5a9f2b 100644 --- a/test/uploads/shared.ts +++ b/test/uploads/shared.ts @@ -35,3 +35,5 @@ export const listViewPreviewSlug = 'list-view-preview' export const threeDimensionalSlug = 'three-dimensional' export const constructorOptionsSlug = 'constructor-options' export const bulkUploadsSlug = 'bulk-uploads' + +export const fileMimeTypeSlug = 'file-mime-type' From 06ef798653fc266af3f6da4b7ee14b92844963a3 Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Fri, 11 Jul 2025 12:19:11 -0400 Subject: [PATCH 018/143] fix(ui): ensure buildFormStateHandler throws error instead of returning null for unauthorized requests (#13123) ### What? Prevents `buildFormStateHandler` from returning `null` in unauthorized scenarios by throwing an explicit `Error` instead. ### Why? The `BuildFormStateResult` type does not include `null`, but previously the handler returned `null` when access was unauthorized. This caused runtime type mismatches and forced client-side workarounds (e.g. guarding destructures). By always throwing instead of returning `null`, the client code can safely assume a valid result or catch errors. Screenshot_2025-07-10_185618 ### How? - Replaced the `return null` with `throw new Error('Unauthorized')` in `buildFormStateHandler`. - Client code no longer needs to handle `null` responses from `getFormState`. --- packages/ui/src/utilities/buildFormState.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/ui/src/utilities/buildFormState.ts b/packages/ui/src/utilities/buildFormState.ts index 5da3b1ac10..2ab2fffbfb 100644 --- a/packages/ui/src/utilities/buildFormState.ts +++ b/packages/ui/src/utilities/buildFormState.ts @@ -94,7 +94,7 @@ export const buildFormStateHandler: ServerFunction< } if (err.message === 'Unauthorized') { - return null + throw new Error('Unauthorized') } return formatErrors(err) From 8a3b97c6430e38c56ef29b5f868f57fcbaf213f8 Mon Sep 17 00:00:00 2001 From: Aaron Claes <52576914+AaronClaes@users.noreply.github.com> Date: Fri, 11 Jul 2025 18:50:38 +0200 Subject: [PATCH 019/143] feat(ui): add API key visibility toggle (#13110) --- packages/ui/src/views/Edit/Auth/APIKey.tsx | 35 ++++++++++++++------- packages/ui/src/views/Edit/Auth/index.scss | 36 +++++++++++++++++++--- 2 files changed, 55 insertions(+), 16 deletions(-) diff --git a/packages/ui/src/views/Edit/Auth/APIKey.tsx b/packages/ui/src/views/Edit/Auth/APIKey.tsx index 0f31877cb9..1d0498411c 100644 --- a/packages/ui/src/views/Edit/Auth/APIKey.tsx +++ b/packages/ui/src/views/Edit/Auth/APIKey.tsx @@ -6,10 +6,12 @@ import { text } from 'payload/shared' import React, { useEffect, useMemo, useState } from 'react' import { v4 as uuidv4 } from 'uuid' +import { Button } from '../../../elements/Button/index.js' import { CopyToClipboard } from '../../../elements/CopyToClipboard/index.js' import { GenerateConfirmation } from '../../../elements/GenerateConfirmation/index.js' import { useFormFields } from '../../../forms/Form/context.js' import { useField } from '../../../forms/useField/index.js' +import { EyeIcon } from '../../../icons/Eye/index.js' import { useConfig } from '../../../providers/Config/index.js' import { useDocumentInfo } from '../../../providers/DocumentInfo/index.js' import { useTranslation } from '../../../providers/Translation/index.js' @@ -24,6 +26,7 @@ export const APIKey: React.FC<{ readonly enabled: boolean; readonly readOnly?: b }) => { const [initialAPIKey] = useState(uuidv4()) const [highlightedField, setHighlightedField] = useState(false) + const [showKey, setShowKey] = useState(false) const { i18n, t } = useTranslation() const { config, getEntityConfig } = useConfig() const { collectionSlug } = useDocumentInfo() @@ -68,10 +71,10 @@ export const APIKey: React.FC<{ readonly enabled: boolean; readonly readOnly?: b const APIKeyLabel = useMemo( () => ( -
+
+ ), [apiKeyLabel, apiKeyValue], ) @@ -117,15 +120,25 @@ export const APIKey: React.FC<{ readonly enabled: boolean; readonly readOnly?: b
{APIKeyLabel} - +
+ +
+
+
{!readOnly && ( setValue(uuidv4())} /> diff --git a/packages/ui/src/views/Edit/Auth/index.scss b/packages/ui/src/views/Edit/Auth/index.scss index 0dd1b83caa..af3cbc3a14 100644 --- a/packages/ui/src/views/Edit/Auth/index.scss +++ b/packages/ui/src/views/Edit/Auth/index.scss @@ -37,13 +37,39 @@ gap: calc(var(--base) / 2); } } - } - .field-type.api-key { - margin-bottom: var(--base); + .field-type.api-key { + margin-bottom: var(--base); - input { - @include formInput; + input { + @include formInput; + width: 100%; + border-top-right-radius: 0; + border-bottom-right-radius: 0; + } + } + + .api-key { + &__input-wrap { + display: flex; + align-items: center; + } + + &__toggle-button-wrap { + display: flex; + align-self: stretch; + } + + &__toggle-button { + @include formInput; + background: var(--theme-elevation-100); + border-top-left-radius: 0; + border-bottom-left-radius: 0; + margin: 0 0 0 -1px; + padding: 0 calc(var(--base) / 2); + box-shadow: none; + --btn-icon-size: var(--base); + } } } From 576644d0b5ecf6076af95ac8856c07c1a4fd359a Mon Sep 17 00:00:00 2001 From: German Jablonski <43938777+GermanJablo@users.noreply.github.com> Date: Fri, 11 Jul 2025 18:02:06 +0100 Subject: [PATCH 020/143] docs(richtext-lexical): add documentation page about official features (#13132) It was evident from the number of users asking questions about how to use the features that a dedicated page was needed. Preview: https://payloadcms.com/docs/dynamic/rich-text/official-features?branch=features-docs --- docs/rich-text/official-features.mdx | 485 +++++++++++++++++++++++++++ docs/rich-text/overview.mdx | 34 +- 2 files changed, 487 insertions(+), 32 deletions(-) create mode 100644 docs/rich-text/official-features.mdx diff --git a/docs/rich-text/official-features.mdx b/docs/rich-text/official-features.mdx new file mode 100644 index 0000000000..03de1743a7 --- /dev/null +++ b/docs/rich-text/official-features.mdx @@ -0,0 +1,485 @@ +--- +description: Features officially maintained by Payload. +keywords: lexical, rich text, editor, headless cms, official, features +label: Official Features +order: 35 +title: Official Features +--- + +Below are all the Rich Text Features Payload offers. Everything is customizable; you can [create your own features](/docs/rich-text/custom-features), modify ours and share them with the community. + +## Features Overview + +| Feature Name | Included by default | Description | +| ------------------------------- | ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **`BoldFeature`** | Yes | Adds support for bold text formatting. | +| **`ItalicFeature`** | Yes | Adds support for italic text formatting. | +| **`UnderlineFeature`** | Yes | Adds support for underlined text formatting. | +| **`StrikethroughFeature`** | Yes | Adds support for strikethrough text formatting. | +| **`SubscriptFeature`** | Yes | Adds support for subscript text formatting. | +| **`SuperscriptFeature`** | Yes | Adds support for superscript text formatting. | +| **`InlineCodeFeature`** | Yes | Adds support for inline code formatting. | +| **`ParagraphFeature`** | Yes | Provides entries in both the slash menu and toolbar dropdown for explicit paragraph creation or conversion. | +| **`HeadingFeature`** | Yes | Adds Heading Nodes (by default, H1 - H6, but that can be customized) | +| **`AlignFeature`** | Yes | Adds support for text alignment (left, center, right, justify) | +| **`IndentFeature`** | Yes | Adds support for text indentation with toolbar buttons | +| **`UnorderedListFeature`** | Yes | Adds support for unordered lists (ul) | +| **`OrderedListFeature`** | Yes | Adds support for ordered lists (ol) | +| **`ChecklistFeature`** | Yes | Adds support for interactive checklists | +| **`LinkFeature`** | Yes | Allows you to create internal and external links | +| **`RelationshipFeature`** | Yes | Allows you to create block-level (not inline) relationships to other documents | +| **`BlockquoteFeature`** | Yes | Allows you to create block-level quotes | +| **`UploadFeature`** | Yes | Allows you to create block-level upload nodes - this supports all kinds of uploads, not just images | +| **`HorizontalRuleFeature`** | Yes | Adds support for horizontal rules / separators. Basically displays an `
` element | +| **`InlineToolbarFeature`** | Yes | Provides a floating toolbar which appears when you select text. This toolbar only contains actions relevant for selected text | +| **`FixedToolbarFeature`** | No | Provides a persistent toolbar pinned to the top and always visible. Both inline and fixed toolbars can be enabled at the same time. | +| **`BlocksFeature`** | No | Allows you to use Payload's [Blocks Field](../fields/blocks) directly inside your editor. In the feature props, you can specify the allowed blocks - just like in the Blocks field. | +| **`TreeViewFeature`** | No | Provides a debug box under the editor, which allows you to see the current editor state live, the dom, as well as time travel. Very useful for debugging | +| **`EXPERIMENTAL_TableFeature`** | No | Adds support for tables. This feature may be removed or receive breaking changes in the future - even within a stable lexical release, without needing a major release. | +| **`TextStateFeature`** | No | Allows you to store key-value attributes within TextNodes and assign them inline styles. | + +## In depth + +### BoldFeature + +- Description: Adds support for bold text formatting, along with buttons to apply it in both fixed and inline toolbars. +- Included by default: Yes +- Markdown Support: `**bold**` or `__bold__` +- Keyboard Shortcut: Ctrl/Cmd + B + +### ItalicFeature + +- Description: Adds support for italic text formatting, along with buttons to apply it in both fixed and inline toolbars. +- Included by default: Yes +- Markdown Support: `*italic*` or `_italic_` +- Keyboard Shortcut: Ctrl/Cmd + I + +### UnderlineFeature + +- Description: Adds support for underlined text formatting, along with buttons to apply it in both fixed and inline toolbars. +- Included by default: Yes +- Keyboard Shortcut: Ctrl/Cmd + U + +### StrikethroughFeature + +- Description: Adds support for strikethrough text formatting, along with buttons to apply it in both fixed and inline toolbars. +- Included by default: Yes +- Markdown Support: `~~strikethrough~~` + +### SubscriptFeature + +- Description: Adds support for subscript text formatting, along with buttons to apply it in both fixed and inline toolbars. +- Included by default: Yes + +### SuperscriptFeature + +- Description: Adds support for superscript text formatting, along with buttons to apply it in both fixed and inline toolbars. +- Included by default: Yes + +### InlineCodeFeature + +- Description: Adds support for inline code formatting with distinct styling, along with buttons to apply it in both fixed and inline toolbars. +- Included by default: Yes +- Markdown Support: \`code\` + +### ParagraphFeature + +- Description: Provides entries in both the slash menu and toolbar dropdown for explicit paragraph creation or conversion. +- Included by default: Yes + +### HeadingFeature + +- Description: Adds support for heading nodes (H1-H6) with toolbar dropdown and slash menu entries for each enabled heading size. +- Included by default: Yes +- Markdown Support: `#`, `##`, `###`, ..., at start of line. +- Types: + +```typescript +type HeadingFeatureProps = { + enabledHeadingSizes?: HeadingTagType[] // ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'] +} +``` + +- Usage example: + +```typescript +HeadingFeature({ + enabledHeadingSizes: ['h1', 'h2', 'h3'], // Default: ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'] +}) +``` + +### AlignFeature + +- Description: Allows text alignment (left, center, right, justify), along with buttons to apply it in both fixed and inline toolbars. +- Included by default: Yes +- Keyboard Shortcut: Ctrl/Cmd + Shift + L/E/R/J (left/center/right/justify) + +### IndentFeature + +- Description: Adds support for text indentation, along with buttons to apply it in both fixed and inline toolbars. +- Included by default: Yes +- Keyboard Shortcut: Tab (increase), Shift + Tab (decrease) +- Types: + +```typescript +type IndentFeatureProps = { + /** + * The nodes that should not be indented. "type" property of the nodes you don't want to be indented. + * These can be: "paragraph", "heading", "listitem", "quote" or other indentable nodes if they exist. + */ + disabledNodes?: string[] + /** + * If true, pressing Tab in the middle of a block such as a paragraph or heading will not insert a tabNode. + * Instead, Tab will only be used for block-level indentation. + * @default false + */ + disableTabNode?: boolean +} +``` + +- Usage example: + +```typescript +// Allow block-level indentation only +IndentFeature({ + disableTabNode: true, +}) +``` + +### UnorderedListFeature + +- Description: Adds support for unordered lists (bullet points) with toolbar dropdown and slash menu entries. +- Included by default: Yes +- Markdown Support: `-`, `*`, or `+` at start of line + +### OrderedListFeature + +- Description: Adds support for ordered lists (numbered lists) with toolbar dropdown and slash menu entries. +- Included by default: Yes +- Markdown Support: `1.` at start of line + +### ChecklistFeature + +- Description: Adds support for interactive checklists with toolbar dropdown and slash menu entries. +- Included by default: Yes +- Markdown Support: `- [ ]` (unchecked) or `- [x]` (checked) + +### LinkFeature + +- Description: Allows creation of internal and external links with toolbar buttons and automatic URL conversion. +- Included by default: Yes +- Markdown Support: `[anchor](url)` +- Types: + +```typescript +type LinkFeatureServerProps = { + /** + * Disables the automatic creation of links + * from URLs typed or pasted into the editor, + * @default false + */ + disableAutoLinks?: 'creationOnly' | true + /** + * A function or array defining additional fields for the link feature. + * These will be displayed in the link editor drawer. + */ + fields?: + | ((args: { + config: SanitizedConfig + defaultFields: FieldAffectingData[] + }) => (Field | FieldAffectingData)[]) + | Field[] + /** + * Sets a maximum population depth for the internal + * doc default field of link, regardless of the + * remaining depth when the field is reached. + */ + maxDepth?: number +} & ExclusiveLinkCollectionsProps + +type ExclusiveLinkCollectionsProps = + | { + disabledCollections?: CollectionSlug[] + enabledCollections?: never + } + | { + disabledCollections?: never + enabledCollections?: CollectionSlug[] + } +``` + +- Usage example: + +```typescript +LinkFeature({ + fields: ({ defaultFields }) => [ + ...defaultFields, + { + name: 'rel', + type: 'select', + options: ['noopener', 'noreferrer', 'nofollow'], + }, + ], + enabledCollections: ['pages', 'posts'], // Collections for internal links + maxDepth: 2, // Population depth for internal links + disableAutoLinks: false, // Allow auto-conversion of URLs +}) +``` + +### RelationshipFeature + +- Description: Allows creation of block-level relationships to other documents with toolbar button and slash menu entry. +- Included by default: Yes +- Types: + +```typescript +type RelationshipFeatureProps = { + /** + * Sets a maximum population depth for this relationship, regardless of the remaining depth when the respective field is reached. + */ + maxDepth?: number +} & ExclusiveRelationshipFeatureProps + +type ExclusiveRelationshipFeatureProps = + | { + disabledCollections?: CollectionSlug[] + enabledCollections?: never + } + | { + disabledCollections?: never + enabledCollections?: CollectionSlug[] + } +``` + +- Usage example: + +```typescript +RelationshipFeature({ + disabledCollections: ['users'], // Collections to exclude + maxDepth: 2, // Population depth for relationships +}) +``` + +### UploadFeature + +- Description: Allows creation of upload/media nodes with toolbar button and slash menu entry, supports all file types. +- Included by default: Yes +- Types: + +```typescript +type UploadFeatureProps = { + collections?: { + [collection: CollectionSlug]: { + fields: Field[] + } + } + /** + * Sets a maximum population depth for this upload (not the fields for this upload), regardless of the remaining depth when the respective field is reached. + */ + maxDepth?: number +} +``` + +- Usage example: + +```typescript +UploadFeature({ + collections: { + uploads: { + fields: [ + { + name: 'caption', + type: 'text', + label: 'Caption', + }, + { + name: 'alt', + type: 'text', + label: 'Alt Text', + }, + ], + }, + }, + maxDepth: 1, // Population depth for uploads +}) +``` + +### BlockquoteFeature + +- Description: Allows creation of blockquotes with toolbar button and slash menu entry. +- Included by default: Yes +- Markdown Support: `> quote text` + +### HorizontalRuleFeature + +- Description: Adds support for horizontal rules/separators with toolbar button and slash menu entry. +- Included by default: Yes +- Markdown Support: `---` + +### InlineToolbarFeature + +- Description: Provides a floating toolbar that appears when text is selected, containing formatting options relevant to selected text. +- Included by default: Yes + +### FixedToolbarFeature + +- Description: Provides a persistent toolbar pinned to the top of the editor that's always visible. +- Included by default: No +- Types: + +```typescript +type FixedToolbarFeatureProps = { + /** + * @default false + * If this is enabled, the toolbar will apply + * to the focused editor, not the editor with + * the FixedToolbarFeature. + */ + applyToFocusedEditor?: boolean + /** + * Custom configurations for toolbar groups + * Key is the group key (e.g. 'format', 'indent', 'align') + * Value is a partial ToolbarGroup object that will + * be merged with the default configuration + */ + customGroups?: CustomGroups + /** + * @default false + * If there is a parent editor with a fixed toolbar, + * this will disable the toolbar for this editor. + */ + disableIfParentHasFixedToolbar?: boolean +} +``` + +- Usage example: + +```typescript +FixedToolbarFeature({ + applyToFocusedEditor: false, // Apply to focused editor + customGroups: { + format: { + // Custom configuration for format group + }, + }, +}) +``` + +### BlocksFeature + +- Description: Allows use of Payload's Blocks Field directly in the editor with toolbar buttons and slash menu entries for each block type. +- Included by default: No +- Types: + +```typescript +type BlocksFeatureProps = { + blocks?: (Block | BlockSlug)[] | Block[] + inlineBlocks?: (Block | BlockSlug)[] | Block[] +} +``` + +- Usage example: + +```typescript +BlocksFeature({ + blocks: [ + { + slug: 'callout', + fields: [ + { + name: 'text', + type: 'text', + required: true, + }, + ], + }, + ], + inlineBlocks: [ + { + slug: 'mention', + fields: [ + { + name: 'name', + type: 'text', + required: true, + }, + ], + }, + ], +}) +``` + +### TreeViewFeature + +- Description: Provides a debug panel below the editor showing the editor's internal state, DOM tree, and time travel debugging. +- Included by default: No + +### EXPERIMENTAL_TableFeature + +- Description: Adds support for tables with toolbar button and slash menu entry for creation and editing. +- Included by default: No + +### TextStateFeature + +- Description: Allows storing key-value attributes in text nodes with inline styles and toolbar dropdown for style selection. +- Included by default: No +- Types: + +```typescript +type TextStateFeatureProps = { + /** + * The keys of the top-level object (stateKeys) represent the attributes that the textNode can have (e.g., color). + * The values of the top-level object (stateValues) represent the values that the attribute can have (e.g., red, blue, etc.). + * Within the stateValue, you can define inline styles and labels. + */ + state: { [stateKey: string]: StateValues } +} + +type StateValues = { + [stateValue: string]: { + css: StyleObject + label: string + } +} + +type StyleObject = { + [K in keyof PropertiesHyphenFallback]?: + | Extract + | undefined +} +``` + +- Usage example: + +```typescript +// We offer default colors that have good contrast and look good in dark and light mode. +import { defaultColors, TextStateFeature } from '@payloadcms/richtext-lexical' + +TextStateFeature({ + // prettier-ignore + state: { + color: { + ...defaultColors, + // fancy gradients! + galaxy: { label: 'Galaxy', css: { background: 'linear-gradient(to right, #0000ff, #ff0000)', color: 'white' } }, + sunset: { label: 'Sunset', css: { background: 'linear-gradient(to top, #ff5f6d, #6a3093)' } }, + }, + // You can have both colored and underlined text at the same time. + // If you don't want that, you should group them within the same key. + // (just like I did with defaultColors and my fancy gradients) + underline: { + 'solid': { label: 'Solid', css: { 'text-decoration': 'underline', 'text-underline-offset': '4px' } }, + // You'll probably want to use the CSS light-dark() utility. + 'yellow-dashed': { label: 'Yellow Dashed', css: { 'text-decoration': 'underline dashed', 'text-decoration-color': 'light-dark(#EAB308,yellow)', 'text-underline-offset': '4px' } }, + }, + }, +}), +``` + +This is what the example above will look like: + + diff --git a/docs/rich-text/overview.mdx b/docs/rich-text/overview.mdx index fda0cb0007..cd6a55d8a7 100644 --- a/docs/rich-text/overview.mdx +++ b/docs/rich-text/overview.mdx @@ -138,39 +138,9 @@ import { CallToAction } from '../blocks/CallToAction' | **`defaultFeatures`** | This opinionated array contains all "recommended" default features. You can see which features are included in the default features in the table below. | | **`rootFeatures`** | This array contains all features that are enabled in the root richText editor (the one defined in the payload.config.ts). If this field is the root richText editor, or if the root richText editor is not a lexical editor, this array will be empty. | -## Features overview +## Official Features -Here's an overview of all the included features: - -| Feature Name | Included by default | Description | -| ----------------------------------- | ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`BoldFeature`** | Yes | Handles the bold text format | -| **`ItalicFeature`** | Yes | Handles the italic text format | -| **`UnderlineFeature`** | Yes | Handles the underline text format | -| **`StrikethroughFeature`** | Yes | Handles the strikethrough text format | -| **`SubscriptFeature`** | Yes | Handles the subscript text format | -| **`SuperscriptFeature`** | Yes | Handles the superscript text format | -| **`InlineCodeFeature`** | Yes | Handles the inline-code text format | -| **`ParagraphFeature`** | Yes | Handles paragraphs. Since they are already a key feature of lexical itself, this Feature mainly handles the Slash and Add-Block menu entries for paragraphs | -| **`HeadingFeature`** | Yes | Adds Heading Nodes (by default, H1 - H6, but that can be customized) | -| **`AlignFeature`** | Yes | Allows you to align text left, centered and right | -| **`IndentFeature`** | Yes | Allows you to indent text with the tab key | -| **`UnorderedListFeature`** | Yes | Adds unordered lists (ul) | -| **`OrderedListFeature`** | Yes | Adds ordered lists (ol) | -| **`ChecklistFeature`** | Yes | Adds checklists | -| **`LinkFeature`** | Yes | Allows you to create internal and external links | -| **`RelationshipFeature`** | Yes | Allows you to create block-level (not inline) relationships to other documents | -| **`BlockquoteFeature`** | Yes | Allows you to create block-level quotes | -| **`UploadFeature`** | Yes | Allows you to create block-level upload nodes - this supports all kinds of uploads, not just images | -| **`HorizontalRuleFeature`** | Yes | Horizontal rules / separators. Basically displays an `
` element | -| **`InlineToolbarFeature`** | Yes | The inline toolbar is the floating toolbar which appears when you select text. This toolbar only contains actions relevant for selected text | -| **`FixedToolbarFeature`** | No | This classic toolbar is pinned to the top and always visible. Both inline and fixed toolbars can be enabled at the same time. | -| **`BlocksFeature`** | No | Allows you to use Payload's [Blocks Field](../fields/blocks) directly inside your editor. In the feature props, you can specify the allowed blocks - just like in the Blocks field. | -| **`TreeViewFeature`** | No | Adds a debug box under the editor, which allows you to see the current editor state live, the dom, as well as time travel. Very useful for debugging | -| **`EXPERIMENTAL_TableFeature`** | No | Adds support for tables. This feature may be removed or receive breaking changes in the future - even within a stable lexical release, without needing a major release. | -| **`EXPERIMENTAL_TextStateFeature`** | No | Allows you to store key-value attributes within TextNodes and assign them inline styles. | - -Notice how even the toolbars are features? That's how extensible our lexical editor is - you could theoretically create your own toolbar if you wanted to! +You can find more information about the official features in our [official features docs](../rich-text/official-features). ## Creating your own, custom Feature From 2cafe494cc882ab10dac885055a8afec6aea3b66 Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Fri, 11 Jul 2025 14:07:51 -0400 Subject: [PATCH 021/143] fix(ui): disabled and styles add row button correctly (#13140) Disables add row button using disabled prop from useField - i.e. when the form is processing or initializing. This fixes a flaky array test that clicks the button before the form has finished initializing/processing. Also corrects the add row button color styles with specificity. --- .../ui/src/elements/ClipboardAction/index.tsx | 19 ++++++++++++------ packages/ui/src/fields/Array/index.scss | 20 ++++++++++++++++--- packages/ui/src/fields/Array/index.tsx | 8 +++++--- packages/ui/src/fields/Blocks/index.tsx | 5 +++-- packages/ui/src/forms/useField/index.tsx | 2 -- 5 files changed, 38 insertions(+), 16 deletions(-) diff --git a/packages/ui/src/elements/ClipboardAction/index.tsx b/packages/ui/src/elements/ClipboardAction/index.tsx index 8a1d531d8d..0f0fff0f88 100644 --- a/packages/ui/src/elements/ClipboardAction/index.tsx +++ b/packages/ui/src/elements/ClipboardAction/index.tsx @@ -16,10 +16,11 @@ import { clipboardCopy, clipboardPaste } from './clipboardUtilities.js' const baseClass = 'clipboard-action' type Props = { + allowCopy?: boolean + allowPaste?: boolean className?: string copyClassName?: string - disableCopy?: boolean - disablePaste?: boolean + disabled?: boolean getDataToCopy: () => FormStateWithoutComponents isRow?: boolean onPaste: OnPasteFn @@ -31,10 +32,11 @@ type Props = { * @note This component doesn't use the Clipboard API, but localStorage. See rationale in #11513 */ export const ClipboardAction: FC = ({ + allowCopy, + allowPaste, className, copyClassName, - disableCopy, - disablePaste, + disabled, isRow, onPaste, pasteClassName, @@ -81,16 +83,21 @@ export const ClipboardAction: FC = ({ } }, [onPaste, rest, path, t]) + if (!allowPaste && !allowCopy) { + return null + } + return ( } className={classes} + disabled={disabled} horizontalAlign="center" render={({ close }) => ( { void handleCopy() close() @@ -100,7 +107,7 @@ export const ClipboardAction: FC = ({ { void handlePaste() close() diff --git a/packages/ui/src/fields/Array/index.scss b/packages/ui/src/fields/Array/index.scss index 139e0585ba..59232ac1e4 100644 --- a/packages/ui/src/fields/Array/index.scss +++ b/packages/ui/src/fields/Array/index.scss @@ -76,11 +76,25 @@ &__add-row { align-self: flex-start; - color: var(--theme-elevation-400); margin: 2px 0; + --btn-color: var(--theme-elevation-400); - &:hover { - color: var(--theme-elevation-800); + &:hover:not(:disabled) { + --btn-color: var(--theme-elevation-800); + } + + &:disabled { + --btn-color: var(--theme-elevation-300); + } + + .btn__label { + color: var(--btn-color); + } + .btn__icon { + border-color: var(--btn-color); + path { + stroke: var(--btn-color); + } } } } diff --git a/packages/ui/src/fields/Array/index.tsx b/packages/ui/src/fields/Array/index.tsx index fe6cd8cfae..79e871d6f1 100644 --- a/packages/ui/src/fields/Array/index.tsx +++ b/packages/ui/src/fields/Array/index.tsx @@ -30,7 +30,6 @@ import { useForm, useFormSubmitted } from '../../forms/Form/context.js' import { extractRowsAndCollapsedIDs, toggleAllRows } from '../../forms/Form/rowHelpers.js' import { NullifyLocaleField } from '../../forms/NullifyField/index.js' import { useField } from '../../forms/useField/index.js' -import './index.scss' import { withCondition } from '../../forms/withCondition/index.js' import { useConfig } from '../../providers/Config/index.js' import { useDocumentInfo } from '../../providers/DocumentInfo/index.js' @@ -39,6 +38,7 @@ import { useTranslation } from '../../providers/Translation/index.js' import { scrollToID } from '../../utilities/scrollToID.js' import { fieldBaseClass } from '../shared/index.js' import { ArrayRow } from './ArrayRow.js' +import './index.scss' const baseClass = 'array-field' @@ -363,9 +363,10 @@ export const ArrayFieldComponent: ArrayFieldClientComponent = (props) => { )}
  • 0} + allowPaste={!readOnly} className={`${baseClass}__header-action`} - disableCopy={!(rows?.length > 0)} - disablePaste={readOnly} + disabled={disabled} fields={fields} getDataToCopy={getDataToCopy} onPaste={pasteField} @@ -459,6 +460,7 @@ export const ArrayFieldComponent: ArrayFieldClientComponent = (props) => { + {!disableSave && } + {!disableDownload && ( + + )} ) } diff --git a/packages/plugin-import-export/src/getExportCollection.ts b/packages/plugin-import-export/src/getExportCollection.ts index 070a86902a..8f184750ae 100644 --- a/packages/plugin-import-export/src/getExportCollection.ts +++ b/packages/plugin-import-export/src/getExportCollection.ts @@ -34,6 +34,10 @@ export const getExportCollection = ({ SaveButton: '@payloadcms/plugin-import-export/rsc#ExportSaveButton', }, }, + custom: { + disableDownload: pluginConfig.disableDownload ?? false, + disableSave: pluginConfig.disableSave ?? false, + }, group: false, useAsTitle: 'name', }, diff --git a/packages/plugin-import-export/src/types.ts b/packages/plugin-import-export/src/types.ts index f0d9a5cb77..80955a0b7b 100644 --- a/packages/plugin-import-export/src/types.ts +++ b/packages/plugin-import-export/src/types.ts @@ -15,10 +15,20 @@ export type ImportExportPluginConfig = { * If true, enables debug logging */ debug?: boolean + /** + * If true, disables the download button in the export preview UI + * @default false + */ + disableDownload?: boolean /** * Enable to force the export to run synchronously */ disableJobsQueue?: boolean + /** + * If true, disables the save button in the export preview UI + * @default false + */ + disableSave?: boolean /** * This function takes the default export collection configured in the plugin and allows you to override it by modifying and returning it * @param collection From 4c69f8e2051a3083ec92b4cbdd54b7c5c45d622f Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Mon, 14 Jul 2025 12:30:23 -0700 Subject: [PATCH 029/143] docs: add section on browser environment variables when using experimental-build-mode (#13164) Just spent an entire hour trying to figure out why my environment variables are `undefined` on the client. Turns out, when running `pnpm next build --experimental-build-mode compile`, it skips the environment variable inlining step. This adds a new section to the docs mentioning that you can use `pnpm next build --experimental-build-mode generate-env` to manually inline them. --- docs/production/building-without-a-db-connection.mdx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/production/building-without-a-db-connection.mdx b/docs/production/building-without-a-db-connection.mdx index fe5ae02994..7d7ce68014 100644 --- a/docs/production/building-without-a-db-connection.mdx +++ b/docs/production/building-without-a-db-connection.mdx @@ -14,7 +14,9 @@ Solutions: ## Using the experimental-build-mode Next.js build flag -You can run Next.js build using the `pnpx next build --experimental-build-mode compile` command to only compile the code without static generation, which does not require a DB connection. In that case, your pages will be rendered dynamically, but after that, you can still generate static pages using the `pnpx next build --experimental-build-mode generate` command when you have a DB connection. +You can run Next.js build using the `pnpm next build --experimental-build-mode compile` command to only compile the code without static generation, which does not require a DB connection. In that case, your pages will be rendered dynamically, but after that, you can still generate static pages using the `pnpm next build --experimental-build-mode generate` command when you have a DB connection. + +When running `pnpm next build --experimental-build-mode compile`, environment variables prefixed with `NEXT_PUBLIC` will not be inlined and will be `undefined` on the client. To make these variables available, either run `pnpm next build --experimental-build-mode generate` if a DB connection is available, or use `pnpm next build --experimental-build-mode generate-env` if you do not have a DB connection. [Next.js documentation](https://nextjs.org/docs/pages/api-reference/cli/next#next-build-options) From 4831bae6b5ce068ce5b799d4c6a5d700d35890b2 Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Mon, 14 Jul 2025 12:39:56 -0700 Subject: [PATCH 030/143] docs: fix invalid syntax failing the docs import (#13165) ts is recognized, typescript is not --- docs/rich-text/official-features.mdx | 32 ++++++++++++++-------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/docs/rich-text/official-features.mdx b/docs/rich-text/official-features.mdx index 03de1743a7..cdf1efcb18 100644 --- a/docs/rich-text/official-features.mdx +++ b/docs/rich-text/official-features.mdx @@ -94,7 +94,7 @@ Below are all the Rich Text Features Payload offers. Everything is customizable; - Markdown Support: `#`, `##`, `###`, ..., at start of line. - Types: -```typescript +```ts type HeadingFeatureProps = { enabledHeadingSizes?: HeadingTagType[] // ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'] } @@ -102,7 +102,7 @@ type HeadingFeatureProps = { - Usage example: -```typescript +```ts HeadingFeature({ enabledHeadingSizes: ['h1', 'h2', 'h3'], // Default: ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'] }) @@ -121,7 +121,7 @@ HeadingFeature({ - Keyboard Shortcut: Tab (increase), Shift + Tab (decrease) - Types: -```typescript +```ts type IndentFeatureProps = { /** * The nodes that should not be indented. "type" property of the nodes you don't want to be indented. @@ -139,7 +139,7 @@ type IndentFeatureProps = { - Usage example: -```typescript +```ts // Allow block-level indentation only IndentFeature({ disableTabNode: true, @@ -171,7 +171,7 @@ IndentFeature({ - Markdown Support: `[anchor](url)` - Types: -```typescript +```ts type LinkFeatureServerProps = { /** * Disables the automatic creation of links @@ -210,7 +210,7 @@ type ExclusiveLinkCollectionsProps = - Usage example: -```typescript +```ts LinkFeature({ fields: ({ defaultFields }) => [ ...defaultFields, @@ -232,7 +232,7 @@ LinkFeature({ - Included by default: Yes - Types: -```typescript +```ts type RelationshipFeatureProps = { /** * Sets a maximum population depth for this relationship, regardless of the remaining depth when the respective field is reached. @@ -253,7 +253,7 @@ type ExclusiveRelationshipFeatureProps = - Usage example: -```typescript +```ts RelationshipFeature({ disabledCollections: ['users'], // Collections to exclude maxDepth: 2, // Population depth for relationships @@ -266,7 +266,7 @@ RelationshipFeature({ - Included by default: Yes - Types: -```typescript +```ts type UploadFeatureProps = { collections?: { [collection: CollectionSlug]: { @@ -282,7 +282,7 @@ type UploadFeatureProps = { - Usage example: -```typescript +```ts UploadFeature({ collections: { uploads: { @@ -327,7 +327,7 @@ UploadFeature({ - Included by default: No - Types: -```typescript +```ts type FixedToolbarFeatureProps = { /** * @default false @@ -354,7 +354,7 @@ type FixedToolbarFeatureProps = { - Usage example: -```typescript +```ts FixedToolbarFeature({ applyToFocusedEditor: false, // Apply to focused editor customGroups: { @@ -371,7 +371,7 @@ FixedToolbarFeature({ - Included by default: No - Types: -```typescript +```ts type BlocksFeatureProps = { blocks?: (Block | BlockSlug)[] | Block[] inlineBlocks?: (Block | BlockSlug)[] | Block[] @@ -380,7 +380,7 @@ type BlocksFeatureProps = { - Usage example: -```typescript +```ts BlocksFeature({ blocks: [ { @@ -425,7 +425,7 @@ BlocksFeature({ - Included by default: No - Types: -```typescript +```ts type TextStateFeatureProps = { /** * The keys of the top-level object (stateKeys) represent the attributes that the textNode can have (e.g., color). @@ -451,7 +451,7 @@ type StyleObject = { - Usage example: -```typescript +```ts // We offer default colors that have good contrast and look good in dark and light mode. import { defaultColors, TextStateFeature } from '@payloadcms/richtext-lexical' From 7294cf561d2a79199c02a36856c2151bb0e2ba5f Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:19:52 -0400 Subject: [PATCH 031/143] feat(plugin-import-export): adds support for forcing export format via plugin config (#13160) ### What? Adds a new `format` option to the `plugin-import-export` config that allows users to force the export format (`csv` or `json`) and hide the format dropdown from the export UI. ### Why? In some use cases, allowing the user to select between CSV and JSON is unnecessary or undesirable. This new option allows plugin consumers to lock the format and simplify the export interface. ### How? - Added a `format?: 'csv' | 'json'` field to `ImportExportPluginConfig`. - When defined, the `format` field in the export UI is: - Hidden via `admin.condition` - Pre-filled via `defaultValue` - Updated `getFields` to accept the plugin config and apply logic accordingly. ### Example ```ts importExportPlugin({ format: 'json', }) --- .../src/export/getCreateExportCollectionTask.ts | 4 +++- packages/plugin-import-export/src/export/getFields.ts | 11 +++++++++-- .../plugin-import-export/src/getExportCollection.ts | 2 +- packages/plugin-import-export/src/index.ts | 2 +- packages/plugin-import-export/src/types.ts | 10 ++++++++++ test/plugin-import-export/payload-types.ts | 6 +++--- 6 files changed, 27 insertions(+), 8 deletions(-) diff --git a/packages/plugin-import-export/src/export/getCreateExportCollectionTask.ts b/packages/plugin-import-export/src/export/getCreateExportCollectionTask.ts index 21fadf3450..893caf8cd1 100644 --- a/packages/plugin-import-export/src/export/getCreateExportCollectionTask.ts +++ b/packages/plugin-import-export/src/export/getCreateExportCollectionTask.ts @@ -1,5 +1,6 @@ import type { Config, TaskConfig, TypedUser } from 'payload' +import type { ImportExportPluginConfig } from '../types.js' import type { CreateExportArgs, Export } from './createExport.js' import { createExport } from './createExport.js' @@ -7,11 +8,12 @@ import { getFields } from './getFields.js' export const getCreateCollectionExportTask = ( config: Config, + pluginConfig?: ImportExportPluginConfig, ): TaskConfig<{ input: Export output: object }> => { - const inputSchema = getFields(config).concat( + const inputSchema = getFields(config, pluginConfig).concat( { name: 'user', type: 'text', diff --git a/packages/plugin-import-export/src/export/getFields.ts b/packages/plugin-import-export/src/export/getFields.ts index c76e7de9cb..f095c5b798 100644 --- a/packages/plugin-import-export/src/export/getFields.ts +++ b/packages/plugin-import-export/src/export/getFields.ts @@ -1,8 +1,10 @@ import type { Config, Field, SelectField } from 'payload' +import type { ImportExportPluginConfig } from '../types.js' + import { getFilename } from './getFilename.js' -export const getFields = (config: Config): Field[] => { +export const getFields = (config: Config, pluginConfig?: ImportExportPluginConfig): Field[] => { let localeField: SelectField | undefined if (config.localization) { localeField = { @@ -45,9 +47,14 @@ export const getFields = (config: Config): Field[] => { name: 'format', type: 'select', admin: { + // Hide if a forced format is set via plugin config + condition: () => !pluginConfig?.format, width: '33%', }, - defaultValue: 'csv', + defaultValue: (() => { + // Default to plugin-defined format, otherwise 'csv' + return pluginConfig?.format ?? 'csv' + })(), // @ts-expect-error - this is not correctly typed in plugins right now label: ({ t }) => t('plugin-import-export:field-format-label'), options: [ diff --git a/packages/plugin-import-export/src/getExportCollection.ts b/packages/plugin-import-export/src/getExportCollection.ts index 8f184750ae..c25b64824b 100644 --- a/packages/plugin-import-export/src/getExportCollection.ts +++ b/packages/plugin-import-export/src/getExportCollection.ts @@ -51,7 +51,7 @@ export const getExportCollection = ({ path: '/download', }, ], - fields: getFields(config), + fields: getFields(config, pluginConfig), hooks: { afterChange, beforeOperation, diff --git a/packages/plugin-import-export/src/index.ts b/packages/plugin-import-export/src/index.ts index de79864bd9..b17f7402ad 100644 --- a/packages/plugin-import-export/src/index.ts +++ b/packages/plugin-import-export/src/index.ts @@ -32,7 +32,7 @@ export const importExportPlugin = ) // inject the createExport job into the config - ;((config.jobs ??= {}).tasks ??= []).push(getCreateCollectionExportTask(config)) + ;((config.jobs ??= {}).tasks ??= []).push(getCreateCollectionExportTask(config, pluginConfig)) let collectionsToUpdate = config.collections diff --git a/packages/plugin-import-export/src/types.ts b/packages/plugin-import-export/src/types.ts index 80955a0b7b..9b48e2ba12 100644 --- a/packages/plugin-import-export/src/types.ts +++ b/packages/plugin-import-export/src/types.ts @@ -29,6 +29,16 @@ export type ImportExportPluginConfig = { * @default false */ disableSave?: boolean + /** + * Forces a specific export format (`csv` or `json`) and hides the format dropdown from the UI. + * + * When defined, this overrides the user's ability to choose a format manually. The export will + * always use the specified format, and the format selection field will be hidden. + * + * If not set, the user can choose between CSV and JSON in the export UI. + * @default undefined + */ + format?: 'csv' | 'json' /** * This function takes the default export collection configured in the plugin and allows you to override it by modifying and returning it * @param collection diff --git a/test/plugin-import-export/payload-types.ts b/test/plugin-import-export/payload-types.ts index 1f377e7fa6..83ef2a640d 100644 --- a/test/plugin-import-export/payload-types.ts +++ b/test/plugin-import-export/payload-types.ts @@ -264,7 +264,7 @@ export interface Post { export interface Export { id: string; name?: string | null; - format: 'csv' | 'json'; + format?: ('csv' | 'json') | null; limit?: number | null; sort?: string | null; locale?: ('all' | 'en' | 'es' | 'de') | null; @@ -300,7 +300,7 @@ export interface Export { export interface ExportsTask { id: string; name?: string | null; - format: 'csv' | 'json'; + format?: ('csv' | 'json') | null; limit?: number | null; sort?: string | null; locale?: ('all' | 'en' | 'es' | 'de') | null; @@ -717,7 +717,7 @@ export interface PayloadMigrationsSelect { export interface TaskCreateCollectionExport { input: { name?: string | null; - format: 'csv' | 'json'; + format?: ('csv' | 'json') | null; limit?: number | null; sort?: string | null; locale?: ('all' | 'en' | 'es' | 'de') | null; From f4d951dd04d889e271f1cdc245105d7b67705c5c Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Mon, 14 Jul 2025 16:42:11 -0400 Subject: [PATCH 032/143] templates: bump for v3.47.0 (#13161) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🤖 Automated bump of templates for v3.47.0 Triggered by user: @AlessioGr Co-authored-by: github-actions[bot] --- templates/with-postgres/package.json | 12 ++++---- ...tial.json => 20250714_175444_initial.json} | 2 +- ..._initial.ts => 20250714_175444_initial.ts} | 0 .../with-postgres/src/migrations/index.ts | 8 ++--- templates/with-vercel-mongodb/package.json | 20 ++++++------- templates/with-vercel-postgres/package.json | 24 +++++++-------- ...tial.json => 20250714_175405_initial.json} | 2 +- ..._initial.ts => 20250714_175405_initial.ts} | 0 .../src/migrations/index.ts | 8 ++--- templates/with-vercel-website/package.json | 30 +++++++++---------- ...tial.json => 20250714_175425_initial.json} | 4 +-- ..._initial.ts => 20250714_175425_initial.ts} | 2 +- .../src/migrations/index.ts | 8 ++--- 13 files changed, 60 insertions(+), 60 deletions(-) rename templates/with-postgres/src/migrations/{20250629_202651_initial.json => 20250714_175444_initial.json} (99%) rename templates/with-postgres/src/migrations/{20250629_202651_initial.ts => 20250714_175444_initial.ts} (100%) rename templates/with-vercel-postgres/src/migrations/{20250629_202637_initial.json => 20250714_175405_initial.json} (99%) rename templates/with-vercel-postgres/src/migrations/{20250629_202637_initial.ts => 20250714_175405_initial.ts} (100%) rename templates/with-vercel-website/src/migrations/{20250629_202644_initial.json => 20250714_175425_initial.json} (99%) rename templates/with-vercel-website/src/migrations/{20250629_202644_initial.ts => 20250714_175425_initial.ts} (99%) diff --git a/templates/with-postgres/package.json b/templates/with-postgres/package.json index bfb553fe76..4dd82982b0 100644 --- a/templates/with-postgres/package.json +++ b/templates/with-postgres/package.json @@ -19,15 +19,15 @@ "test:int": "cross-env NODE_OPTIONS=--no-deprecation vitest run --config ./vitest.config.mts" }, "dependencies": { - "@payloadcms/db-postgres": "3.44.0", - "@payloadcms/next": "3.44.0", - "@payloadcms/payload-cloud": "3.44.0", - "@payloadcms/richtext-lexical": "3.44.0", - "@payloadcms/ui": "3.44.0", + "@payloadcms/db-postgres": "3.47.0", + "@payloadcms/next": "3.47.0", + "@payloadcms/payload-cloud": "3.47.0", + "@payloadcms/richtext-lexical": "3.47.0", + "@payloadcms/ui": "3.47.0", "cross-env": "^7.0.3", "graphql": "^16.8.1", "next": "15.3.0", - "payload": "3.44.0", + "payload": "3.47.0", "react": "19.1.0", "react-dom": "19.1.0", "sharp": "0.34.2" diff --git a/templates/with-postgres/src/migrations/20250629_202651_initial.json b/templates/with-postgres/src/migrations/20250714_175444_initial.json similarity index 99% rename from templates/with-postgres/src/migrations/20250629_202651_initial.json rename to templates/with-postgres/src/migrations/20250714_175444_initial.json index b350cc63a0..1a334812e3 100644 --- a/templates/with-postgres/src/migrations/20250629_202651_initial.json +++ b/templates/with-postgres/src/migrations/20250714_175444_initial.json @@ -1,5 +1,5 @@ { - "id": "77ca1be4-16d8-4f65-bad8-0b86d2692050", + "id": "99e77143-a464-4a6a-b0ed-d11158d1a942", "prevId": "00000000-0000-0000-0000-000000000000", "version": "7", "dialect": "postgresql", diff --git a/templates/with-postgres/src/migrations/20250629_202651_initial.ts b/templates/with-postgres/src/migrations/20250714_175444_initial.ts similarity index 100% rename from templates/with-postgres/src/migrations/20250629_202651_initial.ts rename to templates/with-postgres/src/migrations/20250714_175444_initial.ts diff --git a/templates/with-postgres/src/migrations/index.ts b/templates/with-postgres/src/migrations/index.ts index df35ff0f6d..d552ee42cf 100644 --- a/templates/with-postgres/src/migrations/index.ts +++ b/templates/with-postgres/src/migrations/index.ts @@ -1,9 +1,9 @@ -import * as migration_20250629_202651_initial from './20250629_202651_initial' +import * as migration_20250714_175444_initial from './20250714_175444_initial' export const migrations = [ { - up: migration_20250629_202651_initial.up, - down: migration_20250629_202651_initial.down, - name: '20250629_202651_initial', + up: migration_20250714_175444_initial.up, + down: migration_20250714_175444_initial.down, + name: '20250714_175444_initial', }, ] diff --git a/templates/with-vercel-mongodb/package.json b/templates/with-vercel-mongodb/package.json index eea86a292f..0a1b3c6ee0 100644 --- a/templates/with-vercel-mongodb/package.json +++ b/templates/with-vercel-mongodb/package.json @@ -18,18 +18,18 @@ "test:int": "cross-env NODE_OPTIONS=--no-deprecation vitest run --config ./vitest.config.mts" }, "dependencies": { - "@payloadcms/db-mongodb": "3.44.0", - "@payloadcms/next": "3.44.0", - "@payloadcms/payload-cloud": "3.44.0", - "@payloadcms/richtext-lexical": "3.44.0", - "@payloadcms/ui": "3.44.0", + "@payloadcms/db-mongodb": "3.47.0", + "@payloadcms/next": "3.47.0", + "@payloadcms/payload-cloud": "3.47.0", + "@payloadcms/richtext-lexical": "3.47.0", + "@payloadcms/storage-vercel-blob": "3.47.0", + "@payloadcms/ui": "3.47.0", "cross-env": "^7.0.3", "graphql": "^16.8.1", "next": "15.3.0", - "payload": "3.44.0", + "payload": "3.47.0", "react": "19.1.0", - "react-dom": "19.1.0", - "@payloadcms/storage-vercel-blob": "3.44.0" + "react-dom": "19.1.0" }, "devDependencies": { "@eslint/eslintrc": "^3.2.0", @@ -49,6 +49,7 @@ "vite-tsconfig-paths": "5.1.4", "vitest": "3.2.3" }, + "packageManager": "pnpm@10.13.1", "engines": { "node": "^18.20.2 || >=20.9.0" }, @@ -58,6 +59,5 @@ "esbuild", "unrs-resolver" ] - }, - "packageManager": "pnpm@10.12.4" + } } diff --git a/templates/with-vercel-postgres/package.json b/templates/with-vercel-postgres/package.json index ef61a4dd91..fa36ccdf4a 100644 --- a/templates/with-vercel-postgres/package.json +++ b/templates/with-vercel-postgres/package.json @@ -6,6 +6,7 @@ "type": "module", "scripts": { "build": "cross-env NODE_OPTIONS=--no-deprecation next build", + "ci": "payload migrate && pnpm build", "dev": "cross-env NODE_OPTIONS=--no-deprecation next dev", "devsafe": "rm -rf .next && cross-env NODE_OPTIONS=--no-deprecation next dev", "generate:importmap": "cross-env NODE_OPTIONS=--no-deprecation payload generate:importmap", @@ -15,22 +16,21 @@ "start": "cross-env NODE_OPTIONS=--no-deprecation next start", "test": "pnpm run test:int && pnpm run test:e2e", "test:e2e": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" pnpm exec playwright test", - "test:int": "cross-env NODE_OPTIONS=--no-deprecation vitest run --config ./vitest.config.mts", - "ci": "payload migrate && pnpm build" + "test:int": "cross-env NODE_OPTIONS=--no-deprecation vitest run --config ./vitest.config.mts" }, "dependencies": { - "@payloadcms/next": "3.44.0", - "@payloadcms/payload-cloud": "3.44.0", - "@payloadcms/richtext-lexical": "3.44.0", - "@payloadcms/ui": "3.44.0", + "@payloadcms/db-vercel-postgres": "3.47.0", + "@payloadcms/next": "3.47.0", + "@payloadcms/payload-cloud": "3.47.0", + "@payloadcms/richtext-lexical": "3.47.0", + "@payloadcms/storage-vercel-blob": "3.47.0", + "@payloadcms/ui": "3.47.0", "cross-env": "^7.0.3", "graphql": "^16.8.1", "next": "15.3.0", - "payload": "3.44.0", + "payload": "3.47.0", "react": "19.1.0", - "react-dom": "19.1.0", - "@payloadcms/db-vercel-postgres": "3.44.0", - "@payloadcms/storage-vercel-blob": "3.44.0" + "react-dom": "19.1.0" }, "devDependencies": { "@eslint/eslintrc": "^3.2.0", @@ -50,6 +50,7 @@ "vite-tsconfig-paths": "5.1.4", "vitest": "3.2.3" }, + "packageManager": "pnpm@10.13.1", "engines": { "node": "^18.20.2 || >=20.9.0" }, @@ -59,6 +60,5 @@ "esbuild", "unrs-resolver" ] - }, - "packageManager": "pnpm@10.12.4" + } } diff --git a/templates/with-vercel-postgres/src/migrations/20250629_202637_initial.json b/templates/with-vercel-postgres/src/migrations/20250714_175405_initial.json similarity index 99% rename from templates/with-vercel-postgres/src/migrations/20250629_202637_initial.json rename to templates/with-vercel-postgres/src/migrations/20250714_175405_initial.json index 7d705c2482..0767d022f9 100644 --- a/templates/with-vercel-postgres/src/migrations/20250629_202637_initial.json +++ b/templates/with-vercel-postgres/src/migrations/20250714_175405_initial.json @@ -1,5 +1,5 @@ { - "id": "f6e3ff1c-ee75-4b76-9e36-3e30ac7f87a1", + "id": "c530fbb1-3663-46c0-aa05-9f2e25b30f8c", "prevId": "00000000-0000-0000-0000-000000000000", "version": "7", "dialect": "postgresql", diff --git a/templates/with-vercel-postgres/src/migrations/20250629_202637_initial.ts b/templates/with-vercel-postgres/src/migrations/20250714_175405_initial.ts similarity index 100% rename from templates/with-vercel-postgres/src/migrations/20250629_202637_initial.ts rename to templates/with-vercel-postgres/src/migrations/20250714_175405_initial.ts diff --git a/templates/with-vercel-postgres/src/migrations/index.ts b/templates/with-vercel-postgres/src/migrations/index.ts index 064143082c..1ffc16cbf1 100644 --- a/templates/with-vercel-postgres/src/migrations/index.ts +++ b/templates/with-vercel-postgres/src/migrations/index.ts @@ -1,9 +1,9 @@ -import * as migration_20250629_202637_initial from './20250629_202637_initial' +import * as migration_20250714_175405_initial from './20250714_175405_initial' export const migrations = [ { - up: migration_20250629_202637_initial.up, - down: migration_20250629_202637_initial.down, - name: '20250629_202637_initial', + up: migration_20250714_175405_initial.up, + down: migration_20250714_175405_initial.down, + name: '20250714_175405_initial', }, ] diff --git a/templates/with-vercel-website/package.json b/templates/with-vercel-website/package.json index e0b9e8c6db..344396192c 100644 --- a/templates/with-vercel-website/package.json +++ b/templates/with-vercel-website/package.json @@ -23,19 +23,19 @@ "test:int": "cross-env NODE_OPTIONS=--no-deprecation vitest run --config ./vitest.config.mts" }, "dependencies": { - "@payloadcms/admin-bar": "3.44.0", - "@payloadcms/db-vercel-postgres": "3.44.0", - "@payloadcms/live-preview-react": "3.44.0", - "@payloadcms/next": "3.44.0", - "@payloadcms/payload-cloud": "3.44.0", - "@payloadcms/plugin-form-builder": "3.44.0", - "@payloadcms/plugin-nested-docs": "3.44.0", - "@payloadcms/plugin-redirects": "3.44.0", - "@payloadcms/plugin-search": "3.44.0", - "@payloadcms/plugin-seo": "3.44.0", - "@payloadcms/richtext-lexical": "3.44.0", - "@payloadcms/storage-vercel-blob": "3.44.0", - "@payloadcms/ui": "3.44.0", + "@payloadcms/admin-bar": "3.47.0", + "@payloadcms/db-vercel-postgres": "3.47.0", + "@payloadcms/live-preview-react": "3.47.0", + "@payloadcms/next": "3.47.0", + "@payloadcms/payload-cloud": "3.47.0", + "@payloadcms/plugin-form-builder": "3.47.0", + "@payloadcms/plugin-nested-docs": "3.47.0", + "@payloadcms/plugin-redirects": "3.47.0", + "@payloadcms/plugin-search": "3.47.0", + "@payloadcms/plugin-seo": "3.47.0", + "@payloadcms/richtext-lexical": "3.47.0", + "@payloadcms/storage-vercel-blob": "3.47.0", + "@payloadcms/ui": "3.47.0", "@radix-ui/react-checkbox": "^1.0.4", "@radix-ui/react-label": "^2.0.2", "@radix-ui/react-select": "^2.0.0", @@ -49,7 +49,7 @@ "lucide-react": "^0.378.0", "next": "15.3.3", "next-sitemap": "^4.2.3", - "payload": "3.44.0", + "payload": "3.47.0", "prism-react-renderer": "^2.3.1", "react": "19.1.0", "react-dom": "19.1.0", @@ -82,7 +82,7 @@ "vite-tsconfig-paths": "5.1.4", "vitest": "3.2.3" }, - "packageManager": "pnpm@10.12.4", + "packageManager": "pnpm@10.13.1", "engines": { "node": "^18.20.2 || >=20.9.0" }, diff --git a/templates/with-vercel-website/src/migrations/20250629_202644_initial.json b/templates/with-vercel-website/src/migrations/20250714_175425_initial.json similarity index 99% rename from templates/with-vercel-website/src/migrations/20250629_202644_initial.json rename to templates/with-vercel-website/src/migrations/20250714_175425_initial.json index cb61946d11..2692aae9dc 100644 --- a/templates/with-vercel-website/src/migrations/20250629_202644_initial.json +++ b/templates/with-vercel-website/src/migrations/20250714_175425_initial.json @@ -1,5 +1,5 @@ { - "id": "a31dfac2-cdc9-41d5-97e2-2aa6f567d3f8", + "id": "194ab06f-3083-410e-b08c-aa19a8b82410", "prevId": "00000000-0000-0000-0000-000000000000", "version": "7", "dialect": "postgresql", @@ -4907,7 +4907,7 @@ "nulls": "last" } ], - "isUnique": false, + "isUnique": true, "concurrently": false, "method": "btree", "with": {} diff --git a/templates/with-vercel-website/src/migrations/20250629_202644_initial.ts b/templates/with-vercel-website/src/migrations/20250714_175425_initial.ts similarity index 99% rename from templates/with-vercel-website/src/migrations/20250629_202644_initial.ts rename to templates/with-vercel-website/src/migrations/20250714_175425_initial.ts index 0aea12f95a..915e50e51d 100644 --- a/templates/with-vercel-website/src/migrations/20250629_202644_initial.ts +++ b/templates/with-vercel-website/src/migrations/20250714_175425_initial.ts @@ -998,7 +998,7 @@ export async function up({ db, payload, req }: MigrateUpArgs): Promise { CREATE INDEX "users_updated_at_idx" ON "users" USING btree ("updated_at"); CREATE INDEX "users_created_at_idx" ON "users" USING btree ("created_at"); CREATE UNIQUE INDEX "users_email_idx" ON "users" USING btree ("email"); - CREATE INDEX "redirects_from_idx" ON "redirects" USING btree ("from"); + CREATE UNIQUE INDEX "redirects_from_idx" ON "redirects" USING btree ("from"); CREATE INDEX "redirects_updated_at_idx" ON "redirects" USING btree ("updated_at"); CREATE INDEX "redirects_created_at_idx" ON "redirects" USING btree ("created_at"); CREATE INDEX "redirects_rels_order_idx" ON "redirects_rels" USING btree ("order"); diff --git a/templates/with-vercel-website/src/migrations/index.ts b/templates/with-vercel-website/src/migrations/index.ts index 2bf4a5d3af..667aace0af 100644 --- a/templates/with-vercel-website/src/migrations/index.ts +++ b/templates/with-vercel-website/src/migrations/index.ts @@ -1,9 +1,9 @@ -import * as migration_20250629_202644_initial from './20250629_202644_initial' +import * as migration_20250714_175425_initial from './20250714_175425_initial' export const migrations = [ { - up: migration_20250629_202644_initial.up, - down: migration_20250629_202644_initial.down, - name: '20250629_202644_initial', + up: migration_20250714_175425_initial.up, + down: migration_20250714_175425_initial.down, + name: '20250714_175425_initial', }, ] From 5839cb61fa9c01b156be400c7c48d665d41ff27e Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Mon, 14 Jul 2025 17:10:36 -0400 Subject: [PATCH 033/143] feat(plugin-import-export): adds support for disabling fields (#13166) ### What? Adds support for excluding specific fields from the import-export plugin using a custom field config. ### Why? Some fields should not be included in exports or previews. This feature allows users to flag those fields directly in the field config. ### How? - Introduced a `plugin-import-export.disabled: true` custom field property. - Automatically collects and stores disabled field accessors in `collection.admin.custom['plugin-import-export'].disabledFields`. - Excludes these fields from the export field selector, preview table, and final export output (CSV/JSON). --- .../src/components/FieldsToExport/index.tsx | 9 +++++- .../FieldsToExport/reduceFields.tsx | 9 ++++++ .../src/components/Preview/index.tsx | 26 ++++++++++++++-- .../src/export/createExport.ts | 21 +++++++++++-- packages/plugin-import-export/src/index.ts | 30 ++++++++++++++++++- 5 files changed, 88 insertions(+), 7 deletions(-) diff --git a/packages/plugin-import-export/src/components/FieldsToExport/index.tsx b/packages/plugin-import-export/src/components/FieldsToExport/index.tsx index 45fd172455..085346fe4c 100644 --- a/packages/plugin-import-export/src/components/FieldsToExport/index.tsx +++ b/packages/plugin-import-export/src/components/FieldsToExport/index.tsx @@ -27,7 +27,14 @@ export const FieldsToExport: SelectFieldClientComponent = (props) => { const { query } = useListQuery() const collectionConfig = getEntityConfig({ collectionSlug: collectionSlug ?? collection }) - const fieldOptions = reduceFields({ fields: collectionConfig?.fields }) + + const disabledFields = + collectionConfig?.admin?.custom?.['plugin-import-export']?.disabledFields ?? [] + + const fieldOptions = reduceFields({ + disabledFields, + fields: collectionConfig?.fields, + }) useEffect(() => { if (id || !collectionSlug) { diff --git a/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx b/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx index 9d71530511..4a678a0503 100644 --- a/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx +++ b/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx @@ -43,10 +43,12 @@ const combineLabel = ({ } export const reduceFields = ({ + disabledFields = [], fields, labelPrefix = null, path = '', }: { + disabledFields?: string[] fields: ClientField[] labelPrefix?: React.ReactNode path?: string @@ -66,6 +68,7 @@ export const reduceFields = ({ return [ ...fieldsToUse, ...reduceFields({ + disabledFields, fields: field.fields, labelPrefix: combineLabel({ field, prefix: labelPrefix }), path: createNestedClientFieldPath(path, field), @@ -83,6 +86,7 @@ export const reduceFields = ({ return [ ...tabFields, ...reduceFields({ + disabledFields, fields: tab.fields, labelPrefix, path: isNamedTab ? createNestedClientFieldPath(path, field) : path, @@ -98,6 +102,11 @@ export const reduceFields = ({ const val = createNestedClientFieldPath(path, field) + // If the field is disabled, skip it + if (disabledFields.includes(val)) { + return fieldsToUse + } + const formattedField = { id: val, label: combineLabel({ field, prefix: labelPrefix }), diff --git a/packages/plugin-import-export/src/components/Preview/index.tsx b/packages/plugin-import-export/src/components/Preview/index.tsx index af99f817ff..046b04c4a7 100644 --- a/packages/plugin-import-export/src/components/Preview/index.tsx +++ b/packages/plugin-import-export/src/components/Preview/index.tsx @@ -46,6 +46,14 @@ export const Preview = () => { (collection) => collection.slug === collectionSlug, ) + const disabledFieldsUnderscored = React.useMemo(() => { + return ( + collectionConfig?.admin?.custom?.['plugin-import-export']?.disabledFields?.map((f: string) => + f.replace(/\./g, '_'), + ) ?? [] + ) + }, [collectionConfig]) + const isCSV = format === 'csv' React.useEffect(() => { @@ -95,7 +103,10 @@ export const Preview = () => { const regex = fieldToRegex(field) return allKeys.filter((key) => regex.test(key)) }) - : allKeys.filter((key) => !defaultMetaFields.includes(key)) + : allKeys.filter( + (key) => + !defaultMetaFields.includes(key) && !disabledFieldsUnderscored.includes(key), + ) const fieldKeys = Array.isArray(fields) && fields.length > 0 @@ -136,7 +147,18 @@ export const Preview = () => { } void fetchData() - }, [collectionConfig, collectionSlug, draft, fields, i18n, limit, locale, sort, where]) + }, [ + collectionConfig, + collectionSlug, + disabledFieldsUnderscored, + draft, + fields, + i18n, + limit, + locale, + sort, + where, + ]) return (
    diff --git a/packages/plugin-import-export/src/export/createExport.ts b/packages/plugin-import-export/src/export/createExport.ts index 2623241209..9868e0a965 100644 --- a/packages/plugin-import-export/src/export/createExport.ts +++ b/packages/plugin-import-export/src/export/createExport.ts @@ -108,6 +108,17 @@ export const createExport = async (args: CreateExportArgs) => { fields: collectionConfig.flattenedFields, }) + const disabledFieldsDot = + collectionConfig.admin?.custom?.['plugin-import-export']?.disabledFields ?? [] + const disabledFields = disabledFieldsDot.map((f: string) => f.replace(/\./g, '_')) + + const filterDisabled = (row: Record): Record => { + for (const key of disabledFields) { + delete row[key] + } + return row + } + if (download) { if (debug) { req.payload.logger.info('Pre-scanning all columns before streaming') @@ -122,7 +133,7 @@ export const createExport = async (args: CreateExportArgs) => { const result = await payload.find({ ...findArgs, page: scanPage }) result.docs.forEach((doc) => { - const flat = flattenObject({ doc, fields, toCSVFunctions }) + const flat = filterDisabled(flattenObject({ doc, fields, toCSVFunctions })) Object.keys(flat).forEach((key) => { if (!allColumnsSet.has(key)) { allColumnsSet.add(key) @@ -156,7 +167,9 @@ export const createExport = async (args: CreateExportArgs) => { return } - const batchRows = result.docs.map((doc) => flattenObject({ doc, fields, toCSVFunctions })) + const batchRows = result.docs.map((doc) => + filterDisabled(flattenObject({ doc, fields, toCSVFunctions })), + ) const paddedRows = batchRows.map((row) => { const fullRow: Record = {} @@ -217,7 +230,9 @@ export const createExport = async (args: CreateExportArgs) => { } if (isCSV) { - const batchRows = result.docs.map((doc) => flattenObject({ doc, fields, toCSVFunctions })) + const batchRows = result.docs.map((doc) => + filterDisabled(flattenObject({ doc, fields, toCSVFunctions })), + ) // Track discovered column keys batchRows.forEach((row) => { diff --git a/packages/plugin-import-export/src/index.ts b/packages/plugin-import-export/src/index.ts index b17f7402ad..27dd52bd83 100644 --- a/packages/plugin-import-export/src/index.ts +++ b/packages/plugin-import-export/src/index.ts @@ -1,6 +1,6 @@ import type { Config, FlattenedField } from 'payload' -import { addDataAndFileToRequest, deepMergeSimple } from 'payload' +import { addDataAndFileToRequest, deepMergeSimple, flattenTopLevelFields } from 'payload' import type { PluginDefaultTranslationsObject } from './translations/types.js' import type { ImportExportPluginConfig, ToCSVFunction } from './types.js' @@ -58,6 +58,26 @@ export const importExportPlugin = }, path: '@payloadcms/plugin-import-export/rsc#ExportListMenuItem', }) + + // Flatten top-level fields to expose nested fields for export config + const flattenedFields = flattenTopLevelFields(collection.fields, { + moveSubFieldsToTop: true, + }) + + // Find fields explicitly marked as disabled for import/export + const disabledFieldAccessors = flattenedFields + .filter((field) => field.custom?.['plugin-import-export']?.disabled) + .map((field) => field.accessor || field.name) + + // Store disabled field accessors in the admin config for use in the UI + collection.admin.custom = { + ...(collection.admin.custom || {}), + 'plugin-import-export': { + ...(collection.admin.custom?.['plugin-import-export'] || {}), + disabledFields: disabledFieldAccessors, + }, + } + collection.admin.components = components }) @@ -161,6 +181,14 @@ export const importExportPlugin = declare module 'payload' { export interface FieldCustom { 'plugin-import-export'?: { + /** + * When `true` the field is **completely excluded** from the import-export plugin: + * - It will not appear in the “Fields to export” selector. + * - It is hidden from the preview list when no specific fields are chosen. + * - Its data is omitted from the final CSV / JSON export. + * @default false + */ + disabled?: boolean toCSV?: ToCSVFunction } } From 277448d9c08cd138f7253929e3882e14e6d6ca71 Mon Sep 17 00:00:00 2001 From: Jacob Fletcher Date: Mon, 14 Jul 2025 23:55:16 -0400 Subject: [PATCH 034/143] docs: performance (#13068) Payload is designed with performance in mind, but its customizability means that there are many ways to configure your app that can impact performance. While Payload provides several features and best practices to help you optimize your app's specific performance needs, these are not currently well surfaced and can be obscure. Now: - A high-level performance doc now exists at `/docs/performance` - There's a new section on performance within the `/docs/queries` doc - There's a new section on performance within the `/docs/hooks` doc - There's a new section on performance within the `/docs/custom-components` doc This PR also: - Restructures and elaborates on the `/docs/queries/pagination` docs - Adds a new `/docs/database/indexing` doc - More --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210743577153856 --- docs/admin/react-hooks.mdx | 7 +- docs/configuration/collections.mdx | 52 ++--- docs/configuration/environment-vars.mdx | 4 +- docs/configuration/overview.mdx | 9 +- docs/custom-components/overview.mdx | 48 +++++ docs/database/indexes.mdx | 65 +++++++ docs/database/mongodb.mdx | 2 +- docs/database/postgres.mdx | 2 +- docs/database/sqlite.mdx | 2 +- docs/fields/array.mdx | 6 +- docs/fields/blocks.mdx | 6 +- docs/fields/checkbox.mdx | 8 +- docs/fields/code.mdx | 8 +- docs/fields/date.mdx | 8 +- docs/fields/email.mdx | 8 +- docs/fields/group.mdx | 6 +- docs/fields/join.mdx | 2 +- docs/fields/json.mdx | 8 +- docs/fields/number.mdx | 8 +- docs/fields/overview.mdx | 13 +- docs/fields/point.mdx | 8 +- docs/fields/radio.mdx | 8 +- docs/fields/relationship.mdx | 12 +- docs/fields/rich-text.mdx | 6 +- docs/fields/select.mdx | 8 +- docs/fields/tabs.mdx | 2 +- docs/fields/text.mdx | 8 +- docs/fields/textarea.mdx | 8 +- docs/fields/ui.mdx | 4 +- docs/fields/upload.mdx | 12 +- docs/hooks/overview.mdx | 100 +++++++++- docs/performance/overview.mdx | 244 ++++++++++++++++++++++++ docs/queries/depth.mdx | 32 +++- docs/queries/overview.mdx | 129 ++++++++++++- docs/queries/pagination.mdx | 115 ++++++++++- docs/queries/select.mdx | 18 +- docs/queries/sort.mdx | 6 +- 37 files changed, 856 insertions(+), 136 deletions(-) create mode 100644 docs/database/indexes.mdx create mode 100644 docs/performance/overview.mdx diff --git a/docs/admin/react-hooks.mdx b/docs/admin/react-hooks.mdx index 0fc05b4933..5640f185ca 100644 --- a/docs/admin/react-hooks.mdx +++ b/docs/admin/react-hooks.mdx @@ -114,7 +114,12 @@ const MyComponent: React.FC = () => { ## useAllFormFields -**To retrieve more than one field**, you can use the `useAllFormFields` hook. Your component will re-render when _any_ field changes, so use this hook only if you absolutely need to. Unlike the `useFormFields` hook, this hook does not accept a "selector", and it always returns an array with type of `[fields: Fields, dispatch: React.Dispatch]]`. +**To retrieve more than one field**, you can use the `useAllFormFields` hook. Unlike the `useFormFields` hook, this hook does not accept a "selector", and it always returns an array with type of `[fields: Fields, dispatch: React.Dispatch]]`. + + + **Warning:** Your component will re-render when _any_ field changes, so use + this hook only if you absolutely need to. + You can do lots of powerful stuff by retrieving the full form state, like using built-in helper functions to reduce field state to values only, or to retrieve sibling data by path. diff --git a/docs/configuration/collections.mdx b/docs/configuration/collections.mdx index 66b11f06f0..f431c925f7 100644 --- a/docs/configuration/collections.mdx +++ b/docs/configuration/collections.mdx @@ -60,32 +60,32 @@ export const Posts: CollectionConfig = { The following options are available: -| Option | Description | -| -------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `admin` | The configuration options for the Admin Panel. [More details](#admin-options). | -| `access` | Provide Access Control functions to define exactly who should be able to do what with Documents in this Collection. [More details](../access-control/collections). | -| `auth` | Specify options if you would like this Collection to feature authentication. [More details](../authentication/overview). | -| `custom` | Extension point for adding custom data (e.g. for plugins) | -| `disableDuplicate` | When true, do not show the "Duplicate" button while editing documents within this Collection and prevent `duplicate` from all APIs. | -| `defaultSort` | Pass a top-level field to sort by default in the Collection List View. Prefix the name of the field with a minus symbol ("-") to sort in descending order. Multiple fields can be specified by using a string array. | -| `dbName` | Custom table or Collection name depending on the Database Adapter. Auto-generated from slug if not defined. | -| `endpoints` | Add custom routes to the REST API. Set to `false` to disable routes. [More details](../rest-api/overview#custom-endpoints). | -| `fields` \* | Array of field types that will determine the structure and functionality of the data stored within this Collection. [More details](../fields/overview). | -| `graphQL` | Manage GraphQL-related properties for this collection. [More](#graphql) | -| `hooks` | Entry point for Hooks. [More details](../hooks/overview#collection-hooks). | -| `orderable` | If true, enables custom ordering for the collection, and documents can be reordered via drag and drop. Uses [fractional indexing](https://observablehq.com/@dgreensp/implementing-fractional-indexing) for efficient reordering. | -| `labels` | Singular and plural labels for use in identifying this Collection throughout Payload. Auto-generated from slug if not defined. | -| `enableQueryPresets` | Enable query presets for this Collection. [More details](../query-presets/overview). | -| `lockDocuments` | Enables or disables document locking. By default, document locking is enabled. Set to an object to configure, or set to `false` to disable locking. [More details](../admin/locked-documents). | -| `slug` \* | Unique, URL-friendly string that will act as an identifier for this Collection. | -| `timestamps` | Set to false to disable documents' automatically generated `createdAt` and `updatedAt` timestamps. | -| `typescript` | An object with property `interface` as the text used in schema generation. Auto-generated from slug if not defined. | -| `upload` | Specify options if you would like this Collection to support file uploads. For more, consult the [Uploads](../upload/overview) documentation. | -| `versions` | Set to true to enable default options, or configure with object properties. [More details](../versions/overview#collection-config). | -| `defaultPopulate` | Specify which fields to select when this Collection is populated from another document. [More Details](../queries/select#defaultpopulate-collection-config-property). | -| `indexes` | Define compound indexes for this collection. This can be used to either speed up querying/sorting by 2 or more fields at the same time or to ensure uniqueness between several fields. | -| `forceSelect` | Specify which fields should be selected always, regardless of the `select` query which can be useful that the field exists for access control / hooks | -| `disableBulkEdit` | Disable the bulk edit operation for the collection in the admin panel and the REST API | +| Option | Description | +| -------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `admin` | The configuration options for the Admin Panel. [More details](#admin-options). | +| `access` | Provide Access Control functions to define exactly who should be able to do what with Documents in this Collection. [More details](../access-control/collections). | +| `auth` | Specify options if you would like this Collection to feature authentication. [More details](../authentication/overview). | +| `custom` | Extension point for adding custom data (e.g. for plugins) | +| `disableDuplicate` | When true, do not show the "Duplicate" button while editing documents within this Collection and prevent `duplicate` from all APIs. | +| `defaultSort` | Pass a top-level field to sort by default in the Collection List View. Prefix the name of the field with a minus symbol ("-") to sort in descending order. Multiple fields can be specified by using a string array. | +| `dbName` | Custom table or Collection name depending on the Database Adapter. Auto-generated from slug if not defined. | +| `endpoints` | Add custom routes to the REST API. Set to `false` to disable routes. [More details](../rest-api/overview#custom-endpoints). | +| `fields` \* | Array of field types that will determine the structure and functionality of the data stored within this Collection. [More details](../fields/overview). | +| `graphQL` | Manage GraphQL-related properties for this collection. [More](#graphql) | +| `hooks` | Entry point for Hooks. [More details](../hooks/overview#collection-hooks). | +| `orderable` | If true, enables custom ordering for the collection, and documents can be reordered via drag and drop. Uses [fractional indexing](https://observablehq.com/@dgreensp/implementing-fractional-indexing) for efficient reordering. | +| `labels` | Singular and plural labels for use in identifying this Collection throughout Payload. Auto-generated from slug if not defined. | +| `enableQueryPresets` | Enable query presets for this Collection. [More details](../query-presets/overview). | +| `lockDocuments` | Enables or disables document locking. By default, document locking is enabled. Set to an object to configure, or set to `false` to disable locking. [More details](../admin/locked-documents). | +| `slug` \* | Unique, URL-friendly string that will act as an identifier for this Collection. | +| `timestamps` | Set to false to disable documents' automatically generated `createdAt` and `updatedAt` timestamps. | +| `typescript` | An object with property `interface` as the text used in schema generation. Auto-generated from slug if not defined. | +| `upload` | Specify options if you would like this Collection to support file uploads. For more, consult the [Uploads](../upload/overview) documentation. | +| `versions` | Set to true to enable default options, or configure with object properties. [More details](../versions/overview#collection-config). | +| `defaultPopulate` | Specify which fields to select when this Collection is populated from another document. [More Details](../queries/select#defaultpopulate-collection-config-property). | +| `indexes` | Define compound indexes for this collection. This can be used to either speed up querying/sorting by 2 or more fields at the same time or to ensure uniqueness between several fields. [More details](../database/indexes#compound-indexes). | +| `forceSelect` | Specify which fields should be selected always, regardless of the `select` query which can be useful that the field exists for access control / hooks | +| `disableBulkEdit` | Disable the bulk edit operation for the collection in the admin panel and the REST API | _\* An asterisk denotes that a property is required._ diff --git a/docs/configuration/environment-vars.mdx b/docs/configuration/environment-vars.mdx index 6ef525066d..c73d1ae523 100644 --- a/docs/configuration/environment-vars.mdx +++ b/docs/configuration/environment-vars.mdx @@ -1,7 +1,7 @@ --- title: Environment Variables label: Environment Variables -order: 100 +order: 60 desc: Learn how to use Environment Variables in your Payload project --- @@ -72,7 +72,7 @@ const MyClientComponent = () => { } ``` -For more information, check out the [Next.js Documentation](https://nextjs.org/docs/app/building-your-application/configuring/environment-variables). +For more information, check out the [Next.js documentation](https://nextjs.org/docs/app/building-your-application/configuring/environment-variables). ## Outside of Next.js diff --git a/docs/configuration/overview.mdx b/docs/configuration/overview.mdx index 417849ce9d..cbbd35d068 100644 --- a/docs/configuration/overview.mdx +++ b/docs/configuration/overview.mdx @@ -110,7 +110,7 @@ _\* An asterisk denotes that a property is required._ details](../custom-components/overview#accessing-the-payload-config). -### Typescript Config +### TypeScript Config Payload exposes a variety of TypeScript settings that you can leverage. These settings are used to auto-generate TypeScript interfaces for your [Collections](./collections) and [Globals](./globals), and to ensure that Payload uses your [Generated Types](../typescript/overview) for all [Local API](../local-api/overview) methods. @@ -121,10 +121,11 @@ import { buildConfig } from 'payload' export default buildConfig({ // ... + // highlight-start typescript: { - // highlight-line // ... }, + // highlight-end }) ``` @@ -227,7 +228,9 @@ import { buildConfig } from 'payload' export default buildConfig({ // ... - cors: '*', // highlight-line + // highlight-start + cors: '*', + // highlight-end }) ``` diff --git a/docs/custom-components/overview.mdx b/docs/custom-components/overview.mdx index 3ea36e3149..86b8198d81 100644 --- a/docs/custom-components/overview.mdx +++ b/docs/custom-components/overview.mdx @@ -505,3 +505,51 @@ Payload also exports its [SCSS](https://sass-lang.com) library for reuse which i **Note:** You can also drill into Payload's own component styles, or easily apply global, app-wide CSS. More on that [here](../admin/customizing-css). + +## Performance + +An often overlooked aspect of Custom Components is performance. If unchecked, Custom Components can lead to slow load times of the Admin Panel and ultimately a poor user experience. + +This is different from front-end performance of your public-facing site. + + + For more performance tips, see the [Performance + documentation](../performance/overview). + + +### Follow React and Next.js best practices + +All Custom Components are built using [React](https://react.dev). For this reason, it is important to follow React best practices. This includes using memoization, streaming, caching, optimizing renders, using hooks appropriately, and more. + +To learn more, see the [React documentation](https://react.dev/learn). + +The Admin Panel itself is a [Next.js](https://nextjs.org) application. For this reason, it is _also_ important to follow Next.js best practices. This includes bundling, when to use layouts vs pages, where to place the server/client boundary, and more. + +To learn more, see the [Next.js documentation](https://nextjs.org/docs). + +### Reducing initial HTML size + +With Server Components, be aware of what is being sent to through the server/client boundary. All props are serialized and sent through the network. This can lead to large HTML sizes and slow initial load times if too much data is being sent to the client. + +To minimize this, you must be explicit about what props are sent to the client. Prefer server components and only send the necessary props to the client. This will also offset some of the JS execution to the server. + + + **Tip:** Use [React Suspense](https://react.dev/reference/react/Suspense) to + progressively load components and improve perceived performance. + + +### Prevent unnecessary re-renders + +If subscribing your component to form state, it may be re-rendering more often than necessary. + +To do this, use the [`useFormFields`](../admin/react-hooks) hook instead of `useFields` when you only need to access specific fields. + +```ts +'use client' +import { useFormFields } from '@payloadcms/ui' + +const MyComponent: TextFieldClientComponent = ({ path }) => { + const value = useFormFields(([fields, dispatch]) => fields[path]) + // ... +} +``` diff --git a/docs/database/indexes.mdx b/docs/database/indexes.mdx new file mode 100644 index 0000000000..e399320e11 --- /dev/null +++ b/docs/database/indexes.mdx @@ -0,0 +1,65 @@ +--- +title: Indexes +label: Indexes +order: 40 +keywords: database, indexes +desc: Index fields to produce faster queries. +--- + +Database indexes are a way to optimize the performance of your database by allowing it to quickly locate and retrieve data. If you have a field that you frequently query or sort by, adding an index to that field can significantly improve the speed of those operations. + +When your query runs, the database will not scan the entire document to find that one field, but will instead use the index to quickly locate the data. + +To index a field, set the `index` option to `true` in your field's config: + +```ts +import type { CollectionConfig } from 'payload' + +export MyCollection: CollectionConfig = { + // ... + fields: [ + // ... + { + name: 'title', + type: 'text', + // highlight-start + index: true, + // highlight-end + }, + ] +} +``` + + + **Note:** The `id`, `createdAt`, and `updatedAt` fields are indexed by + default. + + + + **Tip:** If you're using MongoDB, you can use [MongoDB + Compass](https://www.mongodb.com/products/compass) to visualize and manage + your indexes. + + +## Compound Indexes + +In addition to indexing single fields, you can also create compound indexes that index multiple fields together. This can be useful for optimizing queries that filter or sort by multiple fields. + +To create a compound index, use the `indexes` option in your [Collection Config](../configuration/collections): + +```ts +import type { CollectionConfig } from 'payload' + +export const MyCollection: CollectionConfig = { + // ... + fields: [ + // ... + ], + indexes: [ + { + fields: ['title', 'createdAt'], + unique: true, // Optional, if you want the combination of fields to be unique + }, + ], +} +``` diff --git a/docs/database/mongodb.mdx b/docs/database/mongodb.mdx index 0be3d87cbe..16958cd1c6 100644 --- a/docs/database/mongodb.mdx +++ b/docs/database/mongodb.mdx @@ -1,7 +1,7 @@ --- title: MongoDB label: MongoDB -order: 40 +order: 50 desc: Payload has supported MongoDB natively since we started. The flexible nature of MongoDB lends itself well to Payload's powerful fields. keywords: MongoDB, documentation, typescript, Content Management System, cms, headless, javascript, node, react, nextjs --- diff --git a/docs/database/postgres.mdx b/docs/database/postgres.mdx index 9f9c0ed046..99e39aca07 100644 --- a/docs/database/postgres.mdx +++ b/docs/database/postgres.mdx @@ -1,7 +1,7 @@ --- title: Postgres label: Postgres -order: 50 +order: 60 desc: Payload supports Postgres through an officially supported Drizzle Database Adapter. keywords: Postgres, documentation, typescript, Content Management System, cms, headless, javascript, node, react, nextjs --- diff --git a/docs/database/sqlite.mdx b/docs/database/sqlite.mdx index 64082da7a3..5c1e9d3753 100644 --- a/docs/database/sqlite.mdx +++ b/docs/database/sqlite.mdx @@ -1,7 +1,7 @@ --- title: SQLite label: SQLite -order: 60 +order: 70 desc: Payload supports SQLite through an officially supported Drizzle Database Adapter. keywords: SQLite, documentation, typescript, Content Management System, cms, headless, javascript, node, react, nextjs --- diff --git a/docs/fields/array.mdx b/docs/fields/array.mdx index 77396b9164..a4c597bd87 100644 --- a/docs/fields/array.mdx +++ b/docs/fields/array.mdx @@ -41,17 +41,17 @@ export const MyArrayField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`label`** | Text used as the heading in the [Admin Panel](../admin/overview) or an object with keys for each language. Auto-generated from name if not defined. | | **`fields`** \* | Array of field types to correspond to each row of the Array. | -| **`validate`** | Provide a custom validation function that will be executed on both the [Admin Panel](../admin/overview) and the backend. [More](/docs/fields/overview#validation) | +| **`validate`** | Provide a custom validation function that will be executed on both the [Admin Panel](../admin/overview) and the backend. [More details](/docs/fields/overview#validation). | | **`minRows`** | A number for the fewest allowed items during validation when a value is present. | | **`maxRows`** | A number for the most allowed items during validation when a value is present. | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide an array of row data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide an array of row data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. If enabled, a separate, localized set of all data within this Array will be kept, so there is no need to specify each nested field as `localized`. | | **`required`** | Require this field to have a value. | | **`labels`** | Customize the row labels appearing in the Admin dashboard. | diff --git a/docs/fields/blocks.mdx b/docs/fields/blocks.mdx index 9fbe6c9755..3a840ae71c 100644 --- a/docs/fields/blocks.mdx +++ b/docs/fields/blocks.mdx @@ -41,17 +41,17 @@ export const MyBlocksField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`label`** | Text used as the heading in the Admin Panel or an object with keys for each language. Auto-generated from name if not defined. | | **`blocks`** \* | Array of [block configs](/docs/fields/blocks#block-configs) to be made available to this field. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | | **`minRows`** | A number for the fewest allowed items during validation when a value is present. | | **`maxRows`** | A number for the most allowed items during validation when a value is present. | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API response or the Admin Panel. | -| **`defaultValue`** | Provide an array of block data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide an array of block data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. If enabled, a separate, localized set of all data within this field will be kept, so there is no need to specify each nested field as `localized`. | | **`unique`** | Enforce that each entry in the Collection has a unique value for this field. | | **`labels`** | Customize the block row labels appearing in the Admin dashboard. | diff --git a/docs/fields/checkbox.mdx b/docs/fields/checkbox.mdx index c135b54b47..95c7a04d92 100644 --- a/docs/fields/checkbox.mdx +++ b/docs/fields/checkbox.mdx @@ -30,15 +30,15 @@ export const MyCheckboxField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value, will default to false if field is also `required`. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value, will default to false if field is also `required`. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. [More details](./overview#admin-options). | diff --git a/docs/fields/code.mdx b/docs/fields/code.mdx index 0b16ee4838..4cf7f405bc 100644 --- a/docs/fields/code.mdx +++ b/docs/fields/code.mdx @@ -31,18 +31,18 @@ export const MyBlocksField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | | **`unique`** | Enforce that each entry in the Collection has a unique value for this field. | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | | **`minLength`** | Used by the default validation function to ensure values are of a minimum character length. | | **`maxLength`** | Used by the default validation function to ensure values are of a maximum character length. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. See below for [more detail](#admin-options). | diff --git a/docs/fields/date.mdx b/docs/fields/date.mdx index aec8650a2b..3f575a52d8 100644 --- a/docs/fields/date.mdx +++ b/docs/fields/date.mdx @@ -30,15 +30,15 @@ export const MyDateField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. [More details](#admin-options). | diff --git a/docs/fields/email.mdx b/docs/fields/email.mdx index 55812c4a00..206c74beb1 100644 --- a/docs/fields/email.mdx +++ b/docs/fields/email.mdx @@ -30,16 +30,16 @@ export const MyEmailField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | | **`unique`** | Enforce that each entry in the Collection has a unique value for this field. | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. [More details](#admin-options). | diff --git a/docs/fields/group.mdx b/docs/fields/group.mdx index 29da25afd0..e3803afea2 100644 --- a/docs/fields/group.mdx +++ b/docs/fields/group.mdx @@ -35,15 +35,15 @@ export const MyGroupField: Field = { | Option | Description | | ---------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`fields`** \* | Array of field types to nest within this Group. | | **`label`** | Used as a heading in the Admin Panel and to name the generated GraphQL type. Defaults to the field name, if defined. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide an object of data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide an object of data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. If enabled, a separate, localized set of all data within this Group will be kept, so there is no need to specify each nested field as `localized`. | | **`admin`** | Admin-specific configuration. [More details](#admin-options). | | **`custom`** | Extension point for adding custom data (e.g. for plugins) | diff --git a/docs/fields/join.mdx b/docs/fields/join.mdx index f0e03befb1..d88dff2685 100644 --- a/docs/fields/join.mdx +++ b/docs/fields/join.mdx @@ -135,7 +135,7 @@ powerful Admin UI. | Option | Description | | ---------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when retrieved from the database. [More](./overview#field-names) | +| **`name`** \* | To be used as the property name when retrieved from the database. [More details](./overview#field-names). | | **`collection`** \* | The `slug`s having the relationship field or an array of collection slugs. | | **`on`** \* | The name of the relationship or upload field that relates to the collection document. Use dot notation for nested paths, like 'myGroup.relationName'. If `collection` is an array, this field must exist for all specified collections | | **`orderable`** | If true, enables custom ordering and joined documents can be reordered via drag and drop. Uses [fractional indexing](https://observablehq.com/@dgreensp/implementing-fractional-indexing) for efficient reordering. | diff --git a/docs/fields/json.mdx b/docs/fields/json.mdx index db9db40c8f..4b22f75aff 100644 --- a/docs/fields/json.mdx +++ b/docs/fields/json.mdx @@ -31,17 +31,17 @@ export const MyJSONField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | | **`unique`** | Enforce that each entry in the Collection has a unique value for this field. | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | | **`jsonSchema`** | Provide a JSON schema that will be used for validation. [JSON schemas](https://json-schema.org/learn/getting-started-step-by-step) | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. [More details](#admin-options). | diff --git a/docs/fields/number.mdx b/docs/fields/number.mdx index 8b8813f4a7..3cee5a4bde 100644 --- a/docs/fields/number.mdx +++ b/docs/fields/number.mdx @@ -30,7 +30,7 @@ export const MyNumberField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | | **`min`** | Minimum value accepted. Used in the default `validation` function. | | **`max`** | Maximum value accepted. Used in the default `validation` function. | @@ -38,13 +38,13 @@ export const MyNumberField: Field = { | **`minRows`** | Minimum number of numbers in the numbers array, if `hasMany` is set to true. | | **`maxRows`** | Maximum number of numbers in the numbers array, if `hasMany` is set to true. | | **`unique`** | Enforce that each entry in the Collection has a unique value for this field. | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. [More details](#admin-options). | diff --git a/docs/fields/overview.mdx b/docs/fields/overview.mdx index 49fbb64aaa..7ecc88d5f2 100644 --- a/docs/fields/overview.mdx +++ b/docs/fields/overview.mdx @@ -303,7 +303,7 @@ The following additional properties are provided in the `ctx` object: | `path` | The full path to the field in the schema, represented as an array of string segments, including array indexes. I.e `['group', 'myArray', '1', 'textField']`. | | `id` | The `id` of the current document being edited. `id` is `undefined` during the `create` operation. | | `req` | The current HTTP request object. Contains `payload`, `user`, etc. | -| `event` | Either `onChange` or `submit` depending on the current action. Used as a performance opt-in. [More details](#async-field-validations). | +| `event` | Either `onChange` or `submit` depending on the current action. Used as a performance opt-in. [More details](#validation-performance). | #### Localized and Built-in Error Messages @@ -365,11 +365,11 @@ import { } from 'payload/shared' ``` -#### Async Field Validations +#### Validation Performance -Custom validation functions can also be asynchronous depending on your needs. This makes it possible to make requests to external services or perform other miscellaneous asynchronous logic. +When writing async or computationally heavy validation functions, it is important to consider the performance implications. Within the Admin Panel, validations are executed on every change to the field, so they should be as lightweight as possible and only run when necessary. -When writing async validation functions, it is important to consider the performance implications. Validations are executed on every change to the field, so they should be as lightweight as possible. If you need to perform expensive validations, such as querying the database, consider using the `event` property in the `ctx` object to only run the validation on form submission. +If you need to perform expensive validations, such as querying the database, consider using the `event` property in the `ctx` object to only run that particular validation on form submission. To write asynchronous validation functions, use the `async` keyword to define your function: @@ -403,6 +403,11 @@ export const Orders: CollectionConfig = { } ``` + + For more performance tips, see the [Performance + documentation](../performance/overview). + + ## Custom ID Fields All [Collections](../configuration/collections) automatically generate their own ID field. If needed, you can override this behavior by providing an explicit ID field to your config. This field should either be required or have a hook to generate the ID dynamically. diff --git a/docs/fields/point.mdx b/docs/fields/point.mdx index 5a2c2e575b..88cabbae74 100644 --- a/docs/fields/point.mdx +++ b/docs/fields/point.mdx @@ -34,16 +34,16 @@ export const MyPointField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`label`** | Used as a field label in the Admin Panel and to name the generated GraphQL type. | | **`unique`** | Enforce that each entry in the Collection has a unique value for this field. | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. To support location queries, point index defaults to `2dsphere`, to disable the index set to `false`. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. To support location queries, point index defaults to `2dsphere`, to disable the index set to `false`. | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. [More details](./overview#admin-options). | diff --git a/docs/fields/radio.mdx b/docs/fields/radio.mdx index 3d02e9d1ed..2e11f3715a 100644 --- a/docs/fields/radio.mdx +++ b/docs/fields/radio.mdx @@ -35,16 +35,16 @@ export const MyRadioField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`options`** \* | Array of options to allow the field to store. Can either be an array of strings, or an array of objects containing a `label` string and a `value` string. | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. The default value must exist within provided values in `options`. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value. The default value must exist within provided values in `options`. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. [More details](#admin-options). | diff --git a/docs/fields/relationship.mdx b/docs/fields/relationship.mdx index 2b277d6776..4bc7e80834 100644 --- a/docs/fields/relationship.mdx +++ b/docs/fields/relationship.mdx @@ -39,22 +39,22 @@ export const MyRelationshipField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`relationTo`** \* | Provide one or many collection `slug`s to be able to assign relationships to. | -| **`filterOptions`** | A query to filter which options appear in the UI and validate against. [More](#filtering-relationship-options). | +| **`filterOptions`** | A query to filter which options appear in the UI and validate against. [More details](#filtering-relationship-options). | | **`hasMany`** | Boolean when, if set to `true`, allows this field to have many relations instead of only one. | | **`minRows`** | A number for the fewest allowed items during validation when a value is present. Used with `hasMany`. | | **`maxRows`** | A number for the most allowed items during validation when a value is present. Used with `hasMany`. | | **`maxDepth`** | Sets a maximum population depth for this field, regardless of the remaining depth when this field is reached. [Max Depth](/docs/queries/depth#max-depth) | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | | **`unique`** | Enforce that each entry in the Collection has a unique value for this field. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. [More details](#admin-options). | @@ -93,7 +93,7 @@ The Relationship Field inherits all of the default admin options from the base [ | **`isSortable`** | Set to `true` if you'd like this field to be sortable within the Admin UI using drag and drop (only works when `hasMany` is set to `true`). | | **`allowCreate`** | Set to `false` if you'd like to disable the ability to create new documents from within the relationship field. | | **`allowEdit`** | Set to `false` if you'd like to disable the ability to edit documents from within the relationship field. | -| **`sortOptions`** | Define a default sorting order for the options within a Relationship field's dropdown. [More](#sort-options) | +| **`sortOptions`** | Define a default sorting order for the options within a Relationship field's dropdown. [More details](#sort-options) | | **`placeholder`** | Define a custom text or function to replace the generic default placeholder | | **`appearance`** | Set to `drawer` or `select` to change the behavior of the field. Defaults to `select`. | diff --git a/docs/fields/rich-text.mdx b/docs/fields/rich-text.mdx index 78c81917eb..9a864238d2 100644 --- a/docs/fields/rich-text.mdx +++ b/docs/fields/rich-text.mdx @@ -23,14 +23,14 @@ Instead, you can invest your time and effort into learning the underlying open-s | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](./overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](./overview#field-names). | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](./overview#validation) | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](./overview#validation). | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](../authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. [More](./overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value. [More details](./overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](../configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. [More details](#admin-options). | diff --git a/docs/fields/select.mdx b/docs/fields/select.mdx index 07cd04e568..b2fbfca3e7 100644 --- a/docs/fields/select.mdx +++ b/docs/fields/select.mdx @@ -35,18 +35,18 @@ export const MySelectField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`options`** \* | Array of options to allow the field to store. Can either be an array of strings, or an array of objects containing a `label` string and a `value` string. | | **`hasMany`** | Boolean when, if set to `true`, allows this field to have many selections instead of only one. | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | | **`unique`** | Enforce that each entry in the Collection has a unique value for this field. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. See the [default field admin config](/docs/fields/overview#admin-options) for more details. | diff --git a/docs/fields/tabs.mdx b/docs/fields/tabs.mdx index 5e603dd8cd..723a6d9f13 100644 --- a/docs/fields/tabs.mdx +++ b/docs/fields/tabs.mdx @@ -45,7 +45,7 @@ Each tab must have either a `name` or `label` and the required `fields` array. Y | Option | Description | | ------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** | Groups field data into an object when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** | Groups field data into an object when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`label`** | The label to render on the tab itself. Required when name is undefined, defaults to name converted to words. | | **`fields`** \* | The fields to render within this tab. | | **`description`** | Optionally render a description within this tab to describe the contents of the tab itself. | diff --git a/docs/fields/text.mdx b/docs/fields/text.mdx index 3d2042822b..c1cd4e77e1 100644 --- a/docs/fields/text.mdx +++ b/docs/fields/text.mdx @@ -30,18 +30,18 @@ export const MyTextField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | | **`unique`** | Enforce that each entry in the Collection has a unique value for this field. | | **`minLength`** | Used by the default validation function to ensure values are of a minimum character length. | | **`maxLength`** | Used by the default validation function to ensure values are of a maximum character length. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. [More details](#admin-options). | diff --git a/docs/fields/textarea.mdx b/docs/fields/textarea.mdx index c8913c800d..6c4aa78f52 100644 --- a/docs/fields/textarea.mdx +++ b/docs/fields/textarea.mdx @@ -30,18 +30,18 @@ export const MyTextareaField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | | **`unique`** | Enforce that each entry in the Collection has a unique value for this field. | | **`minLength`** | Used by the default validation function to ensure values are of a minimum character length. | | **`maxLength`** | Used by the default validation function to ensure values are of a maximum character length. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. [More](/docs/fields/overview#default-values) | +| **`defaultValue`** | Provide data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. [More details](#admin-options). | diff --git a/docs/fields/ui.mdx b/docs/fields/ui.mdx index 98133a224e..e7d787b88b 100644 --- a/docs/fields/ui.mdx +++ b/docs/fields/ui.mdx @@ -32,8 +32,8 @@ export const MyUIField: Field = { | ------------------------------- | ---------------------------------------------------------------------------------------------------------- | | **`name`** \* | A unique identifier for this field. | | **`label`** | Human-readable label for this UI field. | -| **`admin.components.Field`** \* | React component to be rendered for this field within the Edit View. [More](./overview#field) | -| **`admin.components.Cell`** | React component to be rendered as a Cell within collection List views. [More](./overview#cell) | +| **`admin.components.Field`** \* | React component to be rendered for this field within the Edit View. [More details](./overview#field). | +| **`admin.components.Cell`** | React component to be rendered as a Cell within collection List views. [More details](./overview#cell). | | **`admin.disableListColumn`** | Set `disableListColumn` to `true` to prevent the UI field from appearing in the list view column selector. | | **`custom`** | Extension point for adding custom data (e.g. for plugins) | diff --git a/docs/fields/upload.mdx b/docs/fields/upload.mdx index 2370c55492..177bcf3dc8 100644 --- a/docs/fields/upload.mdx +++ b/docs/fields/upload.mdx @@ -46,23 +46,23 @@ export const MyUploadField: Field = { | Option | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`name`** \* | To be used as the property name when stored and retrieved from the database. [More details](/docs/fields/overview#field-names). | | **`relationTo`** \* | Provide a single collection `slug` to allow this field to accept a relation to. **Note: the related collection must be configured to support Uploads.** | -| **`filterOptions`** | A query to filter which options appear in the UI and validate against. [More](#filtering-upload-options). | +| **`filterOptions`** | A query to filter which options appear in the UI and validate against. [More details](#filtering-upload-options). | | **`hasMany`** | Boolean which, if set to true, allows this field to have many relations instead of only one. | | **`minRows`** | A number for the fewest allowed items during validation when a value is present. Used with hasMany. | | **`maxRows`** | A number for the most allowed items during validation when a value is present. Used with hasMany. | | **`maxDepth`** | Sets a number limit on iterations of related documents to populate when queried. [Depth](../queries/depth) | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | | **`unique`** | Enforce that each entry in the Collection has a unique value for this field. | -| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More](/docs/fields/overview#validation) | -| **`index`** | Build an [index](/docs/database/overview) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | +| **`validate`** | Provide a custom validation function that will be executed on both the Admin Panel and the backend. [More details](/docs/fields/overview#validation). | +| **`index`** | Build an [index](../database/indexes) for this field to produce faster queries. Set this field to `true` if your users will perform queries on this field's data often. | | **`saveToJWT`** | If this field is top-level and nested in a config supporting [Authentication](/docs/authentication/overview), include its data in the user JWT. | | **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | | **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | | **`hidden`** | Restrict this field's visibility from all APIs entirely. Will still be saved to the database, but will not appear in any API or the Admin Panel. | -| **`defaultValue`** | Provide data to be used for this field's default value. [More](/docs/fields/overview#default-values) | -| **`displayPreview`** | Enable displaying preview of the uploaded file. Overrides related Collection's `displayPreview` option. [More](/docs/upload/overview#collection-upload-options). | +| **`defaultValue`** | Provide data to be used for this field's default value. [More details](/docs/fields/overview#default-values). | +| **`displayPreview`** | Enable displaying preview of the uploaded file. Overrides related Collection's `displayPreview` option. [More details](/docs/upload/overview#collection-upload-options). | | **`localized`** | Enable localization for this field. Requires [localization to be enabled](/docs/configuration/localization) in the Base config. | | **`required`** | Require this field to have a value. | | **`admin`** | Admin-specific configuration. [Admin Options](./overview#admin-options). | diff --git a/docs/hooks/overview.mdx b/docs/hooks/overview.mdx index c1601e8578..1d1afb4923 100644 --- a/docs/hooks/overview.mdx +++ b/docs/hooks/overview.mdx @@ -93,12 +93,108 @@ All Hooks can be written as either synchronous or asynchronous functions. Choosi #### Asynchronous -If the Hook should modify data before a Document is updated or created, and it relies on asynchronous actions such as fetching data from a third party, it might make sense to define your Hook as an asynchronous function. This way you can be sure that your Hook completes before the operation's lifecycle continues. Async hooks are run in series - so if you have two async hooks defined, the second hook will wait for the first to complete before it starts. +If the Hook should modify data before a Document is updated or created, and it relies on asynchronous actions such as fetching data from a third party, it might make sense to define your Hook as an asynchronous function. This way you can be sure that your Hook completes before the operation's lifecycle continues. + +Async hooks are run in series - so if you have two async hooks defined, the second hook will wait for the first to complete before it starts. + + + **Tip:** If your hook executes a long-running task that doesn't affect the + response in any way, consider [offloading it to the job + queue](#offloading-long-running-tasks). That will free up the request to + continue processing without waiting for the task to complete. + #### Synchronous -If your Hook simply performs a side-effect, such as updating a CRM, it might be okay to define it synchronously, so the Payload operation does not have to wait for your hook to complete. +If your Hook simply performs a side-effect, such as mutating document data, it might be okay to define it synchronously, so the Payload operation does not have to wait for your hook to complete. ## Server-only Execution Hooks are only triggered on the server and are automatically excluded from the client-side bundle. This means that you can safely use sensitive business logic in your Hooks without worrying about exposing it to the client. + +## Performance + +Hooks are a powerful way to customize the behavior of your APIs, but some hooks are run very often and can add significant overhead to your requests if not optimized. + +When building hooks, combine together as many of these strategies as possible to ensure your hooks are as performant as they can be. + + + For more performance tips, see the [Performance + documentation](../performance/overview). + + +### Writing efficient hooks + +Consider when hooks are run. One common pitfall is putting expensive logic in hooks that run very often. + +For example, the `read` operation runs on every read request, so avoid putting expensive logic in a `beforeRead` or `afterRead` hook. + +```ts +{ + hooks: { + beforeRead: [ + async () => { + // This runs on every read request - avoid expensive logic here + await doSomethingExpensive() + return data + }, + ], + }, +} +``` + +Instead, you might want to use a `beforeChange` or `afterChange` hook, which only runs when a document is created or updated. + +```ts +{ + hooks: { + beforeChange: [ + async ({ context }) => { + // This is more acceptable here, although still should be mindful of performance + await doSomethingExpensive() + // ... + }, + ] + }, +} +``` + +### Using Hook Context + +Use [Hook Context](./context) avoid prevent infinite loops or avoid repeating expensive operations across multiple hooks in the same request. + +```ts +{ + hooks: { + beforeChange: [ + async ({ context }) => { + const somethingExpensive = await doSomethingExpensive() + context.somethingExpensive = somethingExpensive + // ... + }, + ], + }, +} +``` + +To learn more, see the [Hook Context documentation](./context). + +### Offloading to the jobs queue + +If your hooks perform any long-running tasks that don't direct affect request lifecycle, consider offloading them to the [jobs queue](../jobs-queue/overview). This will free up the request to continue processing without waiting for the task to complete. + +```ts +{ + hooks: { + afterChange: [ + async ({ doc, req }) => { + // Offload to job queue + await req.payload.jobs.queue(...) + // ... + }, + ], + }, +} +``` + +To learn more, see the [Job Queue documentation](../jobs-queue/overview). diff --git a/docs/performance/overview.mdx b/docs/performance/overview.mdx new file mode 100644 index 0000000000..457d74d033 --- /dev/null +++ b/docs/performance/overview.mdx @@ -0,0 +1,244 @@ +--- +title: Performance +label: Overview +order: 10 +desc: Ensure your Payload app runs as quickly and efficiently as possible. +keywords: performance, optimization, indexes, depth, select, block references, documentation, Content Management System, cms, headless, javascript, node, react, nextjs +--- + +Payload is designed with performance in mind, but its customizability means that there are many ways to configure your app that can impact performance. + +With this in mind, Payload provides several options and best practices to help you optimize your app's specific performance needs. This includes the database, APIs, and Admin Panel. + +Whether you're building an app or troubleshooting an existing one, follow these guidelines to ensure that it runs as quickly and efficiently as possible. + +## Building your application + +### Database proximity + +The proximity of your database to your server can significantly impact performance. Ensure that your database is hosted in the same region as your server to minimize latency and improve response times. + +### Indexing your fields + +If a particular field is queried often, build an [Index](../database/indexes) for that field to produce faster queries. + +When your query runs, the database will not search the entire document to find that one field, but will instead use the index to quickly locate the data. + +To learn more, see the [Indexes](../database/indexes) docs. + +### Querying your data + +There are several ways to optimize your [Queries](../queries/overview). Many of these options directly impact overall database overhead, response sizes, and/or computational load and can significantly improve performance. + +When building queries, combine as many of these options together as possible. This will ensure your queries are as efficient as they can be. + +To learn more, see the [Query Performance](../queries/overview#performance) docs. + +### Optimizing your APIs + +When querying data through Payload APIs, the request lifecycle includes running hooks, access control, validations, and other operations that can add significant overhead to the request. + +To optimize your APIs, any custom logic should be as efficient as possible. This includes writing lightweight hooks, preventing memory leaks, offloading long-running tasks, and optimizing custom validations. + +To learn more, see the [Hooks Performance](../hooks/overview#performance) docs. + +### Writing efficient validations + +If your validation functions are asynchronous or computationally heavy, ensure they only run when necessary. + +To learn more, see the [Validation Performance](../fields/overview#validation-performance) docs. + +### Optimizing custom components + +When building custom components in the Admin Panel, ensure that they are as efficient as possible. This includes using React best practices such as memoization, lazy loading, and avoiding unnecessary re-renders. + +To learn more, see the [Custom Components Performance](../admin/custom-components#performance) docs. + +## Other Best Practices + +### Block references + +Use [Block References](../fields/blocks#block-references) to share the same block across multiple fields without bloating the config. This will reduce the number of fields to traverse when processing permissions, etc. and can can significantly reduce the amount of data sent from the server to the client in the Admin Panel. + +For example, if you have a block that is used in multiple fields, you can define it once and reference it in each field. + +To do this, use the `blockReferences` option in your blocks field: + +```ts +import { buildConfig } from 'payload' + +const config = buildConfig({ + // ... + blocks: [ + { + slug: 'TextBlock', + fields: [ + { + name: 'text', + type: 'text', + }, + ], + }, + ], + collections: [ + { + slug: 'posts', + fields: [ + { + name: 'content', + type: 'blocks', + // highlight-start + blockReferences: ['TextBlock'], + blocks: [], // Required to be empty, for compatibility reasons + // highlight-end + }, + ], + }, + { + slug: 'pages', + fields: [ + { + name: 'content', + type: 'blocks', + // highlight-start + blockReferences: ['TextBlock'], + blocks: [], // Required to be empty, for compatibility reasons + // highlight-end + }, + ], + }, + ], +}) +``` + +### Using the cached Payload instance + +Ensure that you do not instantiate Payload unnecessarily. Instead, Payload provides a caching mechanism to reuse the same instance across your app. + +To do this, use the `getPayload` function to get the cached instance of Payload: + +```ts +import { getPayload } from 'payload' +import config from '@payload-config' + +const myFunction = async () => { + const payload = await getPayload({ config }) + + // use payload here +} +``` + +### When to make direct-to-db calls + + + **Warning:** Direct database calls bypass all hooks and validations. Only use + this method when you are certain that the operation is safe and does not + require any of these features. + + +Making direct database calls can significantly improve performance by bypassing much of the request lifecycle such as hooks, validations, and other overhead associated with Payload APIs. + +For example, this can be especially useful for the `update` operation, where Payload would otherwise need to make multiple API calls to fetch, update, and fetch again. Making a direct database call can reduce this to a single operation. + +To do this, use the `payload.db` methods: + +```ts +await payload.db.updateOne({ + collection: 'posts', + id: post.id, + data: { + title: 'New Title', + }, +}) +``` + + + **Note:** Direct database methods do not start a + [transaction](../database/transactions). You have to start that yourself. + + +#### Returning + +To prevent unnecessary database computation and reduce the size of the response, you can also set `returning: false` in your direct database calls if you don't need the updated document returned to you. + +```ts +await payload.db.updateOne({ + collection: 'posts', + id: post.id, + data: { title: 'New Title' }, // See note above ^ about Postgres + // highlight-start + returning: false, + // highlight-end +}) +``` + + + **Note:** The `returning` option is only available on direct-to-db methods. + E.g. those on the `payload.db` object. It is not exposed to the Local API. + + +### Avoid bundling the entire UI library in your front-end + +If your front-end imports from `@payloadcms/ui`, ensure that you do not bundle the entire package as this can significantly increase your bundle size. + +To do this, import using the full path to the specific component you need: + +```ts +import { Button } from '@payloadcms/ui/elements/Button' +``` + +Custom components within the Admin Panel, however, do not have this same restriction and can import directly from `@payloadcms/ui`: + +```ts +import { Button } from '@payloadcms/ui' +``` + + + **Tip:** Use + [`@next/bundle-analyzer`](https://nextjs.org/docs/app/guides/package-bundling) + to analyze your component tree and identify unnecessary re-renders or large + components that could be optimized. + + +## Optimizing local development + +Everything mentioned above applies to local development as well, but there are a few additional steps you can take to optimize your local development experience. + +### Enable Turbopack + + + **Note:** In the future this will be the default. Use as your own risk. + + +Add `--turbo` to your dev script to significantly speed up your local development server start time. + +```json +{ + "scripts": { + "dev": "next dev --turbo" + } +} +``` + +### Only bundle server packages in production + + + **Note:** This is enabled by default in `create-payload-app` since v3.28.0. If + you created your app after this version, you don't need to do anything. + + +By default, Next.js bundles both server and client code. However, during development, bundling certain server packages isn't necessary. + +Payload has thousands of modules, slowing down compilation. + +Setting this option skips bundling Payload server modules during development. Fewer files to compile means faster compilation speeds. + +To do this, add the `devBundleServerPackages` option to `withPayload` in your `next.config.js` file: + +```ts +const nextConfig = { + // your existing next config +} + +export default withPayload(nextConfig, { devBundleServerPackages: false }) +``` diff --git a/docs/queries/depth.mdx b/docs/queries/depth.mdx index 0fe9a128e6..241f2e78e0 100644 --- a/docs/queries/depth.mdx +++ b/docs/queries/depth.mdx @@ -8,7 +8,7 @@ keywords: query, documents, pagination, documentation, Content Management System Documents in Payload can have relationships to other Documents. This is true for both [Collections](../configuration/collections) as well as [Globals](../configuration/globals). When you query a Document, you can specify the depth at which to populate any of its related Documents either as full objects, or only their IDs. -Depth will optimize the performance of your application by limiting the amount of processing made in the database and significantly reducing the amount of data returned. Since Documents can be infinitely nested or recursively related, it's important to be able to control how deep your API populates. +Since Documents can be infinitely nested or recursively related, it's important to be able to control how deep your API populates. Depth can impact the performance of your queries by affecting the load on the database and the size of the response. For example, when you specify a `depth` of `0`, the API response might look like this: @@ -48,7 +48,9 @@ import type { Payload } from 'payload' const getPosts = async (payload: Payload) => { const posts = await payload.find({ collection: 'posts', - depth: 2, // highlight-line + // highlight-start + depth: 2, + // highlight-end }) return posts @@ -65,7 +67,9 @@ const getPosts = async (payload: Payload) => { To specify depth in the [REST API](../rest-api/overview), you can use the `depth` parameter in your query: ```ts -fetch('https://localhost:3000/api/posts?depth=2') // highlight-line +// highlight-start +fetch('https://localhost:3000/api/posts?depth=2') + // highlight-end .then((res) => res.json()) .then((data) => console.log(data)) ``` @@ -75,6 +79,24 @@ fetch('https://localhost:3000/api/posts?depth=2') // highlight-line the `/api/globals` endpoint. +## Default Depth + +If no depth is specified in the request, Payload will use its default depth for all requests. By default, this is set to `2`. + +To change the default depth on the application level, you can use the `defaultDepth` option in your root Payload config: + +```ts +import { buildConfig } from 'payload/config' + +export default buildConfig({ + // ... + // highlight-start + defaultDepth: 1, + // highlight-end + // ... +}) +``` + ## Max Depth Fields like the [Relationship Field](../fields/relationship) or the [Upload Field](../fields/upload) can also set a maximum depth. If exceeded, this will limit the population depth regardless of what the depth might be on the request. @@ -89,7 +111,9 @@ To set a max depth for a field, use the `maxDepth` property in your field config name: 'author', type: 'relationship', relationTo: 'users', - maxDepth: 2, // highlight-line + // highlight-start + maxDepth: 2, + // highlight-end } ] } diff --git a/docs/queries/overview.mdx b/docs/queries/overview.mdx index b3354d65af..cd39f6d174 100644 --- a/docs/queries/overview.mdx +++ b/docs/queries/overview.mdx @@ -60,7 +60,7 @@ The following operators are available for use in queries: **Tip:** If you know your users will be querying on certain fields a lot, add `index: true` to the Field Config. This will speed up searches using that - field immensely. + field immensely. [More details](../database/indexes). ### And / Or Logic @@ -192,3 +192,130 @@ const getPosts = async () => { // Continue to handle the response below... } ``` + +## Performance + +There are several ways to optimize your queries. Many of these options directly impact overall database overhead, response sizes, and/or computational load and can significantly improve performance. + +When building queries, combine as many of these strategies together as possible to ensure your queries are as performant as they can be. + + + For more performance tips, see the [Performance + documentation](../performance/overview). + + +### Indexes + +Build [Indexes](../database/indexes) for fields that are often queried or sorted by. + +When your query runs, the database will not search the entire document to find that one field, but will instead use the index to quickly locate the data. + +This is done by adding `index: true` to the Field Config for that field: + +```ts +// In your collection configuration +{ + name: 'posts', + fields: [ + { + name: 'title', + type: 'text', + // highlight-start + index: true, // Add an index to the title field + // highlight-end + }, + // Other fields... + ], +} +``` + +To learn more, see the [Indexes documentation](../database/indexes). + +### Depth + +Set the [Depth](./depth) to only the level that you need to avoid populating unnecessary related documents. + +Relationships will only populate down to the specified depth, and any relationships beyond that depth will only return the ID of the related document. + +```ts +const posts = await payload.find({ + collection: 'posts', + where: { ... }, + // highlight-start + depth: 0, // Only return the IDs of related documents + // highlight-end +}) +``` + +To learn more, see the [Depth documentation](./depth). + +### Limit + +Set the [Limit](./pagination#limit) if you can reliably predict the number of matched documents, such as when querying on a unique field. + +```ts +const posts = await payload.find({ + collection: 'posts', + where: { + slug: { + equals: 'unique-post-slug', + }, + }, + // highlight-start + limit: 1, // Only expect one document to be returned + // highlight-end +}) +``` + + + **Tip:** Use in combination with `pagination: false` for best performance when + querying by unique fields. + + +To learn more, see the [Limit documentation](./pagination#limit). + +### Select + +Use the [Select API](./select) to only process and return the fields you need. + +This will reduce the amount of data returned from the request, and also skip processing of any fields that are not selected, such as running their field hooks. + +```ts +const posts = await payload.find({ + collection: 'posts', + where: { ... }, + // highlight-start + select: [{ + title: true, + }], + // highlight-end +``` + +This is a basic example, but there are many ways to use the Select API, including selecting specific fields, excluding fields, etc. + +To learn more, see the [Select documentation](./select). + +### Pagination + +[Disable Pagination](./pagination#disabling-pagination) if you can reliably predict the number of matched documents, such as when querying on a unique field. + +```ts +const posts = await payload.find({ + collection: 'posts', + where: { + slug: { + equals: 'unique-post-slug', + }, + }, + // highlight-start + pagination: false, // Return all matched documents without pagination + // highlight-end +}) +``` + + + **Tip:** Use in combination with `limit: 1` for best performance when querying + by unique fields. + + +To learn more, see the [Pagination documentation](./pagination). diff --git a/docs/queries/pagination.mdx b/docs/queries/pagination.mdx index a9f39137db..7ec1ae953a 100644 --- a/docs/queries/pagination.mdx +++ b/docs/queries/pagination.mdx @@ -6,9 +6,61 @@ desc: Payload queries are equipped with automatic pagination so you create pagin keywords: query, documents, pagination, documentation, Content Management System, cms, headless, javascript, node, react, nextjs --- -All collection `find` queries are paginated automatically. Responses are returned with top-level meta data related to pagination, and returned documents are nested within a `docs` array. +With Pagination you can limit the number of documents returned per page, and get a specific page of results. This is useful for creating paginated lists of documents within your application. -**`Find` response properties:** +All paginated responses include documents nested within a `docs` array, and return top-level meta data related to pagination such as `totalDocs`, `limit`, `totalPages`, `page`, and more. + + + **Note:** Collection `find` queries are paginated automatically. + + +## Options + +All Payload APIs support the pagination controls below. With them, you can create paginated lists of documents within your application: + +| Control | Default | Description | +| ------------ | ------- | ------------------------------------------------------------------------- | +| `limit` | `10` | Limits the number of documents returned per page. [More details](#limit). | +| `pagination` | `true` | Set to `false` to disable pagination and return all documents. | +| `page` | `1` | Get a specific page number. | + +## Local API + +To specify pagination controls in the [Local API](../local-api/overview), you can use the `limit`, `page`, and `pagination` options in your query: + +```ts +import type { Payload } from 'payload' + +const getPosts = async (payload: Payload) => { + const posts = await payload.find({ + collection: 'posts', + // highlight-start + limit: 10, + page: 2, + // highlight-end + }) + + return posts +} +``` + +## REST API + +With the [REST API](../rest-api/overview), you can use the pagination controls below as query strings: + +```ts +// highlight-start +fetch('https://localhost:3000/api/posts?limit=10&page=2') + // highlight-end + .then((res) => res.json()) + .then((data) => console.log(data)) +``` + +## Response + +All paginated responses include documents nested within a `docs` array, and return top-level meta data related to pagination. + +The `find` operation includes the following properties in its response: | Property | Description | | --------------- | --------------------------------------------------------- | @@ -51,16 +103,59 @@ All collection `find` queries are paginated automatically. Responses are returne } ``` -## Pagination controls +## Limit -All Payload APIs support the pagination controls below. With them, you can create paginated lists of documents within your application: +You can specify a `limit` to restrict the number of documents returned per page. -| Control | Default | Description | -| ------------ | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `limit` | `10` | Limits the number of documents returned per page - set to `0` to show all documents, we automatically disabled pagination for you when `limit` is `0` for optimisation | -| `pagination` | `true` | Set to `false` to disable pagination and return all documents | -| `page` | `1` | Get a specific page number | + + **Reminder:** By default, any query with `limit: 0` will automatically + [disable pagination](#disabling-pagination). + -### Disabling pagination within Local API +#### Performance benefits + +If you are querying for a specific document and can reliably expect only one document to match, you can set a limit of `1` (or another low number) to reduce the number of database lookups and improve performance. + +For example, when querying a document by a unique field such as `slug`, you can set the limit to `1` since you know there will only be one document with that slug. + +To do this, set the `limit` option in your query: + +```ts +await payload.find({ + collection: 'posts', + where: { + slug: { + equals: 'post-1', + }, + }, + // highlight-start + limit: 1, + // highlight-end +}) +``` + +## Disabling pagination + +Disabling pagination can improve performance by reducing the overhead of pagination calculations and improve query speed. For `find` operations within the Local API, you can disable pagination to retrieve all documents from a collection by passing `pagination: false` to the `find` local operation. + +To do this, set `pagination: false` in your query: + +```ts +import type { Payload } from 'payload' + +const getPost = async (payload: Payload) => { + const posts = await payload.find({ + collection: 'posts', + where: { + title: { equals: 'My Post' }, + }, + // highlight-start + pagination: false, + // highlight-end + }) + + return posts +} +``` diff --git a/docs/queries/select.mdx b/docs/queries/select.mdx index abc8a516bb..fa5e760167 100644 --- a/docs/queries/select.mdx +++ b/docs/queries/select.mdx @@ -6,9 +6,9 @@ desc: Payload select determines which fields are selected to the result. keywords: query, documents, pagination, documentation, Content Management System, cms, headless, javascript, node, react, nextjs --- -By default, Payload's APIs will return _all fields_ for a given collection or global. But, you may not need all of that data for all of your queries. Sometimes, you might want just a few fields from the response, which can speed up the Payload API and reduce the amount of JSON that is sent to you from the API. +By default, Payload's APIs will return _all fields_ for a given collection or global. But, you may not need all of that data for all of your queries. Sometimes, you might want just a few fields from the response. -This is where Payload's `select` feature comes in. Here, you can define exactly which fields you'd like to retrieve from the API. +With the Select API, you can define exactly which fields you'd like to retrieve. This can impact the performance of your queries by affecting the load on the database and the size of the response. ## Local API @@ -21,6 +21,7 @@ import type { Payload } from 'payload' const getPosts = async (payload: Payload) => { const posts = await payload.find({ collection: 'posts', + // highlight-start select: { text: true, // select a specific field from group @@ -29,7 +30,8 @@ const getPosts = async (payload: Payload) => { }, // select all fields from array array: true, - }, // highlight-line + }, + // highlight-end }) return posts @@ -40,12 +42,14 @@ const getPosts = async (payload: Payload) => { const posts = await payload.find({ collection: 'posts', // Select everything except for array and group.number + // highlight-start select: { array: false, group: { number: false, }, - }, // highlight-line + }, + // highlight-end }) return posts @@ -67,8 +71,10 @@ To specify select in the [REST API](../rest-api/overview), you can use the `sele ```ts fetch( + // highlight-start 'https://localhost:3000/api/posts?select[color]=true&select[group][number]=true', -) // highlight-line + // highlight-end +) .then((res) => res.json()) .then((data) => console.log(data)) ``` @@ -149,7 +155,7 @@ export const Pages: CollectionConfig<'pages'> = { not be able to construct the correct file URL, instead returning `url: null`. -## populate +## Populate Setting `defaultPopulate` will enforce that each time Payload performs a "population" of a related document, only the fields specified will be queried and returned. However, you can override `defaultPopulate` with the `populate` property in the Local and REST API: diff --git a/docs/queries/sort.mdx b/docs/queries/sort.mdx index d9f46b2f91..5ff67f5821 100644 --- a/docs/queries/sort.mdx +++ b/docs/queries/sort.mdx @@ -6,13 +6,15 @@ desc: Payload sort allows you to order your documents by a field in ascending or keywords: query, documents, pagination, documentation, Content Management System, cms, headless, javascript, node, react, nextjs --- -Documents in Payload can be easily sorted by a specific [Field](../fields/overview). When querying Documents, you can pass the name of any top-level field, and the response will sort the Documents by that field in _ascending_ order. If prefixed with a minus symbol ("-"), they will be sorted in _descending_ order. In Local API multiple fields can be specified by using an array of strings. In REST API multiple fields can be specified by separating fields with comma. The minus symbol can be in front of individual fields. +Documents in Payload can be easily sorted by a specific [Field](../fields/overview). When querying Documents, you can pass the name of any top-level field, and the response will sort the Documents by that field in _ascending_ order. + +If prefixed with a minus symbol ("-"), they will be sorted in _descending_ order. In Local API multiple fields can be specified by using an array of strings. In REST API multiple fields can be specified by separating fields with comma. The minus symbol can be in front of individual fields. Because sorting is handled by the database, the field cannot be a [Virtual Field](https://payloadcms.com/blog/learn-how-virtual-fields-can-help-solve-common-cms-challenges) unless it's [linked with a relationship field](/docs/fields/relationship#linking-virtual-fields-with-relationships). It must be stored in the database to be searchable. **Tip:** For performance reasons, it is recommended to enable `index: true` - for the fields that will be sorted upon. [More details](../fields/overview). + for the fields that will be sorted upon. [More details](../database/indexes). ## Local API From 5f019533d86373686edb7fbba70cb666525ffedc Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Tue, 15 Jul 2025 09:12:33 -0400 Subject: [PATCH 035/143] fix: types for RenderField fields prop (#13162) Fixes #7799 Fixes a type issue where all fields in RenderFields['fields'] admin properties were being marked as required since we were using `Pick`. Adds a helper type to allow extracting properties with correct optionality. --- packages/payload/src/fields/config/types.ts | 13 +++++++------ packages/payload/src/types/index.ts | 6 ++++++ 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/payload/src/fields/config/types.ts b/packages/payload/src/fields/config/types.ts index 8ac63a8849..14f1fc7fae 100644 --- a/packages/payload/src/fields/config/types.ts +++ b/packages/payload/src/fields/config/types.ts @@ -142,6 +142,7 @@ import type { JsonObject, Operation, PayloadRequest, + PickPreserveOptional, Where, } from '../../types/index.js' import type { @@ -632,8 +633,8 @@ export type TextField = { Omit export type TextFieldClient = { - // @ts-expect-error - vestiges of when tsconfig was not strict. Feel free to improve - admin?: AdminClient & Pick + admin?: AdminClient & + PickPreserveOptional, 'autoComplete' | 'placeholder' | 'rtl'> } & FieldBaseClient & Pick @@ -653,8 +654,8 @@ export type EmailField = { } & Omit export type EmailFieldClient = { - // @ts-expect-error - vestiges of when tsconfig was not strict. Feel free to improve - admin?: AdminClient & Pick + admin?: AdminClient & + PickPreserveOptional, 'autoComplete' | 'placeholder'> } & FieldBaseClient & Pick @@ -677,8 +678,8 @@ export type TextareaField = { } & Omit export type TextareaFieldClient = { - // @ts-expect-error - vestiges of when tsconfig was not strict. Feel free to improve - admin?: AdminClient & Pick + admin?: AdminClient & + PickPreserveOptional, 'placeholder' | 'rows' | 'rtl'> } & FieldBaseClient & Pick diff --git a/packages/payload/src/types/index.ts b/packages/payload/src/types/index.ts index f10c14d4bf..b22bcbdde4 100644 --- a/packages/payload/src/types/index.ts +++ b/packages/payload/src/types/index.ts @@ -1,5 +1,6 @@ import type { I18n, TFunction } from '@payloadcms/translations' import type DataLoader from 'dataloader' +import type { OptionalKeys, RequiredKeys } from 'ts-essentials' import type { URL } from 'url' import type { @@ -262,3 +263,8 @@ export type TransformGlobalWithSelect< export type PopulateType = Partial export type ResolvedFilterOptions = { [collection: string]: Where } + +export type PickPreserveOptional = Partial< + Pick>> +> & + Pick>> From 64d76a38693cb99c8ba0feae2258b584618b8b93 Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Tue, 15 Jul 2025 10:24:50 -0700 Subject: [PATCH 036/143] fix: cron jobs running when calling bin scripts, leading to db errors (#13135) Previously, we were always initializing cronjobs when calling `getPayload` or `payload.init`. This is undesired in bin scripts - we don't want cron jobs to start triggering db calls while we're running an initial migration using `payload migrate` for example. This has previously led to a race condition, triggering the following, occasional error, if job autoruns were enabled: ```ts DrizzleQueryError: Failed query: select "payload_jobs"."id", "payload_jobs"."input", "payload_jobs"."completed_at", "payload_jobs"."total_tried", "payload_jobs"."has_error", "payload_jobs"."error", "payload_jobs"."workflow_slug", "payload_jobs"."task_slug", "payload_jobs"."queue", "payload_jobs"."wait_until", "payload_jobs"."processing", "payload_jobs"."updated_at", "payload_jobs"."created_at", "payload_jobs_log"."data" as "log" from "payload_jobs" "payload_jobs" left join lateral (select coalesce(json_agg(json_build_array("payload_jobs_log"."_order", "payload_jobs_log"."id", "payload_jobs_log"."executed_at", "payload_jobs_log"."completed_at", "payload_jobs_log"."task_slug", "payload_jobs_log"."task_i_d", "payload_jobs_log"."input", "payload_jobs_log"."output", "payload_jobs_log"."state", "payload_jobs_log"."error") order by "payload_jobs_log"."_order" asc), '[]'::json) as "data" from (select * from "payload_jobs_log" "payload_jobs_log" where "payload_jobs_log"."_parent_id" = "payload_jobs"."id" order by "payload_jobs_log"."_order" asc) "payload_jobs_log") "payload_jobs_log" on true where ("payload_jobs"."completed_at" is null and ("payload_jobs"."has_error" is null or "payload_jobs"."has_error" <> $1) and "payload_jobs"."processing" = $2 and ("payload_jobs"."wait_until" is null or "payload_jobs"."wait_until" < $3) and "payload_jobs"."queue" = $4) order by "payload_jobs"."created_at" asc limit $5 params: true,false,2025-07-10T21:25:03.002Z,autorunSecond,100 at NodePgPreparedQuery.queryWithCache (/Users/alessio/Documents/GitHub/payload2/node_modules/.pnpm/drizzle-orm@0.44.2_@libsql+client@0.14.0_bufferutil@4.0.8_utf-8-validate@6.0.5__@opentelemetr_asjmtflojkxlnxrshoh4fj5f6u/node_modules/src/pg-core/session.ts:74:11) at processTicksAndRejections (node:internal/process/task_queues:105:5) at /Users/alessio/Documents/GitHub/payload2/node_modules/.pnpm/drizzle-orm@0.44.2_@libsql+client@0.14.0_bufferutil@4.0.8_utf-8-validate@6.0.5__@opentelemetr_asjmtflojkxlnxrshoh4fj5f6u/node_modules/src/node-postgres/session.ts:154:19 ... 6 lines matching cause stack trace ... at N._trigger (/Users/alessio/Documents/GitHub/payload2/node_modules/.pnpm/croner@9.0.0/node_modules/croner/dist/croner.cjs:1:16806) { query: `select "payload_jobs"."id", "payload_jobs"."input", "payload_jobs"."completed_at", "payload_jobs"."total_tried", "payload_jobs"."has_error", "payload_jobs"."error", "payload_jobs"."workflow_slug", "payload_jobs"."task_slug", "payload_jobs"."queue", "payload_jobs"."wait_until", "payload_jobs"."processing", "payload_jobs"."updated_at", "payload_jobs"."created_at", "payload_jobs_log"."data" as "log" from "payload_jobs" "payload_jobs" left join lateral (select coalesce(json_agg(json_build_array("payload_jobs_log"."_order", "payload_jobs_log"."id", "payload_jobs_log"."executed_at", "payload_jobs_log"."completed_at", "payload_jobs_log"."task_slug", "payload_jobs_log"."task_i_d", "payload_jobs_log"."input", "payload_jobs_log"."output", "payload_jobs_log"."state", "payload_jobs_log"."error") order by "payload_jobs_log"."_order" asc), '[]'::json) as "data" from (select * from "payload_jobs_log" "payload_jobs_log" where "payload_jobs_log"."_parent_id" = "payload_jobs"."id" order by "payload_jobs_log"."_order" asc) "payload_jobs_log") "payload_jobs_log" on true where ("payload_jobs"."completed_at" is null and ("payload_jobs"."has_error" is null or "payload_jobs"."has_error" <> $1) and "payload_jobs"."processing" = $2 and ("payload_jobs"."wait_until" is null or "payload_jobs"."wait_until" < $3) and "payload_jobs"."queue" = $4) order by "payload_jobs"."created_at" asc limit $5`, params: [ true, false, '2025-07-10T21:25:03.002Z', 'autorunSecond', 100 ], cause: error: relation "payload_jobs" does not exist at /Users/alessio/Documents/GitHub/payload2/node_modules/.pnpm/pg@8.16.3/node_modules/pg/lib/client.js:545:17 at processTicksAndRejections (node:internal/process/task_queues:105:5) at /Users/alessio/Documents/GitHub/payload2/node_modules/.pnpm/drizzle-orm@0.44.2_@libsql+client@0.14.0_bufferutil@4.0.8_utf-8-validate@6.0.5__@opentelemetr_asjmtflojkxlnxrshoh4fj5f6u/node_modules/src/node-postgres/session.ts:161:13 at NodePgPreparedQuery.queryWithCache (/Users/alessio/Documents/GitHub/payload2/node_modules/.pnpm/drizzle-orm@0.44.2_@libsql+client@0.14.0_bufferutil@4.0.8_utf-8-validate@6.0.5__@opentelemetr_asjmtflojkxlnxrshoh4fj5f6u/node_modules/src/pg-core/session.ts:72:12) at /Users/alessio/Documents/GitHub/payload2/node_modules/.pnpm/drizzle-orm@0.44.2_@libsql+client@0.14.0_bufferutil@4.0.8_utf-8-validate@6.0.5__@opentelemetr_asjmtflojkxlnxrshoh4fj5f6u/node_modules/src/node-postgres/session.ts:154:19 at find (/Users/alessio/Documents/GitHub/payload2/packages/drizzle/src/find/findMany.ts:162:19) at Object.updateMany (/Users/alessio/Documents/GitHub/payload2/packages/drizzle/src/updateJobs.ts:26:16) at updateJobs (/Users/alessio/Documents/GitHub/payload2/packages/payload/src/queues/utilities/updateJob.ts:102:37) at runJobs (/Users/alessio/Documents/GitHub/payload2/packages/payload/src/queues/operations/runJobs/index.ts:181:25) at Object.run (/Users/alessio/Documents/GitHub/payload2/packages/payload/src/queues/localAPI.ts:137:12) at N.fn (/Users/alessio/Documents/GitHub/payload2/packages/payload/src/index.ts:866:13) at N._trigger (/Users/alessio/Documents/GitHub/payload2/node_modules/.pnpm/croner@9.0.0/node_modules/croner/dist/croner.cjs:1:16806) { length: 112, severity: 'ERROR', code: '42P01', detail: undefined, hint: undefined, position: '406', internalPosition: undefined, internalQuery: undefined, where: undefined, schema: undefined, table: undefined, column: undefined, dataType: undefined, constraint: undefined, file: 'parse_relation.c', line: '1449', routine: 'parserOpenTable' } } ``` This PR makes running crons opt-in using a new `cron` flag. By default, no cron jobs will be created. --- docs/jobs-queue/queues.mdx | 2 +- packages/next/src/auth/login.ts | 2 +- packages/next/src/auth/logout.ts | 2 +- packages/next/src/auth/refresh.ts | 2 +- packages/next/src/utilities/initReq.ts | 2 +- packages/payload/src/bin/index.ts | 2 +- packages/payload/src/config/types.ts | 8 ++++- packages/payload/src/index.ts | 29 ++++++++++--------- .../payload/src/queues/config/types/index.ts | 1 + .../src/utilities/createPayloadRequest.ts | 2 +- packages/payload/src/utilities/routeError.ts | 2 +- test/helpers/initPayloadInt.ts | 2 +- test/queues/int.spec.ts | 24 ++++++++++++++- 13 files changed, 55 insertions(+), 25 deletions(-) diff --git a/docs/jobs-queue/queues.mdx b/docs/jobs-queue/queues.mdx index b67014c560..6b0172df2c 100644 --- a/docs/jobs-queue/queues.mdx +++ b/docs/jobs-queue/queues.mdx @@ -51,7 +51,7 @@ export default buildConfig({ // add as many cron jobs as you want ], shouldAutoRun: async (payload) => { - // Tell Payload if it should run jobs or not. + // Tell Payload if it should run jobs or not. This function is optional and will return true by default. // This function will be invoked each time Payload goes to pick up and run jobs. // If this function ever returns false, the cron schedule will be stopped. return true diff --git a/packages/next/src/auth/login.ts b/packages/next/src/auth/login.ts index 5bed900a94..9b5a7cb74d 100644 --- a/packages/next/src/auth/login.ts +++ b/packages/next/src/auth/login.ts @@ -27,7 +27,7 @@ export async function login({ collection, config, email, password, username }: L token?: string user: any }> { - const payload = await getPayload({ config }) + const payload = await getPayload({ config, cron: true }) const authConfig = payload.collections[collection]?.config.auth diff --git a/packages/next/src/auth/logout.ts b/packages/next/src/auth/logout.ts index 192e293580..f1684dd507 100644 --- a/packages/next/src/auth/logout.ts +++ b/packages/next/src/auth/logout.ts @@ -14,7 +14,7 @@ export async function logout({ allSessions?: boolean config: Promise | SanitizedConfig }) { - const payload = await getPayload({ config }) + const payload = await getPayload({ config, cron: true }) const headers = await nextHeaders() const authResult = await payload.auth({ headers }) diff --git a/packages/next/src/auth/refresh.ts b/packages/next/src/auth/refresh.ts index 9ece3e97c7..05fc5964d1 100644 --- a/packages/next/src/auth/refresh.ts +++ b/packages/next/src/auth/refresh.ts @@ -9,7 +9,7 @@ import { getExistingAuthToken } from '../utilities/getExistingAuthToken.js' import { setPayloadAuthCookie } from '../utilities/setPayloadAuthCookie.js' export async function refresh({ config }: { config: any }) { - const payload = await getPayload({ config }) + const payload = await getPayload({ config, cron: true }) const headers = await nextHeaders() const result = await payload.auth({ headers }) diff --git a/packages/next/src/utilities/initReq.ts b/packages/next/src/utilities/initReq.ts index 62c6cd98dc..aa9b29b71c 100644 --- a/packages/next/src/utilities/initReq.ts +++ b/packages/next/src/utilities/initReq.ts @@ -66,7 +66,7 @@ export const initReq = async function ({ const partialResult = await partialReqCache.get(async () => { const config = await configPromise - const payload = await getPayload({ config, importMap }) + const payload = await getPayload({ config, cron: true, importMap }) const languageCode = getRequestLanguage({ config, cookies, diff --git a/packages/payload/src/bin/index.ts b/packages/payload/src/bin/index.ts index 81d06a78bd..b1b973d0ef 100755 --- a/packages/payload/src/bin/index.ts +++ b/packages/payload/src/bin/index.ts @@ -107,7 +107,7 @@ export const bin = async () => { } if (script === 'jobs:run') { - const payload = await getPayload({ config }) + const payload = await getPayload({ config }) // Do not setup crons here - this bin script can set up its own crons const limit = args.limit ? parseInt(args.limit, 10) : undefined const queue = args.queue ? args.queue : undefined const allQueues = !!args.allQueues diff --git a/packages/payload/src/config/types.ts b/packages/payload/src/config/types.ts index ee1b3bc251..fdd2e460f6 100644 --- a/packages/payload/src/config/types.ts +++ b/packages/payload/src/config/types.ts @@ -257,6 +257,13 @@ export type InitOptions = { * and the backend functionality */ config: Promise | SanitizedConfig + /** + * If set to `true`, payload will initialize crons for things like autorunning jobs on initialization. + * + * @default false + */ + cron?: boolean + /** * Disable connect to the database on init */ @@ -268,7 +275,6 @@ export type InitOptions = { disableOnInit?: boolean importMap?: ImportMap - /** * A function that is called immediately following startup that receives the Payload instance as it's only argument. */ diff --git a/packages/payload/src/index.ts b/packages/payload/src/index.ts index b768b4bde0..0c2ea26805 100644 --- a/packages/payload/src/index.ts +++ b/packages/payload/src/index.ts @@ -836,7 +836,7 @@ export class BasePayload { throw error } - if (this.config.jobs.enabled && this.config.jobs.autoRun && !isNextBuild()) { + if (this.config.jobs.enabled && this.config.jobs.autoRun && !isNextBuild() && options.cron) { const DEFAULT_CRON = '* * * * *' const DEFAULT_LIMIT = 10 @@ -974,7 +974,7 @@ export const reload = async ( } export const getPayload = async ( - options: Pick, + options: Pick, ): Promise => { if (!options?.config) { throw new Error('Error: the payload config is required for getPayload to work.') @@ -1109,6 +1109,8 @@ export { generateImportMap } from './bin/generateImportMap/index.js' export type { ImportMap } from './bin/generateImportMap/index.js' export { genImportMapIterateFields } from './bin/generateImportMap/iterateFields.js' +export { migrate as migrateCLI } from './bin/migrate.js' + export { type ClientCollectionConfig, createClientCollectionConfig, @@ -1155,7 +1157,6 @@ export type { } from './collections/config/types.js' export type { CompoundIndex } from './collections/config/types.js' - export type { SanitizedCompoundIndex } from './collections/config/types.js' export { createDataloaderCacheKey, getDataLoader } from './collections/dataloader.js' export { countOperation } from './collections/operations/count.js' @@ -1171,6 +1172,7 @@ export { findVersionsOperation } from './collections/operations/findVersions.js' export { restoreVersionOperation } from './collections/operations/restoreVersion.js' export { updateOperation } from './collections/operations/update.js' export { updateByIDOperation } from './collections/operations/updateByID.js' + export { buildConfig } from './config/build.js' export { @@ -1180,7 +1182,6 @@ export { serverOnlyConfigProperties, type UnsanitizedClientConfig, } from './config/client.js' - export { defaults } from './config/defaults.js' export { type OrderableEndpointBody } from './config/orderable/index.js' export { sanitizeConfig } from './config/sanitize.js' @@ -1297,10 +1298,11 @@ export { ValidationError, ValidationErrorName, } from './errors/index.js' -export type { ValidationFieldError } from './errors/index.js' +export type { ValidationFieldError } from './errors/index.js' export { baseBlockFields } from './fields/baseFields/baseBlockFields.js' export { baseIDField } from './fields/baseFields/baseIDField.js' + export { createClientField, createClientFields, @@ -1308,10 +1310,10 @@ export { type ServerOnlyFieldProperties, } from './fields/config/client.js' -export { sanitizeFields } from './fields/config/sanitize.js' - export interface FieldCustom extends Record {} +export { sanitizeFields } from './fields/config/sanitize.js' + export type { AdminClient, ArrayField, @@ -1421,14 +1423,13 @@ export type { } from './fields/config/types.js' export { getDefaultValue } from './fields/getDefaultValue.js' - export { traverseFields as afterChangeTraverseFields } from './fields/hooks/afterChange/traverseFields.js' export { promise as afterReadPromise } from './fields/hooks/afterRead/promise.js' export { traverseFields as afterReadTraverseFields } from './fields/hooks/afterRead/traverseFields.js' export { traverseFields as beforeChangeTraverseFields } from './fields/hooks/beforeChange/traverseFields.js' export { traverseFields as beforeValidateTraverseFields } from './fields/hooks/beforeValidate/traverseFields.js' -export { sortableFieldTypes } from './fields/sortableFieldTypes.js' +export { sortableFieldTypes } from './fields/sortableFieldTypes.js' export { validations } from './fields/validations.js' export type { ArrayFieldValidation, @@ -1481,8 +1482,8 @@ export type { GlobalConfig, SanitizedGlobalConfig, } from './globals/config/types.js' -export { docAccessOperation as docAccessOperationGlobal } from './globals/operations/docAccess.js' +export { docAccessOperation as docAccessOperationGlobal } from './globals/operations/docAccess.js' export { findOneOperation } from './globals/operations/findOne.js' export { findVersionByIDOperation as findVersionByIDOperationGlobal } from './globals/operations/findVersionByID.js' export { findVersionsOperation as findVersionsOperationGlobal } from './globals/operations/findVersions.js' @@ -1505,8 +1506,8 @@ export type { } from './preferences/types.js' export type { QueryPreset } from './query-presets/types.js' export { jobAfterRead } from './queues/config/index.js' -export type { JobsConfig, RunJobAccess, RunJobAccessArgs } from './queues/config/types/index.js' +export type { JobsConfig, RunJobAccess, RunJobAccessArgs } from './queues/config/types/index.js' export type { RunInlineTaskFunction, RunTaskFunction, @@ -1530,14 +1531,14 @@ export type { WorkflowHandler, WorkflowTypes, } from './queues/config/types/workflowTypes.js' -export { importHandlerPath } from './queues/operations/runJobs/runJob/importHandlerPath.js' +export { importHandlerPath } from './queues/operations/runJobs/runJob/importHandlerPath.js' export { getLocalI18n } from './translations/getLocalI18n.js' export * from './types/index.js' export { getFileByPath } from './uploads/getFileByPath.js' export { _internal_safeFetchGlobal } from './uploads/safeFetch.js' -export type * from './uploads/types.js' +export type * from './uploads/types.js' export { addDataAndFileToRequest } from './utilities/addDataAndFileToRequest.js' export { addLocalesToRequestFromData, sanitizeLocales } from './utilities/addLocalesToRequest.js' export { commitTransaction } from './utilities/commitTransaction.js' @@ -1609,8 +1610,8 @@ export { versionDefaults } from './versions/defaults.js' export { deleteCollectionVersions } from './versions/deleteCollectionVersions.js' export { appendVersionToQueryKey } from './versions/drafts/appendVersionToQueryKey.js' export { getQueryDraftsSort } from './versions/drafts/getQueryDraftsSort.js' -export { enforceMaxVersions } from './versions/enforceMaxVersions.js' +export { enforceMaxVersions } from './versions/enforceMaxVersions.js' export { getLatestCollectionVersion } from './versions/getLatestCollectionVersion.js' export { getLatestGlobalVersion } from './versions/getLatestGlobalVersion.js' export { saveVersion } from './versions/saveVersion.js' diff --git a/packages/payload/src/queues/config/types/index.ts b/packages/payload/src/queues/config/types/index.ts index 2e6eafc72f..6bf730f44f 100644 --- a/packages/payload/src/queues/config/types/index.ts +++ b/packages/payload/src/queues/config/types/index.ts @@ -121,6 +121,7 @@ export type JobsConfig = { /** * A function that will be executed before Payload picks up jobs which are configured by the `jobs.autorun` function. * If this function returns true, jobs will be queried and picked up. If it returns false, jobs will not be run. + * @default undefined - if this function is not defined, jobs will be run - as if () => true was passed. * @param payload * @returns boolean */ diff --git a/packages/payload/src/utilities/createPayloadRequest.ts b/packages/payload/src/utilities/createPayloadRequest.ts index f87550cf70..ac262ce8a6 100644 --- a/packages/payload/src/utilities/createPayloadRequest.ts +++ b/packages/payload/src/utilities/createPayloadRequest.ts @@ -27,7 +27,7 @@ export const createPayloadRequest = async ({ request, }: Args): Promise => { const cookies = parseCookies(request.headers) - const payload = await getPayload({ config: configPromise }) + const payload = await getPayload({ config: configPromise, cron: true }) const { config } = payload const localization = config.localization diff --git a/packages/payload/src/utilities/routeError.ts b/packages/payload/src/utilities/routeError.ts index ce435a25ec..1d66ed3185 100644 --- a/packages/payload/src/utilities/routeError.ts +++ b/packages/payload/src/utilities/routeError.ts @@ -39,7 +39,7 @@ export const routeError = async ({ if (!payload) { try { - payload = await getPayload({ config: configArg }) + payload = await getPayload({ config: configArg, cron: true }) } catch (ignore) { return Response.json( { diff --git a/test/helpers/initPayloadInt.ts b/test/helpers/initPayloadInt.ts index 184db198a3..2801b5985c 100644 --- a/test/helpers/initPayloadInt.ts +++ b/test/helpers/initPayloadInt.ts @@ -29,7 +29,7 @@ export async function initPayloadInt { } }) }) + +describe('Queues - CLI', () => { + let config: SanitizedConfig + beforeAll(async () => { + ;({ config } = await initPayloadInt(dirname, undefined, false)) + }) + it('can run migrate CLI without jobs attempting to run', async () => { + await migrateCLI({ + config, + parsedArgs: { + _: ['migrate'], + }, + }) + + // Wait 3 seconds to let potential autorun crons trigger + await new Promise((resolve) => setTimeout(resolve, 3000)) + + // Expect no errors. Previously, this would throw an "error: relation "payload_jobs" does not exist" error + expect(true).toBe(true) + }) +}) From 2a59c5bf8cf23577145438239f7d328755c2d0da Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Tue, 15 Jul 2025 16:41:07 -0400 Subject: [PATCH 037/143] fix(plugin-import-export): export field dropdown to properly label and path fields in named/unnamed tabs (#13180) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What? Fixes the export field selection dropdown to correctly differentiate between fields in named and unnamed tabs. ### Why? Previously, when a `tabs` field contained both named and unnamed tabs, subfields with the same `name` would appear as duplicates in the dropdown (e.g. `Tab To CSV`, `Tab To CSV`). Additionally, selecting a field from a named tab would incorrectly map it to the unnamed version due to shared labels and missing path prefixes. ### How? - Updated the `reduceFields` utility to manually construct the field path and label using the tab’s `name` if present. - Ensured unnamed tabs treat subfields as top-level and skip prefixing altogether. - Adjusted label prefix logic to show `Named Tab > Field Name` when appropriate. #### Before Screenshot 2025-07-15 at 2 55 14 PM #### After Screenshot 2025-07-15 at 2 50 38 PM --- .../components/FieldsToExport/reduceFields.tsx | 15 +++++++++++++-- test/plugin-import-export/collections/Pages.ts | 1 - 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx b/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx index 4a678a0503..37c2a47f48 100644 --- a/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx +++ b/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx @@ -83,13 +83,24 @@ export const reduceFields = ({ (tabFields, tab) => { if ('fields' in tab) { const isNamedTab = 'name' in tab && tab.name + + const newPath = isNamedTab ? `${path}${path ? '.' : ''}${tab.name}` : path + return [ ...tabFields, ...reduceFields({ disabledFields, fields: tab.fields, - labelPrefix, - path: isNamedTab ? createNestedClientFieldPath(path, field) : path, + labelPrefix: isNamedTab + ? combineLabel({ + field: { + name: tab.name, + label: tab.label ?? tab.name, + } as any, + prefix: labelPrefix, + }) + : labelPrefix, + path: newPath, }), ] } diff --git a/test/plugin-import-export/collections/Pages.ts b/test/plugin-import-export/collections/Pages.ts index 6eb177326b..35f38032fd 100644 --- a/test/plugin-import-export/collections/Pages.ts +++ b/test/plugin-import-export/collections/Pages.ts @@ -95,7 +95,6 @@ export const Pages: CollectionConfig = { ], }, { - name: 'tabs', type: 'tabs', tabs: [ { From 841bf891d074fba69560d5200fcba210590e1de6 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Wed, 16 Jul 2025 07:53:45 +0300 Subject: [PATCH 038/143] feat: atomic number field updates (#13118) Based on https://github.com/payloadcms/payload/pull/13060 which should be merged first This PR adds ability to update number fields atomically, which could be important with parallel writes. For now we support this only via `payload.db.updateOne`. For example: ```js // increment by 10 const res = await payload.db.updateOne({ data: { number: { $inc: 10, }, }, collection: 'posts', where: { id: { equals: post.id } }, }) // decrement by 3 const res2 = await payload.db.updateOne({ data: { number: { $inc: -3, }, }, collection: 'posts', where: { id: { equals: post.id } }, }) ``` --- packages/db-mongodb/src/updateOne.ts | 13 ++++++--- .../db-mongodb/src/utilities/transform.ts | 20 +++++++++++-- packages/drizzle/src/transform/write/index.ts | 3 ++ .../src/transform/write/traverseFields.ts | 22 +++++++++++++-- packages/drizzle/src/updateOne.ts | 1 + packages/drizzle/src/upsertRow/index.ts | 1 + .../payload/src/utilities/traverseFields.ts | 28 +++++++++++++++++-- test/database/int.spec.ts | 28 +++++++++++++++++++ 8 files changed, 106 insertions(+), 10 deletions(-) diff --git a/packages/db-mongodb/src/updateOne.ts b/packages/db-mongodb/src/updateOne.ts index 3fd4a0a516..20816512ad 100644 --- a/packages/db-mongodb/src/updateOne.ts +++ b/packages/db-mongodb/src/updateOne.ts @@ -1,4 +1,4 @@ -import type { MongooseUpdateQueryOptions } from 'mongoose' +import type { MongooseUpdateQueryOptions, UpdateQuery } from 'mongoose' import type { UpdateOne } from 'payload' import type { MongooseAdapter } from './index.js' @@ -50,15 +50,20 @@ export const updateOne: UpdateOne = async function updateOne( let result - transform({ adapter: this, data, fields, operation: 'write' }) + const $inc: Record = {} + let updateData: UpdateQuery = data + transform({ $inc, adapter: this, data, fields, operation: 'write' }) + if (Object.keys($inc).length) { + updateData = { $inc, $set: updateData } + } try { if (returning === false) { - await Model.updateOne(query, data, options) + await Model.updateOne(query, updateData, options) transform({ adapter: this, data, fields, operation: 'read' }) return null } else { - result = await Model.findOneAndUpdate(query, data, options) + result = await Model.findOneAndUpdate(query, updateData, options) } } catch (error) { handleError({ collection: collectionSlug, error, req }) diff --git a/packages/db-mongodb/src/utilities/transform.ts b/packages/db-mongodb/src/utilities/transform.ts index 74b6e7b93a..7318c29cee 100644 --- a/packages/db-mongodb/src/utilities/transform.ts +++ b/packages/db-mongodb/src/utilities/transform.ts @@ -208,6 +208,7 @@ const sanitizeDate = ({ } type Args = { + $inc?: Record /** instance of the adapter */ adapter: MongooseAdapter /** data to transform, can be an array of documents or a single document */ @@ -396,6 +397,7 @@ const stripFields = ({ } export const transform = ({ + $inc, adapter, data, fields, @@ -406,7 +408,7 @@ export const transform = ({ }: Args) => { if (Array.isArray(data)) { for (const item of data) { - transform({ adapter, data: item, fields, globalSlug, operation, validateRelationships }) + transform({ $inc, adapter, data: item, fields, globalSlug, operation, validateRelationships }) } return } @@ -438,13 +440,27 @@ export const transform = ({ data.globalType = globalSlug } - const sanitize: TraverseFieldsCallback = ({ field, ref: incomingRef }) => { + const sanitize: TraverseFieldsCallback = ({ field, parentPath, ref: incomingRef }) => { if (!incomingRef || typeof incomingRef !== 'object') { return } const ref = incomingRef as Record + if ( + $inc && + field.type === 'number' && + operation === 'write' && + field.name in ref && + ref[field.name] + ) { + const value = ref[field.name] + if (value && typeof value === 'object' && '$inc' in value && typeof value.$inc === 'number') { + $inc[`${parentPath}${field.name}`] = value.$inc + delete ref[field.name] + } + } + if (field.type === 'date' && operation === 'read' && field.name in ref && ref[field.name]) { if (config.localization && fieldShouldBeLocalized({ field, parentIsLocalized })) { const fieldRef = ref[field.name] as Record diff --git a/packages/drizzle/src/transform/write/index.ts b/packages/drizzle/src/transform/write/index.ts index e70b91ff8c..5d875162da 100644 --- a/packages/drizzle/src/transform/write/index.ts +++ b/packages/drizzle/src/transform/write/index.ts @@ -8,6 +8,7 @@ import { traverseFields } from './traverseFields.js' type Args = { adapter: DrizzleAdapter data: Record + enableAtomicWrites?: boolean fields: FlattenedField[] parentIsLocalized?: boolean path?: string @@ -17,6 +18,7 @@ type Args = { export const transformForWrite = ({ adapter, data, + enableAtomicWrites, fields, parentIsLocalized, path = '', @@ -48,6 +50,7 @@ export const transformForWrite = ({ blocksToDelete: rowToInsert.blocksToDelete, columnPrefix: '', data, + enableAtomicWrites, fieldPrefix: '', fields, locales: rowToInsert.locales, diff --git a/packages/drizzle/src/transform/write/traverseFields.ts b/packages/drizzle/src/transform/write/traverseFields.ts index e815733efa..feb3b17662 100644 --- a/packages/drizzle/src/transform/write/traverseFields.ts +++ b/packages/drizzle/src/transform/write/traverseFields.ts @@ -1,6 +1,5 @@ -import type { FlattenedField } from 'payload' - import { sql } from 'drizzle-orm' +import { APIError, type FlattenedField } from 'payload' import { fieldIsVirtual, fieldShouldBeLocalized } from 'payload/shared' import toSnakeCase from 'to-snake-case' @@ -41,6 +40,7 @@ type Args = { */ columnPrefix: string data: Record + enableAtomicWrites?: boolean existingLocales?: Record[] /** * A prefix that will retain camel-case formatting, representing prior fields @@ -87,6 +87,7 @@ export const traverseFields = ({ blocksToDelete, columnPrefix, data, + enableAtomicWrites, existingLocales, fieldPrefix, fields, @@ -268,6 +269,7 @@ export const traverseFields = ({ blocksToDelete, columnPrefix: `${columnName}_`, data: localeData as Record, + enableAtomicWrites, existingLocales, fieldPrefix: `${fieldName}_`, fields: field.flattenedFields, @@ -553,6 +555,22 @@ export const traverseFields = ({ formattedValue = JSON.stringify(value) } + if ( + field.type === 'number' && + value && + typeof value === 'object' && + '$inc' in value && + typeof value.$inc === 'number' + ) { + if (!enableAtomicWrites) { + throw new APIError( + 'The passed data must not contain any nested fields for atomic writes', + ) + } + + formattedValue = sql.raw(`${columnName} + ${value.$inc}`) + } + if (field.type === 'date') { if (typeof value === 'number' && !Number.isNaN(value)) { formattedValue = new Date(value).toISOString() diff --git a/packages/drizzle/src/updateOne.ts b/packages/drizzle/src/updateOne.ts index ef451c9436..3bd37e4682 100644 --- a/packages/drizzle/src/updateOne.ts +++ b/packages/drizzle/src/updateOne.ts @@ -151,6 +151,7 @@ export const updateOne: UpdateOne = async function updateOne( const { row } = transformForWrite({ adapter: this, data, + enableAtomicWrites: true, fields: collection.flattenedFields, tableName, }) diff --git a/packages/drizzle/src/upsertRow/index.ts b/packages/drizzle/src/upsertRow/index.ts index 34abf51075..ad10c5fd14 100644 --- a/packages/drizzle/src/upsertRow/index.ts +++ b/packages/drizzle/src/upsertRow/index.ts @@ -44,6 +44,7 @@ export const upsertRow = async | TypeWithID>( const rowToInsert = transformForWrite({ adapter, data, + enableAtomicWrites: false, fields, path, tableName, diff --git a/packages/payload/src/utilities/traverseFields.ts b/packages/payload/src/utilities/traverseFields.ts index 4f8408735f..f3981c6811 100644 --- a/packages/payload/src/utilities/traverseFields.ts +++ b/packages/payload/src/utilities/traverseFields.ts @@ -5,6 +5,7 @@ import { fieldAffectsData, fieldHasSubFields, fieldShouldBeLocalized, + tabHasName, } from '../fields/config/types.js' const traverseArrayOrBlocksField = ({ @@ -16,6 +17,7 @@ const traverseArrayOrBlocksField = ({ fillEmpty, leavesFirst, parentIsLocalized, + parentPath, parentRef, }: { callback: TraverseFieldsCallback @@ -26,6 +28,7 @@ const traverseArrayOrBlocksField = ({ fillEmpty: boolean leavesFirst: boolean parentIsLocalized: boolean + parentPath?: string parentRef?: unknown }) => { if (fillEmpty) { @@ -38,6 +41,7 @@ const traverseArrayOrBlocksField = ({ isTopLevel: false, leavesFirst, parentIsLocalized: parentIsLocalized || field.localized, + parentPath: `${parentPath}${field.name}.`, parentRef, }) } @@ -55,6 +59,7 @@ const traverseArrayOrBlocksField = ({ isTopLevel: false, leavesFirst, parentIsLocalized: parentIsLocalized || field.localized, + parentPath: `${parentPath}${field.name}.`, parentRef, }) } @@ -88,6 +93,7 @@ const traverseArrayOrBlocksField = ({ isTopLevel: false, leavesFirst, parentIsLocalized: parentIsLocalized || field.localized, + parentPath: `${parentPath}${field.name}.`, parentRef, ref, }) @@ -105,6 +111,7 @@ export type TraverseFieldsCallback = (args: { */ next?: () => void parentIsLocalized: boolean + parentPath: string /** * The parent reference object */ @@ -130,6 +137,7 @@ type TraverseFieldsArgs = { */ leavesFirst?: boolean parentIsLocalized?: boolean + parentPath?: string parentRef?: Record | unknown ref?: Record | unknown } @@ -152,6 +160,7 @@ export const traverseFields = ({ isTopLevel = true, leavesFirst = false, parentIsLocalized, + parentPath = '', parentRef = {}, ref = {}, }: TraverseFieldsArgs): void => { @@ -172,12 +181,19 @@ export const traverseFields = ({ if ( !leavesFirst && callback && - callback({ field, next, parentIsLocalized: parentIsLocalized!, parentRef, ref }) + callback({ field, next, parentIsLocalized: parentIsLocalized!, parentPath, parentRef, ref }) ) { return true } else if (leavesFirst) { callbackStack.push(() => - callback({ field, next, parentIsLocalized: parentIsLocalized!, parentRef, ref }), + callback({ + field, + next, + parentIsLocalized: parentIsLocalized!, + parentPath, + parentRef, + ref, + }), ) } @@ -220,6 +236,7 @@ export const traverseFields = ({ field: { ...tab, type: 'tab' }, next, parentIsLocalized: parentIsLocalized!, + parentPath, parentRef: currentParentRef, ref: tabRef, }) @@ -231,6 +248,7 @@ export const traverseFields = ({ field: { ...tab, type: 'tab' }, next, parentIsLocalized: parentIsLocalized!, + parentPath, parentRef: currentParentRef, ref: tabRef, }), @@ -254,6 +272,7 @@ export const traverseFields = ({ isTopLevel: false, leavesFirst, parentIsLocalized: true, + parentPath: `${parentPath}${tab.name}.`, parentRef: currentParentRef, ref: tabRef[key as keyof typeof tabRef], }) @@ -268,6 +287,7 @@ export const traverseFields = ({ field: { ...tab, type: 'tab' }, next, parentIsLocalized: parentIsLocalized!, + parentPath, parentRef: currentParentRef, ref: tabRef, }) @@ -279,6 +299,7 @@ export const traverseFields = ({ field: { ...tab, type: 'tab' }, next, parentIsLocalized: parentIsLocalized!, + parentPath, parentRef: currentParentRef, ref: tabRef, }), @@ -296,6 +317,7 @@ export const traverseFields = ({ isTopLevel: false, leavesFirst, parentIsLocalized: false, + parentPath: tabHasName(tab) ? `${parentPath}${tab.name}` : parentPath, parentRef: currentParentRef, ref: tabRef, }) @@ -352,6 +374,7 @@ export const traverseFields = ({ isTopLevel: false, leavesFirst, parentIsLocalized: true, + parentPath: field.name ? `${parentPath}${field.name}` : parentPath, parentRef: currentParentRef, ref: currentRef[key as keyof typeof currentRef], }) @@ -426,6 +449,7 @@ export const traverseFields = ({ isTopLevel: false, leavesFirst, parentIsLocalized, + parentPath, parentRef: currentParentRef, ref: currentRef, }) diff --git a/test/database/int.spec.ts b/test/database/int.spec.ts index 34bcc13da9..ecaf364acb 100644 --- a/test/database/int.spec.ts +++ b/test/database/int.spec.ts @@ -2836,6 +2836,34 @@ describe('database', () => { expect(res.arrayWithIDs[0].text).toBe('some text') }) + it('should allow incremental number update', async () => { + const post = await payload.create({ collection: 'posts', data: { number: 1, title: 'post' } }) + + const res = await payload.db.updateOne({ + data: { + number: { + $inc: 10, + }, + }, + collection: 'posts', + where: { id: { equals: post.id } }, + }) + + expect(res.number).toBe(11) + + const res2 = await payload.db.updateOne({ + data: { + number: { + $inc: -3, + }, + }, + collection: 'posts', + where: { id: { equals: post.id } }, + }) + + expect(res2.number).toBe(8) + }) + it('should support x3 nesting blocks', async () => { const res = await payload.create({ collection: 'posts', From be8e8d9c7f0d4dd292169f6b84bba6270d3c7d5c Mon Sep 17 00:00:00 2001 From: jangir-ritik <115213651+jangir-ritik@users.noreply.github.com> Date: Wed, 16 Jul 2025 11:16:17 +0530 Subject: [PATCH 039/143] docs: fix minor typo (#13185) --- docs/performance/overview.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/performance/overview.mdx b/docs/performance/overview.mdx index 457d74d033..274312d4ce 100644 --- a/docs/performance/overview.mdx +++ b/docs/performance/overview.mdx @@ -58,7 +58,7 @@ To learn more, see the [Custom Components Performance](../admin/custom-component ### Block references -Use [Block References](../fields/blocks#block-references) to share the same block across multiple fields without bloating the config. This will reduce the number of fields to traverse when processing permissions, etc. and can can significantly reduce the amount of data sent from the server to the client in the Admin Panel. +Use [Block References](../fields/blocks#block-references) to share the same block across multiple fields without bloating the config. This will reduce the number of fields to traverse when processing permissions, etc. and can significantly reduce the amount of data sent from the server to the client in the Admin Panel. For example, if you have a block that is used in multiple fields, you can define it once and reference it in each field. From 7cd682c66af5f9e61fbacf6e368b8988c13a829b Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Wed, 16 Jul 2025 09:45:02 -0700 Subject: [PATCH 040/143] perf(drizzle): further optimize postgres row updates (#13184) This is a follow-up to https://github.com/payloadcms/payload/pull/13060. There are a bunch of other db adapter methods that use `upsertRow` for updates: `updateGlobal`, `updateGlobalVersion`, `updateJobs`, `updateMany`, `updateVersion`. The previous PR had the logic for using the optimized row updating logic inside the `updateOne` adapter. This PR moves that logic to the original `upsertRow` function. Benefits: - all the other db methods will benefit from this massive optimization as well. This will be especially relevant for optimizing postgres job queue initial updates - we should be able to close https://github.com/payloadcms/payload/pull/11865 after another follow-up PR - easier to read db adapter methods due to less code. --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210803039809810 --- packages/drizzle/src/updateOne.ts | 119 +-- packages/drizzle/src/upsertRow/index.ts | 775 +++++++++--------- .../upsertRow/shouldUseOptimizedUpsertRow.ts | 52 ++ test/database/int.spec.ts | 67 +- 4 files changed, 518 insertions(+), 495 deletions(-) create mode 100644 packages/drizzle/src/upsertRow/shouldUseOptimizedUpsertRow.ts diff --git a/packages/drizzle/src/updateOne.ts b/packages/drizzle/src/updateOne.ts index 3bd37e4682..8fddd9378f 100644 --- a/packages/drizzle/src/updateOne.ts +++ b/packages/drizzle/src/updateOne.ts @@ -1,67 +1,15 @@ import type { LibSQLDatabase } from 'drizzle-orm/libsql' -import type { FlattenedField, UpdateOne } from 'payload' +import type { UpdateOne } from 'payload' -import { eq } from 'drizzle-orm' import toSnakeCase from 'to-snake-case' import type { DrizzleAdapter } from './types.js' -import { buildFindManyArgs } from './find/buildFindManyArgs.js' import { buildQuery } from './queries/buildQuery.js' import { selectDistinct } from './queries/selectDistinct.js' -import { transform } from './transform/read/index.js' -import { transformForWrite } from './transform/write/index.js' import { upsertRow } from './upsertRow/index.js' import { getTransaction } from './utilities/getTransaction.js' -/** - * Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call. - * We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships. - */ -const shouldUseUpsertRow = ({ - data, - fields, -}: { - data: Record - fields: FlattenedField[] -}) => { - for (const key in data) { - const value = data[key] - const field = fields.find((each) => each.name === key) - - if (!field) { - continue - } - - if ( - field.type === 'array' || - field.type === 'blocks' || - ((field.type === 'text' || - field.type === 'relationship' || - field.type === 'upload' || - field.type === 'select' || - field.type === 'number') && - field.hasMany) || - ((field.type === 'relationship' || field.type === 'upload') && - Array.isArray(field.relationTo)) || - field.localized - ) { - return true - } - - if ( - (field.type === 'group' || field.type === 'tab') && - value && - typeof value === 'object' && - shouldUseUpsertRow({ data: value as Record, fields: field.flattenedFields }) - ) { - return true - } - } - - return false -} - export const updateOne: UpdateOne = async function updateOne( this: DrizzleAdapter, { @@ -126,72 +74,23 @@ export const updateOne: UpdateOne = async function updateOne( return null } - if (!idToUpdate || shouldUseUpsertRow({ data, fields: collection.flattenedFields })) { - const result = await upsertRow({ - id: idToUpdate, - adapter: this, - data, - db, - fields: collection.flattenedFields, - ignoreResult: returning === false, - joinQuery, - operation: 'update', - req, - select, - tableName, - }) - - if (returning === false) { - return null - } - - return result - } - - const { row } = transformForWrite({ + const result = await upsertRow({ + id: idToUpdate, adapter: this, data, - enableAtomicWrites: true, + db, fields: collection.flattenedFields, + ignoreResult: returning === false, + joinQuery, + operation: 'update', + req, + select, tableName, }) - const drizzle = db as LibSQLDatabase - await drizzle - .update(this.tables[tableName]) - .set(row) - // TODO: we can skip fetching idToUpdate here with using the incoming where - .where(eq(this.tables[tableName].id, idToUpdate)) - if (returning === false) { return null } - const findManyArgs = buildFindManyArgs({ - adapter: this, - depth: 0, - fields: collection.flattenedFields, - joinQuery: false, - select, - tableName, - }) - - findManyArgs.where = eq(this.tables[tableName].id, idToUpdate) - - const doc = await db.query[tableName].findFirst(findManyArgs) - - // ////////////////////////////////// - // TRANSFORM DATA - // ////////////////////////////////// - - const result = transform({ - adapter: this, - config: this.payload.config, - data: doc, - fields: collection.flattenedFields, - joinQuery: false, - tableName, - }) - return result } diff --git a/packages/drizzle/src/upsertRow/index.ts b/packages/drizzle/src/upsertRow/index.ts index ad10c5fd14..72f89435ec 100644 --- a/packages/drizzle/src/upsertRow/index.ts +++ b/packages/drizzle/src/upsertRow/index.ts @@ -1,3 +1,4 @@ +import type { LibSQLDatabase } from 'drizzle-orm/libsql' import type { TypeWithID } from 'payload' import { eq } from 'drizzle-orm' @@ -12,13 +13,14 @@ import { transformForWrite } from '../transform/write/index.js' import { deleteExistingArrayRows } from './deleteExistingArrayRows.js' import { deleteExistingRowsByPath } from './deleteExistingRowsByPath.js' import { insertArrays } from './insertArrays.js' +import { shouldUseOptimizedUpsertRow } from './shouldUseOptimizedUpsertRow.js' /** * If `id` is provided, it will update the row with that ID. * If `where` is provided, it will update the row that matches the `where` * If neither `id` nor `where` is provided, it will create a new row. * - * This function replaces the entire row and does not support partial updates. + * adapter function replaces the entire row and does not support partial updates. */ export const upsertRow = async | TypeWithID>({ id, @@ -39,429 +41,446 @@ export const upsertRow = async | TypeWithID>( upsertTarget, where, }: Args): Promise => { - // Split out the incoming data into the corresponding: - // base row, locales, relationships, blocks, and arrays - const rowToInsert = transformForWrite({ - adapter, - data, - enableAtomicWrites: false, - fields, - path, - tableName, - }) - - // First, we insert the main row - let insertedRow: Record - - try { - if (operation === 'update') { - const target = upsertTarget || adapter.tables[tableName].id - - if (id) { - rowToInsert.row.id = id - ;[insertedRow] = await adapter.insert({ - db, - onConflictDoUpdate: { set: rowToInsert.row, target }, - tableName, - values: rowToInsert.row, - }) - } else { - ;[insertedRow] = await adapter.insert({ - db, - onConflictDoUpdate: { set: rowToInsert.row, target, where }, - tableName, - values: rowToInsert.row, - }) - } - } else { - if (adapter.allowIDOnCreate && data.id) { - rowToInsert.row.id = data.id - } - ;[insertedRow] = await adapter.insert({ - db, - tableName, - values: rowToInsert.row, - }) - } - - const localesToInsert: Record[] = [] - const relationsToInsert: Record[] = [] - const textsToInsert: Record[] = [] - const numbersToInsert: Record[] = [] - const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {} - const selectsToInsert: { [selectTableName: string]: Record[] } = {} - - // If there are locale rows with data, add the parent and locale to each - if (Object.keys(rowToInsert.locales).length > 0) { - Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => { - localeRow._parentID = insertedRow.id - localeRow._locale = locale - localesToInsert.push(localeRow) - }) - } - - // If there are relationships, add parent to each - if (rowToInsert.relationships.length > 0) { - rowToInsert.relationships.forEach((relation) => { - relation.parent = insertedRow.id - relationsToInsert.push(relation) - }) - } - - // If there are texts, add parent to each - if (rowToInsert.texts.length > 0) { - rowToInsert.texts.forEach((textRow) => { - textRow.parent = insertedRow.id - textsToInsert.push(textRow) - }) - } - - // If there are numbers, add parent to each - if (rowToInsert.numbers.length > 0) { - rowToInsert.numbers.forEach((numberRow) => { - numberRow.parent = insertedRow.id - numbersToInsert.push(numberRow) - }) - } - - // If there are selects, add parent to each, and then - // store by table name and rows - if (Object.keys(rowToInsert.selects).length > 0) { - Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => { - selectsToInsert[selectTableName] = [] - - selectRows.forEach((row) => { - if (typeof row.parent === 'undefined') { - row.parent = insertedRow.id - } - - selectsToInsert[selectTableName].push(row) - }) - }) - } - - // If there are blocks, add parent to each, and then - // store by table name and rows - Object.keys(rowToInsert.blocks).forEach((tableName) => { - rowToInsert.blocks[tableName].forEach((blockRow) => { - blockRow.row._parentID = insertedRow.id - if (!blocksToInsert[tableName]) { - blocksToInsert[tableName] = [] - } - if (blockRow.row.uuid) { - delete blockRow.row.uuid - } - blocksToInsert[tableName].push(blockRow) - }) + let insertedRow: Record = { id } + if (id && shouldUseOptimizedUpsertRow({ data, fields })) { + const { row } = transformForWrite({ + adapter, + data, + enableAtomicWrites: true, + fields, + tableName, }) - // ////////////////////////////////// - // INSERT LOCALES - // ////////////////////////////////// + const drizzle = db as LibSQLDatabase - if (localesToInsert.length > 0) { - const localeTableName = `${tableName}${adapter.localesSuffix}` - const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`] + await drizzle + .update(adapter.tables[tableName]) + .set(row) + // TODO: we can skip fetching idToUpdate here with using the incoming where + .where(eq(adapter.tables[tableName].id, id)) + } else { + // Split out the incoming data into the corresponding: + // base row, locales, relationships, blocks, and arrays + const rowToInsert = transformForWrite({ + adapter, + data, + enableAtomicWrites: false, + fields, + path, + tableName, + }) + // First, we insert the main row + try { if (operation === 'update') { - await adapter.deleteWhere({ - db, - tableName: localeTableName, - where: eq(localeTable._parentID, insertedRow.id), - }) - } + const target = upsertTarget || adapter.tables[tableName].id - await adapter.insert({ - db, - tableName: localeTableName, - values: localesToInsert, - }) - } - - // ////////////////////////////////// - // INSERT RELATIONSHIPS - // ////////////////////////////////// - - const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}` - - if (operation === 'update') { - await deleteExistingRowsByPath({ - adapter, - db, - localeColumnName: 'locale', - parentColumnName: 'parent', - parentID: insertedRow.id, - pathColumnName: 'path', - rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete], - tableName: relationshipsTableName, - }) - } - - if (relationsToInsert.length > 0) { - await adapter.insert({ - db, - tableName: relationshipsTableName, - values: relationsToInsert, - }) - } - - // ////////////////////////////////// - // INSERT hasMany TEXTS - // ////////////////////////////////// - - const textsTableName = `${tableName}_texts` - - if (operation === 'update') { - await deleteExistingRowsByPath({ - adapter, - db, - localeColumnName: 'locale', - parentColumnName: 'parent', - parentID: insertedRow.id, - pathColumnName: 'path', - rows: [...textsToInsert, ...rowToInsert.textsToDelete], - tableName: textsTableName, - }) - } - - if (textsToInsert.length > 0) { - await adapter.insert({ - db, - tableName: textsTableName, - values: textsToInsert, - }) - } - - // ////////////////////////////////// - // INSERT hasMany NUMBERS - // ////////////////////////////////// - - const numbersTableName = `${tableName}_numbers` - - if (operation === 'update') { - await deleteExistingRowsByPath({ - adapter, - db, - localeColumnName: 'locale', - parentColumnName: 'parent', - parentID: insertedRow.id, - pathColumnName: 'path', - rows: [...numbersToInsert, ...rowToInsert.numbersToDelete], - tableName: numbersTableName, - }) - } - - if (numbersToInsert.length > 0) { - await adapter.insert({ - db, - tableName: numbersTableName, - values: numbersToInsert, - }) - } - - // ////////////////////////////////// - // INSERT BLOCKS - // ////////////////////////////////// - - const insertedBlockRows: Record[]> = {} - - if (operation === 'update') { - for (const tableName of rowToInsert.blocksToDelete) { - const blockTable = adapter.tables[tableName] - await adapter.deleteWhere({ + if (id) { + rowToInsert.row.id = id + ;[insertedRow] = await adapter.insert({ + db, + onConflictDoUpdate: { set: rowToInsert.row, target }, + tableName, + values: rowToInsert.row, + }) + } else { + ;[insertedRow] = await adapter.insert({ + db, + onConflictDoUpdate: { set: rowToInsert.row, target, where }, + tableName, + values: rowToInsert.row, + }) + } + } else { + if (adapter.allowIDOnCreate && data.id) { + rowToInsert.row.id = data.id + } + ;[insertedRow] = await adapter.insert({ db, tableName, - where: eq(blockTable._parentID, insertedRow.id), + values: rowToInsert.row, }) } - } - // When versions are enabled, this is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions. - const arraysBlocksUUIDMap: Record = {} + const localesToInsert: Record[] = [] + const relationsToInsert: Record[] = [] + const textsToInsert: Record[] = [] + const numbersToInsert: Record[] = [] + const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {} + const selectsToInsert: { [selectTableName: string]: Record[] } = {} - for (const [tableName, blockRows] of Object.entries(blocksToInsert)) { - insertedBlockRows[tableName] = await adapter.insert({ - db, - tableName, - values: blockRows.map(({ row }) => row), - }) + // If there are locale rows with data, add the parent and locale to each + if (Object.keys(rowToInsert.locales).length > 0) { + Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => { + localeRow._parentID = insertedRow.id + localeRow._locale = locale + localesToInsert.push(localeRow) + }) + } - insertedBlockRows[tableName].forEach((row, i) => { - blockRows[i].row = row - if ( - typeof row._uuid === 'string' && - (typeof row.id === 'string' || typeof row.id === 'number') - ) { - arraysBlocksUUIDMap[row._uuid] = row.id - } - }) + // If there are relationships, add parent to each + if (rowToInsert.relationships.length > 0) { + rowToInsert.relationships.forEach((relation) => { + relation.parent = insertedRow.id + relationsToInsert.push(relation) + }) + } - const blockLocaleIndexMap: number[] = [] + // If there are texts, add parent to each + if (rowToInsert.texts.length > 0) { + rowToInsert.texts.forEach((textRow) => { + textRow.parent = insertedRow.id + textsToInsert.push(textRow) + }) + } - const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => { - if (Object.entries(blockRow.locales).length > 0) { - Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => { - if (Object.keys(blockLocaleData).length > 0) { - blockLocaleData._parentID = blockRow.row.id - blockLocaleData._locale = blockLocale - acc.push(blockLocaleData) - blockLocaleIndexMap.push(i) + // If there are numbers, add parent to each + if (rowToInsert.numbers.length > 0) { + rowToInsert.numbers.forEach((numberRow) => { + numberRow.parent = insertedRow.id + numbersToInsert.push(numberRow) + }) + } + + // If there are selects, add parent to each, and then + // store by table name and rows + if (Object.keys(rowToInsert.selects).length > 0) { + Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => { + selectsToInsert[selectTableName] = [] + + selectRows.forEach((row) => { + if (typeof row.parent === 'undefined') { + row.parent = insertedRow.id } + + selectsToInsert[selectTableName].push(row) + }) + }) + } + + // If there are blocks, add parent to each, and then + // store by table name and rows + Object.keys(rowToInsert.blocks).forEach((tableName) => { + rowToInsert.blocks[tableName].forEach((blockRow) => { + blockRow.row._parentID = insertedRow.id + if (!blocksToInsert[tableName]) { + blocksToInsert[tableName] = [] + } + if (blockRow.row.uuid) { + delete blockRow.row.uuid + } + blocksToInsert[tableName].push(blockRow) + }) + }) + + // ////////////////////////////////// + // INSERT LOCALES + // ////////////////////////////////// + + if (localesToInsert.length > 0) { + const localeTableName = `${tableName}${adapter.localesSuffix}` + const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`] + + if (operation === 'update') { + await adapter.deleteWhere({ + db, + tableName: localeTableName, + where: eq(localeTable._parentID, insertedRow.id), }) } - return acc - }, []) - - if (blockLocaleRowsToInsert.length > 0) { await adapter.insert({ db, - tableName: `${tableName}${adapter.localesSuffix}`, - values: blockLocaleRowsToInsert, + tableName: localeTableName, + values: localesToInsert, }) } + // ////////////////////////////////// + // INSERT RELATIONSHIPS + // ////////////////////////////////// + + const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}` + + if (operation === 'update') { + await deleteExistingRowsByPath({ + adapter, + db, + localeColumnName: 'locale', + parentColumnName: 'parent', + parentID: insertedRow.id, + pathColumnName: 'path', + rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete], + tableName: relationshipsTableName, + }) + } + + if (relationsToInsert.length > 0) { + await adapter.insert({ + db, + tableName: relationshipsTableName, + values: relationsToInsert, + }) + } + + // ////////////////////////////////// + // INSERT hasMany TEXTS + // ////////////////////////////////// + + const textsTableName = `${tableName}_texts` + + if (operation === 'update') { + await deleteExistingRowsByPath({ + adapter, + db, + localeColumnName: 'locale', + parentColumnName: 'parent', + parentID: insertedRow.id, + pathColumnName: 'path', + rows: [...textsToInsert, ...rowToInsert.textsToDelete], + tableName: textsTableName, + }) + } + + if (textsToInsert.length > 0) { + await adapter.insert({ + db, + tableName: textsTableName, + values: textsToInsert, + }) + } + + // ////////////////////////////////// + // INSERT hasMany NUMBERS + // ////////////////////////////////// + + const numbersTableName = `${tableName}_numbers` + + if (operation === 'update') { + await deleteExistingRowsByPath({ + adapter, + db, + localeColumnName: 'locale', + parentColumnName: 'parent', + parentID: insertedRow.id, + pathColumnName: 'path', + rows: [...numbersToInsert, ...rowToInsert.numbersToDelete], + tableName: numbersTableName, + }) + } + + if (numbersToInsert.length > 0) { + await adapter.insert({ + db, + tableName: numbersTableName, + values: numbersToInsert, + }) + } + + // ////////////////////////////////// + // INSERT BLOCKS + // ////////////////////////////////// + + const insertedBlockRows: Record[]> = {} + + if (operation === 'update') { + for (const tableName of rowToInsert.blocksToDelete) { + const blockTable = adapter.tables[tableName] + await adapter.deleteWhere({ + db, + tableName, + where: eq(blockTable._parentID, insertedRow.id), + }) + } + } + + // When versions are enabled, adapter is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions. + const arraysBlocksUUIDMap: Record = {} + + for (const [tableName, blockRows] of Object.entries(blocksToInsert)) { + insertedBlockRows[tableName] = await adapter.insert({ + db, + tableName, + values: blockRows.map(({ row }) => row), + }) + + insertedBlockRows[tableName].forEach((row, i) => { + blockRows[i].row = row + if ( + typeof row._uuid === 'string' && + (typeof row.id === 'string' || typeof row.id === 'number') + ) { + arraysBlocksUUIDMap[row._uuid] = row.id + } + }) + + const blockLocaleIndexMap: number[] = [] + + const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => { + if (Object.entries(blockRow.locales).length > 0) { + Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => { + if (Object.keys(blockLocaleData).length > 0) { + blockLocaleData._parentID = blockRow.row.id + blockLocaleData._locale = blockLocale + acc.push(blockLocaleData) + blockLocaleIndexMap.push(i) + } + }) + } + + return acc + }, []) + + if (blockLocaleRowsToInsert.length > 0) { + await adapter.insert({ + db, + tableName: `${tableName}${adapter.localesSuffix}`, + values: blockLocaleRowsToInsert, + }) + } + + await insertArrays({ + adapter, + arrays: blockRows.map(({ arrays }) => arrays), + db, + parentRows: insertedBlockRows[tableName], + uuidMap: arraysBlocksUUIDMap, + }) + } + + // ////////////////////////////////// + // INSERT ARRAYS RECURSIVELY + // ////////////////////////////////// + + if (operation === 'update') { + for (const arrayTableName of Object.keys(rowToInsert.arrays)) { + await deleteExistingArrayRows({ + adapter, + db, + parentID: insertedRow.id, + tableName: arrayTableName, + }) + } + } + await insertArrays({ adapter, - arrays: blockRows.map(({ arrays }) => arrays), + arrays: [rowToInsert.arrays], db, - parentRows: insertedBlockRows[tableName], + parentRows: [insertedRow], uuidMap: arraysBlocksUUIDMap, }) - } - // ////////////////////////////////// - // INSERT ARRAYS RECURSIVELY - // ////////////////////////////////// + // ////////////////////////////////// + // INSERT hasMany SELECTS + // ////////////////////////////////// - if (operation === 'update') { - for (const arrayTableName of Object.keys(rowToInsert.arrays)) { - await deleteExistingArrayRows({ - adapter, - db, - parentID: insertedRow.id, - tableName: arrayTableName, - }) - } - } + for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) { + const selectTable = adapter.tables[selectTableName] + if (operation === 'update') { + await adapter.deleteWhere({ + db, + tableName: selectTableName, + where: eq(selectTable.parent, insertedRow.id), + }) + } - await insertArrays({ - adapter, - arrays: [rowToInsert.arrays], - db, - parentRows: [insertedRow], - uuidMap: arraysBlocksUUIDMap, - }) + if (Object.keys(arraysBlocksUUIDMap).length > 0) { + tableRows.forEach((row: any) => { + if (row.parent in arraysBlocksUUIDMap) { + row.parent = arraysBlocksUUIDMap[row.parent] + } + }) + } - // ////////////////////////////////// - // INSERT hasMany SELECTS - // ////////////////////////////////// - - for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) { - const selectTable = adapter.tables[selectTableName] - if (operation === 'update') { - await adapter.deleteWhere({ - db, - tableName: selectTableName, - where: eq(selectTable.parent, insertedRow.id), - }) + if (tableRows.length) { + await adapter.insert({ + db, + tableName: selectTableName, + values: tableRows, + }) + } } - if (Object.keys(arraysBlocksUUIDMap).length > 0) { - tableRows.forEach((row: any) => { - if (row.parent in arraysBlocksUUIDMap) { - row.parent = arraysBlocksUUIDMap[row.parent] + // ////////////////////////////////// + // Error Handling + // ////////////////////////////////// + } catch (caughtError) { + // Unique constraint violation error + // '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite + + let error = caughtError + if (typeof caughtError === 'object' && 'cause' in caughtError) { + error = caughtError.cause + } + + if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') { + let fieldName: null | string = null + // We need to try and find the right constraint for the field but if we can't we fallback to a generic message + if (error.code === '23505') { + // For PostgreSQL, we can try to extract the field name from the error constraint + if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) { + fieldName = adapter.fieldConstraints[tableName]?.[error.constraint] + } else { + const replacement = `${tableName}_` + + if (error.constraint.includes(replacement)) { + const replacedConstraint = error.constraint.replace(replacement, '') + + if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) { + fieldName = adapter.fieldConstraints[tableName][replacedConstraint] + } + } } - }) - } - if (tableRows.length) { - await adapter.insert({ - db, - tableName: selectTableName, - values: tableRows, - }) - } - } + if (!fieldName) { + // Last case scenario we extract the key and value from the detail on the error + const detail = error.detail + const regex = /Key \(([^)]+)\)=\(([^)]+)\)/ + const match: string[] = detail.match(regex) - // ////////////////////////////////// - // Error Handling - // ////////////////////////////////// - } catch (caughtError) { - // Unique constraint violation error - // '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite + if (match && match[1]) { + const key = match[1] - let error = caughtError - if (typeof caughtError === 'object' && 'cause' in caughtError) { - error = caughtError.cause - } + fieldName = key + } + } + } else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') { + /** + * For SQLite, we can try to extract the field name from the error message + * The message typically looks like: + * "UNIQUE constraint failed: table_name.field_name" + */ + const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/ + const match: string[] = error.message.match(regex) - if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') { - let fieldName: null | string = null - // We need to try and find the right constraint for the field but if we can't we fallback to a generic message - if (error.code === '23505') { - // For PostgreSQL, we can try to extract the field name from the error constraint - if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) { - fieldName = adapter.fieldConstraints[tableName]?.[error.constraint] - } else { - const replacement = `${tableName}_` + if (match && match[2]) { + if (adapter.fieldConstraints[tableName]) { + fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`] + } - if (error.constraint.includes(replacement)) { - const replacedConstraint = error.constraint.replace(replacement, '') - - if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) { - fieldName = adapter.fieldConstraints[tableName][replacedConstraint] + if (!fieldName) { + fieldName = match[2] } } } - if (!fieldName) { - // Last case scenario we extract the key and value from the detail on the error - const detail = error.detail - const regex = /Key \(([^)]+)\)=\(([^)]+)\)/ - const match: string[] = detail.match(regex) - - if (match && match[1]) { - const key = match[1] - - fieldName = key - } - } - } else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') { - /** - * For SQLite, we can try to extract the field name from the error message - * The message typically looks like: - * "UNIQUE constraint failed: table_name.field_name" - */ - const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/ - const match: string[] = error.message.match(regex) - - if (match && match[2]) { - if (adapter.fieldConstraints[tableName]) { - fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`] - } - - if (!fieldName) { - fieldName = match[2] - } - } + throw new ValidationError( + { + id, + errors: [ + { + message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique', + path: fieldName, + }, + ], + req, + }, + req?.t, + ) + } else { + throw error } - - throw new ValidationError( - { - id, - errors: [ - { - message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique', - path: fieldName, - }, - ], - req, - }, - req?.t, - ) - } else { - throw error } } diff --git a/packages/drizzle/src/upsertRow/shouldUseOptimizedUpsertRow.ts b/packages/drizzle/src/upsertRow/shouldUseOptimizedUpsertRow.ts new file mode 100644 index 0000000000..096d22a5cf --- /dev/null +++ b/packages/drizzle/src/upsertRow/shouldUseOptimizedUpsertRow.ts @@ -0,0 +1,52 @@ +import type { FlattenedField } from 'payload' + +/** + * Checks whether we should use the upsertRow function for the passed data and otherwise use a simple SQL SET call. + * We need to use upsertRow only when the data has arrays, blocks, hasMany select/text/number, localized fields, complex relationships. + */ +export const shouldUseOptimizedUpsertRow = ({ + data, + fields, +}: { + data: Record + fields: FlattenedField[] +}) => { + for (const key in data) { + const value = data[key] + const field = fields.find((each) => each.name === key) + + if (!field) { + continue + } + + if ( + field.type === 'array' || + field.type === 'blocks' || + ((field.type === 'text' || + field.type === 'relationship' || + field.type === 'upload' || + field.type === 'select' || + field.type === 'number') && + field.hasMany) || + ((field.type === 'relationship' || field.type === 'upload') && + Array.isArray(field.relationTo)) || + field.localized + ) { + return false + } + + if ( + (field.type === 'group' || field.type === 'tab') && + value && + typeof value === 'object' && + !shouldUseOptimizedUpsertRow({ + data: value as Record, + fields: field.flattenedFields, + }) + ) { + return false + } + } + + return true +} diff --git a/test/database/int.spec.ts b/test/database/int.spec.ts index ecaf364acb..9bd4ae5418 100644 --- a/test/database/int.spec.ts +++ b/test/database/int.spec.ts @@ -1,7 +1,13 @@ import type { MongooseAdapter } from '@payloadcms/db-mongodb' import type { PostgresAdapter } from '@payloadcms/db-postgres/types' import type { NextRESTClient } from 'helpers/NextRESTClient.js' -import type { Payload, PayloadRequest, TypeWithID, ValidationError } from 'payload' +import type { + DataFromCollectionSlug, + Payload, + PayloadRequest, + TypeWithID, + ValidationError, +} from 'payload' import { migrateRelationshipsV2_V3, @@ -2807,7 +2813,7 @@ describe('database', () => { } }) - it('should update simple', async () => { + it('should use optimized updateOne', async () => { const post = await payload.create({ collection: 'posts', data: { @@ -2818,7 +2824,7 @@ describe('database', () => { arrayWithIDs: [{ text: 'some text' }], }, }) - const res = await payload.db.updateOne({ + const res = (await payload.db.updateOne({ where: { id: { equals: post.id } }, data: { title: 'hello updated', @@ -2826,14 +2832,61 @@ describe('database', () => { tab: { text: 'in tab updated' }, }, collection: 'posts', - }) + })) as unknown as DataFromCollectionSlug<'posts'> expect(res.title).toBe('hello updated') expect(res.text).toBe('other text (should not be nuked)') - expect(res.group.text).toBe('in group updated') - expect(res.tab.text).toBe('in tab updated') + expect(res.group?.text).toBe('in group updated') + expect(res.tab?.text).toBe('in tab updated') expect(res.arrayWithIDs).toHaveLength(1) - expect(res.arrayWithIDs[0].text).toBe('some text') + expect(res.arrayWithIDs?.[0]?.text).toBe('some text') + }) + + it('should use optimized updateMany', async () => { + const post1 = await payload.create({ + collection: 'posts', + data: { + text: 'other text (should not be nuked)', + title: 'hello', + group: { text: 'in group' }, + tab: { text: 'in tab' }, + arrayWithIDs: [{ text: 'some text' }], + }, + }) + const post2 = await payload.create({ + collection: 'posts', + data: { + text: 'other text 2 (should not be nuked)', + title: 'hello', + group: { text: 'in group' }, + tab: { text: 'in tab' }, + arrayWithIDs: [{ text: 'some text' }], + }, + }) + + const res = (await payload.db.updateMany({ + where: { id: { in: [post1.id, post2.id] } }, + data: { + title: 'hello updated', + group: { text: 'in group updated' }, + tab: { text: 'in tab updated' }, + }, + collection: 'posts', + })) as unknown as Array> + + expect(res).toHaveLength(2) + const resPost1 = res?.find((r) => r.id === post1.id) + const resPost2 = res?.find((r) => r.id === post2.id) + expect(resPost1?.text).toBe('other text (should not be nuked)') + expect(resPost2?.text).toBe('other text 2 (should not be nuked)') + + for (const post of res) { + expect(post.title).toBe('hello updated') + expect(post.group?.text).toBe('in group updated') + expect(post.tab?.text).toBe('in tab updated') + expect(post.arrayWithIDs).toHaveLength(1) + expect(post.arrayWithIDs?.[0]?.text).toBe('some text') + } }) it('should allow incremental number update', async () => { From e6da384a439cc68ce1216c491ae2c3df9ae571c7 Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Wed, 16 Jul 2025 12:56:42 -0400 Subject: [PATCH 041/143] ci: disable bundle analysis for forks (#13198) The bundle analysis action requires comment permissions which are not available to PRs from forks. This PR disables bundle analysis until we can implement this in a separate workflow as shown in [the docs here](https://github.com/exoego/esbuild-bundle-analyzer?tab=readme-ov-file#github-action-setup-for-public-repositories). --- .github/workflows/main.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9ca829bcbe..bf9fe7d359 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -718,6 +718,8 @@ jobs: DO_NOT_TRACK: 1 # Disable Turbopack telemetry - name: Analyze esbuild bundle size + # Temporarily disable this for community PRs until this can be implemented in a separate workflow + if: github.event.pull_request.head.repo.fork == false uses: exoego/esbuild-bundle-analyzer@v1 with: metafiles: 'packages/payload/meta_index.json,packages/payload/meta_shared.json,packages/ui/meta_client.json,packages/ui/meta_shared.json,packages/next/meta_index.json,packages/richtext-lexical/meta_client.json' From 41cff6d436d2a4ee0920caacdbfa60f6ed1397da Mon Sep 17 00:00:00 2001 From: Elliott W Date: Thu, 17 Jul 2025 01:02:43 +0545 Subject: [PATCH 042/143] fix(db-mongodb): improve compatibility with Firestore database (#12763) ### What? Adds four more arguments to the `mongooseAdapter`: ```typescript useJoinAggregations?: boolean /* The big one */ useAlternativeDropDatabase?: boolean useBigIntForNumberIDs?: boolean usePipelineInSortLookup?: boolean ``` Also export a new `compatabilityOptions` object from `@payloadcms/db-mongodb` where each key is a mongo-compatible database and the value is the recommended `mongooseAdapter` settings for compatability. ### Why? When using firestore and visiting `/admin/collections/media/payload-folders`, we get: ``` MongoServerError: invalid field(s) in lookup: [let, pipeline], only lookup(from, localField, foreignField, as) is supported ``` Firestore doesn't support the full MongoDB aggregation API used by Payload which gets used when building aggregations for populating join fields. There are several other compatability issues with Firestore: - The invalid `pipeline` property is used in the `$lookup` aggregation in `buildSortParams` - Firestore only supports number IDs of type `Long`, but Mongoose converts custom ID fields of type number to `Double` - Firestore does not support the `dropDatabase` command - Firestore does not support the `createIndex` command (not addressed in this PR) ### How? ```typescript useJoinAggregations?: boolean /* The big one */ ``` When this is `false` we skip the `buildJoinAggregation()` pipeline and resolve the join fields through multiple queries. This can potentially be used with AWS DocumentDB and Azure Cosmos DB to support join fields, but I have not tested with either of these databases. ```typescript useAlternativeDropDatabase?: boolean ``` When `true`, monkey-patch (replace) the `dropDatabase` function so that it calls `collection.deleteMany({})` on every collection instead of sending a single `dropDatabase` command to the database ```typescript useBigIntForNumberIDs?: boolean ``` When `true`, use `mongoose.Schema.Types.BigInt` for custom ID fields of type `number` which converts to a firestore `Long` behind the scenes ```typescript usePipelineInSortLookup?: boolean ``` When `false`, modify the sortAggregation pipeline in `buildSortParams()` so that we don't use the `pipeline` property in the `$lookup` aggregation. Results in slightly worse performance when sorting by relationship properties. ### Limitations This PR does not add support for transactions or creating indexes in firestore. ### Fixes Fixed a bug (and added a test) where you weren't able to sort by multiple properties on a relationship field. ### Future work 1. Firestore supports simple `$lookup` aggregations but other databases might not. Could add a `useSortAggregations` property which can be used to disable aggregations in sorting. --------- Co-authored-by: Claude Co-authored-by: Sasha <64744993+r1tsuu@users.noreply.github.com> --- .github/workflows/main.yml | 1 + docs/database/mongodb.mdx | 50 +- package.json | 1 + packages/db-mongodb/src/connect.ts | 19 + packages/db-mongodb/src/find.ts | 11 + packages/db-mongodb/src/findOne.ts | 11 + packages/db-mongodb/src/index.ts | 41 ++ packages/db-mongodb/src/models/buildSchema.ts | 13 +- .../db-mongodb/src/queries/buildSortParam.ts | 58 +- packages/db-mongodb/src/queryDrafts.ts | 12 + .../src/utilities/aggregatePaginate.ts | 6 +- .../src/utilities/buildJoinAggregation.ts | 3 + .../src/utilities/compatabilityOptions.ts | 25 + .../db-mongodb/src/utilities/resolveJoins.ts | 647 ++++++++++++++++++ .../db-mongodb/src/utilities/transform.ts | 5 + test/generateDatabaseAdapter.ts | 19 + test/generateDatabaseSchema.ts | 2 +- test/helpers/isMongoose.ts | 5 +- test/helpers/startMemoryDB.ts | 6 +- test/relationships/int.spec.ts | 43 +- 20 files changed, 938 insertions(+), 40 deletions(-) create mode 100644 packages/db-mongodb/src/utilities/compatabilityOptions.ts create mode 100644 packages/db-mongodb/src/utilities/resolveJoins.ts diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index bf9fe7d359..60b6ac9655 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -153,6 +153,7 @@ jobs: matrix: database: - mongodb + - firestore - postgres - postgres-custom-schema - postgres-uuid diff --git a/docs/database/mongodb.mdx b/docs/database/mongodb.mdx index 16958cd1c6..26a139bae3 100644 --- a/docs/database/mongodb.mdx +++ b/docs/database/mongodb.mdx @@ -30,18 +30,22 @@ export default buildConfig({ ## Options -| Option | Description | -| -------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `autoPluralization` | Tell Mongoose to auto-pluralize any collection names if it encounters any singular words used as collection `slug`s. | -| `connectOptions` | Customize MongoDB connection options. Payload will connect to your MongoDB database using default options which you can override and extend to include all the [options](https://mongoosejs.com/docs/connections.html#options) available to mongoose. | -| `collectionsSchemaOptions` | Customize Mongoose schema options for collections. | -| `disableIndexHints` | Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination, as it increases the speed of the count function used in that query. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false | -| `migrationDir` | Customize the directory that migrations are stored. | -| `transactionOptions` | An object with configuration properties used in [transactions](https://www.mongodb.com/docs/manual/core/transactions/) or `false` which will disable the use of transactions. | -| `collation` | Enable language-specific string comparison with customizable options. Available on MongoDB 3.4+. Defaults locale to "en". Example: `{ strength: 3 }`. For a full list of collation options and their definitions, see the [MongoDB documentation](https://www.mongodb.com/docs/manual/reference/collation/). | -| `allowAdditionalKeys` | By default, Payload strips all additional keys from MongoDB data that don't exist in the Payload schema. If you have some data that you want to include to the result but it doesn't exist in Payload, you can set this to `true`. Be careful as Payload access control _won't_ work for this data. | -| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. | -| `disableFallbackSort` | Set to `true` to disable the adapter adding a fallback sort when sorting by non-unique fields, this can affect performance in some cases but it ensures a consistent order of results. | +| Option | Description | +| ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `autoPluralization` | Tell Mongoose to auto-pluralize any collection names if it encounters any singular words used as collection `slug`s. | +| `connectOptions` | Customize MongoDB connection options. Payload will connect to your MongoDB database using default options which you can override and extend to include all the [options](https://mongoosejs.com/docs/connections.html#options) available to mongoose. | +| `collectionsSchemaOptions` | Customize Mongoose schema options for collections. | +| `disableIndexHints` | Set to true to disable hinting to MongoDB to use 'id' as index. This is currently done when counting documents for pagination, as it increases the speed of the count function used in that query. Disabling this optimization might fix some problems with AWS DocumentDB. Defaults to false | +| `migrationDir` | Customize the directory that migrations are stored. | +| `transactionOptions` | An object with configuration properties used in [transactions](https://www.mongodb.com/docs/manual/core/transactions/) or `false` which will disable the use of transactions. | +| `collation` | Enable language-specific string comparison with customizable options. Available on MongoDB 3.4+. Defaults locale to "en". Example: `{ strength: 3 }`. For a full list of collation options and their definitions, see the [MongoDB documentation](https://www.mongodb.com/docs/manual/reference/collation/). | +| `allowAdditionalKeys` | By default, Payload strips all additional keys from MongoDB data that don't exist in the Payload schema. If you have some data that you want to include to the result but it doesn't exist in Payload, you can set this to `true`. Be careful as Payload access control _won't_ work for this data. | +| `allowIDOnCreate` | Set to `true` to use the `id` passed in data on the create API operations without using a custom ID field. | +| `disableFallbackSort` | Set to `true` to disable the adapter adding a fallback sort when sorting by non-unique fields, this can affect performance in some cases but it ensures a consistent order of results. | +| `useAlternativeDropDatabase` | Set to `true` to use an alternative `dropDatabase` implementation that calls `collection.deleteMany({})` on every collection instead of sending a raw `dropDatabase` command. Payload only uses `dropDatabase` for testing purposes. Defaults to `false`. | +| `useBigIntForNumberIDs` | Set to `true` to use `BigInt` for custom ID fields of type `'number'`. Useful for databases that don't support `double` or `int32` IDs. Defaults to `false`. | +| `useJoinAggregations` | Set to `false` to disable join aggregations (which use correlated subqueries) and instead populate join fields via multiple `find` queries. Defaults to `true`. | +| `usePipelineInSortLookup` | Set to `false` to disable the use of `pipeline` in the `$lookup` aggregation in sorting. Defaults to `true`. | ## Access to Mongoose models @@ -56,9 +60,21 @@ You can access Mongoose models as follows: ## Using other MongoDB implementations -Limitations with [DocumentDB](https://aws.amazon.com/documentdb/) and [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db): +You can import the `compatabilityOptions` object to get the recommended settings for other MongoDB implementations. Since these databases aren't officially supported by payload, you may still encounter issues even with these settings (please create an issue or PR if you believe these options should be updated): -- For Azure Cosmos DB you must pass `transactionOptions: false` to the adapter options. Azure Cosmos DB does not support transactions that update two and more documents in different collections, which is a common case when using Payload (via hooks). -- For Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`. -- The [Join Field](../fields/join) is not supported in DocumentDB and Azure Cosmos DB, as we internally use MongoDB aggregations to query data for that field, which are limited there. This can be changed in the future. -- For DocumentDB pass `disableIndexHints: true` to disable hinting to the DB to use `id` as index which can cause problems with DocumentDB. +```ts +import { mongooseAdapter, compatabilityOptions } from '@payloadcms/db-mongodb' + +export default buildConfig({ + db: mongooseAdapter({ + url: process.env.DATABASE_URI, + // For example, if you're using firestore: + ...compatabilityOptions.firestore, + }), +}) +``` + +We export compatability options for [DocumentDB](https://aws.amazon.com/documentdb/), [Azure Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db) and [Firestore](https://cloud.google.com/firestore/mongodb-compatibility/docs/overview). Known limitations: + +- Azure Cosmos DB does not support transactions that update two or more documents in different collections, which is a common case when using Payload (via hooks). +- Azure Cosmos DB the root config property `indexSortableFields` must be set to `true`. diff --git a/package.json b/package.json index d00e17bd8d..7bd509f311 100644 --- a/package.json +++ b/package.json @@ -112,6 +112,7 @@ "test:e2e:prod:ci": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod", "test:e2e:prod:ci:noturbo": "pnpm prepare-run-test-against-prod:ci && pnpm runts ./test/runE2E.ts --prod --no-turbo", "test:int": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand", + "test:int:firestore": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=firestore DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand", "test:int:postgres": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=postgres DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand", "test:int:sqlite": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" NODE_NO_WARNINGS=1 PAYLOAD_DATABASE=sqlite DISABLE_LOGGING=true jest --forceExit --detectOpenHandles --config=test/jest.config.js --runInBand", "test:types": "tstyche", diff --git a/packages/db-mongodb/src/connect.ts b/packages/db-mongodb/src/connect.ts index 6210bde286..ba2c9c4db3 100644 --- a/packages/db-mongodb/src/connect.ts +++ b/packages/db-mongodb/src/connect.ts @@ -36,6 +36,25 @@ export const connect: Connect = async function connect( try { this.connection = (await mongoose.connect(urlToConnect, connectionOptions)).connection + if (this.useAlternativeDropDatabase) { + if (this.connection.db) { + // Firestore doesn't support dropDatabase, so we monkey patch + // dropDatabase to delete all documents from all collections instead + this.connection.db.dropDatabase = async function (): Promise { + const existingCollections = await this.listCollections().toArray() + await Promise.all( + existingCollections.map(async (collectionInfo) => { + const collection = this.collection(collectionInfo.name) + await collection.deleteMany({}) + }), + ) + return true + } + this.connection.dropDatabase = async function () { + await this.db?.dropDatabase() + } + } + } // If we are running a replica set with MongoDB Memory Server, // wait until the replica set elects a primary before proceeding diff --git a/packages/db-mongodb/src/find.ts b/packages/db-mongodb/src/find.ts index 938940c513..6f1124e503 100644 --- a/packages/db-mongodb/src/find.ts +++ b/packages/db-mongodb/src/find.ts @@ -12,6 +12,7 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js' import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js' import { getCollection } from './utilities/getEntity.js' import { getSession } from './utilities/getSession.js' +import { resolveJoins } from './utilities/resolveJoins.js' import { transform } from './utilities/transform.js' export const find: Find = async function find( @@ -155,6 +156,16 @@ export const find: Find = async function find( result = await Model.paginate(query, paginationOptions) } + if (!this.useJoinAggregations) { + await resolveJoins({ + adapter: this, + collectionSlug, + docs: result.docs as Record[], + joins, + locale, + }) + } + transform({ adapter: this, data: result.docs, diff --git a/packages/db-mongodb/src/findOne.ts b/packages/db-mongodb/src/findOne.ts index 0ffe97b108..cf6edb34f0 100644 --- a/packages/db-mongodb/src/findOne.ts +++ b/packages/db-mongodb/src/findOne.ts @@ -10,6 +10,7 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js' import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js' import { getCollection } from './utilities/getEntity.js' import { getSession } from './utilities/getSession.js' +import { resolveJoins } from './utilities/resolveJoins.js' import { transform } from './utilities/transform.js' export const findOne: FindOne = async function findOne( @@ -67,6 +68,16 @@ export const findOne: FindOne = async function findOne( doc = await Model.findOne(query, {}, options) } + if (doc && !this.useJoinAggregations) { + await resolveJoins({ + adapter: this, + collectionSlug, + docs: [doc] as Record[], + joins, + locale, + }) + } + if (!doc) { return null } diff --git a/packages/db-mongodb/src/index.ts b/packages/db-mongodb/src/index.ts index de2dc1c862..f2f152533a 100644 --- a/packages/db-mongodb/src/index.ts +++ b/packages/db-mongodb/src/index.ts @@ -143,6 +143,29 @@ export interface Args { /** The URL to connect to MongoDB or false to start payload and prevent connecting */ url: false | string + + /** + * Set to `true` to use an alternative `dropDatabase` implementation that calls `collection.deleteMany({})` on every collection instead of sending a raw `dropDatabase` command. + * Payload only uses `dropDatabase` for testing purposes. + * @default false + */ + useAlternativeDropDatabase?: boolean + /** + * Set to `true` to use `BigInt` for custom ID fields of type `'number'`. + * Useful for databases that don't support `double` or `int32` IDs. + * @default false + */ + useBigIntForNumberIDs?: boolean + /** + * Set to `false` to disable join aggregations (which use correlated subqueries) and instead populate join fields via multiple `find` queries. + * @default true + */ + useJoinAggregations?: boolean + /** + * Set to `false` to disable the use of `pipeline` in the `$lookup` aggregation in sorting. + * @default true + */ + usePipelineInSortLookup?: boolean } export type MongooseAdapter = { @@ -159,6 +182,10 @@ export type MongooseAdapter = { up: (args: MigrateUpArgs) => Promise }[] sessions: Record + useAlternativeDropDatabase: boolean + useBigIntForNumberIDs: boolean + useJoinAggregations: boolean + usePipelineInSortLookup: boolean versions: { [slug: string]: CollectionModel } @@ -194,6 +221,10 @@ declare module 'payload' { updateVersion: ( args: { options?: QueryOptions } & UpdateVersionArgs, ) => Promise> + useAlternativeDropDatabase: boolean + useBigIntForNumberIDs: boolean + useJoinAggregations: boolean + usePipelineInSortLookup: boolean versions: { [slug: string]: CollectionModel } @@ -214,6 +245,10 @@ export function mongooseAdapter({ prodMigrations, transactionOptions = {}, url, + useAlternativeDropDatabase = false, + useBigIntForNumberIDs = false, + useJoinAggregations = true, + usePipelineInSortLookup = true, }: Args): DatabaseAdapterObj { function adapter({ payload }: { payload: Payload }) { const migrationDir = findMigrationDir(migrationDirArg) @@ -279,6 +314,10 @@ export function mongooseAdapter({ updateOne, updateVersion, upsert, + useAlternativeDropDatabase, + useBigIntForNumberIDs, + useJoinAggregations, + usePipelineInSortLookup, }) } @@ -290,6 +329,8 @@ export function mongooseAdapter({ } } +export { compatabilityOptions } from './utilities/compatabilityOptions.js' + /** * Attempt to find migrations directory. * diff --git a/packages/db-mongodb/src/models/buildSchema.ts b/packages/db-mongodb/src/models/buildSchema.ts index 56e2cf1130..719f474ef7 100644 --- a/packages/db-mongodb/src/models/buildSchema.ts +++ b/packages/db-mongodb/src/models/buildSchema.ts @@ -143,7 +143,12 @@ export const buildSchema = (args: { const idField = schemaFields.find((field) => fieldAffectsData(field) && field.name === 'id') if (idField) { fields = { - _id: idField.type === 'number' ? Number : String, + _id: + idField.type === 'number' + ? payload.db.useBigIntForNumberIDs + ? mongoose.Schema.Types.BigInt + : Number + : String, } schemaFields = schemaFields.filter( (field) => !(fieldAffectsData(field) && field.name === 'id'), @@ -900,7 +905,11 @@ const getRelationshipValueType = (field: RelationshipField | UploadField, payloa } if (customIDType === 'number') { - return mongoose.Schema.Types.Number + if (payload.db.useBigIntForNumberIDs) { + return mongoose.Schema.Types.BigInt + } else { + return mongoose.Schema.Types.Number + } } return mongoose.Schema.Types.String diff --git a/packages/db-mongodb/src/queries/buildSortParam.ts b/packages/db-mongodb/src/queries/buildSortParam.ts index 0133736932..551d5bfad7 100644 --- a/packages/db-mongodb/src/queries/buildSortParam.ts +++ b/packages/db-mongodb/src/queries/buildSortParam.ts @@ -99,31 +99,57 @@ const relationshipSort = ({ sortFieldPath = foreignFieldPath.localizedPath.replace('', locale) } - if ( - !sortAggregation.some((each) => { - return '$lookup' in each && each.$lookup.as === `__${path}` - }) - ) { + const as = `__${relationshipPath.replace(/\./g, '__')}` + + // If we have not already sorted on this relationship yet, we need to add a lookup stage + if (!sortAggregation.some((each) => '$lookup' in each && each.$lookup.as === as)) { + let localField = versions ? `version.${relationshipPath}` : relationshipPath + + if (adapter.usePipelineInSortLookup) { + const flattenedField = `__${localField.replace(/\./g, '__')}_lookup` + sortAggregation.push({ + $addFields: { + [flattenedField]: `$${localField}`, + }, + }) + localField = flattenedField + } + sortAggregation.push({ $lookup: { - as: `__${path}`, + as, foreignField: '_id', from: foreignCollection.Model.collection.name, - localField: versions ? `version.${relationshipPath}` : relationshipPath, - pipeline: [ - { - $project: { - [sortFieldPath]: true, + localField, + ...(!adapter.usePipelineInSortLookup && { + pipeline: [ + { + $project: { + [sortFieldPath]: true, + }, }, - }, - ], + ], + }), }, }) - sort[`__${path}.${sortFieldPath}`] = sortDirection - - return true + if (adapter.usePipelineInSortLookup) { + sortAggregation.push({ + $unset: localField, + }) + } } + + if (!adapter.usePipelineInSortLookup) { + const lookup = sortAggregation.find( + (each) => '$lookup' in each && each.$lookup.as === as, + ) as PipelineStage.Lookup + const pipeline = lookup.$lookup.pipeline![0] as PipelineStage.Project + pipeline.$project[sortFieldPath] = true + } + + sort[`${as}.${sortFieldPath}`] = sortDirection + return true } } diff --git a/packages/db-mongodb/src/queryDrafts.ts b/packages/db-mongodb/src/queryDrafts.ts index c43e0c52f4..1dd0e84daf 100644 --- a/packages/db-mongodb/src/queryDrafts.ts +++ b/packages/db-mongodb/src/queryDrafts.ts @@ -12,6 +12,7 @@ import { buildJoinAggregation } from './utilities/buildJoinAggregation.js' import { buildProjectionFromSelect } from './utilities/buildProjectionFromSelect.js' import { getCollection } from './utilities/getEntity.js' import { getSession } from './utilities/getSession.js' +import { resolveJoins } from './utilities/resolveJoins.js' import { transform } from './utilities/transform.js' export const queryDrafts: QueryDrafts = async function queryDrafts( @@ -158,6 +159,17 @@ export const queryDrafts: QueryDrafts = async function queryDrafts( result = await Model.paginate(versionQuery, paginationOptions) } + if (!this.useJoinAggregations) { + await resolveJoins({ + adapter: this, + collectionSlug, + docs: result.docs as Record[], + joins, + locale, + versions: true, + }) + } + transform({ adapter: this, data: result.docs, diff --git a/packages/db-mongodb/src/utilities/aggregatePaginate.ts b/packages/db-mongodb/src/utilities/aggregatePaginate.ts index 237d0a00c9..5e0b6d1de3 100644 --- a/packages/db-mongodb/src/utilities/aggregatePaginate.ts +++ b/packages/db-mongodb/src/utilities/aggregatePaginate.ts @@ -76,7 +76,11 @@ export const aggregatePaginate = async ({ countPromise = Model.estimatedDocumentCount(query) } else { const hint = adapter.disableIndexHints !== true ? { _id: 1 } : undefined - countPromise = Model.countDocuments(query, { collation, hint, session }) + countPromise = Model.countDocuments(query, { + collation, + session, + ...(hint ? { hint } : {}), + }) } } diff --git a/packages/db-mongodb/src/utilities/buildJoinAggregation.ts b/packages/db-mongodb/src/utilities/buildJoinAggregation.ts index 0d8afb3688..da737d62fc 100644 --- a/packages/db-mongodb/src/utilities/buildJoinAggregation.ts +++ b/packages/db-mongodb/src/utilities/buildJoinAggregation.ts @@ -44,6 +44,9 @@ export const buildJoinAggregation = async ({ projection, versions, }: BuildJoinAggregationArgs): Promise => { + if (!adapter.useJoinAggregations) { + return + } if ( (Object.keys(collectionConfig.joins).length === 0 && collectionConfig.polymorphicJoins.length == 0) || diff --git a/packages/db-mongodb/src/utilities/compatabilityOptions.ts b/packages/db-mongodb/src/utilities/compatabilityOptions.ts new file mode 100644 index 0000000000..bf797895b7 --- /dev/null +++ b/packages/db-mongodb/src/utilities/compatabilityOptions.ts @@ -0,0 +1,25 @@ +import type { Args } from '../index.js' + +/** + * Each key is a mongo-compatible database and the value + * is the recommended `mongooseAdapter` settings for compatability. + */ +export const compatabilityOptions = { + cosmosdb: { + transactionOptions: false, + useJoinAggregations: false, + usePipelineInSortLookup: false, + }, + documentdb: { + disableIndexHints: true, + }, + firestore: { + disableIndexHints: true, + ensureIndexes: false, + transactionOptions: false, + useAlternativeDropDatabase: true, + useBigIntForNumberIDs: true, + useJoinAggregations: false, + usePipelineInSortLookup: false, + }, +} satisfies Record> diff --git a/packages/db-mongodb/src/utilities/resolveJoins.ts b/packages/db-mongodb/src/utilities/resolveJoins.ts new file mode 100644 index 0000000000..fa28c63d76 --- /dev/null +++ b/packages/db-mongodb/src/utilities/resolveJoins.ts @@ -0,0 +1,647 @@ +import type { JoinQuery, SanitizedJoins, Where } from 'payload' + +import { + appendVersionToQueryKey, + buildVersionCollectionFields, + combineQueries, + getQueryDraftsSort, +} from 'payload' +import { fieldShouldBeLocalized } from 'payload/shared' + +import type { MongooseAdapter } from '../index.js' + +import { buildQuery } from '../queries/buildQuery.js' +import { buildSortParam } from '../queries/buildSortParam.js' +import { transform } from './transform.js' + +export type ResolveJoinsArgs = { + /** The MongoDB adapter instance */ + adapter: MongooseAdapter + /** The slug of the collection being queried */ + collectionSlug: string + /** Array of documents to resolve joins for */ + docs: Record[] + /** Join query specifications (which joins to resolve and how) */ + joins?: JoinQuery + /** Optional locale for localized queries */ + locale?: string + /** Optional projection for the join query */ + projection?: Record + /** Whether to resolve versions instead of published documents */ + versions?: boolean +} + +/** + * Resolves join relationships for a collection of documents. + * This function fetches related documents based on join configurations and + * attaches them to the original documents with pagination support. + */ +export async function resolveJoins({ + adapter, + collectionSlug, + docs, + joins, + locale, + projection, + versions = false, +}: ResolveJoinsArgs): Promise { + // Early return if no joins are specified or no documents to process + if (!joins || docs.length === 0) { + return + } + + // Get the collection configuration from the adapter + const collectionConfig = adapter.payload.collections[collectionSlug]?.config + if (!collectionConfig) { + return + } + + // Build a map of join paths to their configurations for quick lookup + // This flattens the nested join structure into a single map keyed by join path + const joinMap: Record = {} + + // Add regular joins + for (const [target, joinList] of Object.entries(collectionConfig.joins)) { + for (const join of joinList) { + joinMap[join.joinPath] = { ...join, targetCollection: target } + } + } + + // Add polymorphic joins + for (const join of collectionConfig.polymorphicJoins || []) { + // For polymorphic joins, we use the collections array as the target + joinMap[join.joinPath] = { ...join, targetCollection: join.field.collection as string } + } + + // Process each requested join concurrently + const joinPromises = Object.entries(joins).map(async ([joinPath, joinQuery]) => { + if (!joinQuery) { + return null + } + + // If a projection is provided, and the join path is not in the projection, skip it + if (projection && !projection[joinPath]) { + return null + } + + // Get the join definition from our map + const joinDef = joinMap[joinPath] + if (!joinDef) { + return null + } + + // Normalize collections to always be an array for unified processing + const allCollections = Array.isArray(joinDef.field.collection) + ? joinDef.field.collection + : [joinDef.field.collection] + + // Use the provided locale or fall back to the default locale for localized fields + const localizationConfig = adapter.payload.config.localization + const effectiveLocale = + locale || + (typeof localizationConfig === 'object' && + localizationConfig && + localizationConfig.defaultLocale) + + // Extract relationTo filter from the where clause to determine which collections to query + const relationToFilter = extractRelationToFilter(joinQuery.where || {}) + + // Determine which collections to query based on relationTo filter + const collections = relationToFilter + ? allCollections.filter((col) => relationToFilter.includes(col)) + : allCollections + + // Check if this is a polymorphic collection join (where field.collection is an array) + const isPolymorphicJoin = Array.isArray(joinDef.field.collection) + + // Apply pagination settings + const limit = joinQuery.limit ?? joinDef.field.defaultLimit ?? 10 + const page = joinQuery.page ?? 1 + const skip = (page - 1) * limit + + // Process collections concurrently + const collectionPromises = collections.map(async (joinCollectionSlug) => { + const targetConfig = adapter.payload.collections[joinCollectionSlug]?.config + if (!targetConfig) { + return null + } + + const useDrafts = versions && Boolean(targetConfig.versions?.drafts) + let JoinModel + if (useDrafts) { + JoinModel = adapter.versions[targetConfig.slug] + } else { + JoinModel = adapter.collections[targetConfig.slug] + } + + if (!JoinModel) { + return null + } + + // Extract all parent document IDs to use in the join query + const parentIDs = docs.map((d) => (versions ? (d.parent ?? d._id ?? d.id) : (d._id ?? d.id))) + + // Build the base query + let whereQuery: null | Record = null + whereQuery = isPolymorphicJoin + ? filterWhereForCollection( + joinQuery.where || {}, + targetConfig.flattenedFields, + true, // exclude relationTo for individual collections + ) + : joinQuery.where || {} + + // Skip this collection if the WHERE clause cannot be satisfied for polymorphic collection joins + if (whereQuery === null) { + return null + } + whereQuery = useDrafts + ? await JoinModel.buildQuery({ + locale, + payload: adapter.payload, + where: combineQueries(appendVersionToQueryKey(whereQuery as Where), { + latest: { + equals: true, + }, + }), + }) + : await buildQuery({ + adapter, + collectionSlug: joinCollectionSlug, + fields: targetConfig.flattenedFields, + locale, + where: whereQuery as Where, + }) + + // Handle localized paths and version prefixes + let dbFieldName = joinDef.field.on + + if (effectiveLocale && typeof localizationConfig === 'object' && localizationConfig) { + const pathSegments = joinDef.field.on.split('.') + const transformedSegments: string[] = [] + const fields = useDrafts + ? buildVersionCollectionFields(adapter.payload.config, targetConfig, true) + : targetConfig.flattenedFields + + for (let i = 0; i < pathSegments.length; i++) { + const segment = pathSegments[i]! + transformedSegments.push(segment) + + // Check if this segment corresponds to a localized field + const fieldAtSegment = fields.find((f) => f.name === segment) + if (fieldAtSegment && fieldAtSegment.localized) { + transformedSegments.push(effectiveLocale) + } + } + + dbFieldName = transformedSegments.join('.') + } + + // Add version prefix for draft queries + if (useDrafts) { + dbFieldName = `version.${dbFieldName}` + } + + // Check if the target field is a polymorphic relationship + const isPolymorphic = joinDef.targetField + ? Array.isArray(joinDef.targetField.relationTo) + : false + + if (isPolymorphic) { + // For polymorphic relationships, we need to match both relationTo and value + whereQuery[`${dbFieldName}.relationTo`] = collectionSlug + whereQuery[`${dbFieldName}.value`] = { $in: parentIDs } + } else { + // For regular relationships and polymorphic collection joins + whereQuery[dbFieldName] = { $in: parentIDs } + } + + // Build the sort parameters for the query + const fields = useDrafts + ? buildVersionCollectionFields(adapter.payload.config, targetConfig, true) + : targetConfig.flattenedFields + + const sort = buildSortParam({ + adapter, + config: adapter.payload.config, + fields, + locale, + sort: useDrafts + ? getQueryDraftsSort({ + collectionConfig: targetConfig, + sort: joinQuery.sort || joinDef.field.defaultSort || targetConfig.defaultSort, + }) + : joinQuery.sort || joinDef.field.defaultSort || targetConfig.defaultSort, + timestamps: true, + }) + + const projection = buildJoinProjection(dbFieldName, useDrafts, sort) + + const [results, dbCount] = await Promise.all([ + JoinModel.find(whereQuery, projection, { + sort, + ...(isPolymorphicJoin ? {} : { limit, skip }), + }).lean(), + isPolymorphicJoin ? Promise.resolve(0) : JoinModel.countDocuments(whereQuery), + ]) + + const count = isPolymorphicJoin ? results.length : dbCount + + transform({ + adapter, + data: results, + fields: useDrafts + ? buildVersionCollectionFields(adapter.payload.config, targetConfig, false) + : targetConfig.fields, + operation: 'read', + }) + + // Return results with collection info for grouping + return { + collectionSlug: joinCollectionSlug, + count, + dbFieldName, + results, + sort, + useDrafts, + } + }) + + const collectionResults = await Promise.all(collectionPromises) + + // Group the results by parent ID + const grouped: Record< + string, + { + docs: Record[] + sort: Record + } + > = {} + + let totalCount = 0 + for (const collectionResult of collectionResults) { + if (!collectionResult) { + continue + } + + const { collectionSlug, count, dbFieldName, results, sort, useDrafts } = collectionResult + + totalCount += count + + for (const result of results) { + if (useDrafts) { + result.id = result.parent + } + + const parentValues = getByPathWithArrays(result, dbFieldName) as ( + | { relationTo: string; value: number | string } + | number + | string + )[] + + if (parentValues.length === 0) { + continue + } + + for (let parentValue of parentValues) { + if (!parentValue) { + continue + } + + if (typeof parentValue === 'object') { + parentValue = parentValue.value + } + + const joinData = { + relationTo: collectionSlug, + value: result.id, + } + + const parentKey = parentValue as string + if (!grouped[parentKey]) { + grouped[parentKey] = { + docs: [], + sort, + } + } + + // Always store the ObjectID reference in polymorphic format + grouped[parentKey].docs.push({ + ...result, + __joinData: joinData, + }) + } + } + } + + for (const results of Object.values(grouped)) { + results.docs.sort((a, b) => { + for (const [fieldName, sortOrder] of Object.entries(results.sort)) { + const sort = sortOrder === 'asc' ? 1 : -1 + const aValue = a[fieldName] as Date | number | string + const bValue = b[fieldName] as Date | number | string + if (aValue < bValue) { + return -1 * sort + } + if (aValue > bValue) { + return 1 * sort + } + } + return 0 + }) + results.docs = results.docs.map( + (doc) => (isPolymorphicJoin ? doc.__joinData : doc.id) as Record, + ) + } + + // Determine if the join field should be localized + const localeSuffix = + fieldShouldBeLocalized({ + field: joinDef.field, + parentIsLocalized: joinDef.parentIsLocalized, + }) && + adapter.payload.config.localization && + effectiveLocale + ? `.${effectiveLocale}` + : '' + + // Adjust the join path with locale suffix if needed + const localizedJoinPath = `${joinPath}${localeSuffix}` + + return { + grouped, + isPolymorphicJoin, + joinQuery, + limit, + localizedJoinPath, + page, + skip, + totalCount, + } + }) + + // Wait for all join operations to complete + const joinResults = await Promise.all(joinPromises) + + // Process the results and attach them to documents + for (const joinResult of joinResults) { + if (!joinResult) { + continue + } + + const { grouped, isPolymorphicJoin, joinQuery, limit, localizedJoinPath, skip, totalCount } = + joinResult + + // Attach the joined data to each parent document + for (const doc of docs) { + const id = (versions ? (doc.parent ?? doc._id ?? doc.id) : (doc._id ?? doc.id)) as string + const all = grouped[id]?.docs || [] + + // Calculate the slice for pagination + // When limit is 0, it means unlimited - return all results + const slice = isPolymorphicJoin + ? limit === 0 + ? all + : all.slice(skip, skip + limit) + : // For non-polymorphic joins, we assume that page and limit were applied at the database level + all + + // Create the join result object with pagination metadata + const value: Record = { + docs: slice, + hasNextPage: limit === 0 ? false : totalCount > skip + slice.length, + } + + // Include total count if requested + if (joinQuery.count) { + value.totalDocs = totalCount + } + + // Navigate to the correct nested location in the document and set the join data + // This handles nested join paths like "user.posts" by creating intermediate objects + const segments = localizedJoinPath.split('.') + let ref: Record + if (versions) { + if (!doc.version) { + doc.version = {} + } + ref = doc.version as Record + } else { + ref = doc + } + + for (let i = 0; i < segments.length - 1; i++) { + const seg = segments[i]! + if (!ref[seg]) { + ref[seg] = {} + } + ref = ref[seg] as Record + } + // Set the final join data at the target path + ref[segments[segments.length - 1]!] = value + } + } +} + +/** + * Extracts relationTo filter values from a WHERE clause + * @param where - The WHERE clause to search + * @returns Array of collection slugs if relationTo filter found, null otherwise + */ +function extractRelationToFilter(where: Record): null | string[] { + if (!where || typeof where !== 'object') { + return null + } + + // Check for direct relationTo conditions + if (where.relationTo && typeof where.relationTo === 'object') { + const relationTo = where.relationTo as Record + if (relationTo.in && Array.isArray(relationTo.in)) { + return relationTo.in as string[] + } + if (relationTo.equals) { + return [relationTo.equals as string] + } + } + + // Check for relationTo in logical operators + if (where.and && Array.isArray(where.and)) { + for (const condition of where.and) { + const result = extractRelationToFilter(condition) + if (result) { + return result + } + } + } + + if (where.or && Array.isArray(where.or)) { + for (const condition of where.or) { + const result = extractRelationToFilter(condition) + if (result) { + return result + } + } + } + + return null +} + +/** + * Filters a WHERE clause to only include fields that exist in the target collection + * This is needed for polymorphic joins where different collections have different fields + * @param where - The original WHERE clause + * @param availableFields - The fields available in the target collection + * @param excludeRelationTo - Whether to exclude relationTo field (for individual collections) + * @returns A filtered WHERE clause, or null if the query cannot match this collection + */ +function filterWhereForCollection( + where: Record, + availableFields: Array<{ name: string }>, + excludeRelationTo: boolean = false, +): null | Record { + if (!where || typeof where !== 'object') { + return where + } + + const fieldNames = new Set(availableFields.map((f) => f.name)) + // Add special fields that are available in polymorphic relationships + if (!excludeRelationTo) { + fieldNames.add('relationTo') + } + + const filtered: Record = {} + + for (const [key, value] of Object.entries(where)) { + if (key === 'and') { + // Handle AND operator - all conditions must be satisfiable + if (Array.isArray(value)) { + const filteredConditions: Record[] = [] + + for (const condition of value) { + const filteredCondition = filterWhereForCollection( + condition, + availableFields, + excludeRelationTo, + ) + + // If any condition in AND cannot be satisfied, the whole AND fails + if (filteredCondition === null) { + return null + } + + if (Object.keys(filteredCondition).length > 0) { + filteredConditions.push(filteredCondition) + } + } + + if (filteredConditions.length > 0) { + filtered[key] = filteredConditions + } + } + } else if (key === 'or') { + // Handle OR operator - at least one condition must be satisfiable + if (Array.isArray(value)) { + const filteredConditions = value + .map((condition) => + filterWhereForCollection(condition, availableFields, excludeRelationTo), + ) + .filter((condition) => condition !== null && Object.keys(condition).length > 0) + + if (filteredConditions.length > 0) { + filtered[key] = filteredConditions + } + // If no OR conditions can be satisfied, we still continue (OR is more permissive) + } + } else if (key === 'relationTo' && excludeRelationTo) { + // Skip relationTo field for non-polymorphic collections + continue + } else if (fieldNames.has(key)) { + // Include the condition if the field exists in this collection + filtered[key] = value + } else { + // Field doesn't exist in this collection - this makes the query unsatisfiable + return null + } + } + + return filtered +} + +type SanitizedJoin = SanitizedJoins[string][number] + +/** + * Builds projection for join queries + */ +function buildJoinProjection( + baseFieldName: string, + useDrafts: boolean, + sort: Record, +): Record { + const projection: Record = { + _id: 1, + [baseFieldName]: 1, + } + + if (useDrafts) { + projection.parent = 1 + } + + for (const fieldName of Object.keys(sort)) { + projection[fieldName] = 1 + } + + return projection +} + +/** + * Enhanced utility function to safely traverse nested object properties using dot notation + * Handles arrays by searching through array elements for matching values + * @param doc - The document to traverse + * @param path - Dot-separated path (e.g., "array.category") + * @returns Array of values found at the specified path (for arrays) or single value + */ +function getByPathWithArrays(doc: unknown, path: string): unknown[] { + const segments = path.split('.') + let current = doc + + for (let i = 0; i < segments.length; i++) { + const segment = segments[i]! + + if (current === undefined || current === null) { + return [] + } + + // Get the value at the current segment + const value = (current as Record)[segment] + + if (value === undefined || value === null) { + return [] + } + + // If this is the last segment, return the value(s) + if (i === segments.length - 1) { + return Array.isArray(value) ? value : [value] + } + + // If the value is an array and we have more segments to traverse + if (Array.isArray(value)) { + const remainingPath = segments.slice(i + 1).join('.') + const results: unknown[] = [] + + // Search through each array element + for (const item of value) { + if (item && typeof item === 'object') { + const subResults = getByPathWithArrays(item, remainingPath) + results.push(...subResults) + } + } + + return results + } + + // Continue traversing + current = value + } + + return [] +} diff --git a/packages/db-mongodb/src/utilities/transform.ts b/packages/db-mongodb/src/utilities/transform.ts index 7318c29cee..24113806ae 100644 --- a/packages/db-mongodb/src/utilities/transform.ts +++ b/packages/db-mongodb/src/utilities/transform.ts @@ -426,6 +426,11 @@ export const transform = ({ data.id = data.id.toHexString() } + // Handle BigInt conversion for custom ID fields of type 'number' + if (adapter.useBigIntForNumberIDs && typeof data.id === 'bigint') { + data.id = Number(data.id) + } + if (!adapter.allowAdditionalKeys) { stripFields({ config, diff --git a/test/generateDatabaseAdapter.ts b/test/generateDatabaseAdapter.ts index 5d28069b8b..dd079d4f6a 100644 --- a/test/generateDatabaseAdapter.ts +++ b/test/generateDatabaseAdapter.ts @@ -21,6 +21,25 @@ export const allDatabaseAdapters = { strength: 1, }, })`, + firestore: ` + import { mongooseAdapter, compatabilityOptions } from '@payloadcms/db-mongodb' + + export const databaseAdapter = mongooseAdapter({ + ...compatabilityOptions.firestore, + url: + process.env.DATABASE_URI || + process.env.MONGODB_MEMORY_SERVER_URI || + 'mongodb://127.0.0.1/payloadtests', + collation: { + strength: 1, + }, + // The following options prevent some tests from failing. + // More work needed to get tests succeeding without these options. + ensureIndexes: true, + transactionOptions: {}, + disableIndexHints: false, + useAlternativeDropDatabase: false, + })`, postgres: ` import { postgresAdapter } from '@payloadcms/db-postgres' diff --git a/test/generateDatabaseSchema.ts b/test/generateDatabaseSchema.ts index a7a84621d7..1adff62d77 100644 --- a/test/generateDatabaseSchema.ts +++ b/test/generateDatabaseSchema.ts @@ -13,7 +13,7 @@ const dirname = path.dirname(filename) const writeDBAdapter = process.env.WRITE_DB_ADAPTER !== 'false' process.env.PAYLOAD_DROP_DATABASE = process.env.PAYLOAD_DROP_DATABASE || 'true' -if (process.env.PAYLOAD_DATABASE === 'mongodb') { +if (process.env.PAYLOAD_DATABASE === 'mongodb' || process.env.PAYLOAD_DATABASE === 'firestore') { throw new Error('Not supported') } diff --git a/test/helpers/isMongoose.ts b/test/helpers/isMongoose.ts index 965e83851f..2f1b7e152f 100644 --- a/test/helpers/isMongoose.ts +++ b/test/helpers/isMongoose.ts @@ -1,5 +1,8 @@ import type { Payload } from 'payload' export function isMongoose(_payload?: Payload) { - return _payload?.db?.name === 'mongoose' || ['mongodb'].includes(process.env.PAYLOAD_DATABASE) + return ( + _payload?.db?.name === 'mongoose' || + ['firestore', 'mongodb'].includes(process.env.PAYLOAD_DATABASE) + ) } diff --git a/test/helpers/startMemoryDB.ts b/test/helpers/startMemoryDB.ts index a090524021..17d2a89d45 100644 --- a/test/helpers/startMemoryDB.ts +++ b/test/helpers/startMemoryDB.ts @@ -14,13 +14,17 @@ declare global { */ // eslint-disable-next-line no-restricted-exports export default async () => { + if (process.env.DATABASE_URI) { + return + } process.env.NODE_ENV = 'test' process.env.PAYLOAD_DROP_DATABASE = 'true' process.env.NODE_OPTIONS = '--no-deprecation' process.env.DISABLE_PAYLOAD_HMR = 'true' if ( - (!process.env.PAYLOAD_DATABASE || process.env.PAYLOAD_DATABASE === 'mongodb') && + (!process.env.PAYLOAD_DATABASE || + ['firestore', 'mongodb'].includes(process.env.PAYLOAD_DATABASE)) && !global._mongoMemoryServer ) { console.log('Starting memory db...') diff --git a/test/relationships/int.spec.ts b/test/relationships/int.spec.ts index da6189c663..e02ecf86f0 100644 --- a/test/relationships/int.spec.ts +++ b/test/relationships/int.spec.ts @@ -38,7 +38,7 @@ const dirname = path.dirname(filename) type EasierChained = { id: string; relation: EasierChained } -const mongoIt = process.env.PAYLOAD_DATABASE === 'mongodb' ? it : it.skip +const mongoIt = ['firestore', 'mongodb'].includes(process.env.PAYLOAD_DATABASE || '') ? it : it.skip describe('Relationships', () => { beforeAll(async () => { @@ -791,6 +791,47 @@ describe('Relationships', () => { expect(localized_res_2.docs).toStrictEqual([movie_1, movie_2]) }) + it('should sort by multiple properties of a relationship', async () => { + await payload.delete({ collection: 'directors', where: {} }) + await payload.delete({ collection: 'movies', where: {} }) + + const createDirector = { + collection: 'directors', + data: { + name: 'Dan', + }, + } as const + + const director_1 = await payload.create(createDirector) + const director_2 = await payload.create(createDirector) + + const movie_1 = await payload.create({ + collection: 'movies', + depth: 0, + data: { director: director_1.id, name: 'Some Movie 1' }, + }) + + const movie_2 = await payload.create({ + collection: 'movies', + depth: 0, + data: { director: director_2.id, name: 'Some Movie 2' }, + }) + + const res_1 = await payload.find({ + collection: 'movies', + sort: ['director.name', 'director.createdAt'], + depth: 0, + }) + const res_2 = await payload.find({ + collection: 'movies', + sort: ['director.name', '-director.createdAt'], + depth: 0, + }) + + expect(res_1.docs).toStrictEqual([movie_1, movie_2]) + expect(res_2.docs).toStrictEqual([movie_2, movie_1]) + }) + it('should sort by a property of a hasMany relationship', async () => { const movie1 = await payload.create({ collection: 'movies', From cab7ba4a8aae9c806bf64fb2b23bb2ec95534e60 Mon Sep 17 00:00:00 2001 From: Sean Zubrickas Date: Wed, 16 Jul 2025 12:36:32 -0700 Subject: [PATCH 043/143] =?UTF-8?q?fix:=20Enhances=20field-level=20access?= =?UTF-8?q?=20controls=20on=20Users=20collection=20to=20address=20s?= =?UTF-8?q?=E2=80=A6=20(#13197)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Enhance field-level access controls on Users collection to address security concerns - Restricted read/update access on `email` field to admins and the user themselves - Locked down `roles` field so only admins can create, read, or update it --- examples/auth/src/collections/Users.ts | 35 ++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/examples/auth/src/collections/Users.ts b/examples/auth/src/collections/Users.ts index 67f00ebdd4..a0f9d334a0 100644 --- a/examples/auth/src/collections/Users.ts +++ b/examples/auth/src/collections/Users.ts @@ -6,6 +6,8 @@ import { anyone } from './access/anyone' import { checkRole } from './access/checkRole' import { loginAfterCreate } from './hooks/loginAfterCreate' import { protectRoles } from './hooks/protectRoles' +import { access } from 'fs' +import { create } from 'domain' export const Users: CollectionConfig = { slug: 'users', @@ -32,6 +34,34 @@ export const Users: CollectionConfig = { afterChange: [loginAfterCreate], }, fields: [ + { + name: 'email', + type: 'email', + required: true, + unique: true, + access: { + read: adminsAndUser, + update: adminsAndUser, + }, + }, + { + name: 'password', + type: 'password', + required: true, + admin: { + description: 'Leave blank to keep the current password.', + }, + }, + { + name: 'resetPasswordToken', + type: 'text', + hidden: true, + }, + { + name: 'resetPasswordExpiration', + type: 'date', + hidden: true, + }, { name: 'firstName', type: 'text', @@ -45,6 +75,11 @@ export const Users: CollectionConfig = { type: 'select', hasMany: true, saveToJWT: true, + access: { + read: admins, + update: admins, + create: admins, + }, hooks: { beforeChange: [protectRoles], }, From a20b43624b108412aef646f6628c6ce07ad64e55 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Thu, 17 Jul 2025 00:18:14 +0300 Subject: [PATCH 044/143] feat: add `findDistinct` operation (#13102) Adds a new operation findDistinct that can give you distinct values of a field for a given collection Example: Assume you have a collection posts with multiple documents, and some of them share the same title: ```js // Example dataset (some titles appear multiple times) [ { title: 'title-1' }, { title: 'title-2' }, { title: 'title-1' }, { title: 'title-3' }, { title: 'title-2' }, { title: 'title-4' }, { title: 'title-5' }, { title: 'title-6' }, { title: 'title-7' }, { title: 'title-8' }, { title: 'title-9' }, ] ``` You can now retrieve all unique title values using findDistinct: ```js const result = await payload.findDistinct({ collection: 'posts', field: 'title', }) console.log(result.values) // Output: // [ // 'title-1', // 'title-2', // 'title-3', // 'title-4', // 'title-5', // 'title-6', // 'title-7', // 'title-8', // 'title-9' // ] ``` You can also limit the number of distinct results: ```js const limitedResult = await payload.findDistinct({ collection: 'posts', field: 'title', sortOrder: 'desc', limit: 3, }) console.log(limitedResult.values) // Output: // [ // 'title-1', // 'title-2', // 'title-3' // ] ``` You can also pass a `where` query to filter the documents. --- docs/local-api/overview.mdx | 21 ++ packages/db-mongodb/src/findDistinct.ts | 141 +++++++++++++ packages/db-mongodb/src/index.ts | 2 + packages/db-postgres/src/index.ts | 2 + packages/db-sqlite/src/countDistinct.ts | 6 +- packages/db-sqlite/src/index.ts | 2 + packages/db-sqlite/src/types.ts | 2 + packages/db-vercel-postgres/src/index.ts | 2 + packages/drizzle/src/findDistinct.ts | 108 ++++++++++ packages/drizzle/src/index.ts | 1 + .../drizzle/src/postgres/countDistinct.ts | 6 +- packages/drizzle/src/postgres/types.ts | 2 + packages/drizzle/src/queries/parseParams.ts | 18 +- .../drizzle/src/queries/selectDistinct.ts | 4 +- packages/drizzle/src/types.ts | 1 + .../drizzle/src/utilities/rawConstraint.ts | 2 + .../payload/src/collections/config/types.ts | 1 + .../src/collections/endpoints/findDistinct.ts | 46 +++++ .../src/collections/endpoints/index.ts | 7 + .../collections/operations/findDistinct.ts | 189 ++++++++++++++++++ .../operations/local/findDistinct.ts | 138 +++++++++++++ .../src/collections/operations/utils.ts | 7 + packages/payload/src/database/types.ts | 37 +++- packages/payload/src/index.ts | 24 ++- test/database/int.spec.ts | 112 +++++++++++ ...50707_123508.json => 20250714_201659.json} | 2 +- ...{20250707_123508.ts => 20250714_201659.ts} | 0 .../up-down-migration/migrations/index.ts | 10 +- test/helpers/NextRESTClient.ts | 2 +- 29 files changed, 869 insertions(+), 26 deletions(-) create mode 100644 packages/db-mongodb/src/findDistinct.ts create mode 100644 packages/drizzle/src/findDistinct.ts create mode 100644 packages/payload/src/collections/endpoints/findDistinct.ts create mode 100644 packages/payload/src/collections/operations/findDistinct.ts create mode 100644 packages/payload/src/collections/operations/local/findDistinct.ts rename test/database/up-down-migration/migrations/{20250707_123508.json => 20250714_201659.json} (99%) rename test/database/up-down-migration/migrations/{20250707_123508.ts => 20250714_201659.ts} (100%) diff --git a/docs/local-api/overview.mdx b/docs/local-api/overview.mdx index cb2e74f4fa..4d39424b5f 100644 --- a/docs/local-api/overview.mdx +++ b/docs/local-api/overview.mdx @@ -194,6 +194,27 @@ const result = await payload.count({ }) ``` +### FindDistinct#collection-find-distinct + +```js +// Result will be an object with: +// { +// values: ['value-1', 'value-2'], // array of distinct values, +// field: 'title', // the field +// totalDocs: 10, // count of the distinct values satisfies query, +// perPage: 10, // count of distinct values per page (based on provided limit) +// } +const result = await payload.findDistinct({ + collection: 'posts', // required + locale: 'en', + where: {}, // pass a `where` query here + user: dummyUser, + overrideAccess: false, + field: 'title', + sort: 'title', +}) +``` + ### Update by ID#collection-update-by-id ```js diff --git a/packages/db-mongodb/src/findDistinct.ts b/packages/db-mongodb/src/findDistinct.ts new file mode 100644 index 0000000000..bc77a8cab4 --- /dev/null +++ b/packages/db-mongodb/src/findDistinct.ts @@ -0,0 +1,141 @@ +import type { PipelineStage } from 'mongoose' + +import { type FindDistinct, getFieldByPath } from 'payload' + +import type { MongooseAdapter } from './index.js' + +import { buildQuery } from './queries/buildQuery.js' +import { buildSortParam } from './queries/buildSortParam.js' +import { getCollection } from './utilities/getEntity.js' +import { getSession } from './utilities/getSession.js' + +export const findDistinct: FindDistinct = async function (this: MongooseAdapter, args) { + const { collectionConfig, Model } = getCollection({ + adapter: this, + collectionSlug: args.collection, + }) + + const session = await getSession(this, args.req) + + const { where = {} } = args + + const sortAggregation: PipelineStage[] = [] + + const sort = buildSortParam({ + adapter: this, + config: this.payload.config, + fields: collectionConfig.flattenedFields, + locale: args.locale, + sort: args.sort ?? args.field, + sortAggregation, + timestamps: true, + }) + + const query = await buildQuery({ + adapter: this, + collectionSlug: args.collection, + fields: collectionConfig.flattenedFields, + locale: args.locale, + where, + }) + + const fieldPathResult = getFieldByPath({ + fields: collectionConfig.flattenedFields, + path: args.field, + }) + let fieldPath = args.field + if (fieldPathResult?.pathHasLocalized && args.locale) { + fieldPath = fieldPathResult.localizedPath.replace('', args.locale) + } + + const page = args.page || 1 + + const sortProperty = Object.keys(sort)[0]! // assert because buildSortParam always returns at least 1 key. + const sortDirection = sort[sortProperty] === 'asc' ? 1 : -1 + + const pipeline: PipelineStage[] = [ + { + $match: query, + }, + ...(sortAggregation.length > 0 ? sortAggregation : []), + + { + $group: { + _id: { + _field: `$${fieldPath}`, + ...(sortProperty === fieldPath + ? {} + : { + _sort: `$${sortProperty}`, + }), + }, + }, + }, + { + $sort: { + [sortProperty === fieldPath ? '_id._field' : '_id._sort']: sortDirection, + }, + }, + ] + + const getValues = async () => { + return Model.aggregate(pipeline, { session }).then((res) => + res.map((each) => ({ + [args.field]: JSON.parse(JSON.stringify(each._id._field)), + })), + ) + } + + if (args.limit) { + pipeline.push({ + $skip: (page - 1) * args.limit, + }) + pipeline.push({ $limit: args.limit }) + const totalDocs = await Model.aggregate( + [ + { + $match: query, + }, + { + $group: { + _id: `$${fieldPath}`, + }, + }, + { $count: 'count' }, + ], + { + session, + }, + ).then((res) => res[0]?.count ?? 0) + const totalPages = Math.ceil(totalDocs / args.limit) + const hasPrevPage = page > 1 + const hasNextPage = totalPages > page + const pagingCounter = (page - 1) * args.limit + 1 + + return { + hasNextPage, + hasPrevPage, + limit: args.limit, + nextPage: hasNextPage ? page + 1 : null, + page, + pagingCounter, + prevPage: hasPrevPage ? page - 1 : null, + totalDocs, + totalPages, + values: await getValues(), + } + } + + const values = await getValues() + + return { + hasNextPage: false, + hasPrevPage: false, + limit: 0, + page: 1, + pagingCounter: 1, + totalDocs: values.length, + totalPages: 1, + values, + } +} diff --git a/packages/db-mongodb/src/index.ts b/packages/db-mongodb/src/index.ts index f2f152533a..08c8e6cb6f 100644 --- a/packages/db-mongodb/src/index.ts +++ b/packages/db-mongodb/src/index.ts @@ -42,6 +42,7 @@ import { deleteOne } from './deleteOne.js' import { deleteVersions } from './deleteVersions.js' import { destroy } from './destroy.js' import { find } from './find.js' +import { findDistinct } from './findDistinct.js' import { findGlobal } from './findGlobal.js' import { findGlobalVersions } from './findGlobalVersions.js' import { findOne } from './findOne.js' @@ -297,6 +298,7 @@ export function mongooseAdapter({ destroy, disableFallbackSort, find, + findDistinct, findGlobal, findGlobalVersions, findOne, diff --git a/packages/db-postgres/src/index.ts b/packages/db-postgres/src/index.ts index a6769cb735..df431424bd 100644 --- a/packages/db-postgres/src/index.ts +++ b/packages/db-postgres/src/index.ts @@ -17,6 +17,7 @@ import { deleteVersions, destroy, find, + findDistinct, findGlobal, findGlobalVersions, findMigrationDir, @@ -120,6 +121,7 @@ export function postgresAdapter(args: Args): DatabaseAdapterObj json: true, }, fieldConstraints: {}, + findDistinct, generateSchema: createSchemaGenerator({ columnToCodeConverter, corePackageSuffix: 'pg-core', diff --git a/packages/db-sqlite/src/countDistinct.ts b/packages/db-sqlite/src/countDistinct.ts index ae729138f0..cbb51cee1d 100644 --- a/packages/db-sqlite/src/countDistinct.ts +++ b/packages/db-sqlite/src/countDistinct.ts @@ -6,13 +6,13 @@ import type { CountDistinct, SQLiteAdapter } from './types.js' export const countDistinct: CountDistinct = async function countDistinct( this: SQLiteAdapter, - { db, joins, tableName, where }, + { column, db, joins, tableName, where }, ) { // When we don't have any joins - use a simple COUNT(*) query. if (joins.length === 0) { const countResult = await db .select({ - count: count(), + count: column ? count(sql`DISTINCT ${column}`) : count(), }) .from(this.tables[tableName]) .where(where) @@ -25,7 +25,7 @@ export const countDistinct: CountDistinct = async function countDistinct( }) .from(this.tables[tableName]) .where(where) - .groupBy(this.tables[tableName].id) + .groupBy(column ?? this.tables[tableName].id) .limit(1) .$dynamic() diff --git a/packages/db-sqlite/src/index.ts b/packages/db-sqlite/src/index.ts index 015ce9ba92..0cae319680 100644 --- a/packages/db-sqlite/src/index.ts +++ b/packages/db-sqlite/src/index.ts @@ -18,6 +18,7 @@ import { deleteVersions, destroy, find, + findDistinct, findGlobal, findGlobalVersions, findMigrationDir, @@ -101,6 +102,7 @@ export function sqliteAdapter(args: Args): DatabaseAdapterObj { json: true, }, fieldConstraints: {}, + findDistinct, generateSchema: createSchemaGenerator({ columnToCodeConverter, corePackageSuffix: 'sqlite-core', diff --git a/packages/db-sqlite/src/types.ts b/packages/db-sqlite/src/types.ts index 568f3a4dc3..5aa84c9935 100644 --- a/packages/db-sqlite/src/types.ts +++ b/packages/db-sqlite/src/types.ts @@ -5,6 +5,7 @@ import type { DrizzleConfig, Relation, Relations, SQL } from 'drizzle-orm' import type { LibSQLDatabase } from 'drizzle-orm/libsql' import type { AnySQLiteColumn, + SQLiteColumn, SQLiteInsertOnConflictDoUpdateConfig, SQLiteTableWithColumns, SQLiteTransactionConfig, @@ -87,6 +88,7 @@ export type GenericTable = SQLiteTableWithColumns<{ export type GenericRelation = Relations>> export type CountDistinct = (args: { + column?: SQLiteColumn db: LibSQLDatabase joins: BuildQueryJoinAliases tableName: string diff --git a/packages/db-vercel-postgres/src/index.ts b/packages/db-vercel-postgres/src/index.ts index a9fd65f63c..155bdc2a2d 100644 --- a/packages/db-vercel-postgres/src/index.ts +++ b/packages/db-vercel-postgres/src/index.ts @@ -18,6 +18,7 @@ import { deleteVersions, destroy, find, + findDistinct, findGlobal, findGlobalVersions, findMigrationDir, @@ -174,6 +175,7 @@ export function vercelPostgresAdapter(args: Args = {}): DatabaseAdapterObj { + query = query.orderBy(() => orderBy.map(({ column, order }) => order(column))) + + if (args.limit) { + if (offset) { + query = query.offset(offset) + } + + query = query.limit(args.limit) + } + + return query + }, + selectFields: { + _selected: selectFields['_selected'], + ...(orderBy[0].column === selectFields['_selected'] ? {} : { _order: orderBy[0].column }), + } as Record, + tableName, + where, + }) + + const values = selectDistinctResult.map((each) => ({ + [args.field]: (each as Record)._selected, + })) + + if (args.limit) { + const totalDocs = await this.countDistinct({ + column: selectFields['_selected'], + db, + joins, + tableName, + where, + }) + + const totalPages = Math.ceil(totalDocs / args.limit) + const hasPrevPage = page > 1 + const hasNextPage = totalPages > page + const pagingCounter = (page - 1) * args.limit + 1 + + return { + hasNextPage, + hasPrevPage, + limit: args.limit, + nextPage: hasNextPage ? page + 1 : null, + page, + pagingCounter, + prevPage: hasPrevPage ? page - 1 : null, + totalDocs, + totalPages, + values, + } + } + + return { + hasNextPage: false, + hasPrevPage: false, + limit: 0, + page: 1, + pagingCounter: 1, + totalDocs: values.length, + totalPages: 1, + values, + } +} diff --git a/packages/drizzle/src/index.ts b/packages/drizzle/src/index.ts index 6650b26178..dd1055bdfc 100644 --- a/packages/drizzle/src/index.ts +++ b/packages/drizzle/src/index.ts @@ -12,6 +12,7 @@ export { deleteVersions } from './deleteVersions.js' export { destroy } from './destroy.js' export { find } from './find.js' export { chainMethods } from './find/chainMethods.js' +export { findDistinct } from './findDistinct.js' export { findGlobal } from './findGlobal.js' export { findGlobalVersions } from './findGlobalVersions.js' export { findMigrationDir } from './findMigrationDir.js' diff --git a/packages/drizzle/src/postgres/countDistinct.ts b/packages/drizzle/src/postgres/countDistinct.ts index 04d7559fcf..55f4ea8ad9 100644 --- a/packages/drizzle/src/postgres/countDistinct.ts +++ b/packages/drizzle/src/postgres/countDistinct.ts @@ -6,13 +6,13 @@ import type { BasePostgresAdapter, CountDistinct } from './types.js' export const countDistinct: CountDistinct = async function countDistinct( this: BasePostgresAdapter, - { db, joins, tableName, where }, + { column, db, joins, tableName, where }, ) { // When we don't have any joins - use a simple COUNT(*) query. if (joins.length === 0) { const countResult = await db .select({ - count: count(), + count: column ? count(sql`DISTINCT ${column}`) : count(), }) .from(this.tables[tableName]) .where(where) @@ -26,7 +26,7 @@ export const countDistinct: CountDistinct = async function countDistinct( }) .from(this.tables[tableName]) .where(where) - .groupBy(this.tables[tableName].id) + .groupBy(column || this.tables[tableName].id) .limit(1) .$dynamic() diff --git a/packages/drizzle/src/postgres/types.ts b/packages/drizzle/src/postgres/types.ts index 696d13797d..60ed3a0749 100644 --- a/packages/drizzle/src/postgres/types.ts +++ b/packages/drizzle/src/postgres/types.ts @@ -20,6 +20,7 @@ import type { UniqueConstraintBuilder, } from 'drizzle-orm/pg-core' import type { PgTableFn } from 'drizzle-orm/pg-core/table' +import type { SQLiteColumn } from 'drizzle-orm/sqlite-core' import type { Payload, PayloadRequest } from 'payload' import type { ClientConfig, QueryResult } from 'pg' @@ -64,6 +65,7 @@ export type GenericRelation = Relations> export type PostgresDB = NodePgDatabase> export type CountDistinct = (args: { + column?: PgColumn | SQLiteColumn db: PostgresDB | TransactionPg joins: BuildQueryJoinAliases tableName: string diff --git a/packages/drizzle/src/queries/parseParams.ts b/packages/drizzle/src/queries/parseParams.ts index a5b88d4d74..b43dad70a4 100644 --- a/packages/drizzle/src/queries/parseParams.ts +++ b/packages/drizzle/src/queries/parseParams.ts @@ -10,6 +10,7 @@ import type { DrizzleAdapter, GenericColumn } from '../types.js' import type { BuildQueryJoinAliases } from './buildQuery.js' import { getNameFromDrizzleTable } from '../utilities/getNameFromDrizzleTable.js' +import { DistinctSymbol } from '../utilities/rawConstraint.js' import { buildAndOrConditions } from './buildAndOrConditions.js' import { getTableColumnFromPath } from './getTableColumnFromPath.js' import { sanitizeQueryValue } from './sanitizeQueryValue.js' @@ -108,6 +109,17 @@ export function parseParams({ value: val, }) + const resolvedColumn = + rawColumn || + (aliasTable && tableName === getNameFromDrizzleTable(table) + ? aliasTable[columnName] + : table[columnName]) + + if (val === DistinctSymbol) { + selectFields['_selected'] = resolvedColumn + break + } + queryConstraints.forEach(({ columnName: col, table: constraintTable, value }) => { if (typeof value === 'string' && value.indexOf('%') > -1) { constraints.push(adapter.operators.like(constraintTable[col], value)) @@ -281,12 +293,6 @@ export function parseParams({ break } - const resolvedColumn = - rawColumn || - (aliasTable && tableName === getNameFromDrizzleTable(table) - ? aliasTable[columnName] - : table[columnName]) - if (queryOperator === 'not_equals' && queryValue !== null) { constraints.push( or( diff --git a/packages/drizzle/src/queries/selectDistinct.ts b/packages/drizzle/src/queries/selectDistinct.ts index 7cb6b5fc0f..25bf75ba4d 100644 --- a/packages/drizzle/src/queries/selectDistinct.ts +++ b/packages/drizzle/src/queries/selectDistinct.ts @@ -14,6 +14,7 @@ import type { BuildQueryJoinAliases } from './buildQuery.js' type Args = { adapter: DrizzleAdapter db: DrizzleAdapter['drizzle'] | DrizzleTransaction + forceRun?: boolean joins: BuildQueryJoinAliases query?: (args: { query: SQLiteSelect }) => SQLiteSelect selectFields: Record @@ -27,13 +28,14 @@ type Args = { export const selectDistinct = ({ adapter, db, + forceRun, joins, query: queryModifier = ({ query }) => query, selectFields, tableName, where, }: Args): QueryPromise<{ id: number | string }[] & Record> => { - if (Object.keys(joins).length > 0) { + if (forceRun || Object.keys(joins).length > 0) { let query: SQLiteSelect const table = adapter.tables[tableName] diff --git a/packages/drizzle/src/types.ts b/packages/drizzle/src/types.ts index 42f01b7ce8..84dd5f1e74 100644 --- a/packages/drizzle/src/types.ts +++ b/packages/drizzle/src/types.ts @@ -89,6 +89,7 @@ export type TransactionPg = PgTransaction< export type DrizzleTransaction = TransactionPg | TransactionSQLite export type CountDistinct = (args: { + column?: PgColumn | SQLiteColumn db: DrizzleTransaction | LibSQLDatabase | PostgresDB joins: BuildQueryJoinAliases tableName: string diff --git a/packages/drizzle/src/utilities/rawConstraint.ts b/packages/drizzle/src/utilities/rawConstraint.ts index f47ceed9c0..2105532e3b 100644 --- a/packages/drizzle/src/utilities/rawConstraint.ts +++ b/packages/drizzle/src/utilities/rawConstraint.ts @@ -1,5 +1,7 @@ const RawConstraintSymbol = Symbol('RawConstraint') +export const DistinctSymbol = Symbol('DistinctSymbol') + /** * You can use this to inject a raw query to where */ diff --git a/packages/payload/src/collections/config/types.ts b/packages/payload/src/collections/config/types.ts index 43f80ae91d..4414715544 100644 --- a/packages/payload/src/collections/config/types.ts +++ b/packages/payload/src/collections/config/types.ts @@ -82,6 +82,7 @@ export type HookOperationType = | 'forgotPassword' | 'login' | 'read' + | 'readDistinct' | 'refresh' | 'resetPassword' | 'update' diff --git a/packages/payload/src/collections/endpoints/findDistinct.ts b/packages/payload/src/collections/endpoints/findDistinct.ts new file mode 100644 index 0000000000..3a7eb4b927 --- /dev/null +++ b/packages/payload/src/collections/endpoints/findDistinct.ts @@ -0,0 +1,46 @@ +import { status as httpStatus } from 'http-status' + +import type { PayloadHandler } from '../../config/types.js' +import type { Where } from '../../types/index.js' + +import { APIError } from '../../errors/APIError.js' +import { getRequestCollection } from '../../utilities/getRequestEntity.js' +import { headersWithCors } from '../../utilities/headersWithCors.js' +import { isNumber } from '../../utilities/isNumber.js' +import { findDistinctOperation } from '../operations/findDistinct.js' + +export const findDistinctHandler: PayloadHandler = async (req) => { + const collection = getRequestCollection(req) + const { depth, field, limit, page, sort, where } = req.query as { + depth?: string + field?: string + limit?: string + page?: string + sort?: string + sortOrder?: string + where?: Where + } + + if (!field) { + throw new APIError('field must be specified', httpStatus.BAD_REQUEST) + } + + const result = await findDistinctOperation({ + collection, + depth: isNumber(depth) ? Number(depth) : undefined, + field, + limit: isNumber(limit) ? Number(limit) : undefined, + page: isNumber(page) ? Number(page) : undefined, + req, + sort: typeof sort === 'string' ? sort.split(',') : undefined, + where, + }) + + return Response.json(result, { + headers: headersWithCors({ + headers: new Headers(), + req, + }), + status: httpStatus.OK, + }) +} diff --git a/packages/payload/src/collections/endpoints/index.ts b/packages/payload/src/collections/endpoints/index.ts index bab76e2db5..368cd58eb6 100644 --- a/packages/payload/src/collections/endpoints/index.ts +++ b/packages/payload/src/collections/endpoints/index.ts @@ -9,6 +9,7 @@ import { docAccessHandler } from './docAccess.js' import { duplicateHandler } from './duplicate.js' import { findHandler } from './find.js' import { findByIDHandler } from './findByID.js' +import { findDistinctHandler } from './findDistinct.js' import { findVersionByIDHandler } from './findVersionByID.js' import { findVersionsHandler } from './findVersions.js' import { previewHandler } from './preview.js' @@ -48,6 +49,12 @@ export const defaultCollectionEndpoints: Endpoint[] = [ method: 'get', path: '/versions', }, + // Might be uncommented in the future + // { + // handler: findDistinctHandler, + // method: 'get', + // path: '/distinct', + // }, { handler: duplicateHandler, method: 'post', diff --git a/packages/payload/src/collections/operations/findDistinct.ts b/packages/payload/src/collections/operations/findDistinct.ts new file mode 100644 index 0000000000..2c814f5f4f --- /dev/null +++ b/packages/payload/src/collections/operations/findDistinct.ts @@ -0,0 +1,189 @@ +import httpStatus from 'http-status' + +import type { AccessResult } from '../../config/types.js' +import type { PaginatedDistinctDocs } from '../../database/types.js' +import type { PayloadRequest, PopulateType, Sort, Where } from '../../types/index.js' +import type { Collection } from '../config/types.js' + +import { executeAccess } from '../../auth/executeAccess.js' +import { combineQueries } from '../../database/combineQueries.js' +import { validateQueryPaths } from '../../database/queryValidation/validateQueryPaths.js' +import { sanitizeWhereQuery } from '../../database/sanitizeWhereQuery.js' +import { APIError } from '../../errors/APIError.js' +import { Forbidden } from '../../errors/Forbidden.js' +import { relationshipPopulationPromise } from '../../fields/hooks/afterRead/relationshipPopulationPromise.js' +import { getFieldByPath } from '../../utilities/getFieldByPath.js' +import { killTransaction } from '../../utilities/killTransaction.js' +import { buildAfterOperation } from './utils.js' + +export type Arguments = { + collection: Collection + depth?: number + disableErrors?: boolean + field: string + limit?: number + locale?: string + overrideAccess?: boolean + page?: number + populate?: PopulateType + req?: PayloadRequest + showHiddenFields?: boolean + sort?: Sort + where?: Where +} +export const findDistinctOperation = async ( + incomingArgs: Arguments, +): Promise>> => { + let args = incomingArgs + + try { + // ///////////////////////////////////// + // beforeOperation - Collection + // ///////////////////////////////////// + + if (args.collection.config.hooks?.beforeOperation?.length) { + for (const hook of args.collection.config.hooks.beforeOperation) { + args = + (await hook({ + args, + collection: args.collection.config, + context: args.req!.context, + operation: 'readDistinct', + req: args.req!, + })) || args + } + } + + const { + collection: { config: collectionConfig }, + disableErrors, + overrideAccess, + populate, + showHiddenFields = false, + where, + } = args + + const req = args.req! + const { locale, payload } = req + + // ///////////////////////////////////// + // Access + // ///////////////////////////////////// + + let accessResult: AccessResult + + if (!overrideAccess) { + accessResult = await executeAccess({ disableErrors, req }, collectionConfig.access.read) + + // If errors are disabled, and access returns false, return empty results + if (accessResult === false) { + return { + hasNextPage: false, + hasPrevPage: false, + limit: args.limit || 0, + nextPage: null, + page: 1, + pagingCounter: 1, + prevPage: null, + totalDocs: 0, + totalPages: 0, + values: [], + } + } + } + + // ///////////////////////////////////// + // Find Distinct + // ///////////////////////////////////// + + const fullWhere = combineQueries(where!, accessResult!) + sanitizeWhereQuery({ fields: collectionConfig.flattenedFields, payload, where: fullWhere }) + + await validateQueryPaths({ + collectionConfig, + overrideAccess: overrideAccess!, + req, + where: where ?? {}, + }) + + const fieldResult = getFieldByPath({ + fields: collectionConfig.flattenedFields, + path: args.field, + }) + + if (!fieldResult) { + throw new APIError( + `Field ${args.field} was not found in the collection ${collectionConfig.slug}`, + httpStatus.BAD_REQUEST, + ) + } + + if (fieldResult.field.hidden && !showHiddenFields) { + throw new Forbidden(req.t) + } + + if (fieldResult.field.access?.read) { + const hasAccess = await fieldResult.field.access.read({ req }) + if (!hasAccess) { + throw new Forbidden(req.t) + } + } + + let result = await payload.db.findDistinct({ + collection: collectionConfig.slug, + field: args.field, + limit: args.limit, + locale: locale!, + page: args.page, + req, + sort: args.sort, + where: fullWhere, + }) + + if ( + (fieldResult.field.type === 'relationship' || fieldResult.field.type === 'upload') && + args.depth + ) { + const populationPromises: Promise[] = [] + for (const doc of result.values) { + populationPromises.push( + relationshipPopulationPromise({ + currentDepth: 0, + depth: args.depth, + draft: false, + fallbackLocale: req.fallbackLocale || null, + field: fieldResult.field, + locale: req.locale || null, + overrideAccess: args.overrideAccess ?? true, + parentIsLocalized: false, + populate, + req, + showHiddenFields: false, + siblingDoc: doc, + }), + ) + } + await Promise.all(populationPromises) + } + + // ///////////////////////////////////// + // afterOperation - Collection + // ///////////////////////////////////// + + result = await buildAfterOperation({ + args, + collection: collectionConfig, + operation: 'findDistinct', + result, + }) + + // ///////////////////////////////////// + // Return results + // ///////////////////////////////////// + + return result + } catch (error: unknown) { + await killTransaction(args.req!) + throw error + } +} diff --git a/packages/payload/src/collections/operations/local/findDistinct.ts b/packages/payload/src/collections/operations/local/findDistinct.ts new file mode 100644 index 0000000000..2a0ca5cd73 --- /dev/null +++ b/packages/payload/src/collections/operations/local/findDistinct.ts @@ -0,0 +1,138 @@ +import type { + CollectionSlug, + DataFromCollectionSlug, + Document, + PaginatedDistinctDocs, + Payload, + PayloadRequest, + PopulateType, + RequestContext, + Sort, + TypedLocale, + Where, +} from '../../../index.js' +import type { CreateLocalReqOptions } from '../../../utilities/createLocalReq.js' + +import { APIError, createLocalReq } from '../../../index.js' +import { findDistinctOperation } from '../findDistinct.js' + +export type Options< + TSlug extends CollectionSlug, + TField extends keyof DataFromCollectionSlug, +> = { + /** + * the Collection slug to operate against. + */ + collection: TSlug + /** + * [Context](https://payloadcms.com/docs/hooks/context), which will then be passed to `context` and `req.context`, + * which can be read by hooks. Useful if you want to pass additional information to the hooks which + * shouldn't be necessarily part of the document, for example a `triggerBeforeChange` option which can be read by the BeforeChange hook + * to determine if it should run or not. + */ + context?: RequestContext + /** + * [Control auto-population](https://payloadcms.com/docs/queries/depth) of nested relationship and upload fields. + */ + depth?: number + /** + * When set to `true`, errors will not be thrown. + */ + disableErrors?: boolean + /** + * The field to get distinct values for + */ + field: TField + /** + * The maximum distinct field values to be returned. + * By default the operation returns all the values. + */ + limit?: number + /** + * Specify [locale](https://payloadcms.com/docs/configuration/localization) for any returned documents. + */ + locale?: 'all' | TypedLocale + /** + * Skip access control. + * Set to `false` if you want to respect Access Control for the operation, for example when fetching data for the fron-end. + * @default true + */ + overrideAccess?: boolean + /** + * Get a specific page number (if limit is specified) + * @default 1 + */ + page?: number + /** + * Specify [populate](https://payloadcms.com/docs/queries/select#populate) to control which fields to include to the result from populated documents. + */ + populate?: PopulateType + /** + * The `PayloadRequest` object. You can pass it to thread the current [transaction](https://payloadcms.com/docs/database/transactions), user and locale to the operation. + * Recommended to pass when using the Local API from hooks, as usually you want to execute the operation within the current transaction. + */ + req?: Partial + /** + * Opt-in to receiving hidden fields. By default, they are hidden from returned documents in accordance to your config. + * @default false + */ + showHiddenFields?: boolean + /** + * Sort the documents, can be a string or an array of strings + * @example '-createdAt' // Sort DESC by createdAt + * @example ['group', '-createdAt'] // sort by 2 fields, ASC group and DESC createdAt + */ + sort?: Sort + /** + * If you set `overrideAccess` to `false`, you can pass a user to use against the access control checks. + */ + user?: Document + /** + * A filter [query](https://payloadcms.com/docs/queries/overview) + */ + where?: Where +} + +export async function findDistinct< + TSlug extends CollectionSlug, + TField extends keyof DataFromCollectionSlug & string, +>( + payload: Payload, + options: Options, +): Promise[TField]>>> { + const { + collection: collectionSlug, + depth = 0, + disableErrors, + field, + limit, + overrideAccess = true, + page, + populate, + showHiddenFields, + sort, + where, + } = options + const collection = payload.collections[collectionSlug] + + if (!collection) { + throw new APIError( + `The collection with slug ${String(collectionSlug)} can't be found. Find Operation.`, + ) + } + + return findDistinctOperation({ + collection, + depth, + disableErrors, + field, + limit, + overrideAccess, + page, + populate, + req: await createLocalReq(options as CreateLocalReqOptions, payload), + showHiddenFields, + sort, + where, + }) as Promise[TField]>>> +} diff --git a/packages/payload/src/collections/operations/utils.ts b/packages/payload/src/collections/operations/utils.ts index 6ea8497248..faade92d48 100644 --- a/packages/payload/src/collections/operations/utils.ts +++ b/packages/payload/src/collections/operations/utils.ts @@ -12,6 +12,7 @@ import type { deleteOperation } from './delete.js' import type { deleteByIDOperation } from './deleteByID.js' import type { findOperation } from './find.js' import type { findByIDOperation } from './findByID.js' +import type { findDistinctOperation } from './findDistinct.js' import type { updateOperation } from './update.js' import type { updateByIDOperation } from './updateByID.js' @@ -30,6 +31,7 @@ export type AfterOperationMap = { boolean, SelectFromCollectionSlug > + findDistinct: typeof findDistinctOperation forgotPassword: typeof forgotPasswordOperation login: typeof loginOperation refresh: typeof refreshOperation @@ -81,6 +83,11 @@ export type AfterOperationArg = { operation: 'findByID' result: Awaited['findByID']>> } + | { + args: Parameters['findDistinct']>[0] + operation: 'findDistinct' + result: Awaited['findDistinct']>> + } | { args: Parameters['forgotPassword']>[0] operation: 'forgotPassword' diff --git a/packages/payload/src/database/types.ts b/packages/payload/src/database/types.ts index ca94f76997..c88b8324c8 100644 --- a/packages/payload/src/database/types.ts +++ b/packages/payload/src/database/types.ts @@ -63,6 +63,8 @@ export interface BaseDatabaseAdapter { find: Find + findDistinct: FindDistinct + findGlobal: FindGlobal findGlobalVersions: FindGlobalVersions @@ -82,16 +84,15 @@ export interface BaseDatabaseAdapter { * Run any migration up functions that have not yet been performed and update the status */ migrate: (args?: { migrations?: Migration[] }) => Promise - /** * Run any migration down functions that have been performed */ migrateDown: () => Promise + /** * Drop the current database and run all migrate up functions */ migrateFresh: (args: { forceAcceptWarning?: boolean }) => Promise - /** * Run all migration down functions before running up */ @@ -104,6 +105,7 @@ export interface BaseDatabaseAdapter { * Read the current state of migrations and output the result to show which have been run */ migrateStatus: () => Promise + /** * Path to read and write migration files from */ @@ -113,7 +115,6 @@ export interface BaseDatabaseAdapter { * The name of the database adapter */ name: string - /** * Full package name of the database adapter * @@ -124,6 +125,7 @@ export interface BaseDatabaseAdapter { * reference to the instance of payload */ payload: Payload + queryDrafts: QueryDrafts /** @@ -151,7 +153,6 @@ export interface BaseDatabaseAdapter { updateMany: UpdateMany updateOne: UpdateOne - updateVersion: UpdateVersion upsert: Upsert } @@ -481,6 +482,34 @@ export type CreateArgs = { select?: SelectType } +export type FindDistinctArgs = { + collection: CollectionSlug + field: string + limit?: number + locale?: string + page?: number + req?: Partial + sort?: Sort + where?: Where +} + +export type PaginatedDistinctDocs> = { + hasNextPage: boolean + hasPrevPage: boolean + limit: number + nextPage?: null | number | undefined + page: number + pagingCounter: number + prevPage?: null | number | undefined + totalDocs: number + totalPages: number + values: T[] +} + +export type FindDistinct = ( + args: FindDistinctArgs, +) => Promise>> + export type Create = (args: CreateArgs) => Promise export type UpdateOneArgs = { diff --git a/packages/payload/src/index.ts b/packages/payload/src/index.ts index 0c2ea26805..5909e20dd6 100644 --- a/packages/payload/src/index.ts +++ b/packages/payload/src/index.ts @@ -40,7 +40,7 @@ import { } from './auth/operations/local/verifyEmail.js' export type { FieldState } from './admin/forms/Form.js' import type { InitOptions, SanitizedConfig } from './config/types.js' -import type { BaseDatabaseAdapter, PaginatedDocs } from './database/types.js' +import type { BaseDatabaseAdapter, PaginatedDistinctDocs, PaginatedDocs } from './database/types.js' import type { InitializedEmailAdapter } from './email/types.js' import type { DataFromGlobalSlug, Globals, SelectFromGlobalSlug } from './globals/config/types.js' import type { @@ -72,6 +72,10 @@ import { findByIDLocal, type Options as FindByIDOptions, } from './collections/operations/local/findByID.js' +import { + findDistinct as findDistinctLocal, + type Options as FindDistinctOptions, +} from './collections/operations/local/findDistinct.js' import { findVersionByIDLocal, type Options as FindVersionByIDOptions, @@ -464,6 +468,20 @@ export class BasePayload { return findByIDLocal(this, options) } + /** + * @description Find distinct field values + * @param options + * @returns result with distinct field values + */ + findDistinct = async < + TSlug extends CollectionSlug, + TField extends keyof DataFromCollectionSlug & string, + >( + options: FindDistinctOptions, + ): Promise[TField]>>> => { + return findDistinctLocal(this, options) + } + findGlobal = async >( options: FindGlobalOptions, ): Promise> => { @@ -1174,7 +1192,6 @@ export { updateOperation } from './collections/operations/update.js' export { updateByIDOperation } from './collections/operations/updateByID.js' export { buildConfig } from './config/build.js' - export { type ClientConfig, createClientConfig, @@ -1183,6 +1200,7 @@ export { type UnsanitizedClientConfig, } from './config/client.js' export { defaults } from './config/defaults.js' + export { type OrderableEndpointBody } from './config/orderable/index.js' export { sanitizeConfig } from './config/sanitize.js' export type * from './config/types.js' @@ -1237,6 +1255,7 @@ export type { Destroy, Find, FindArgs, + FindDistinct, FindGlobal, FindGlobalArgs, FindGlobalVersions, @@ -1250,6 +1269,7 @@ export type { Migration, MigrationData, MigrationTemplateArgs, + PaginatedDistinctDocs, PaginatedDocs, QueryDrafts, QueryDraftsArgs, diff --git a/test/database/int.spec.ts b/test/database/int.spec.ts index 9bd4ae5418..8cc74b84f7 100644 --- a/test/database/int.spec.ts +++ b/test/database/int.spec.ts @@ -385,6 +385,118 @@ describe('database', () => { }) }) + it('should find distinct field values of the collection', async () => { + await payload.delete({ collection: 'posts', where: {} }) + const titles = [ + 'title-1', + 'title-2', + 'title-3', + 'title-4', + 'title-5', + 'title-6', + 'title-7', + 'title-8', + 'title-9', + ].map((title) => ({ title })) + + for (const { title } of titles) { + // eslint-disable-next-line jest/no-conditional-in-test + const docsCount = Math.random() > 0.5 ? 3 : Math.random() > 0.5 ? 2 : 1 + for (let i = 0; i < docsCount; i++) { + await payload.create({ collection: 'posts', data: { title } }) + } + } + + const res = await payload.findDistinct({ + collection: 'posts', + field: 'title', + }) + + expect(res.values).toStrictEqual(titles) + + // const resREST = await restClient + // .GET('/posts/distinct', { + // headers: { + // Authorization: `Bearer ${token}`, + // }, + // query: { sortOrder: 'asc', field: 'title' }, + // }) + // .then((res) => res.json()) + + // expect(resREST.values).toEqual(titles) + + const resLimit = await payload.findDistinct({ + collection: 'posts', + field: 'title', + limit: 3, + }) + + expect(resLimit.values).toStrictEqual( + ['title-1', 'title-2', 'title-3'].map((title) => ({ title })), + ) + // count is still 9 + expect(resLimit.totalDocs).toBe(9) + + const resDesc = await payload.findDistinct({ + collection: 'posts', + sort: '-title', + field: 'title', + }) + + expect(resDesc.values).toStrictEqual(titles.toReversed()) + + const resAscDefault = await payload.findDistinct({ + collection: 'posts', + field: 'title', + }) + + expect(resAscDefault.values).toStrictEqual(titles) + }) + + it('should populate distinct relationships when depth>0', async () => { + await payload.delete({ collection: 'posts', where: {} }) + + const categories = ['category-1', 'category-2', 'category-3', 'category-4'].map((title) => ({ + title, + })) + + const categoriesIDS: { category: string }[] = [] + + for (const { title } of categories) { + const doc = await payload.create({ collection: 'categories', data: { title } }) + categoriesIDS.push({ category: doc.id }) + } + + for (const { category } of categoriesIDS) { + // eslint-disable-next-line jest/no-conditional-in-test + const docsCount = Math.random() > 0.5 ? 3 : Math.random() > 0.5 ? 2 : 1 + for (let i = 0; i < docsCount; i++) { + await payload.create({ collection: 'posts', data: { title: randomUUID(), category } }) + } + } + + const resultDepth0 = await payload.findDistinct({ + collection: 'posts', + sort: 'category.title', + field: 'category', + }) + expect(resultDepth0.values).toStrictEqual(categoriesIDS) + const resultDepth1 = await payload.findDistinct({ + depth: 1, + collection: 'posts', + field: 'category', + sort: 'category.title', + }) + + for (let i = 0; i < resultDepth1.values.length; i++) { + const fromRes = resultDepth1.values[i] as any + const id = categoriesIDS[i].category as any + const title = categories[i]?.title + expect(fromRes.category.title).toBe(title) + expect(fromRes.category.id).toBe(id) + } + }) + describe('Compound Indexes', () => { beforeEach(async () => { await payload.delete({ collection: 'compound-indexes', where: {} }) diff --git a/test/database/up-down-migration/migrations/20250707_123508.json b/test/database/up-down-migration/migrations/20250714_201659.json similarity index 99% rename from test/database/up-down-migration/migrations/20250707_123508.json rename to test/database/up-down-migration/migrations/20250714_201659.json index f541345218..18d7fcf69c 100644 --- a/test/database/up-down-migration/migrations/20250707_123508.json +++ b/test/database/up-down-migration/migrations/20250714_201659.json @@ -1,5 +1,5 @@ { - "id": "bf183b76-944c-4e83-bd58-4aa993885106", + "id": "80e7a0d2-ffb3-4f22-8597-0442b3ab8102", "prevId": "00000000-0000-0000-0000-000000000000", "version": "7", "dialect": "postgresql", diff --git a/test/database/up-down-migration/migrations/20250707_123508.ts b/test/database/up-down-migration/migrations/20250714_201659.ts similarity index 100% rename from test/database/up-down-migration/migrations/20250707_123508.ts rename to test/database/up-down-migration/migrations/20250714_201659.ts diff --git a/test/database/up-down-migration/migrations/index.ts b/test/database/up-down-migration/migrations/index.ts index 0c0f710443..fea58e46c2 100644 --- a/test/database/up-down-migration/migrations/index.ts +++ b/test/database/up-down-migration/migrations/index.ts @@ -1,9 +1,9 @@ -import * as migration_20250707_123508 from './20250707_123508.js' +import * as migration_20250714_201659 from './20250714_201659.js'; export const migrations = [ { - up: migration_20250707_123508.up, - down: migration_20250707_123508.down, - name: '20250707_123508', + up: migration_20250714_201659.up, + down: migration_20250714_201659.down, + name: '20250714_201659' }, -] +]; diff --git a/test/helpers/NextRESTClient.ts b/test/helpers/NextRESTClient.ts index d49d11f33b..30c65eec53 100644 --- a/test/helpers/NextRESTClient.ts +++ b/test/helpers/NextRESTClient.ts @@ -16,7 +16,7 @@ import { devUser } from '../credentials.js' type ValidPath = `/${string}` type RequestOptions = { auth?: boolean - query?: { + query?: { [key: string]: unknown } & { depth?: number fallbackLocale?: string joins?: JoinQuery From 6ae730b33b7124d7c343f968ce67106004d5c6cc Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Thu, 17 Jul 2025 06:24:37 -0700 Subject: [PATCH 045/143] feat(richtext-lexical): export $createLinkNode and $isLinkNode for server use (#13205) Exports `$createLinkNode`, `$isLinkNode` and the equivalent modules for autolinks. --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210710489889573 --- packages/richtext-lexical/src/index.ts | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/richtext-lexical/src/index.ts b/packages/richtext-lexical/src/index.ts index d10a51c166..1256d68880 100644 --- a/packages/richtext-lexical/src/index.ts +++ b/packages/richtext-lexical/src/index.ts @@ -925,11 +925,16 @@ export { HeadingFeature, type HeadingFeatureProps } from './features/heading/ser export { HorizontalRuleFeature } from './features/horizontalRule/server/index.js' export { IndentFeature } from './features/indent/server/index.js' -export { AutoLinkNode } from './features/link/nodes/AutoLinkNode.js' -export { LinkNode } from './features/link/nodes/LinkNode.js' +export { + $createAutoLinkNode, + $isAutoLinkNode, + AutoLinkNode, +} from './features/link/nodes/AutoLinkNode.js' +export { $createLinkNode, $isLinkNode, LinkNode } from './features/link/nodes/LinkNode.js' export type { LinkFields } from './features/link/nodes/types.js' export { LinkFeature, type LinkFeatureServerProps } from './features/link/server/index.js' + export { ChecklistFeature } from './features/lists/checklist/server/index.js' export { OrderedListFeature } from './features/lists/orderedList/server/index.js' From 12539c61d4d47aa6caac217414b3dfad676bb2f3 Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Thu, 17 Jul 2025 13:24:22 -0400 Subject: [PATCH 046/143] feat(ui): supports collection scoped folders (#12797) As discussed in [this RFC](https://github.com/payloadcms/payload/discussions/12729), this PR supports collection-scoped folders. You can scope folders to multiple collection types or just one. This unlocks the possibility to have folders on a per collection instead of always being shared on every collection. You can combine this feature with the `browseByFolder: false` to completely isolate a collection from other collections. Things left to do: - [x] ~~Create a custom react component for the selecting of collectionSlugs to filter out available options based on the current folders parameters~~ https://github.com/user-attachments/assets/14cb1f09-8d70-4cb9-b1e2-09da89302995 --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210564397815557 --- .github/workflows/main.yml | 2 + .../src/views/BrowseByFolder/buildView.tsx | 88 +++- .../src/views/CollectionFolders/buildView.tsx | 28 +- packages/payload/src/admin/functions/index.ts | 32 +- .../payload/src/admin/views/folderList.ts | 1 + packages/payload/src/config/defaults.ts | 17 +- packages/payload/src/config/sanitize.ts | 43 +- .../src/folders/addFolderCollection.ts | 51 ++ .../src/folders/addFolderCollections.ts | 56 --- .../src/folders/addFolderFieldToCollection.ts | 33 ++ .../payload/src/folders/buildFolderField.ts | 108 +++++ .../src/folders/createFolderCollection.ts | 163 ++++--- .../folders/endpoints/populateFolderData.ts | 135 ------ .../hooks/ensureSafeCollectionsChange.ts | 144 ++++++ packages/payload/src/folders/types.ts | 15 +- .../utils/formatFolderOrDocumentItem.ts | 1 + .../src/folders/utils/getFolderBreadcrumbs.ts | 2 + .../src/folders/utils/getFolderData.ts | 45 +- .../utils/getFoldersAndDocumentsFromJoin.ts | 8 +- .../utilities/combineWhereConstraints.spec.ts | 86 ++++ .../src/utilities/combineWhereConstraints.ts | 25 +- packages/translations/src/clientKeys.ts | 1 + packages/translations/src/languages/ar.ts | 1 + packages/translations/src/languages/az.ts | 1 + packages/translations/src/languages/bg.ts | 2 + packages/translations/src/languages/bnBd.ts | 2 + packages/translations/src/languages/bnIn.ts | 2 + packages/translations/src/languages/ca.ts | 2 + packages/translations/src/languages/cs.ts | 2 + packages/translations/src/languages/da.ts | 2 + packages/translations/src/languages/de.ts | 2 + packages/translations/src/languages/en.ts | 2 + packages/translations/src/languages/es.ts | 2 + packages/translations/src/languages/et.ts | 1 + packages/translations/src/languages/fa.ts | 1 + packages/translations/src/languages/fr.ts | 2 + packages/translations/src/languages/he.ts | 1 + packages/translations/src/languages/hr.ts | 2 + packages/translations/src/languages/hu.ts | 2 + packages/translations/src/languages/hy.ts | 2 + packages/translations/src/languages/it.ts | 2 + packages/translations/src/languages/ja.ts | 2 + packages/translations/src/languages/ko.ts | 1 + packages/translations/src/languages/lt.ts | 2 + packages/translations/src/languages/lv.ts | 2 + packages/translations/src/languages/my.ts | 1 + packages/translations/src/languages/nb.ts | 1 + packages/translations/src/languages/nl.ts | 2 + packages/translations/src/languages/pl.ts | 2 + packages/translations/src/languages/pt.ts | 2 + packages/translations/src/languages/ro.ts | 2 + packages/translations/src/languages/rs.ts | 2 + .../translations/src/languages/rsLatin.ts | 2 + packages/translations/src/languages/ru.ts | 2 + packages/translations/src/languages/sk.ts | 2 + packages/translations/src/languages/sl.ts | 2 + packages/translations/src/languages/sv.ts | 1 + packages/translations/src/languages/th.ts | 1 + packages/translations/src/languages/tr.ts | 2 + packages/translations/src/languages/uk.ts | 2 + packages/translations/src/languages/vi.ts | 1 + packages/translations/src/languages/zh.ts | 1 + packages/translations/src/languages/zhTw.ts | 1 + .../elements/FolderView/Breadcrumbs/index.tsx | 9 +- .../FolderView/CurrentFolderActions/index.tsx | 1 + .../FolderView/DragOverlaySelection/index.tsx | 12 +- .../FolderView/DraggableTableRow/index.tsx | 1 - .../FolderView/DraggableWithClick/index.scss | 2 +- .../FolderView/DraggableWithClick/index.tsx | 20 +- .../Drawers/MoveToFolder/index.scss | 4 + .../FolderView/Drawers/MoveToFolder/index.tsx | 16 +- .../index.scss | 0 .../index.tsx | 2 +- .../{Field => FolderField}/index.scss | 0 .../{Field => FolderField}/index.server.tsx | 2 +- .../FolderView/FolderFileCard/index.scss | 48 +- .../FolderView/FolderFileCard/index.tsx | 75 ++- .../FolderView/FolderFileTable/index.tsx | 32 +- .../FolderView/FolderTypeField/index.tsx | 140 ++++++ .../FolderView/MoveDocToFolder/index.tsx | 5 +- .../elements/FolderView/SortByPill/index.tsx | 31 +- .../ListCreateNewDocInFolderButton.tsx | 5 + packages/ui/src/exports/client/index.ts | 3 +- packages/ui/src/exports/rsc/index.ts | 2 +- packages/ui/src/fields/Checkbox/Input.tsx | 6 +- packages/ui/src/fields/Select/index.tsx | 2 +- .../Folders/groupItemIDsByRelation.ts | 15 + packages/ui/src/providers/Folders/index.tsx | 455 +++++++++++++----- .../ui/src/providers/Folders/selection.ts | 52 -- .../getFolderResultsComponentAndData.tsx | 75 ++- .../ui/src/views/BrowseByFolder/index.tsx | 46 +- .../CollectionFolder/ListSelection/index.tsx | 7 +- .../ui/src/views/CollectionFolder/index.tsx | 23 +- test/folders/e2e.spec.ts | 339 +++++++++---- test/folders/int.spec.ts | 169 ++++++- test/folders/payload-types.ts | 2 + test/folders/tsconfig.json | 3 + .../folders/applyBrowseByFolderTypeFilter.ts | 41 ++ test/helpers/folders/clickFolderCard.ts | 22 +- test/helpers/folders/createFolder.ts | 24 +- test/helpers/folders/createFolderDoc.ts | 26 + test/helpers/folders/createFolderFromDoc.ts | 25 +- 102 files changed, 2127 insertions(+), 768 deletions(-) create mode 100644 packages/payload/src/folders/addFolderCollection.ts delete mode 100644 packages/payload/src/folders/addFolderCollections.ts create mode 100644 packages/payload/src/folders/addFolderFieldToCollection.ts create mode 100644 packages/payload/src/folders/buildFolderField.ts delete mode 100644 packages/payload/src/folders/endpoints/populateFolderData.ts create mode 100644 packages/payload/src/folders/hooks/ensureSafeCollectionsChange.ts create mode 100644 packages/payload/src/utilities/combineWhereConstraints.spec.ts rename packages/ui/src/elements/FolderView/{CollectionTypePill => FilterFolderTypePill}/index.scss (100%) rename packages/ui/src/elements/FolderView/{CollectionTypePill => FilterFolderTypePill}/index.tsx (97%) rename packages/ui/src/elements/FolderView/{Field => FolderField}/index.scss (100%) rename packages/ui/src/elements/FolderView/{Field => FolderField}/index.server.tsx (87%) create mode 100644 packages/ui/src/elements/FolderView/FolderTypeField/index.tsx create mode 100644 packages/ui/src/providers/Folders/groupItemIDsByRelation.ts delete mode 100644 packages/ui/src/providers/Folders/selection.ts create mode 100644 test/folders/tsconfig.json create mode 100644 test/helpers/folders/applyBrowseByFolderTypeFilter.ts create mode 100644 test/helpers/folders/createFolderDoc.ts diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 60b6ac9655..e10abad457 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -284,6 +284,7 @@ jobs: - fields__collections__Text - fields__collections__UI - fields__collections__Upload + - folders - hooks - lexical__collections__Lexical__e2e__main - lexical__collections__Lexical__e2e__blocks @@ -418,6 +419,7 @@ jobs: - fields__collections__Text - fields__collections__UI - fields__collections__Upload + - folders - hooks - lexical__collections__Lexical__e2e__main - lexical__collections__Lexical__e2e__blocks diff --git a/packages/next/src/views/BrowseByFolder/buildView.tsx b/packages/next/src/views/BrowseByFolder/buildView.tsx index b57da8a60a..59775ad7d3 100644 --- a/packages/next/src/views/BrowseByFolder/buildView.tsx +++ b/packages/next/src/views/BrowseByFolder/buildView.tsx @@ -58,20 +58,45 @@ export const buildBrowseByFolderView = async ( throw new Error('not-found') } - const browseByFolderSlugs = browseByFolderSlugsFromArgs.filter( + const foldersSlug = config.folders.slug + + /** + * All visiible folder enabled collection slugs that the user has read permissions for. + */ + const allowReadCollectionSlugs = browseByFolderSlugsFromArgs.filter( (collectionSlug) => permissions?.collections?.[collectionSlug]?.read && visibleEntities.collections.includes(collectionSlug), ) - const query = queryFromArgs || queryFromReq - const activeCollectionFolderSlugs: string[] = - Array.isArray(query?.relationTo) && query.relationTo.length - ? query.relationTo.filter( - (slug) => - browseByFolderSlugs.includes(slug) || (config.folders && slug === config.folders.slug), - ) - : [...browseByFolderSlugs, config.folders.slug] + const query = + queryFromArgs || + ((queryFromReq + ? { + ...queryFromReq, + relationTo: + typeof queryFromReq?.relationTo === 'string' + ? JSON.parse(queryFromReq.relationTo) + : undefined, + } + : {}) as ListQuery) + + /** + * If a folderID is provided and the relationTo query param exists, + * we filter the collection slugs to only those that are allowed to be read. + * + * If no folderID is provided, only folders should be active and displayed (the root view). + */ + let collectionsToDisplay: string[] = [] + if (folderID && Array.isArray(query?.relationTo)) { + collectionsToDisplay = query.relationTo.filter( + (slug) => allowReadCollectionSlugs.includes(slug) || slug === foldersSlug, + ) + } else if (folderID) { + collectionsToDisplay = [...allowReadCollectionSlugs, foldersSlug] + } else { + collectionsToDisplay = [foldersSlug] + } const { routes: { admin: adminRoute }, @@ -93,14 +118,15 @@ export const buildBrowseByFolderView = async ( }, }) - const sortPreference: FolderSortKeys = browseByFolderPreferences?.sort || '_folderOrDocumentTitle' + const sortPreference: FolderSortKeys = browseByFolderPreferences?.sort || 'name' const viewPreference = browseByFolderPreferences?.viewPreference || 'grid' - const { breadcrumbs, documents, FolderResultsComponent, subfolders } = + const { breadcrumbs, documents, folderAssignedCollections, FolderResultsComponent, subfolders } = await getFolderResultsComponentAndData({ - activeCollectionSlugs: activeCollectionFolderSlugs, - browseByFolder: false, + browseByFolder: true, + collectionsToDisplay, displayAs: viewPreference, + folderAssignedCollections: collectionsToDisplay.filter((slug) => slug !== foldersSlug) || [], folderID, req: initPageResult.req, sort: sortPreference, @@ -142,10 +168,33 @@ export const buildBrowseByFolderView = async ( // serverProps, // }) - // documents cannot be created without a parent folder in this view - const allowCreateCollectionSlugs = resolvedFolderID - ? [config.folders.slug, ...browseByFolderSlugs] - : [config.folders.slug] + // Filter down allCollectionFolderSlugs by the ones the current folder is assingned to + const allAvailableCollectionSlugs = + folderID && Array.isArray(folderAssignedCollections) && folderAssignedCollections.length + ? allowReadCollectionSlugs.filter((slug) => folderAssignedCollections.includes(slug)) + : allowReadCollectionSlugs + + // Filter down activeCollectionFolderSlugs by the ones the current folder is assingned to + const availableActiveCollectionFolderSlugs = collectionsToDisplay.filter((slug) => { + if (slug === foldersSlug) { + return permissions?.collections?.[foldersSlug]?.read + } else { + return !folderAssignedCollections || folderAssignedCollections.includes(slug) + } + }) + + // Documents cannot be created without a parent folder in this view + const allowCreateCollectionSlugs = ( + resolvedFolderID ? [foldersSlug, ...allAvailableCollectionSlugs] : [foldersSlug] + ).filter((collectionSlug) => { + if (collectionSlug === foldersSlug) { + return permissions?.collections?.[foldersSlug]?.create + } + return ( + permissions?.collections?.[collectionSlug]?.create && + visibleEntities.collections.includes(collectionSlug) + ) + }) return { View: ( @@ -154,8 +203,8 @@ export const buildBrowseByFolderView = async ( {RenderServerComponent({ clientProps: { // ...folderViewSlots, - activeCollectionFolderSlugs, - allCollectionFolderSlugs: browseByFolderSlugs, + activeCollectionFolderSlugs: availableActiveCollectionFolderSlugs, + allCollectionFolderSlugs: allAvailableCollectionSlugs, allowCreateCollectionSlugs, baseFolderPath: `/browse-by-folder`, breadcrumbs, @@ -163,6 +212,7 @@ export const buildBrowseByFolderView = async ( disableBulkEdit, documents, enableRowSelections, + folderAssignedCollections, folderFieldName: config.folders.fieldName, folderID: resolvedFolderID || null, FolderResultsComponent, diff --git a/packages/next/src/views/CollectionFolders/buildView.tsx b/packages/next/src/views/CollectionFolders/buildView.tsx index e06bfcc2c0..8a110f20f7 100644 --- a/packages/next/src/views/CollectionFolders/buildView.tsx +++ b/packages/next/src/views/CollectionFolders/buildView.tsx @@ -97,23 +97,28 @@ export const buildCollectionFolderView = async ( }, }) - const sortPreference: FolderSortKeys = - collectionFolderPreferences?.sort || '_folderOrDocumentTitle' + const sortPreference: FolderSortKeys = collectionFolderPreferences?.sort || 'name' const viewPreference = collectionFolderPreferences?.viewPreference || 'grid' const { routes: { admin: adminRoute }, } = config - const { breadcrumbs, documents, FolderResultsComponent, subfolders } = - await getFolderResultsComponentAndData({ - activeCollectionSlugs: [config.folders.slug, collectionSlug], - browseByFolder: false, - displayAs: viewPreference, - folderID, - req: initPageResult.req, - sort: sortPreference, - }) + const { + breadcrumbs, + documents, + folderAssignedCollections, + FolderResultsComponent, + subfolders, + } = await getFolderResultsComponentAndData({ + browseByFolder: false, + collectionsToDisplay: [config.folders.slug, collectionSlug], + displayAs: viewPreference, + folderAssignedCollections: [collectionSlug], + folderID, + req: initPageResult.req, + sort: sortPreference, + }) const resolvedFolderID = breadcrumbs[breadcrumbs.length - 1]?.id @@ -182,6 +187,7 @@ export const buildCollectionFolderView = async ( disableBulkEdit, documents, enableRowSelections, + folderAssignedCollections, folderFieldName: config.folders.fieldName, folderID: resolvedFolderID || null, FolderResultsComponent, diff --git a/packages/payload/src/admin/functions/index.ts b/packages/payload/src/admin/functions/index.ts index e3676a10a7..d14fd04d3a 100644 --- a/packages/payload/src/admin/functions/index.ts +++ b/packages/payload/src/admin/functions/index.ts @@ -1,7 +1,7 @@ import type { ImportMap } from '../../bin/generateImportMap/index.js' import type { SanitizedConfig } from '../../config/types.js' import type { PaginatedDocs } from '../../database/types.js' -import type { CollectionSlug, ColumnPreference } from '../../index.js' +import type { CollectionSlug, ColumnPreference, FolderSortKeys } from '../../index.js' import type { PayloadRequest, Sort, Where } from '../../types/index.js' import type { ColumnsFromURL } from '../../utilities/transformColumnPreferences.js' @@ -78,10 +78,36 @@ export type BuildCollectionFolderViewResult = { } export type GetFolderResultsComponentAndDataArgs = { - activeCollectionSlugs: CollectionSlug[] + /** + * If true and no folderID is provided, only folders will be returned. + * If false, the results will include documents from the active collections. + */ browseByFolder: boolean + /** + * Used to filter document types to include in the results/display. + * + * i.e. ['folders', 'posts'] will only include folders and posts in the results. + * + * collectionsToQuery? + */ + collectionsToDisplay: CollectionSlug[] + /** + * Used to determine how the results should be displayed. + */ displayAs: 'grid' | 'list' + /** + * Used to filter folders by the collections they are assigned to. + * + * i.e. ['posts'] will only include folders that are assigned to the posts collections. + */ + folderAssignedCollections: CollectionSlug[] + /** + * The ID of the folder to filter results by. + */ folderID: number | string | undefined req: PayloadRequest - sort: string + /** + * The sort order for the results. + */ + sort: FolderSortKeys } diff --git a/packages/payload/src/admin/views/folderList.ts b/packages/payload/src/admin/views/folderList.ts index b1074fe467..18b9aac736 100644 --- a/packages/payload/src/admin/views/folderList.ts +++ b/packages/payload/src/admin/views/folderList.ts @@ -30,6 +30,7 @@ export type FolderListViewClientProps = { disableBulkEdit?: boolean documents: FolderOrDocument[] enableRowSelections?: boolean + folderAssignedCollections?: SanitizedCollectionConfig['slug'][] folderFieldName: string folderID: null | number | string FolderResultsComponent: React.ReactNode diff --git a/packages/payload/src/config/defaults.ts b/packages/payload/src/config/defaults.ts index b5a4063bb3..77dc94677c 100644 --- a/packages/payload/src/config/defaults.ts +++ b/packages/payload/src/config/defaults.ts @@ -163,14 +163,17 @@ export const addDefaultsToConfig = (config: Config): Config => { ...(config.auth || {}), } - const hasFolderCollections = config.collections.some((collection) => Boolean(collection.folders)) - if (hasFolderCollections) { + if ( + config.folders !== false && + config.collections.some((collection) => Boolean(collection.folders)) + ) { config.folders = { - slug: foldersSlug, - browseByFolder: true, - debug: false, - fieldName: parentFolderFieldName, - ...(config.folders || {}), + slug: config.folders?.slug ?? foldersSlug, + browseByFolder: config.folders?.browseByFolder ?? true, + collectionOverrides: config.folders?.collectionOverrides || undefined, + collectionSpecific: config.folders?.collectionSpecific ?? true, + debug: config.folders?.debug ?? false, + fieldName: config.folders?.fieldName ?? parentFolderFieldName, } } else { config.folders = false diff --git a/packages/payload/src/config/sanitize.ts b/packages/payload/src/config/sanitize.ts index d79b2d73fc..c90ee9703b 100644 --- a/packages/payload/src/config/sanitize.ts +++ b/packages/payload/src/config/sanitize.ts @@ -3,6 +3,7 @@ import type { AcceptedLanguages } from '@payloadcms/translations' import { en } from '@payloadcms/translations/languages/en' import { deepMergeSimple } from '@payloadcms/translations/utilities' +import type { CollectionSlug, GlobalSlug, SanitizedCollectionConfig } from '../index.js' import type { SanitizedJobsConfig } from '../queues/config/types/index.js' import type { Config, @@ -18,15 +19,10 @@ import { sanitizeCollection } from '../collections/config/sanitize.js' import { migrationsCollection } from '../database/migrations/migrationsCollection.js' import { DuplicateCollection, InvalidConfiguration } from '../errors/index.js' import { defaultTimezones } from '../fields/baseFields/timezone/defaultTimezones.js' -import { addFolderCollections } from '../folders/addFolderCollections.js' +import { addFolderCollection } from '../folders/addFolderCollection.js' +import { addFolderFieldToCollection } from '../folders/addFolderFieldToCollection.js' import { sanitizeGlobal } from '../globals/config/sanitize.js' -import { - baseBlockFields, - type CollectionSlug, - formatLabels, - type GlobalSlug, - sanitizeFields, -} from '../index.js' +import { baseBlockFields, formatLabels, sanitizeFields } from '../index.js' import { getLockedDocumentsCollection, lockedDocumentsCollectionSlug, @@ -191,8 +187,6 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise() - await addFolderCollections(config as unknown as Config) - const validRelationships = [ ...(config.collections?.map((c) => c.slug) ?? []), jobsCollectionSlug, @@ -200,6 +194,10 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise 0) { @@ -332,6 +345,16 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise + folderEnabledCollections: CollectionConfig[] + richTextSanitizationPromises?: Array<(config: SanitizedConfig) => Promise> + validRelationships?: string[] +}): Promise { + if (config.folders === false) { + return + } + + let folderCollectionConfig = createFolderCollection({ + slug: config.folders!.slug as string, + collectionSpecific, + debug: config.folders!.debug, + folderEnabledCollections, + folderFieldName: config.folders!.fieldName as string, + }) + + const collectionIndex = config.collections!.push(folderCollectionConfig) + + if ( + Array.isArray(config.folders?.collectionOverrides) && + config?.folders.collectionOverrides.length + ) { + for (const override of config.folders.collectionOverrides) { + folderCollectionConfig = await override({ collection: folderCollectionConfig }) + } + } + + const sanitizedCollectionWithOverrides = await sanitizeCollection( + config as unknown as Config, + folderCollectionConfig, + richTextSanitizationPromises, + validRelationships, + ) + + config.collections![collectionIndex - 1] = sanitizedCollectionWithOverrides +} diff --git a/packages/payload/src/folders/addFolderCollections.ts b/packages/payload/src/folders/addFolderCollections.ts deleted file mode 100644 index deb9323197..0000000000 --- a/packages/payload/src/folders/addFolderCollections.ts +++ /dev/null @@ -1,56 +0,0 @@ -import type { Config } from '../config/types.js' -import type { CollectionSlug } from '../index.js' - -import { createFolderCollection } from './createFolderCollection.js' - -export async function addFolderCollections(config: NonNullable): Promise { - if (!config.collections || !config.folders) { - return - } - - const enabledCollectionSlugs: CollectionSlug[] = [] - const debug = Boolean(config?.folders?.debug) - const folderFieldName = config?.folders?.fieldName as unknown as string - const folderSlug = config?.folders?.slug as unknown as CollectionSlug - - for (let i = 0; i < config.collections.length; i++) { - const collection = config.collections[i] - if (collection && collection?.folders) { - collection.fields.push({ - name: folderFieldName, - type: 'relationship', - admin: { - allowCreate: false, - allowEdit: false, - components: { - Cell: '@payloadcms/ui/rsc#FolderTableCell', - Field: '@payloadcms/ui/rsc#FolderEditField', - }, - }, - index: true, - label: 'Folder', - relationTo: folderSlug, - }) - enabledCollectionSlugs.push(collection.slug) - } - } - - if (enabledCollectionSlugs.length) { - let folderCollection = createFolderCollection({ - slug: folderSlug, - collectionSlugs: enabledCollectionSlugs, - debug, - folderFieldName, - }) - - if ( - Array.isArray(config?.folders?.collectionOverrides) && - config?.folders.collectionOverrides.length - ) { - for (const override of config.folders.collectionOverrides) { - folderCollection = await override({ collection: folderCollection }) - } - } - config.collections.push(folderCollection) - } -} diff --git a/packages/payload/src/folders/addFolderFieldToCollection.ts b/packages/payload/src/folders/addFolderFieldToCollection.ts new file mode 100644 index 0000000000..a4aa6c6860 --- /dev/null +++ b/packages/payload/src/folders/addFolderFieldToCollection.ts @@ -0,0 +1,33 @@ +import type { SanitizedCollectionConfig } from '../index.js' + +import { buildFolderField } from './buildFolderField.js' + +export const addFolderFieldToCollection = ({ + collection, + collectionSpecific, + folderFieldName, + folderSlug, +}: { + collection: SanitizedCollectionConfig + collectionSpecific: boolean + folderFieldName: string + folderSlug: string +}): void => { + collection.fields.push( + buildFolderField({ + collectionSpecific, + folderFieldName, + folderSlug, + overrides: { + admin: { + allowCreate: false, + allowEdit: false, + components: { + Cell: '@payloadcms/ui/rsc#FolderTableCell', + Field: '@payloadcms/ui/rsc#FolderField', + }, + }, + }, + }), + ) +} diff --git a/packages/payload/src/folders/buildFolderField.ts b/packages/payload/src/folders/buildFolderField.ts new file mode 100644 index 0000000000..c3920a4d58 --- /dev/null +++ b/packages/payload/src/folders/buildFolderField.ts @@ -0,0 +1,108 @@ +import type { SingleRelationshipField } from '../fields/config/types.js' +import type { Document } from '../types/index.js' + +import { extractID } from '../utilities/extractID.js' + +export const buildFolderField = ({ + collectionSpecific, + folderFieldName, + folderSlug, + overrides = {}, +}: { + collectionSpecific: boolean + folderFieldName: string + folderSlug: string + overrides?: Partial +}): SingleRelationshipField => { + const field: SingleRelationshipField = { + name: folderFieldName, + type: 'relationship', + admin: {}, + hasMany: false, + index: true, + label: 'Folder', + relationTo: folderSlug, + validate: async (value, { collectionSlug, data, overrideAccess, previousValue, req }) => { + if (!collectionSpecific) { + // if collection scoping is not enabled, no validation required since folders can contain any type of document + return true + } + + if (!value) { + // no folder, no validation required + return true + } + + const newID = extractID(value) + if (previousValue && extractID(previousValue) === newID) { + // value did not change, no validation required + return true + } else { + // need to validat the folder value allows this collection type + let parentFolder: Document = null + if (typeof value === 'string' || typeof value === 'number') { + // need to populate the value with the document + parentFolder = await req.payload.findByID({ + id: newID, + collection: folderSlug, + depth: 0, // no need to populate nested folders + overrideAccess, + req, + select: { + folderType: true, // only need to check folderType + }, + user: req.user, + }) + } + + if (parentFolder && collectionSlug) { + const parentFolderTypes: string[] = (parentFolder.folderType as string[]) || [] + + // if the parent folder has no folder types, it accepts all collections + if (parentFolderTypes.length === 0) { + return true + } + + // validation for a folder document + if (collectionSlug === folderSlug) { + // ensure the parent accepts ALL folder types + const folderTypes: string[] = 'folderType' in data ? (data.folderType as string[]) : [] + const invalidSlugs = folderTypes.filter((validCollectionSlug: string) => { + return !parentFolderTypes.includes(validCollectionSlug) + }) + if (invalidSlugs.length === 0) { + return true + } else { + return `Folder with ID ${newID} does not allow documents of type ${invalidSlugs.join(', ')}` + } + } + + // validation for a non-folder document + if (parentFolderTypes.includes(collectionSlug)) { + return true + } else { + return `Folder with ID ${newID} does not allow documents of type ${collectionSlug}` + } + } else { + return `Folder with ID ${newID} not found in collection ${folderSlug}` + } + } + }, + } + + if (overrides?.admin) { + field.admin = { + ...field.admin, + ...(overrides.admin || {}), + } + + if (overrides.admin.components) { + field.admin.components = { + ...field.admin.components, + ...(overrides.admin.components || {}), + } + } + } + + return field +} diff --git a/packages/payload/src/folders/createFolderCollection.ts b/packages/payload/src/folders/createFolderCollection.ts index 4da3e3bee7..9e1b8e93cd 100644 --- a/packages/payload/src/folders/createFolderCollection.ts +++ b/packages/payload/src/folders/createFolderCollection.ts @@ -1,74 +1,129 @@ import type { CollectionConfig } from '../collections/config/types.js' +import type { Field, Option, SelectField } from '../fields/config/types.js' -import { populateFolderDataEndpoint } from './endpoints/populateFolderData.js' +import { defaultAccess } from '../auth/defaultAccess.js' +import { buildFolderField } from './buildFolderField.js' +import { foldersSlug } from './constants.js' import { deleteSubfoldersBeforeDelete } from './hooks/deleteSubfoldersAfterDelete.js' import { dissasociateAfterDelete } from './hooks/dissasociateAfterDelete.js' +import { ensureSafeCollectionsChange } from './hooks/ensureSafeCollectionsChange.js' import { reparentChildFolder } from './hooks/reparentChildFolder.js' type CreateFolderCollectionArgs = { - collectionSlugs: string[] + collectionSpecific: boolean debug?: boolean + folderEnabledCollections: CollectionConfig[] folderFieldName: string slug: string } export const createFolderCollection = ({ slug, - collectionSlugs, + collectionSpecific, debug, + folderEnabledCollections, folderFieldName, -}: CreateFolderCollectionArgs): CollectionConfig => ({ - slug, - admin: { - hidden: !debug, - useAsTitle: 'name', - }, - endpoints: [populateFolderDataEndpoint], - fields: [ - { - name: 'name', - type: 'text', - index: true, - required: true, +}: CreateFolderCollectionArgs): CollectionConfig => { + const { collectionOptions, collectionSlugs } = folderEnabledCollections.reduce( + (acc, collection: CollectionConfig) => { + acc.collectionSlugs.push(collection.slug) + acc.collectionOptions.push({ + label: collection.labels?.plural || collection.slug, + value: collection.slug, + }) + + return acc }, { - name: folderFieldName, - type: 'relationship', - admin: { - hidden: !debug, + collectionOptions: [] as Option[], + collectionSlugs: [] as string[], + }, + ) + + return { + slug, + access: { + create: defaultAccess, + delete: defaultAccess, + read: defaultAccess, + readVersions: defaultAccess, + update: defaultAccess, + }, + admin: { + hidden: !debug, + useAsTitle: 'name', + }, + fields: [ + { + name: 'name', + type: 'text', + index: true, + required: true, }, - index: true, - relationTo: slug, - }, - { - name: 'documentsAndFolders', - type: 'join', - admin: { - hidden: !debug, + buildFolderField({ + collectionSpecific, + folderFieldName, + folderSlug: slug, + overrides: { + admin: { + hidden: !debug, + }, + }, + }), + { + name: 'documentsAndFolders', + type: 'join', + admin: { + hidden: !debug, + }, + collection: [slug, ...collectionSlugs], + hasMany: true, + on: folderFieldName, }, - collection: [slug, ...collectionSlugs], - hasMany: true, - on: folderFieldName, + ...(collectionSpecific + ? [ + { + name: 'folderType', + type: 'select', + admin: { + components: { + Field: { + clientProps: { + options: collectionOptions, + }, + path: '@payloadcms/ui#FolderTypeField', + }, + }, + position: 'sidebar', + }, + hasMany: true, + options: collectionOptions, + } satisfies SelectField, + ] + : ([] as Field[])), + ], + hooks: { + afterChange: [ + reparentChildFolder({ + folderFieldName, + }), + ], + afterDelete: [ + dissasociateAfterDelete({ + collectionSlugs, + folderFieldName, + }), + ], + beforeDelete: [deleteSubfoldersBeforeDelete({ folderFieldName, folderSlug: slug })], + beforeValidate: [ + ...(collectionSpecific ? [ensureSafeCollectionsChange({ foldersSlug })] : []), + ], }, - ], - hooks: { - afterChange: [ - reparentChildFolder({ - folderFieldName, - }), - ], - afterDelete: [ - dissasociateAfterDelete({ - collectionSlugs, - folderFieldName, - }), - ], - beforeDelete: [deleteSubfoldersBeforeDelete({ folderFieldName, folderSlug: slug })], - }, - labels: { - plural: 'Folders', - singular: 'Folder', - }, - typescript: { - interface: 'FolderInterface', - }, -}) + labels: { + plural: 'Folders', + singular: 'Folder', + }, + typescript: { + interface: 'FolderInterface', + }, + } +} diff --git a/packages/payload/src/folders/endpoints/populateFolderData.ts b/packages/payload/src/folders/endpoints/populateFolderData.ts deleted file mode 100644 index 9347602a9e..0000000000 --- a/packages/payload/src/folders/endpoints/populateFolderData.ts +++ /dev/null @@ -1,135 +0,0 @@ -import httpStatus from 'http-status' - -import type { Endpoint, Where } from '../../index.js' - -import { buildFolderWhereConstraints } from '../utils/buildFolderWhereConstraints.js' -import { getFolderData } from '../utils/getFolderData.js' - -export const populateFolderDataEndpoint: Endpoint = { - handler: async (req) => { - if (!req?.user) { - return Response.json( - { - message: 'Unauthorized request.', - }, - { - status: httpStatus.UNAUTHORIZED, - }, - ) - } - - if ( - !( - req.payload.config.folders && - Boolean(req.payload.collections?.[req.payload.config.folders.slug]) - ) - ) { - return Response.json( - { - message: 'Folders are not configured', - }, - { - status: httpStatus.NOT_FOUND, - }, - ) - } - - // if collectionSlug exists, we need to create constraints for that _specific collection_ and the folder collection - // if collectionSlug does not exist, we need to create constraints for _all folder enabled collections_ and the folder collection - let documentWhere: undefined | Where - let folderWhere: undefined | Where - const collectionSlug = req.searchParams?.get('collectionSlug') - - if (collectionSlug) { - const collectionConfig = req.payload.collections?.[collectionSlug]?.config - - if (!collectionConfig) { - return Response.json( - { - message: `Collection with slug "${collectionSlug}" not found`, - }, - { - status: httpStatus.NOT_FOUND, - }, - ) - } - - const collectionConstraints = await buildFolderWhereConstraints({ - collectionConfig, - folderID: req.searchParams?.get('folderID') || undefined, - localeCode: typeof req?.locale === 'string' ? req.locale : undefined, - req, - search: req.searchParams?.get('search') || undefined, - sort: req.searchParams?.get('sort') || undefined, - }) - - if (collectionConstraints) { - documentWhere = collectionConstraints - } - } else { - // loop over all folder enabled collections and build constraints for each - for (const collectionSlug of Object.keys(req.payload.collections)) { - const collectionConfig = req.payload.collections[collectionSlug]?.config - - if (collectionConfig?.folders) { - const collectionConstraints = await buildFolderWhereConstraints({ - collectionConfig, - folderID: req.searchParams?.get('folderID') || undefined, - localeCode: typeof req?.locale === 'string' ? req.locale : undefined, - req, - search: req.searchParams?.get('search') || undefined, - }) - - if (collectionConstraints) { - if (!documentWhere) { - documentWhere = { or: [] } - } - if (!Array.isArray(documentWhere.or)) { - documentWhere.or = [documentWhere] - } else if (Array.isArray(documentWhere.or)) { - documentWhere.or.push(collectionConstraints) - } - } - } - } - } - - const folderCollectionConfig = - req.payload.collections?.[req.payload.config.folders.slug]?.config - - if (!folderCollectionConfig) { - return Response.json( - { - message: 'Folder collection not found', - }, - { - status: httpStatus.NOT_FOUND, - }, - ) - } - - const folderConstraints = await buildFolderWhereConstraints({ - collectionConfig: folderCollectionConfig, - folderID: req.searchParams?.get('folderID') || undefined, - localeCode: typeof req?.locale === 'string' ? req.locale : undefined, - req, - search: req.searchParams?.get('search') || undefined, - }) - - if (folderConstraints) { - folderWhere = folderConstraints - } - - const data = await getFolderData({ - collectionSlug: req.searchParams?.get('collectionSlug') || undefined, - documentWhere: documentWhere ? documentWhere : undefined, - folderID: req.searchParams?.get('folderID') || undefined, - folderWhere, - req, - }) - - return Response.json(data) - }, - method: 'get', - path: '/populate-folder-data', -} diff --git a/packages/payload/src/folders/hooks/ensureSafeCollectionsChange.ts b/packages/payload/src/folders/hooks/ensureSafeCollectionsChange.ts new file mode 100644 index 0000000000..cd8e87858f --- /dev/null +++ b/packages/payload/src/folders/hooks/ensureSafeCollectionsChange.ts @@ -0,0 +1,144 @@ +import { APIError, type CollectionBeforeValidateHook, type CollectionSlug } from '../../index.js' +import { extractID } from '../../utilities/extractID.js' +import { getTranslatedLabel } from '../../utilities/getTranslatedLabel.js' + +export const ensureSafeCollectionsChange = + ({ foldersSlug }: { foldersSlug: CollectionSlug }): CollectionBeforeValidateHook => + async ({ data, originalDoc, req }) => { + const currentFolderID = extractID(originalDoc || {}) + const parentFolderID = extractID(data?.folder || originalDoc?.folder || {}) + if (Array.isArray(data?.folderType) && data.folderType.length > 0) { + const folderType = data.folderType as string[] + const currentlyAssignedCollections: string[] | undefined = + Array.isArray(originalDoc?.folderType) && originalDoc.folderType.length > 0 + ? originalDoc.folderType + : undefined + /** + * Check if the assigned collections have changed. + * example: + * - originalAssignedCollections: ['posts', 'pages'] + * - folderType: ['posts'] + * + * The user is narrowing the types of documents that can be associated with this folder. + * If the user is only expanding the types of documents that can be associated with this folder, + * we do not need to do anything. + */ + const newCollections = currentlyAssignedCollections + ? // user is narrowing the current scope of the folder + currentlyAssignedCollections.filter((c) => !folderType.includes(c)) + : // user is adding a scope to the folder + folderType + + if (newCollections && newCollections.length > 0) { + let hasDependentDocuments = false + if (typeof currentFolderID === 'string' || typeof currentFolderID === 'number') { + const childDocumentsResult = await req.payload.findByID({ + id: currentFolderID, + collection: foldersSlug, + joins: { + documentsAndFolders: { + limit: 100_000_000, + where: { + or: [ + { + relationTo: { + in: newCollections, + }, + }, + ], + }, + }, + }, + overrideAccess: true, + req, + }) + + hasDependentDocuments = childDocumentsResult.documentsAndFolders.docs.length > 0 + } + + // matches folders that are directly related to the removed collections + let hasDependentFolders = false + if ( + !hasDependentDocuments && + (typeof currentFolderID === 'string' || typeof currentFolderID === 'number') + ) { + const childFoldersResult = await req.payload.find({ + collection: foldersSlug, + limit: 1, + req, + where: { + and: [ + { + folderType: { + in: newCollections, + }, + }, + { + folder: { + equals: currentFolderID, + }, + }, + ], + }, + }) + hasDependentFolders = childFoldersResult.totalDocs > 0 + } + + if (hasDependentDocuments || hasDependentFolders) { + const translatedLabels = newCollections.map((collectionSlug) => { + if (req.payload.collections[collectionSlug]?.config.labels.singular) { + return getTranslatedLabel( + req.payload.collections[collectionSlug]?.config.labels.plural, + req.i18n, + ) + } + return collectionSlug + }) + + throw new APIError( + `The folder "${data.name || originalDoc.name}" contains ${hasDependentDocuments ? 'documents' : 'folders'} that still belong to the following collections: ${translatedLabels.join(', ')}`, + 400, + ) + } + return data + } + } else if ( + (data?.folderType === null || + (Array.isArray(data?.folderType) && data?.folderType.length === 0)) && + parentFolderID + ) { + // attempting to set the folderType to catch-all, so we need to ensure that the parent allows this + let parentFolder + if (typeof parentFolderID === 'string' || typeof parentFolderID === 'number') { + try { + parentFolder = await req.payload.findByID({ + id: parentFolderID, + collection: foldersSlug, + overrideAccess: true, + req, + select: { + name: true, + folderType: true, + }, + user: req.user, + }) + } catch (_) { + // parent folder does not exist + } + } + + if ( + parentFolder && + parentFolder?.folderType && + Array.isArray(parentFolder.folderType) && + parentFolder.folderType.length > 0 + ) { + throw new APIError( + `The folder "${data?.name || originalDoc.name}" must have folder-type set since its parent folder ${parentFolder?.name ? `"${parentFolder?.name}" ` : ''}has a folder-type set.`, + 400, + ) + } + } + + return data + } diff --git a/packages/payload/src/folders/types.ts b/packages/payload/src/folders/types.ts index 3b7b23793e..6ec48abef1 100644 --- a/packages/payload/src/folders/types.ts +++ b/packages/payload/src/folders/types.ts @@ -10,10 +10,12 @@ export type FolderInterface = { }[] } folder?: FolderInterface | (number | string | undefined) + folderType: CollectionSlug[] name: string } & TypeWithID export type FolderBreadcrumb = { + folderType?: CollectionSlug[] id: null | number | string name: string } @@ -58,6 +60,7 @@ export type FolderOrDocument = { _folderOrDocumentTitle: string createdAt?: string folderID?: number | string + folderType: CollectionSlug[] id: number | string updatedAt?: string } & DocumentMediaData @@ -66,6 +69,7 @@ export type FolderOrDocument = { export type GetFolderDataResult = { breadcrumbs: FolderBreadcrumb[] | null documents: FolderOrDocument[] + folderAssignedCollections: CollectionSlug[] | undefined subfolders: FolderOrDocument[] } @@ -85,6 +89,12 @@ export type RootFoldersConfiguration = { }: { collection: CollectionConfig }) => CollectionConfig | Promise)[] + /** + * If true, you can scope folders to specific collections. + * + * @default true + */ + collectionSpecific?: boolean /** * Ability to view hidden fields and collections related to folders * @@ -114,9 +124,6 @@ export type CollectionFoldersConfiguration = { browseByFolder?: boolean } -type BaseFolderSortKeys = keyof Pick< - FolderOrDocument['value'], - '_folderOrDocumentTitle' | 'createdAt' | 'updatedAt' -> +type BaseFolderSortKeys = 'createdAt' | 'name' | 'updatedAt' export type FolderSortKeys = `-${BaseFolderSortKeys}` | BaseFolderSortKeys diff --git a/packages/payload/src/folders/utils/formatFolderOrDocumentItem.ts b/packages/payload/src/folders/utils/formatFolderOrDocumentItem.ts index 825dbb9545..4f13d17083 100644 --- a/packages/payload/src/folders/utils/formatFolderOrDocumentItem.ts +++ b/packages/payload/src/folders/utils/formatFolderOrDocumentItem.ts @@ -23,6 +23,7 @@ export function formatFolderOrDocumentItem({ _folderOrDocumentTitle: String((useAsTitle && value?.[useAsTitle]) || value['id']), createdAt: value?.createdAt, folderID: value?.[folderFieldName], + folderType: value?.folderType || [], updatedAt: value?.updatedAt, } diff --git a/packages/payload/src/folders/utils/getFolderBreadcrumbs.ts b/packages/payload/src/folders/utils/getFolderBreadcrumbs.ts index c2cb4c097a..5e9c2a0102 100644 --- a/packages/payload/src/folders/utils/getFolderBreadcrumbs.ts +++ b/packages/payload/src/folders/utils/getFolderBreadcrumbs.ts @@ -27,6 +27,7 @@ export const getFolderBreadcrumbs = async ({ select: { name: true, [folderFieldName]: true, + folderType: true, }, user, where: { @@ -42,6 +43,7 @@ export const getFolderBreadcrumbs = async ({ breadcrumbs.push({ id: folder.id, name: folder.name, + folderType: folder.folderType, }) if (folder[folderFieldName]) { return getFolderBreadcrumbs({ diff --git a/packages/payload/src/folders/utils/getFolderData.ts b/packages/payload/src/folders/utils/getFolderData.ts index d5efa40ef0..6acfcf49bb 100644 --- a/packages/payload/src/folders/utils/getFolderData.ts +++ b/packages/payload/src/folders/utils/getFolderData.ts @@ -1,6 +1,6 @@ import type { CollectionSlug } from '../../index.js' import type { PayloadRequest, Where } from '../../types/index.js' -import type { GetFolderDataResult } from '../types.js' +import type { FolderOrDocument, FolderSortKeys, GetFolderDataResult } from '../types.js' import { parseDocumentID } from '../../index.js' import { getFolderBreadcrumbs } from './getFolderBreadcrumbs.js' @@ -29,6 +29,7 @@ type Args = { */ folderWhere?: Where req: PayloadRequest + sort: FolderSortKeys } /** * Query for documents, subfolders and breadcrumbs for a given folder @@ -39,6 +40,7 @@ export const getFolderData = async ({ folderID: _folderID, folderWhere, req, + sort = 'name', }: Args): Promise => { const { payload } = req @@ -65,15 +67,16 @@ export const getFolderData = async ({ parentFolderID, req, }) - const [breadcrumbs, documentsAndSubfolders] = await Promise.all([ + const [breadcrumbs, result] = await Promise.all([ breadcrumbsPromise, documentAndSubfolderPromise, ]) return { breadcrumbs, - documents: documentsAndSubfolders.documents, - subfolders: documentsAndSubfolders.subfolders, + documents: sortDocs({ docs: result.documents, sort }), + folderAssignedCollections: result.folderAssignedCollections, + subfolders: sortDocs({ docs: result.subfolders, sort }), } } else { // subfolders and documents are queried separately @@ -96,10 +99,40 @@ export const getFolderData = async ({ subfoldersPromise, documentsPromise, ]) + return { breadcrumbs, - documents, - subfolders, + documents: sortDocs({ docs: documents, sort }), + folderAssignedCollections: collectionSlug ? [collectionSlug] : undefined, + subfolders: sortDocs({ docs: subfolders, sort }), } } } + +function sortDocs({ + docs, + sort, +}: { + docs: FolderOrDocument[] + sort?: FolderSortKeys +}): FolderOrDocument[] { + if (!sort) { + return docs + } + const isDesc = typeof sort === 'string' && sort.startsWith('-') + const sortKey = (isDesc ? sort.slice(1) : sort) as FolderSortKeys + + return docs.sort((a, b) => { + let result = 0 + if (sortKey === 'name') { + result = a.value._folderOrDocumentTitle.localeCompare(b.value._folderOrDocumentTitle) + } else if (sortKey === 'createdAt') { + result = + new Date(a.value.createdAt || '').getTime() - new Date(b.value.createdAt || '').getTime() + } else if (sortKey === 'updatedAt') { + result = + new Date(a.value.updatedAt || '').getTime() - new Date(b.value.updatedAt || '').getTime() + } + return isDesc ? -result : result + }) +} diff --git a/packages/payload/src/folders/utils/getFoldersAndDocumentsFromJoin.ts b/packages/payload/src/folders/utils/getFoldersAndDocumentsFromJoin.ts index ea3ef47af9..98b40276c4 100644 --- a/packages/payload/src/folders/utils/getFoldersAndDocumentsFromJoin.ts +++ b/packages/payload/src/folders/utils/getFoldersAndDocumentsFromJoin.ts @@ -1,4 +1,5 @@ import type { PaginatedDocs } from '../../database/types.js' +import type { CollectionSlug } from '../../index.js' import type { Document, PayloadRequest, Where } from '../../types/index.js' import type { FolderOrDocument } from '../types.js' @@ -8,6 +9,7 @@ import { formatFolderOrDocumentItem } from './formatFolderOrDocumentItem.js' type QueryDocumentsAndFoldersResults = { documents: FolderOrDocument[] + folderAssignedCollections: CollectionSlug[] subfolders: FolderOrDocument[] } type QueryDocumentsAndFoldersArgs = { @@ -85,5 +87,9 @@ export async function queryDocumentsAndFoldersFromJoin({ }, ) - return results + return { + documents: results.documents, + folderAssignedCollections: subfolderDoc?.docs[0]?.folderType || [], + subfolders: results.subfolders, + } } diff --git a/packages/payload/src/utilities/combineWhereConstraints.spec.ts b/packages/payload/src/utilities/combineWhereConstraints.spec.ts new file mode 100644 index 0000000000..c852a9477b --- /dev/null +++ b/packages/payload/src/utilities/combineWhereConstraints.spec.ts @@ -0,0 +1,86 @@ +import { Where } from '../types/index.js' +import { combineWhereConstraints } from './combineWhereConstraints.js' + +describe('combineWhereConstraints', () => { + it('should merge matching constraint keys', async () => { + const constraint: Where = { + test: { + equals: 'value', + }, + } + + // should merge and queries + const andConstraint: Where = { + and: [constraint], + } + expect(combineWhereConstraints([andConstraint], 'and')).toEqual(andConstraint) + // should merge multiple and queries + expect(combineWhereConstraints([andConstraint, andConstraint], 'and')).toEqual({ + and: [constraint, constraint], + }) + + // should merge or queries + const orConstraint: Where = { + or: [constraint], + } + expect(combineWhereConstraints([orConstraint], 'or')).toEqual(orConstraint) + // should merge multiple or queries + expect(combineWhereConstraints([orConstraint, orConstraint], 'or')).toEqual({ + or: [constraint, constraint], + }) + }) + + it('should push mismatching constraints keys into `as` key', async () => { + const constraint: Where = { + test: { + equals: 'value', + }, + } + + // should push `and` into `or` key + const andConstraint: Where = { + and: [constraint], + } + expect(combineWhereConstraints([andConstraint], 'or')).toEqual({ + or: [andConstraint], + }) + + // should push `or` into `and` key + const orConstraint: Where = { + or: [constraint], + } + expect(combineWhereConstraints([orConstraint], 'and')).toEqual({ + and: [orConstraint], + }) + + // should merge `and` but push `or` into `and` key + expect(combineWhereConstraints([andConstraint, orConstraint], 'and')).toEqual({ + and: [constraint, orConstraint], + }) + }) + + it('should push non and/or constraint key into `as` key', async () => { + const basicConstraint: Where = { + test: { + equals: 'value', + }, + } + + expect(combineWhereConstraints([basicConstraint], 'and')).toEqual({ + and: [basicConstraint], + }) + expect(combineWhereConstraints([basicConstraint], 'or')).toEqual({ + or: [basicConstraint], + }) + }) + + it('should return an empty object when no constraints are provided', async () => { + expect(combineWhereConstraints([], 'and')).toEqual({}) + expect(combineWhereConstraints([], 'or')).toEqual({}) + }) + + it('should return an empty object when all constraints are empty', async () => { + expect(combineWhereConstraints([{}, {}, undefined], 'and')).toEqual({}) + expect(combineWhereConstraints([{}, {}, undefined], 'or')).toEqual({}) + }) +}) diff --git a/packages/payload/src/utilities/combineWhereConstraints.ts b/packages/payload/src/utilities/combineWhereConstraints.ts index 4363835aee..2a1b979b04 100644 --- a/packages/payload/src/utilities/combineWhereConstraints.ts +++ b/packages/payload/src/utilities/combineWhereConstraints.ts @@ -8,12 +8,27 @@ export function combineWhereConstraints( return {} } - return { - [as]: constraints.filter((constraint): constraint is Where => { + const reducedConstraints = constraints.reduce>( + (acc: Partial, constraint) => { if (constraint && typeof constraint === 'object' && Object.keys(constraint).length > 0) { - return true + if (as in constraint) { + // merge the objects under the shared key + acc[as] = [...(acc[as] as Where[]), ...(constraint[as] as Where[])] + } else { + // the constraint does not share the key + acc[as]?.push(constraint) + } } - return false - }), + + return acc + }, + { [as]: [] } satisfies Where, + ) + + if (reducedConstraints[as]?.length === 0) { + // If there are no constraints, return an empty object + return {} } + + return reducedConstraints as Where } diff --git a/packages/translations/src/clientKeys.ts b/packages/translations/src/clientKeys.ts index 0a9c986847..f50aa54f8f 100644 --- a/packages/translations/src/clientKeys.ts +++ b/packages/translations/src/clientKeys.ts @@ -134,6 +134,7 @@ export const clientTranslationKeys = createClientTranslationKeys([ 'folder:browseByFolder', 'folder:deleteFolder', 'folder:folders', + 'folder:folderTypeDescription', 'folder:folderName', 'folder:itemsMovedToFolder', 'folder:itemsMovedToRoot', diff --git a/packages/translations/src/languages/ar.ts b/packages/translations/src/languages/ar.ts index 6a33e7ecbe..ce23c1780c 100644 --- a/packages/translations/src/languages/ar.ts +++ b/packages/translations/src/languages/ar.ts @@ -183,6 +183,7 @@ export const arTranslations: DefaultTranslationsObject = { deleteFolder: 'حذف المجلد', folderName: 'اسم المجلد', folders: 'مجلدات', + folderTypeDescription: 'حدد نوع المستندات التي يجب السماح بها في هذا المجلد من المجموعات.', itemHasBeenMoved: 'تم نقل {{title}} إلى {{folderName}}', itemHasBeenMovedToRoot: 'تم نقل {{title}} إلى المجلد الجذر', itemsMovedToFolder: '{{title}} تم نقله إلى {{folderName}}', diff --git a/packages/translations/src/languages/az.ts b/packages/translations/src/languages/az.ts index 59726751e4..bc2ecb7ab7 100644 --- a/packages/translations/src/languages/az.ts +++ b/packages/translations/src/languages/az.ts @@ -186,6 +186,7 @@ export const azTranslations: DefaultTranslationsObject = { deleteFolder: 'Qovluğu Sil', folderName: 'Qovluq Adı', folders: 'Qovluqlar', + folderTypeDescription: 'Bu qovluqda hangi tip kolleksiya sənədlərinə icazə verilməlidir seçin.', itemHasBeenMoved: '{{title}} {{folderName}} qovluğuna köçürüldü.', itemHasBeenMovedToRoot: '{{title}} kök qovluğa köçürüldü.', itemsMovedToFolder: '{{title}} {{folderName}} qovluğuna köçürüldü', diff --git a/packages/translations/src/languages/bg.ts b/packages/translations/src/languages/bg.ts index b507d73d69..308778b051 100644 --- a/packages/translations/src/languages/bg.ts +++ b/packages/translations/src/languages/bg.ts @@ -186,6 +186,8 @@ export const bgTranslations: DefaultTranslationsObject = { deleteFolder: 'Изтрий папка', folderName: 'Име на папка', folders: 'Папки', + folderTypeDescription: + 'Изберете кой тип документи от колекциите трябва да се допускат в тази папка.', itemHasBeenMoved: '{{title}} е преместен в {{folderName}}', itemHasBeenMovedToRoot: '{{title}} беше преместено в основната папка', itemsMovedToFolder: '{{title}} беше преместен в {{folderName}}', diff --git a/packages/translations/src/languages/bnBd.ts b/packages/translations/src/languages/bnBd.ts index 50a3462030..9a41c809f9 100644 --- a/packages/translations/src/languages/bnBd.ts +++ b/packages/translations/src/languages/bnBd.ts @@ -187,6 +187,8 @@ export const bnBdTranslations: DefaultTranslationsObject = { deleteFolder: 'ফোল্ডার মুছুন', folderName: 'ফোল্ডারের নাম', folders: 'ফোল্ডারগুলি', + folderTypeDescription: + 'এই ফোল্ডারে কোন ধরনের সংগ্রহ নথিপত্র অনুমোদিত হওয়া উচিত তা নির্বাচন করুন।', itemHasBeenMoved: '{{title}} কে {{folderName}} এ সরানো হয়েছে', itemHasBeenMovedToRoot: '{{title}} কে মূল ফোল্ডারে সরানো হয়েছে', itemsMovedToFolder: '{{title}} কে {{folderName}} এ সরানো হয়েছে', diff --git a/packages/translations/src/languages/bnIn.ts b/packages/translations/src/languages/bnIn.ts index 97e1a90f76..8c01eb2f78 100644 --- a/packages/translations/src/languages/bnIn.ts +++ b/packages/translations/src/languages/bnIn.ts @@ -187,6 +187,8 @@ export const bnInTranslations: DefaultTranslationsObject = { deleteFolder: 'ফোল্ডার মুছুন', folderName: 'ফোল্ডারের নাম', folders: 'ফোল্ডারগুলি', + folderTypeDescription: + 'এই ফোল্ডারে কোন ধরণের কালেকশন ডকুমেন্টস অনুমতি দেওয়া উচিত তা নির্বাচন করুন।', itemHasBeenMoved: '{{title}} কে {{folderName}} এ সরানো হয়েছে', itemHasBeenMovedToRoot: '{{title}} কে মূল ফোল্ডারে সরানো হয়েছে', itemsMovedToFolder: '{{title}} কে {{folderName}} এ সরানো হয়েছে', diff --git a/packages/translations/src/languages/ca.ts b/packages/translations/src/languages/ca.ts index 36a9a5823c..c3c2ecead5 100644 --- a/packages/translations/src/languages/ca.ts +++ b/packages/translations/src/languages/ca.ts @@ -187,6 +187,8 @@ export const caTranslations: DefaultTranslationsObject = { deleteFolder: 'Esborra la carpeta', folderName: 'Nom de la Carpeta', folders: 'Carpetes', + folderTypeDescription: + 'Seleccioneu quin tipus de documents de la col·lecció haurien de ser permesos en aquesta carpeta.', itemHasBeenMoved: "{{title}} s'ha traslladat a {{folderName}}", itemHasBeenMovedToRoot: "{{title}} s'ha mogut a la carpeta arrel", itemsMovedToFolder: "{{title}} s'ha traslladat a {{folderName}}", diff --git a/packages/translations/src/languages/cs.ts b/packages/translations/src/languages/cs.ts index 4808651d5e..7f8304f59b 100644 --- a/packages/translations/src/languages/cs.ts +++ b/packages/translations/src/languages/cs.ts @@ -186,6 +186,8 @@ export const csTranslations: DefaultTranslationsObject = { deleteFolder: 'Smazat složku', folderName: 'Název složky', folders: 'Složky', + folderTypeDescription: + 'Vyberte, který typ dokumentů ze sbírky by měl být dovolen v této složce.', itemHasBeenMoved: '{{title}} bylo přesunuto do {{folderName}}', itemHasBeenMovedToRoot: '{{title}} byl přesunut do kořenové složky', itemsMovedToFolder: '{{title}} přesunuto do {{folderName}}', diff --git a/packages/translations/src/languages/da.ts b/packages/translations/src/languages/da.ts index 0f449e1c2a..ec1ef4b6ef 100644 --- a/packages/translations/src/languages/da.ts +++ b/packages/translations/src/languages/da.ts @@ -185,6 +185,8 @@ export const daTranslations: DefaultTranslationsObject = { deleteFolder: 'Slet mappe', folderName: 'Mappenavn', folders: 'Mapper', + folderTypeDescription: + 'Vælg hvilken type samling af dokumenter der bør være tilladt i denne mappe.', itemHasBeenMoved: '{{title}} er blevet flyttet til {{folderName}}', itemHasBeenMovedToRoot: '{{title}} er blevet flyttet til rodmappen', itemsMovedToFolder: '{{title}} flyttet til {{folderName}}', diff --git a/packages/translations/src/languages/de.ts b/packages/translations/src/languages/de.ts index 9c2fd199d3..ae924bb363 100644 --- a/packages/translations/src/languages/de.ts +++ b/packages/translations/src/languages/de.ts @@ -191,6 +191,8 @@ export const deTranslations: DefaultTranslationsObject = { deleteFolder: 'Ordner löschen', folderName: 'Ordnername', folders: 'Ordner', + folderTypeDescription: + 'Wählen Sie aus, welche Art von Sammlungsdokumenten in diesem Ordner zugelassen sein sollte.', itemHasBeenMoved: '{{title}} wurde in {{folderName}} verschoben.', itemHasBeenMovedToRoot: '{{title}} wurde in den Hauptordner verschoben', itemsMovedToFolder: '{{title}} wurde in {{folderName}} verschoben.', diff --git a/packages/translations/src/languages/en.ts b/packages/translations/src/languages/en.ts index 0b2d0f7694..e1600e45ae 100644 --- a/packages/translations/src/languages/en.ts +++ b/packages/translations/src/languages/en.ts @@ -186,6 +186,8 @@ export const enTranslations = { deleteFolder: 'Delete Folder', folderName: 'Folder Name', folders: 'Folders', + folderTypeDescription: + 'Select which type of collection documents should be allowed in this folder.', itemHasBeenMoved: '{{title}} has been moved to {{folderName}}', itemHasBeenMovedToRoot: '{{title}} has been moved to the root folder', itemsMovedToFolder: '{{title}} moved to {{folderName}}', diff --git a/packages/translations/src/languages/es.ts b/packages/translations/src/languages/es.ts index 311848771a..d91a45c21e 100644 --- a/packages/translations/src/languages/es.ts +++ b/packages/translations/src/languages/es.ts @@ -190,6 +190,8 @@ export const esTranslations: DefaultTranslationsObject = { deleteFolder: 'Eliminar Carpeta', folderName: 'Nombre de la Carpeta', folders: 'Carpetas', + folderTypeDescription: + 'Seleccione qué tipo de documentos de la colección se deben permitir en esta carpeta.', itemHasBeenMoved: '{{title}} se ha movido a {{folderName}}', itemHasBeenMovedToRoot: '{{title}} se ha movido a la carpeta raíz', itemsMovedToFolder: '{{title}} movido a {{folderName}}', diff --git a/packages/translations/src/languages/et.ts b/packages/translations/src/languages/et.ts index 1a72d3e422..15c77ebea4 100644 --- a/packages/translations/src/languages/et.ts +++ b/packages/translations/src/languages/et.ts @@ -185,6 +185,7 @@ export const etTranslations: DefaultTranslationsObject = { deleteFolder: 'Kustuta kaust', folderName: 'Kausta nimi', folders: 'Kaustad', + folderTypeDescription: 'Valige, millist tüüpi kogumiku dokumente peaks selles kaustas lubama.', itemHasBeenMoved: '{{title}} on teisaldatud kausta {{folderName}}', itemHasBeenMovedToRoot: '{{title}} on teisaldatud juurkausta', itemsMovedToFolder: '{{title}} viidi üle kausta {{folderName}}', diff --git a/packages/translations/src/languages/fa.ts b/packages/translations/src/languages/fa.ts index b0c0012eef..1284b1d942 100644 --- a/packages/translations/src/languages/fa.ts +++ b/packages/translations/src/languages/fa.ts @@ -184,6 +184,7 @@ export const faTranslations: DefaultTranslationsObject = { deleteFolder: 'حذف پوشه', folderName: 'نام پوشه', folders: 'پوشه‌ها', + folderTypeDescription: 'انتخاب کنید که کدام نوع اسناد مجموعه باید در این پوشه مجاز باشند.', itemHasBeenMoved: '{{title}} به {{folderName}} منتقل شده است.', itemHasBeenMovedToRoot: '{{title}} به پوشه اصلی انتقال یافته است.', itemsMovedToFolder: '{{title}} به {{folderName}} منتقل شد.', diff --git a/packages/translations/src/languages/fr.ts b/packages/translations/src/languages/fr.ts index 24ec5fd7b0..c5eab55fda 100644 --- a/packages/translations/src/languages/fr.ts +++ b/packages/translations/src/languages/fr.ts @@ -192,6 +192,8 @@ export const frTranslations: DefaultTranslationsObject = { deleteFolder: 'Supprimer le dossier', folderName: 'Nom du dossier', folders: 'Dossiers', + folderTypeDescription: + 'Sélectionnez le type de documents de collection qui devraient être autorisés dans ce dossier.', itemHasBeenMoved: '{{title}} a été déplacé vers {{folderName}}', itemHasBeenMovedToRoot: '{{title}} a été déplacé dans le dossier racine', itemsMovedToFolder: '{{title}} déplacé vers {{folderName}}', diff --git a/packages/translations/src/languages/he.ts b/packages/translations/src/languages/he.ts index 32d5ad7200..f7a8d4ff93 100644 --- a/packages/translations/src/languages/he.ts +++ b/packages/translations/src/languages/he.ts @@ -181,6 +181,7 @@ export const heTranslations: DefaultTranslationsObject = { deleteFolder: 'מחק תיקייה', folderName: 'שם תיקייה', folders: 'תיקיות', + folderTypeDescription: 'בחר איזה סוג של מסמכים מהאוסף יותרו להיות בתיקייה זו.', itemHasBeenMoved: '"{{title}}" הועבר ל- "{{folderName}}"', itemHasBeenMovedToRoot: '"{{title}}" הועבר לתיקיית השורש', itemsMovedToFolder: '{{title}} הועבר אל {{folderName}}', diff --git a/packages/translations/src/languages/hr.ts b/packages/translations/src/languages/hr.ts index 7271e0ac06..320217c8ef 100644 --- a/packages/translations/src/languages/hr.ts +++ b/packages/translations/src/languages/hr.ts @@ -187,6 +187,8 @@ export const hrTranslations: DefaultTranslationsObject = { deleteFolder: 'Izbriši mapu', folderName: 'Naziv mape', folders: 'Mape', + folderTypeDescription: + 'Odaberite koja vrsta dokumenata kolekcije treba biti dozvoljena u ovoj mapi.', itemHasBeenMoved: '{{title}} je premješten u {{folderName}}', itemHasBeenMovedToRoot: '{{title}} je premješten u korijensku mapu.', itemsMovedToFolder: '{{title}} premješteno u {{folderName}}', diff --git a/packages/translations/src/languages/hu.ts b/packages/translations/src/languages/hu.ts index eac7af8c04..8aaa81144b 100644 --- a/packages/translations/src/languages/hu.ts +++ b/packages/translations/src/languages/hu.ts @@ -188,6 +188,8 @@ export const huTranslations: DefaultTranslationsObject = { deleteFolder: 'Mappa törlése', folderName: 'Mappa neve', folders: 'Mappák', + folderTypeDescription: + 'Válassza ki, hogy milyen típusú dokumentumokat engedélyez ebben a mappában.', itemHasBeenMoved: '{{title}} át lett helyezve a {{folderName}} nevű mappába.', itemHasBeenMovedToRoot: 'A(z) {{title}} át lett helyezve a gyökérmappába.', itemsMovedToFolder: '{{title}} áthelyezve a(z) {{folderName}} mappába', diff --git a/packages/translations/src/languages/hy.ts b/packages/translations/src/languages/hy.ts index 925181f244..704b20d8e1 100644 --- a/packages/translations/src/languages/hy.ts +++ b/packages/translations/src/languages/hy.ts @@ -186,6 +186,8 @@ export const hyTranslations: DefaultTranslationsObject = { deleteFolder: 'Ջնջել թղթապանակը', folderName: 'Տեսակավորման անվանում', folders: 'Պատուհաններ', + folderTypeDescription: + 'Ընտրեք, թե որն է հավաքածուի փաստաթղթերը, որոնք պետք է թույլատրվեն այս պանակում:', itemHasBeenMoved: '{{title}}-ը տեղափոխվել է {{folderName}}-ում', itemHasBeenMovedToRoot: '«{{title}}» տեղափոխվել է արմատային պանակ։', itemsMovedToFolder: '{{title}} տեղափոխվեց {{folderName}}', diff --git a/packages/translations/src/languages/it.ts b/packages/translations/src/languages/it.ts index 55bfdbcd8e..3a51ef09b2 100644 --- a/packages/translations/src/languages/it.ts +++ b/packages/translations/src/languages/it.ts @@ -190,6 +190,8 @@ export const itTranslations: DefaultTranslationsObject = { deleteFolder: 'Elimina cartella', folderName: 'Nome Cartella', folders: 'Cartelle', + folderTypeDescription: + 'Seleziona quale tipo di documenti della collezione dovrebbero essere consentiti in questa cartella.', itemHasBeenMoved: '{{title}} è stato spostato in {{folderName}}', itemHasBeenMovedToRoot: '{{title}} è stato spostato nella cartella principale', itemsMovedToFolder: '{{title}} spostato in {{folderName}}', diff --git a/packages/translations/src/languages/ja.ts b/packages/translations/src/languages/ja.ts index 51d9284b96..024cf4e1fe 100644 --- a/packages/translations/src/languages/ja.ts +++ b/packages/translations/src/languages/ja.ts @@ -187,6 +187,8 @@ export const jaTranslations: DefaultTranslationsObject = { deleteFolder: 'フォルダを削除する', folderName: 'フォルダ名', folders: 'フォルダー', + folderTypeDescription: + 'このフォルダーに許可されるコレクションドキュメントのタイプを選択してください。', itemHasBeenMoved: '{{title}}は{{folderName}}に移動されました', itemHasBeenMovedToRoot: '{{title}}はルートフォルダに移動されました', itemsMovedToFolder: '{{title}}は{{folderName}}に移動されました', diff --git a/packages/translations/src/languages/ko.ts b/packages/translations/src/languages/ko.ts index e053968388..0d5af0445e 100644 --- a/packages/translations/src/languages/ko.ts +++ b/packages/translations/src/languages/ko.ts @@ -186,6 +186,7 @@ export const koTranslations: DefaultTranslationsObject = { deleteFolder: '폴더 삭제', folderName: '폴더 이름', folders: '폴더들', + folderTypeDescription: '이 폴더에서 어떤 유형의 컬렉션 문서가 허용되어야 하는지 선택하세요.', itemHasBeenMoved: '{{title}}는 {{folderName}}로 이동되었습니다.', itemHasBeenMovedToRoot: '{{title}}이(가) 루트 폴더로 이동되었습니다.', itemsMovedToFolder: '{{title}}이(가) {{folderName}}로 이동되었습니다.', diff --git a/packages/translations/src/languages/lt.ts b/packages/translations/src/languages/lt.ts index 0a9b605a10..94048058cd 100644 --- a/packages/translations/src/languages/lt.ts +++ b/packages/translations/src/languages/lt.ts @@ -188,6 +188,8 @@ export const ltTranslations: DefaultTranslationsObject = { deleteFolder: 'Ištrinti aplanką', folderName: 'Aplanko pavadinimas', folders: 'Aplankai', + folderTypeDescription: + 'Pasirinkite, kokio tipo rinkinio dokumentai turėtų būti leidžiami šiame aplanke.', itemHasBeenMoved: '{{title}} buvo perkeltas į {{folderName}}', itemHasBeenMovedToRoot: '{{title}} buvo perkeltas į pagrindinį katalogą', itemsMovedToFolder: '{{title}} perkeltas į {{folderName}}', diff --git a/packages/translations/src/languages/lv.ts b/packages/translations/src/languages/lv.ts index e7fb84bb10..0dcd973687 100644 --- a/packages/translations/src/languages/lv.ts +++ b/packages/translations/src/languages/lv.ts @@ -186,6 +186,8 @@ export const lvTranslations: DefaultTranslationsObject = { deleteFolder: 'Dzēst mapi', folderName: 'Mapes nosaukums', folders: 'Mapes', + folderTypeDescription: + 'Izvēlieties, kāda veida kolekcijas dokumentiem jābūt atļautiem šajā mapē.', itemHasBeenMoved: '{{title}} ir pārvietots uz {{folderName}}', itemHasBeenMovedToRoot: '{{title}} ir pārvietots uz saknes mapi', itemsMovedToFolder: '{{title}} pārvietots uz {{folderName}}', diff --git a/packages/translations/src/languages/my.ts b/packages/translations/src/languages/my.ts index c05bf18710..78c87fa725 100644 --- a/packages/translations/src/languages/my.ts +++ b/packages/translations/src/languages/my.ts @@ -187,6 +187,7 @@ export const myTranslations: DefaultTranslationsObject = { deleteFolder: 'Padam Folder', folderName: 'ဖိုင်နာမည်', folders: 'Fail', + folderTypeDescription: 'Pilih jenis dokumen koleksi yang harus diizinkan dalam folder ini.', itemHasBeenMoved: '{{title}} telah dipindahkan ke {{folderName}}', itemHasBeenMovedToRoot: '"{{title}}" က ဗဟိုဖိုလ်ဒါသို့ရွှေ့ပြီးပါပြီ။', itemsMovedToFolder: '{{title}} သို့ {{folderName}} သို့ ရွှေ့လိုက်သွားပါပယ်', diff --git a/packages/translations/src/languages/nb.ts b/packages/translations/src/languages/nb.ts index 90c796312c..291b85b6f7 100644 --- a/packages/translations/src/languages/nb.ts +++ b/packages/translations/src/languages/nb.ts @@ -186,6 +186,7 @@ export const nbTranslations: DefaultTranslationsObject = { deleteFolder: 'Slett mappe', folderName: 'Mappenavn', folders: 'Mapper', + folderTypeDescription: 'Velg hvilken type samling dokumenter som skal tillates i denne mappen.', itemHasBeenMoved: '{{title}} er flyttet til {{folderName}}', itemHasBeenMovedToRoot: '{{title}} er flyttet til rotmappen', itemsMovedToFolder: '{{title}} flyttet til {{folderName}}', diff --git a/packages/translations/src/languages/nl.ts b/packages/translations/src/languages/nl.ts index 316cc69508..1ba7d51a26 100644 --- a/packages/translations/src/languages/nl.ts +++ b/packages/translations/src/languages/nl.ts @@ -188,6 +188,8 @@ export const nlTranslations: DefaultTranslationsObject = { deleteFolder: 'Verwijder map', folderName: 'Mapnaam', folders: 'Mappen', + folderTypeDescription: + 'Selecteer welk type verzameldocumenten toegestaan zou moeten zijn in deze map.', itemHasBeenMoved: '{{title}} is verplaatst naar {{folderName}}', itemHasBeenMovedToRoot: '{{title}} is verplaatst naar de hoofdmap', itemsMovedToFolder: '{{title}} verplaatst naar {{folderName}}', diff --git a/packages/translations/src/languages/pl.ts b/packages/translations/src/languages/pl.ts index 2c12ba691b..1e60b6ac79 100644 --- a/packages/translations/src/languages/pl.ts +++ b/packages/translations/src/languages/pl.ts @@ -185,6 +185,8 @@ export const plTranslations: DefaultTranslationsObject = { deleteFolder: 'Usuń folder', folderName: 'Nazwa folderu', folders: 'Foldery', + folderTypeDescription: + 'Wybierz, które typy dokumentów z kolekcji powinny być dozwolone w tym folderze.', itemHasBeenMoved: '{{title}} został przeniesiony do {{folderName}}', itemHasBeenMovedToRoot: '{{title}} został przeniesiony do folderu głównego', itemsMovedToFolder: '{{title}} przeniesiono do {{folderName}}', diff --git a/packages/translations/src/languages/pt.ts b/packages/translations/src/languages/pt.ts index 8b8f95fa9a..ac01e47c00 100644 --- a/packages/translations/src/languages/pt.ts +++ b/packages/translations/src/languages/pt.ts @@ -186,6 +186,8 @@ export const ptTranslations: DefaultTranslationsObject = { deleteFolder: 'Apagar Pasta', folderName: 'Nome da Pasta', folders: 'Pastas', + folderTypeDescription: + 'Selecione qual tipo de documentos da coleção devem ser permitidos nesta pasta.', itemHasBeenMoved: '{{title}} foi movido para {{folderName}}', itemHasBeenMovedToRoot: '{{title}} foi movido para a pasta raiz', itemsMovedToFolder: '{{title}} movido para {{folderName}}', diff --git a/packages/translations/src/languages/ro.ts b/packages/translations/src/languages/ro.ts index 38825ec119..34bae916f4 100644 --- a/packages/translations/src/languages/ro.ts +++ b/packages/translations/src/languages/ro.ts @@ -190,6 +190,8 @@ export const roTranslations: DefaultTranslationsObject = { deleteFolder: 'Ștergeți dosarul', folderName: 'Nume dosar', folders: 'Dosare', + folderTypeDescription: + 'Selectați ce tip de documente din colecție ar trebui să fie permise în acest dosar.', itemHasBeenMoved: '{{title}} a fost mutat în {{folderName}}', itemHasBeenMovedToRoot: '{{title}} a fost mutat în dosarul rădăcină', itemsMovedToFolder: '{{title}} a fost mutat în {{folderName}}', diff --git a/packages/translations/src/languages/rs.ts b/packages/translations/src/languages/rs.ts index 78803c8e58..1f0701c3f4 100644 --- a/packages/translations/src/languages/rs.ts +++ b/packages/translations/src/languages/rs.ts @@ -187,6 +187,8 @@ export const rsTranslations: DefaultTranslationsObject = { deleteFolder: 'Obriši fasciklu', folderName: 'Ime fascikle', folders: 'Fascikle', + folderTypeDescription: + 'Odaberite koja vrsta dokumenata iz kolekcije treba biti dozvoljena u ovom folderu.', itemHasBeenMoved: '{{title}} je premješten u {{folderName}}', itemHasBeenMovedToRoot: '{{title}} je premešten u osnovni direktorijum.', itemsMovedToFolder: '{{title}} premešten u {{folderName}}', diff --git a/packages/translations/src/languages/rsLatin.ts b/packages/translations/src/languages/rsLatin.ts index 31c321059b..2ae83c93db 100644 --- a/packages/translations/src/languages/rsLatin.ts +++ b/packages/translations/src/languages/rsLatin.ts @@ -187,6 +187,8 @@ export const rsLatinTranslations: DefaultTranslationsObject = { deleteFolder: 'Obriši mapu', folderName: 'Naziv fascikle', folders: 'Fascikle', + folderTypeDescription: + 'Odaberite koja vrsta dokumenta iz kolekcije bi trebala biti dozvoljena u ovoj fascikli.', itemHasBeenMoved: '{{title}} je premesten u {{folderName}}', itemHasBeenMovedToRoot: '{{title}} je premešten u osnovnu fasciklu', itemsMovedToFolder: '{{title}} premešteno u {{folderName}}', diff --git a/packages/translations/src/languages/ru.ts b/packages/translations/src/languages/ru.ts index e881a1c1d4..b23b2ef9fd 100644 --- a/packages/translations/src/languages/ru.ts +++ b/packages/translations/src/languages/ru.ts @@ -188,6 +188,8 @@ export const ruTranslations: DefaultTranslationsObject = { deleteFolder: 'Удалить папку', folderName: 'Название папки', folders: 'Папки', + folderTypeDescription: + 'Выберите, какие типы документов коллекции должны быть разрешены в этой папке.', itemHasBeenMoved: '{{title}} был перемещен в {{folderName}}', itemHasBeenMovedToRoot: '{{title}} был перемещен в корневую папку', itemsMovedToFolder: '{{title}} перемещен в {{folderName}}', diff --git a/packages/translations/src/languages/sk.ts b/packages/translations/src/languages/sk.ts index 44713ae7fe..4c24b250d3 100644 --- a/packages/translations/src/languages/sk.ts +++ b/packages/translations/src/languages/sk.ts @@ -189,6 +189,8 @@ export const skTranslations: DefaultTranslationsObject = { deleteFolder: 'Odstrániť priečinok', folderName: 'Názov priečinka', folders: 'Priečinky', + folderTypeDescription: + 'Vyberte, ktorý typ dokumentov z kolekcie by mal byť povolený v tejto zložke.', itemHasBeenMoved: '{{title}} bol presunutý do {{folderName}}', itemHasBeenMovedToRoot: '{{title}} bol presunutý do koreňového priečinka', itemsMovedToFolder: '{{title}} presunuté do {{folderName}}', diff --git a/packages/translations/src/languages/sl.ts b/packages/translations/src/languages/sl.ts index 45954b2d29..02e046a58b 100644 --- a/packages/translations/src/languages/sl.ts +++ b/packages/translations/src/languages/sl.ts @@ -186,6 +186,8 @@ export const slTranslations: DefaultTranslationsObject = { deleteFolder: 'Izbriši mapo', folderName: 'Ime mape', folders: 'Mape', + folderTypeDescription: + 'Izberite, katere vrste dokumentov zbirke naj bodo dovoljene v tej mapi.', itemHasBeenMoved: '{{title}} je bil premaknjen v {{folderName}}', itemHasBeenMovedToRoot: '{{title}} je bil premaknjen v korensko mapo.', itemsMovedToFolder: '{{title}} premaknjeno v {{folderName}}', diff --git a/packages/translations/src/languages/sv.ts b/packages/translations/src/languages/sv.ts index 3a9b1e12d2..caef27df3b 100644 --- a/packages/translations/src/languages/sv.ts +++ b/packages/translations/src/languages/sv.ts @@ -186,6 +186,7 @@ export const svTranslations: DefaultTranslationsObject = { deleteFolder: 'Ta bort mapp', folderName: 'Mappnamn', folders: 'Mappar', + folderTypeDescription: 'Välj vilken typ av samlingsdokument som ska tillåtas i denna mapp.', itemHasBeenMoved: '{{title}} har flyttats till {{folderName}}', itemHasBeenMovedToRoot: '{{title}} har flyttats till rotmappen', itemsMovedToFolder: '{{title}} flyttad till {{folderName}}', diff --git a/packages/translations/src/languages/th.ts b/packages/translations/src/languages/th.ts index 8d53deae75..41cb9878b8 100644 --- a/packages/translations/src/languages/th.ts +++ b/packages/translations/src/languages/th.ts @@ -183,6 +183,7 @@ export const thTranslations: DefaultTranslationsObject = { deleteFolder: 'ลบโฟลเดอร์', folderName: 'ชื่อโฟลเดอร์', folders: 'โฟลเดอร์', + folderTypeDescription: 'เลือกประเภทของเอกสารคอลเลกชันที่ควรอนุญาตในโฟลเดอร์นี้', itemHasBeenMoved: '{{title}} ได้ถูกย้ายไปที่ {{folderName}}', itemHasBeenMovedToRoot: '"{{title}}" ได้ถูกย้ายไปยังโฟลเดอร์ราก', itemsMovedToFolder: '{{title}} ถูกย้ายไปยัง {{folderName}}', diff --git a/packages/translations/src/languages/tr.ts b/packages/translations/src/languages/tr.ts index 1630721cf9..1daaae2925 100644 --- a/packages/translations/src/languages/tr.ts +++ b/packages/translations/src/languages/tr.ts @@ -188,6 +188,8 @@ export const trTranslations: DefaultTranslationsObject = { deleteFolder: 'Klasörü Sil', folderName: 'Klasör Adı', folders: 'Klasörler', + folderTypeDescription: + 'Bu klasörde hangi türden koleksiyon belgelerine izin verilmesi gerektiğini seçin.', itemHasBeenMoved: '{{title}} {{folderName}} klasörüne taşındı.', itemHasBeenMovedToRoot: '{{title}} kök klasöre taşındı.', itemsMovedToFolder: "{{title}} {{folderName}}'ye taşındı.", diff --git a/packages/translations/src/languages/uk.ts b/packages/translations/src/languages/uk.ts index e76e29e6be..eb33c1daac 100644 --- a/packages/translations/src/languages/uk.ts +++ b/packages/translations/src/languages/uk.ts @@ -187,6 +187,8 @@ export const ukTranslations: DefaultTranslationsObject = { deleteFolder: 'Видалити папку', folderName: 'Назва папки', folders: 'Папки', + folderTypeDescription: + 'Виберіть, який тип документів колекції повинен бути дозволений у цій папці.', itemHasBeenMoved: '{{title}} було переміщено до {{folderName}}', itemHasBeenMovedToRoot: '{{title}} був переміщений до кореневої папки', itemsMovedToFolder: '{{title}} перенесено до {{folderName}}', diff --git a/packages/translations/src/languages/vi.ts b/packages/translations/src/languages/vi.ts index ae280b4fc0..7f747ef15f 100644 --- a/packages/translations/src/languages/vi.ts +++ b/packages/translations/src/languages/vi.ts @@ -186,6 +186,7 @@ export const viTranslations: DefaultTranslationsObject = { deleteFolder: 'Xóa Thư mục', folderName: 'Tên thư mục', folders: 'Thư mục', + folderTypeDescription: 'Chọn loại tài liệu bộ sưu tập nào nên được cho phép trong thư mục này.', itemHasBeenMoved: '{{title}} đã được chuyển đến {{folderName}}', itemHasBeenMovedToRoot: '{{title}} đã được chuyển đến thư mục gốc', itemsMovedToFolder: '{{title}} đã được di chuyển vào {{folderName}}', diff --git a/packages/translations/src/languages/zh.ts b/packages/translations/src/languages/zh.ts index 296612d0e1..84a477ba71 100644 --- a/packages/translations/src/languages/zh.ts +++ b/packages/translations/src/languages/zh.ts @@ -179,6 +179,7 @@ export const zhTranslations: DefaultTranslationsObject = { deleteFolder: '删除文件夹', folderName: '文件夹名称', folders: '文件夹', + folderTypeDescription: '在此文件夹中选择应允许哪种类型的集合文档。', itemHasBeenMoved: '{{title}}已被移至{{folderName}}', itemHasBeenMovedToRoot: '{{title}}已被移至根文件夹', itemsMovedToFolder: '{{title}}已移至{{folderName}}', diff --git a/packages/translations/src/languages/zhTw.ts b/packages/translations/src/languages/zhTw.ts index 6cf18d9773..e659462f6b 100644 --- a/packages/translations/src/languages/zhTw.ts +++ b/packages/translations/src/languages/zhTw.ts @@ -178,6 +178,7 @@ export const zhTwTranslations: DefaultTranslationsObject = { deleteFolder: '刪除資料夾', folderName: '資料夾名稱', folders: '資料夾', + folderTypeDescription: '在此文件夾中選擇應允許的集合文件類型。', itemHasBeenMoved: '{{title}}已被移至{{folderName}}', itemHasBeenMovedToRoot: '{{title}}已被移至根文件夾', itemsMovedToFolder: '{{title}} 已移至 {{folderName}}', diff --git a/packages/ui/src/elements/FolderView/Breadcrumbs/index.tsx b/packages/ui/src/elements/FolderView/Breadcrumbs/index.tsx index 9fc54d7eb2..881a85d830 100644 --- a/packages/ui/src/elements/FolderView/Breadcrumbs/index.tsx +++ b/packages/ui/src/elements/FolderView/Breadcrumbs/index.tsx @@ -48,10 +48,11 @@ export function DroppableBreadcrumb({ children, className, onClick, -}: { children: React.ReactNode; className?: string; onClick: () => void } & Pick< - FolderBreadcrumb, - 'id' ->) { +}: { + children: React.ReactNode + className?: string + onClick: () => void +} & Pick) { const { isOver, setNodeRef } = useDroppable({ id: `folder-${id}`, data: { diff --git a/packages/ui/src/elements/FolderView/CurrentFolderActions/index.tsx b/packages/ui/src/elements/FolderView/CurrentFolderActions/index.tsx index df8b79865b..d0d54ecf00 100644 --- a/packages/ui/src/elements/FolderView/CurrentFolderActions/index.tsx +++ b/packages/ui/src/elements/FolderView/CurrentFolderActions/index.tsx @@ -107,6 +107,7 @@ export function CurrentFolderActions({ className }: Props) {
    diff --git a/packages/ui/src/elements/FolderView/DraggableTableRow/index.tsx b/packages/ui/src/elements/FolderView/DraggableTableRow/index.tsx index f34c70f9bd..4ded3ccd1f 100644 --- a/packages/ui/src/elements/FolderView/DraggableTableRow/index.tsx +++ b/packages/ui/src/elements/FolderView/DraggableTableRow/index.tsx @@ -69,7 +69,6 @@ export function DraggableTableRow({ ] .filter(Boolean) .join(' ')} - id={itemKey} key={itemKey} onClick={onClick} onKeyDown={onKeyDown} diff --git a/packages/ui/src/elements/FolderView/DraggableWithClick/index.scss b/packages/ui/src/elements/FolderView/DraggableWithClick/index.scss index 1fdff6a1d3..e2d7f25d64 100644 --- a/packages/ui/src/elements/FolderView/DraggableWithClick/index.scss +++ b/packages/ui/src/elements/FolderView/DraggableWithClick/index.scss @@ -1,5 +1,5 @@ @layer payload-default { - .draggable-with-click { + .draggable-with-click:not(.draggable-with-click--disabled) { user-select: none; } } diff --git a/packages/ui/src/elements/FolderView/DraggableWithClick/index.tsx b/packages/ui/src/elements/FolderView/DraggableWithClick/index.tsx index d99d2e70d9..aced1e06eb 100644 --- a/packages/ui/src/elements/FolderView/DraggableWithClick/index.tsx +++ b/packages/ui/src/elements/FolderView/DraggableWithClick/index.tsx @@ -1,5 +1,5 @@ import { useDraggable } from '@dnd-kit/core' -import React, { useRef } from 'react' +import React, { useId, useRef } from 'react' import './index.scss' @@ -9,7 +9,7 @@ type Props = { readonly as?: React.ElementType readonly children?: React.ReactNode readonly className?: string - readonly id: string + readonly disabled?: boolean readonly onClick: (e: React.MouseEvent) => void readonly onKeyDown?: (e: React.KeyboardEvent) => void readonly ref?: React.RefObject @@ -17,16 +17,17 @@ type Props = { } export const DraggableWithClick = ({ - id, as = 'div', children, className, + disabled = false, onClick, onKeyDown, ref, thresholdPixels = 3, }: Props) => { - const { attributes, listeners, setNodeRef } = useDraggable({ id }) + const id = useId() + const { attributes, listeners, setNodeRef } = useDraggable({ id, disabled }) const initialPos = useRef({ x: 0, y: 0 }) const isDragging = useRef(false) @@ -75,10 +76,15 @@ export const DraggableWithClick = ({ role="button" tabIndex={0} {...attributes} - className={`${baseClass} ${className || ''}`.trim()} - onKeyDown={onKeyDown} - onPointerDown={onClick ? handlePointerDown : undefined} + className={[baseClass, className, disabled ? `${baseClass}--disabled` : ''] + .filter(Boolean) + .join(' ')} + onKeyDown={disabled ? undefined : onKeyDown} + onPointerDown={disabled ? undefined : onClick ? handlePointerDown : undefined} ref={(node) => { + if (disabled) { + return + } setNodeRef(node) if (ref) { ref.current = node diff --git a/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.scss b/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.scss index 216c9b4c92..e4b74b8a78 100644 --- a/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.scss +++ b/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.scss @@ -22,5 +22,9 @@ align-items: center; gap: calc(var(--base) / 2); } + + .item-card-grid__title { + display: none; + } } } diff --git a/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.tsx b/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.tsx index 614182936a..ba0601174f 100644 --- a/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.tsx +++ b/packages/ui/src/elements/FolderView/Drawers/MoveToFolder/index.tsx @@ -41,6 +41,7 @@ type ActionProps = } export type MoveToFolderDrawerProps = { readonly drawerSlug: string + readonly folderAssignedCollections: CollectionSlug[] readonly folderCollectionSlug: string readonly folderFieldName: string readonly fromFolderID?: number | string @@ -86,11 +87,13 @@ function LoadFolderData(props: MoveToFolderDrawerProps) { async (folderIDToPopulate: null | number | string) => { try { const result = await getFolderResultsComponentAndData({ - activeCollectionSlugs: [props.folderCollectionSlug], browseByFolder: false, + collectionsToDisplay: [props.folderCollectionSlug], displayAs: 'grid', + // todo: should be able to pass undefined, empty array or null and get all folders. Need to look at API for this in the server function + folderAssignedCollections: props.folderAssignedCollections, folderID: folderIDToPopulate, - sort: '_folderOrDocumentTitle', + sort: 'name', }) setBreadcrumbs(result.breadcrumbs || []) @@ -107,7 +110,7 @@ function LoadFolderData(props: MoveToFolderDrawerProps) { hasLoadedRef.current = true }, - [getFolderResultsComponentAndData, props.folderCollectionSlug], + [getFolderResultsComponentAndData, props.folderAssignedCollections, props.folderCollectionSlug], ) React.useEffect(() => { @@ -167,6 +170,7 @@ function Content({ folderFieldName, folderID, FolderResultsComponent, + folderType, getSelectedItems, subfolders, } = useFolder() @@ -229,7 +233,7 @@ function Content({ }, [drawerSlug, isModalOpen, clearRouteCache, folderAddedToUnderlyingFolder]) return ( - <> +
    { closeModal(drawerSlug) @@ -298,6 +302,7 @@ function Content({ { void onCreateSuccess({ @@ -321,6 +326,7 @@ function Content({ )} - +
    ) } diff --git a/packages/ui/src/elements/FolderView/CollectionTypePill/index.scss b/packages/ui/src/elements/FolderView/FilterFolderTypePill/index.scss similarity index 100% rename from packages/ui/src/elements/FolderView/CollectionTypePill/index.scss rename to packages/ui/src/elements/FolderView/FilterFolderTypePill/index.scss diff --git a/packages/ui/src/elements/FolderView/CollectionTypePill/index.tsx b/packages/ui/src/elements/FolderView/FilterFolderTypePill/index.tsx similarity index 97% rename from packages/ui/src/elements/FolderView/CollectionTypePill/index.tsx rename to packages/ui/src/elements/FolderView/FilterFolderTypePill/index.tsx index 38a05855ba..7c0fbd0462 100644 --- a/packages/ui/src/elements/FolderView/CollectionTypePill/index.tsx +++ b/packages/ui/src/elements/FolderView/FilterFolderTypePill/index.tsx @@ -12,7 +12,7 @@ import './index.scss' const baseClass = 'collection-type' -export function CollectionTypePill() { +export function FilterFolderTypePill() { const { activeCollectionFolderSlugs: visibleCollectionSlugs, allCollectionFolderSlugs: folderCollectionSlugs, diff --git a/packages/ui/src/elements/FolderView/Field/index.scss b/packages/ui/src/elements/FolderView/FolderField/index.scss similarity index 100% rename from packages/ui/src/elements/FolderView/Field/index.scss rename to packages/ui/src/elements/FolderView/FolderField/index.scss diff --git a/packages/ui/src/elements/FolderView/Field/index.server.tsx b/packages/ui/src/elements/FolderView/FolderField/index.server.tsx similarity index 87% rename from packages/ui/src/elements/FolderView/Field/index.server.tsx rename to packages/ui/src/elements/FolderView/FolderField/index.server.tsx index 42da4dd077..ce051fe0b0 100644 --- a/packages/ui/src/elements/FolderView/Field/index.server.tsx +++ b/packages/ui/src/elements/FolderView/FolderField/index.server.tsx @@ -6,7 +6,7 @@ import './index.scss' const baseClass = 'folder-edit-field' -export const FolderEditField = (props: RelationshipFieldServerProps) => { +export const FolderField = (props: RelationshipFieldServerProps) => { if (props.payload.config.folders === false) { return null } diff --git a/packages/ui/src/elements/FolderView/FolderFileCard/index.scss b/packages/ui/src/elements/FolderView/FolderFileCard/index.scss index ef6dfe134b..69ec82e70a 100644 --- a/packages/ui/src/elements/FolderView/FolderFileCard/index.scss +++ b/packages/ui/src/elements/FolderView/FolderFileCard/index.scss @@ -10,6 +10,7 @@ --card-titlebar-icon-color: var(--theme-elevation-300); --card-label-color: var(--theme-text); --card-preview-icon-color: var(--theme-elevation-400); + --assigned-collections-color: var(--theme-elevation-900); position: relative; display: grid; @@ -61,6 +62,7 @@ --card-label-color: var(--theme-success-800); --card-preview-icon-color: var(--theme-success-800); --accessibility-outline: 2px solid var(--theme-success-600); + --assigned-collections-color: var(--theme-success-850); .popup:hover:not(.popup--active) { --card-icon-dots-bg-color: var(--theme-success-100); @@ -74,12 +76,25 @@ } .folder-file-card__icon-wrap .icon { - opacity: 50%; + opacity: 0.5; } .folder-file-card__preview-area .icon { opacity: 0.7; } + + .folder-file-card__preview-area .thumbnail { + &:after { + content: ''; + position: absolute; + top: 0; + left: 0; + background: var(--theme-success-150); + width: 100%; + height: 100%; + mix-blend-mode: hard-light; + } + } } &:not(.folder-file-card--selected) { @@ -104,22 +119,6 @@ } } - &__drag-handle { - position: absolute; - top: 0; - width: 100%; - height: 100%; - cursor: pointer; - background: none; - border: none; - padding: 0; - outline-offset: var(--accessibility-outline-offset); - - &:focus-visible { - outline: var(--accessibility-outline); - } - } - &__drop-area { position: absolute; top: 0; @@ -195,13 +194,15 @@ &__titlebar-area { position: relative; pointer-events: none; + display: flex; + flex-direction: column; grid-area: details; border-radius: inherit; display: grid; grid-template-columns: auto 1fr auto; gap: 1rem; align-items: center; - padding: 1rem; + padding: calc(var(--base) / 2); background-color: var(--card-bg-color); .popup { @@ -209,6 +210,10 @@ } } + &__titlebar-labels { + display: grid; + } + &__name { overflow: hidden; font-weight: bold; @@ -219,6 +224,13 @@ color: var(--card-label-color); } + &__assigned-collections { + color: var(--assigned-collections-color); + opacity: 0.5; + margin-top: 4px; + line-height: normal; + } + &__icon-wrap .icon { flex-shrink: 0; color: var(--card-titlebar-icon-color); diff --git a/packages/ui/src/elements/FolderView/FolderFileCard/index.tsx b/packages/ui/src/elements/FolderView/FolderFileCard/index.tsx index f9bb5a9cfc..cfabb80825 100644 --- a/packages/ui/src/elements/FolderView/FolderFileCard/index.tsx +++ b/packages/ui/src/elements/FolderView/FolderFileCard/index.tsx @@ -3,11 +3,14 @@ import type { FolderOrDocument } from 'payload/shared' import { useDroppable } from '@dnd-kit/core' +import { getTranslation } from '@payloadcms/translations' import React from 'react' import { DocumentIcon } from '../../../icons/Document/index.js' import { ThreeDotsIcon } from '../../../icons/ThreeDots/index.js' +import { useConfig } from '../../../providers/Config/index.js' import { useFolder } from '../../../providers/Folders/index.js' +import { useTranslation } from '../../../providers/Translation/index.js' import { Popup } from '../../Popup/index.js' import { Thumbnail } from '../../Thumbnail/index.js' import { ColoredFolderIcon } from '../ColoredFolderIcon/index.js' @@ -19,6 +22,7 @@ const baseClass = 'folder-file-card' type Props = { readonly className?: string readonly disabled?: boolean + readonly folderType?: string[] readonly id: number | string readonly isDeleting?: boolean readonly isFocused?: boolean @@ -37,6 +41,7 @@ export function FolderFileCard({ type, className = '', disabled = false, + folderType, isDeleting = false, isFocused = false, isSelected = false, @@ -54,6 +59,7 @@ export function FolderFileCard({ data: { id, type, + folderType, }, disabled: disableDrop, }) @@ -75,7 +81,7 @@ export function FolderFileCard({ }, [isFocused]) return ( -
    - {!disabled && (onClick || onKeyDown) && ( - - )} {!disableDrop ?
    : null} {type === 'file' ? ( @@ -112,9 +112,14 @@ export function FolderFileCard({
    {type === 'file' ? : }
    -

    - {title} -

    +
    +

    + {title} +

    + {folderType && folderType.length > 0 ? ( + + ) : null} +
    {PopupActions ? ( } @@ -127,7 +132,33 @@ export function FolderFileCard({ ) : null}
    -
    + + ) +} + +function AssignedCollections({ folderType }: { folderType: string[] }) { + const { config } = useConfig() + const { i18n } = useTranslation() + + const collectionsDisplayText = React.useMemo(() => { + return folderType.reduce((acc, collection) => { + const collectionConfig = config.collections?.find((c) => c.slug === collection) + if (collectionConfig) { + return [...acc, getTranslation(collectionConfig.labels.plural, i18n)] + } + return acc + }, []) + }, [folderType, config.collections, i18n]) + + return ( +

    + {collectionsDisplayText.map((label, index) => ( + + {label} + {index < folderType.length - 1 ? ', ' : ''} + + ))} +

    ) } @@ -138,20 +169,16 @@ type ContextCardProps = { readonly type: 'file' | 'folder' } export function ContextFolderFileCard({ type, className, index, item }: ContextCardProps) { - const { - focusedRowIndex, - isDragging, - itemKeysToMove, - onItemClick, - onItemKeyPress, - selectedItemKeys, - } = useFolder() + const { checkIfItemIsDisabled, focusedRowIndex, onItemClick, onItemKeyPress, selectedItemKeys } = + useFolder() const isSelected = selectedItemKeys.has(item.itemKey) + const isDisabled = checkIfItemIsDisabled(item) return ( { - const map: Record = {} + const map: Record = {} config.collections.forEach((collection) => { - map[collection.slug] = getTranslation(collection.labels?.singular, i18n) + map[collection.slug] = { + plural: getTranslation(collection.labels?.plural, i18n), + singular: getTranslation(collection.labels?.singular, i18n), + } }) return map }) @@ -94,7 +97,22 @@ export function FolderFileTable({ showRelationCell = true }: Props) { } if (name === 'type') { - cellValue = relationToMap[relationTo] || relationTo + cellValue = ( + <> + {relationToMap[relationTo]?.singular || relationTo} + {Array.isArray(subfolder.value?.folderType) + ? subfolder.value?.folderType.reduce((acc, slug, index) => { + if (index === 0) { + return ` — ${relationToMap[slug]?.plural || slug}` + } + if (index > 0) { + return `${acc}, ${relationToMap[slug]?.plural || slug}` + } + return acc + }, '') + : ''} + + ) } if (index === 0) { @@ -108,7 +126,7 @@ export function FolderFileTable({ showRelationCell = true }: Props) { return cellValue } })} - disabled={isDragging && selectedItemKeys?.has(itemKey)} + disabled={checkIfItemIsDisabled(subfolder)} dragData={{ id: subfolderID, type: 'folder', @@ -160,7 +178,7 @@ export function FolderFileTable({ showRelationCell = true }: Props) { } if (name === 'type') { - cellValue = relationToMap[relationTo] || relationTo + cellValue = relationToMap[relationTo]?.singular || relationTo } if (index === 0) { @@ -174,7 +192,7 @@ export function FolderFileTable({ showRelationCell = true }: Props) { return cellValue } })} - disabled={isDragging || selectedItemKeys?.has(itemKey)} + disabled={checkIfItemIsDisabled(document)} dragData={{ id: documentID, type: 'document', diff --git a/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx b/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx new file mode 100644 index 0000000000..2592eff529 --- /dev/null +++ b/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx @@ -0,0 +1,140 @@ +import type { Option, OptionObject, SelectFieldClientProps } from 'payload' + +import React from 'react' + +import type { ReactSelectAdapterProps } from '../../ReactSelect/types.js' + +import { mergeFieldStyles } from '../../../fields/mergeFieldStyles.js' +import { formatOptions } from '../../../fields/Select/index.js' +import { SelectInput } from '../../../fields/Select/Input.js' +import { useField } from '../../../forms/useField/index.js' +import { useFolder } from '../../../providers/Folders/index.js' +import { useTranslation } from '../../../providers/Translation/index.js' + +export const FolderTypeField = ({ + options: allSelectOptions, + ...props +}: { options: Option[] } & SelectFieldClientProps) => { + const { + field, + field: { + name, + admin: { + className, + isClearable = true, + isSortable = true, + placeholder, + } = {} as SelectFieldClientProps['field']['admin'], + hasMany = false, + label, + localized, + required, + }, + onChange: onChangeFromProps, + path: pathFromProps, + readOnly, + validate, + } = props + const { t } = useTranslation() + + const { folderType } = useFolder() + + const options = React.useMemo(() => { + if (!folderType || folderType.length === 0) { + return formatOptions(allSelectOptions) + } + return formatOptions( + allSelectOptions.filter((option) => { + if (typeof option === 'object' && option.value) { + return folderType.includes(option.value) + } + return true + }), + ) + }, [allSelectOptions, folderType]) + + const memoizedValidate = React.useCallback( + (value, validationOptions) => { + if (typeof validate === 'function') { + return validate(value, { ...validationOptions, hasMany, options, required }) + } + }, + [validate, required, hasMany, options], + ) + + const { + customComponents: { AfterInput, BeforeInput, Description, Error, Label } = {}, + disabled, + path, + selectFilterOptions, + setValue, + showError, + value, + } = useField({ + potentiallyStalePath: pathFromProps, + validate: memoizedValidate, + }) + + const onChange: ReactSelectAdapterProps['onChange'] = React.useCallback( + (selectedOption: OptionObject | OptionObject[]) => { + if (!readOnly || disabled) { + let newValue: string | string[] = null + if (selectedOption && hasMany) { + if (Array.isArray(selectedOption)) { + newValue = selectedOption.map((option) => option.value) + } else { + newValue = [] + } + } else if (selectedOption && !Array.isArray(selectedOption)) { + newValue = selectedOption.value + } + + if (typeof onChangeFromProps === 'function') { + onChangeFromProps(newValue) + } + + setValue(newValue) + } + }, + [readOnly, disabled, hasMany, setValue, onChangeFromProps], + ) + + const styles = React.useMemo(() => mergeFieldStyles(field), [field]) + + return ( +
    + + selectFilterOptions?.some( + (option) => (typeof option === 'string' ? option : option.value) === value, + ) + : undefined + } + hasMany={hasMany} + isClearable={isClearable} + isSortable={isSortable} + Label={Label} + label={label} + localized={localized} + name={name} + onChange={onChange} + options={options} + path={path} + placeholder={placeholder} + readOnly={readOnly || disabled} + required={required || (Array.isArray(folderType) && folderType.length > 0)} + showError={showError} + style={styles} + value={value as string | string[]} + /> +
    + ) +} diff --git a/packages/ui/src/elements/FolderView/MoveDocToFolder/index.tsx b/packages/ui/src/elements/FolderView/MoveDocToFolder/index.tsx index 1b20c13213..2493c04f57 100644 --- a/packages/ui/src/elements/FolderView/MoveDocToFolder/index.tsx +++ b/packages/ui/src/elements/FolderView/MoveDocToFolder/index.tsx @@ -1,5 +1,6 @@ 'use client' +import type { CollectionSlug } from 'payload' import type { FolderOrDocument } from 'payload/shared' import { useModal } from '@faceless-ui/modal' @@ -16,8 +17,8 @@ import { useDocumentInfo } from '../../../providers/DocumentInfo/index.js' import { useTranslation } from '../../../providers/Translation/index.js' import { Button } from '../../Button/index.js' import { formatDrawerSlug, useDrawerDepth } from '../../Drawer/index.js' -import { MoveItemsToFolderDrawer } from '../Drawers/MoveToFolder/index.js' import './index.scss' +import { MoveItemsToFolderDrawer } from '../Drawers/MoveToFolder/index.js' const baseClass = 'move-doc-to-folder' @@ -151,6 +152,8 @@ export const MoveDocToFolderButton = ({ React.ReactNode - value: keyof FolderOrDocument['value'] + value: FolderSortKeys }[] = [ - { label: (t) => t('general:name'), value: '_folderOrDocumentTitle' }, + { label: (t) => t('general:name'), value: 'name' }, { label: (t) => t('general:createdAt'), value: 'createdAt' }, { label: (t) => t('general:updatedAt'), value: 'updatedAt' }, ] @@ -48,9 +48,9 @@ export function SortByPill() { const { refineFolderData, sort } = useFolder() const { t } = useTranslation() const sortDirection = sort.startsWith('-') ? 'desc' : 'asc' - const [selectedSortOption] = sortOnOptions.filter( - ({ value }) => value === (sort.startsWith('-') ? sort.slice(1) : sort), - ) + const [selectedSortOption] = + sortOnOptions.filter(({ value }) => value === (sort.startsWith('-') ? sort.slice(1) : sort)) || + sortOnOptions const [selectedOrderOption] = orderOnOptions.filter(({ value }) => value === sortDirection) return ( @@ -62,7 +62,7 @@ export function SortByPill() { ) : ( )} - {selectedSortOption.label(t)} + {selectedSortOption?.label(t)} } className={baseClass} @@ -73,12 +73,13 @@ export function SortByPill() { {sortOnOptions.map(({ label, value }) => ( { refineFolderData({ query: { - sort: value, + page: '1', + sort: sortDirection === 'desc' ? `-${value}` : value, }, updateURL: true, }) @@ -94,19 +95,23 @@ export function SortByPill() { {orderOnOptions.map(({ label, value }) => ( { - if (value === 'asc') { + if (sortDirection !== value) { refineFolderData({ query: { - sort: value === 'asc' ? `-${sort}` : sort, + page: '1', + sort: + value === 'desc' + ? `-${selectedSortOption?.value}` + : selectedSortOption?.value, }, updateURL: true, }) + close() } - close() }} > {label(t)} diff --git a/packages/ui/src/elements/ListHeader/TitleActions/ListCreateNewDocInFolderButton.tsx b/packages/ui/src/elements/ListHeader/TitleActions/ListCreateNewDocInFolderButton.tsx index ff8fe24c9a..626ff0c649 100644 --- a/packages/ui/src/elements/ListHeader/TitleActions/ListCreateNewDocInFolderButton.tsx +++ b/packages/ui/src/elements/ListHeader/TitleActions/ListCreateNewDocInFolderButton.tsx @@ -18,11 +18,13 @@ const baseClass = 'create-new-doc-in-folder' export function ListCreateNewDocInFolderButton({ buttonLabel, collectionSlugs, + folderAssignedCollections, onCreateSuccess, slugPrefix, }: { buttonLabel: string collectionSlugs: CollectionSlug[] + folderAssignedCollections: CollectionSlug[] onCreateSuccess: (args: { collectionSlug: CollectionSlug doc: Record @@ -133,6 +135,9 @@ export function ListCreateNewDocInFolderButton({ { await onCreateSuccess({ diff --git a/packages/ui/src/exports/client/index.ts b/packages/ui/src/exports/client/index.ts index e7db91cd0d..f33f1981cb 100644 --- a/packages/ui/src/exports/client/index.ts +++ b/packages/ui/src/exports/client/index.ts @@ -123,8 +123,9 @@ export { SaveDraftButton } from '../../elements/SaveDraftButton/index.js' // folder elements export { FolderProvider, useFolder } from '../../providers/Folders/index.js' export { BrowseByFolderButton } from '../../elements/FolderView/BrowseByFolderButton/index.js' -export { ItemCardGrid } from '../../elements/FolderView/ItemCardGrid/index.js' +export { FolderTypeField } from '../../elements/FolderView/FolderTypeField/index.js' export { FolderFileTable } from '../../elements/FolderView/FolderFileTable/index.js' +export { ItemCardGrid } from '../../elements/FolderView/ItemCardGrid/index.js' export { type Option as ReactSelectOption, ReactSelect } from '../../elements/ReactSelect/index.js' export { ReactSelect as Select } from '../../elements/ReactSelect/index.js' diff --git a/packages/ui/src/exports/rsc/index.ts b/packages/ui/src/exports/rsc/index.ts index 7e0b00208f..155cd026fe 100644 --- a/packages/ui/src/exports/rsc/index.ts +++ b/packages/ui/src/exports/rsc/index.ts @@ -1,7 +1,7 @@ export { FieldDiffContainer } from '../../elements/FieldDiffContainer/index.js' export { FieldDiffLabel } from '../../elements/FieldDiffLabel/index.js' export { FolderTableCell } from '../../elements/FolderView/Cell/index.server.js' -export { FolderEditField } from '../../elements/FolderView/Field/index.server.js' +export { FolderField } from '../../elements/FolderView/FolderField/index.server.js' export { getHTMLDiffComponents } from '../../elements/HTMLDiff/index.js' export { File } from '../../graphics/File/index.js' export { CheckIcon } from '../../icons/Check/index.js' diff --git a/packages/ui/src/fields/Checkbox/Input.tsx b/packages/ui/src/fields/Checkbox/Input.tsx index 149712a716..43ff3c412b 100644 --- a/packages/ui/src/fields/Checkbox/Input.tsx +++ b/packages/ui/src/fields/Checkbox/Input.tsx @@ -1,7 +1,7 @@ 'use client' import type { StaticLabel } from 'payload' -import React from 'react' +import React, { useId } from 'react' import { RenderCustomComponent } from '../../elements/RenderCustomComponent/index.js' import { FieldLabel } from '../../fields/FieldLabel/index.js' @@ -28,7 +28,7 @@ export type CheckboxInputProps = { export const inputBaseClass = 'checkbox-input' export const CheckboxInput: React.FC = ({ - id, + id: idFromProps, name, AfterInput, BeforeInput, @@ -43,6 +43,8 @@ export const CheckboxInput: React.FC = ({ readOnly, required, }) => { + const fallbackID = useId() + const id = idFromProps || fallbackID return (
    +export const formatOptions = (options: Option[]): OptionObject[] => options.map((option) => { if (typeof option === 'object' && (option.value || option.value === '')) { return option diff --git a/packages/ui/src/providers/Folders/groupItemIDsByRelation.ts b/packages/ui/src/providers/Folders/groupItemIDsByRelation.ts new file mode 100644 index 0000000000..145a8d0935 --- /dev/null +++ b/packages/ui/src/providers/Folders/groupItemIDsByRelation.ts @@ -0,0 +1,15 @@ +import type { FolderOrDocument } from 'payload/shared' + +export function groupItemIDsByRelation(items: FolderOrDocument[]) { + return items.reduce( + (acc, item) => { + if (!acc[item.relationTo]) { + acc[item.relationTo] = [] + } + acc[item.relationTo].push(item.value.id) + + return acc + }, + {} as Record, + ) +} diff --git a/packages/ui/src/providers/Folders/index.tsx b/packages/ui/src/providers/Folders/index.tsx index 62d47883e4..2da311857e 100644 --- a/packages/ui/src/providers/Folders/index.tsx +++ b/packages/ui/src/providers/Folders/index.tsx @@ -14,7 +14,7 @@ import { parseSearchParams } from '../../utilities/parseSearchParams.js' import { useConfig } from '../Config/index.js' import { useRouteTransition } from '../RouteTransition/index.js' import { useTranslation } from '../Translation/index.js' -import { getMetaSelection, getShiftSelection, groupItemIDsByRelation } from './selection.js' +import { groupItemIDsByRelation } from './groupItemIDsByRelation.js' type FolderQueryParams = { page?: string @@ -43,20 +43,22 @@ export type FolderContextValue = { readonly allCollectionFolderSlugs?: CollectionSlug[] allowCreateCollectionSlugs: CollectionSlug[] breadcrumbs?: FolderBreadcrumb[] + checkIfItemIsDisabled: (item: FolderOrDocument) => boolean clearSelections: () => void currentFolder?: FolderOrDocument | null documents?: FolderOrDocument[] + dragOverlayItem?: FolderOrDocument | undefined focusedRowIndex: number folderCollectionConfig: ClientCollectionConfig folderCollectionSlug: string folderFieldName: string folderID?: number | string FolderResultsComponent: React.ReactNode + folderType: CollectionSlug[] | undefined getFolderRoute: (toFolderID?: number | string) => string getSelectedItems?: () => FolderOrDocument[] isDragging: boolean itemKeysToMove?: Set - lastSelectedIndex: null | number moveToFolder: (args: { itemsToMove: FolderOrDocument[] toFolderID?: number | string @@ -69,6 +71,7 @@ export type FolderContextValue = { }) => void refineFolderData: (args: { query?: FolderQueryParams; updateURL: boolean }) => void search: string + selectedFolderCollections?: CollectionSlug[] readonly selectedItemKeys: Set setBreadcrumbs: React.Dispatch> setFocusedRowIndex: React.Dispatch> @@ -82,30 +85,33 @@ const Context = React.createContext({ allCollectionFolderSlugs: [], allowCreateCollectionSlugs: [], breadcrumbs: [], + checkIfItemIsDisabled: () => false, clearSelections: () => {}, currentFolder: null, documents: [], + dragOverlayItem: undefined, focusedRowIndex: -1, folderCollectionConfig: null, folderCollectionSlug: '', folderFieldName: 'folder', folderID: undefined, FolderResultsComponent: null, + folderType: undefined, getFolderRoute: () => '', getSelectedItems: () => [], isDragging: false, itemKeysToMove: undefined, - lastSelectedIndex: null, moveToFolder: () => Promise.resolve(undefined), onItemClick: () => undefined, onItemKeyPress: () => undefined, refineFolderData: () => undefined, search: '', + selectedFolderCollections: undefined, selectedItemKeys: new Set(), setBreadcrumbs: () => {}, setFocusedRowIndex: () => -1, setIsDragging: () => false, - sort: '_folderOrDocumentTitle', + sort: 'name', subfolders: [], }) @@ -191,7 +197,7 @@ export function FolderProvider({ FolderResultsComponent: InitialFolderResultsComponent, onItemClick: onItemClickFromProps, search, - sort = '_folderOrDocumentTitle', + sort = 'name', subfolders, }: FolderProviderProps) { const parentFolderContext = useFolder() @@ -202,6 +208,11 @@ export function FolderProvider({ const router = useRouter() const { startRouteTransition } = useRouteTransition() + const currentlySelectedIndexes = React.useRef(new Set()) + + const [selectedFolderCollections, setSelectedFolderCollections] = React.useState< + CollectionSlug[] + >([]) const [FolderResultsComponent, setFolderResultsComponent] = React.useState( InitialFolderResultsComponent || (() => null), ) @@ -221,7 +232,8 @@ export function FolderProvider({ () => new Set(), ) const [focusedRowIndex, setFocusedRowIndex] = React.useState(-1) - const [lastSelectedIndex, setLastSelectedIndex] = React.useState(null) + // This is used to determine what data to display on the drag overlay + const [dragOverlayItem, setDragOverlayItem] = React.useState() const [breadcrumbs, setBreadcrumbs] = React.useState(_breadcrumbsFromProps) const lastClickTime = React.useRef(null) @@ -230,7 +242,8 @@ export function FolderProvider({ const clearSelections = React.useCallback(() => { setFocusedRowIndex(-1) setSelectedItemKeys(new Set()) - setLastSelectedIndex(undefined) + setDragOverlayItem(undefined) + currentlySelectedIndexes.current = new Set() }, []) const mergeQuery = React.useCallback( @@ -245,6 +258,7 @@ export function FolderProvider({ ...currentQuery, ...newQuery, page, + relationTo: 'relationTo' in newQuery ? newQuery.relationTo : currentQuery?.relationTo, search: 'search' in newQuery ? newQuery.search : currentQuery?.search, sort: 'sort' in newQuery ? newQuery.sort : (currentQuery?.sort ?? undefined), } @@ -258,8 +272,11 @@ export function FolderProvider({ ({ query, updateURL }) => { if (updateURL) { const newQuery = mergeQuery(query) + startRouteTransition(() => - router.replace(`${qs.stringify(newQuery, { addQueryPrefix: true })}`), + router.replace( + `${qs.stringify({ ...newQuery, relationTo: JSON.stringify(newQuery.relationTo) }, { addQueryPrefix: true })}`, + ), ) setCurrentQuery(newQuery) @@ -301,10 +318,12 @@ export function FolderProvider({ ({ collectionSlug, docID }: { collectionSlug: string; docID?: number | string }) => { if (drawerDepth === 1) { // not in a drawer (default is 1) - clearSelections() if (collectionSlug === folderCollectionSlug) { // clicked on folder, take the user to the folder view - startRouteTransition(() => router.push(getFolderRoute(docID))) + startRouteTransition(() => { + router.push(getFolderRoute(docID)) + clearSelections() + }) } else if (collectionSlug) { // clicked on document, take the user to the documet view startRouteTransition(() => { @@ -314,8 +333,11 @@ export function FolderProvider({ path: `/collections/${collectionSlug}/${docID}`, }), ) + clearSelections() }) } + } else { + clearSelections() } if (typeof onItemClickFromProps === 'function') { @@ -335,97 +357,205 @@ export function FolderProvider({ ], ) + const handleShiftSelection = React.useCallback( + (targetIndex: number) => { + const allItems = [...subfolders, ...documents] + + // Find existing selection boundaries + const existingIndexes = allItems.reduce((acc, item, idx) => { + if (selectedItemKeys.has(item.itemKey)) { + acc.push(idx) + } + return acc + }, []) + + if (existingIndexes.length === 0) { + // No existing selection, just select target + return [targetIndex] + } + + const firstSelectedIndex = Math.min(...existingIndexes) + const lastSelectedIndex = Math.max(...existingIndexes) + const isWithinBounds = targetIndex >= firstSelectedIndex && targetIndex <= lastSelectedIndex + + // Choose anchor based on whether we're contracting or extending + let anchorIndex = targetIndex + if (isWithinBounds) { + // Contracting: if target is at a boundary, use target as anchor + // Otherwise, use furthest boundary to maintain opposite edge + if (targetIndex === firstSelectedIndex || targetIndex === lastSelectedIndex) { + anchorIndex = targetIndex + } else { + const distanceToFirst = Math.abs(targetIndex - firstSelectedIndex) + const distanceToLast = Math.abs(targetIndex - lastSelectedIndex) + anchorIndex = distanceToFirst >= distanceToLast ? firstSelectedIndex : lastSelectedIndex + } + } else { + // Extending: use closest boundary + const distanceToFirst = Math.abs(targetIndex - firstSelectedIndex) + const distanceToLast = Math.abs(targetIndex - lastSelectedIndex) + anchorIndex = distanceToFirst <= distanceToLast ? firstSelectedIndex : lastSelectedIndex + } + + // Create range from anchor to target + const startIndex = Math.min(anchorIndex, targetIndex) + const endIndex = Math.max(anchorIndex, targetIndex) + const newRangeIndexes = Array.from( + { length: endIndex - startIndex + 1 }, + (_, i) => startIndex + i, + ) + + if (isWithinBounds) { + // Contracting: replace with new range + return newRangeIndexes + } else { + // Extending: union with existing + return [...new Set([...existingIndexes, ...newRangeIndexes])] + } + }, + [subfolders, documents, selectedItemKeys], + ) + + const updateSelections = React.useCallback( + ({ indexes }: { indexes: number[] }) => { + const allItems = [...subfolders, ...documents] + const { newSelectedFolderCollections, newSelectedItemKeys } = allItems.reduce( + (acc, item, index) => { + if (indexes.includes(index)) { + acc.newSelectedItemKeys.add(item.itemKey) + if (item.relationTo === folderCollectionSlug) { + item.value.folderType?.forEach((collectionSlug) => { + if (!acc.newSelectedFolderCollections.includes(collectionSlug)) { + acc.newSelectedFolderCollections.push(collectionSlug) + } + }) + } else { + if (!acc.newSelectedFolderCollections.includes(item.relationTo)) { + acc.newSelectedFolderCollections.push(item.relationTo) + } + } + } + return acc + }, + { + newSelectedFolderCollections: [] satisfies CollectionSlug[], + newSelectedItemKeys: new Set(), + }, + ) + + setSelectedFolderCollections(newSelectedFolderCollections) + setSelectedItemKeys(newSelectedItemKeys) + }, + [documents, folderCollectionSlug, subfolders], + ) + const onItemKeyPress: FolderContextValue['onItemKeyPress'] = React.useCallback( - ({ event, index, item }) => { + ({ event, item: currentItem }) => { const { code, ctrlKey, metaKey, shiftKey } = event const isShiftPressed = shiftKey const isCtrlPressed = ctrlKey || metaKey - let newSelectedIndexes: Set | undefined = undefined + const isCurrentlySelected = selectedItemKeys.has(currentItem.itemKey) + const allItems = [...subfolders, ...documents] + const currentItemIndex = allItems.findIndex((item) => item.itemKey === currentItem.itemKey) switch (code) { - case 'ArrowDown': { - event.preventDefault() - const nextIndex = Math.min(index + 1, totalCount - 1) - setFocusedRowIndex(nextIndex) - - if (isCtrlPressed) { - break - } - - if (allowMultiSelection && isShiftPressed) { - newSelectedIndexes = getShiftSelection({ - selectFromIndex: Math.min(lastSelectedIndex, totalCount), - selectToIndex: Math.min(nextIndex, totalCount), - }) - } else { - setLastSelectedIndex(nextIndex) - newSelectedIndexes = new Set([nextIndex]) - } - break - } + case 'ArrowDown': + case 'ArrowLeft': + case 'ArrowRight': case 'ArrowUp': { event.preventDefault() - const prevIndex = Math.max(index - 1, 0) - setFocusedRowIndex(prevIndex) + + if (currentItemIndex === -1) { + break + } + + const isBackward = code === 'ArrowLeft' || code === 'ArrowUp' + const newItemIndex = isBackward ? currentItemIndex - 1 : currentItemIndex + 1 + + if (newItemIndex < 0 || newItemIndex > totalCount - 1) { + // out of bounds, keep current selection + return + } + + setFocusedRowIndex(newItemIndex) if (isCtrlPressed) { break } - if (allowMultiSelection && isShiftPressed) { - newSelectedIndexes = getShiftSelection({ - selectFromIndex: lastSelectedIndex, - selectToIndex: prevIndex, - }) - } else { - setLastSelectedIndex(prevIndex) - newSelectedIndexes = new Set([prevIndex]) + if (isShiftPressed && allowMultiSelection) { + const selectedIndexes = handleShiftSelection(newItemIndex) + updateSelections({ indexes: selectedIndexes }) + return } + + // Single selection without shift + if (!isShiftPressed) { + const newItem = allItems[newItemIndex] + setSelectedItemKeys(new Set([newItem.itemKey])) + } + break } case 'Enter': { if (selectedItemKeys.size === 1) { - newSelectedIndexes = new Set([]) setFocusedRowIndex(undefined) + navigateAfterSelection({ + collectionSlug: currentItem.relationTo, + docID: extractID(currentItem.value), + }) + return } break } case 'Escape': { - setFocusedRowIndex(undefined) - newSelectedIndexes = new Set([]) + clearSelections() break } case 'KeyA': { if (allowMultiSelection && isCtrlPressed) { event.preventDefault() setFocusedRowIndex(totalCount - 1) - newSelectedIndexes = new Set(Array.from({ length: totalCount }, (_, i) => i)) + updateSelections({ + indexes: Array.from({ length: totalCount }, (_, i) => i), + }) } break } case 'Space': { if (allowMultiSelection && isShiftPressed) { event.preventDefault() - newSelectedIndexes = getMetaSelection({ - currentSelection: newSelectedIndexes, - toggleIndex: index, + const allItems = [...subfolders, ...documents] + updateSelections({ + indexes: allItems.reduce((acc, item, idx) => { + if (item.itemKey === currentItem.itemKey) { + if (isCurrentlySelected) { + return acc + } else { + acc.push(idx) + } + } else if (selectedItemKeys.has(item.itemKey)) { + acc.push(idx) + } + return acc + }, []), }) - setLastSelectedIndex(index) } else { event.preventDefault() - newSelectedIndexes = new Set([index]) - setLastSelectedIndex(index) + updateSelections({ + indexes: isCurrentlySelected ? [] : [currentItemIndex], + }) } break } case 'Tab': { if (allowMultiSelection && isShiftPressed) { - const prevIndex = index - 1 - if (prevIndex < 0 && newSelectedIndexes?.size > 0) { + const prevIndex = currentItemIndex - 1 + if (prevIndex < 0 && selectedItemKeys?.size > 0) { setFocusedRowIndex(prevIndex) } } else { - const nextIndex = index + 1 + const nextIndex = currentItemIndex + 1 if (nextIndex === totalCount && selectedItemKeys.size > 0) { setFocusedRowIndex(totalCount - 1) } @@ -433,101 +563,100 @@ export function FolderProvider({ break } } - - if (!newSelectedIndexes) { - return - } - - setSelectedItemKeys( - [...subfolders, ...documents].reduce((acc, item, index) => { - if (newSelectedIndexes?.size && newSelectedIndexes.has(index)) { - acc.add(item.itemKey) - } - return acc - }, new Set()), - ) - - if (selectedItemKeys.size === 1 && code === 'Enter') { - navigateAfterSelection({ - collectionSlug: item.relationTo, - docID: extractID(item.value), - }) - } }, [ - allowMultiSelection, - documents, - lastSelectedIndex, - navigateAfterSelection, - subfolders, - totalCount, selectedItemKeys, + subfolders, + documents, + allowMultiSelection, + handleShiftSelection, + updateSelections, + navigateAfterSelection, + clearSelections, + totalCount, ], ) const onItemClick: FolderContextValue['onItemClick'] = React.useCallback( - ({ event, index, item }) => { + ({ event, item: clickedItem }) => { let doubleClicked: boolean = false const isCtrlPressed = event.ctrlKey || event.metaKey const isShiftPressed = event.shiftKey - let newSelectedIndexes: Set | undefined = undefined + const isCurrentlySelected = selectedItemKeys.has(clickedItem.itemKey) + const allItems = [...subfolders, ...documents] + const currentItemIndex = allItems.findIndex((item) => item.itemKey === clickedItem.itemKey) if (allowMultiSelection && isCtrlPressed) { - newSelectedIndexes = getMetaSelection({ - currentSelection: newSelectedIndexes, - toggleIndex: index, - }) - } else if (allowMultiSelection && isShiftPressed && lastSelectedIndex !== undefined) { - newSelectedIndexes = getShiftSelection({ - selectFromIndex: lastSelectedIndex, - selectToIndex: index, - }) + event.preventDefault() + let overlayItemKey: FolderDocumentItemKey | undefined + const indexes = allItems.reduce((acc, item, idx) => { + if (item.itemKey === clickedItem.itemKey) { + if (isCurrentlySelected && event.type !== 'pointermove') { + return acc + } else { + acc.push(idx) + overlayItemKey = item.itemKey + } + } else if (selectedItemKeys.has(item.itemKey)) { + acc.push(idx) + } + return acc + }, []) + + updateSelections({ indexes }) + + if (overlayItemKey) { + setDragOverlayItem(getItem(overlayItemKey)) + } + } else if (allowMultiSelection && isShiftPressed) { + if (currentItemIndex !== -1) { + const selectedIndexes = handleShiftSelection(currentItemIndex) + updateSelections({ indexes: selectedIndexes }) + } } else if (allowMultiSelection && event.type === 'pointermove') { // on drag start of an unselected item - if (!selectedItemKeys.has(item.itemKey)) { - newSelectedIndexes = new Set([index]) + if (!isCurrentlySelected) { + updateSelections({ + indexes: allItems.reduce((acc, item, idx) => { + if (item.itemKey === clickedItem.itemKey) { + acc.push(idx) + } + return acc + }, []), + }) } - setLastSelectedIndex(index) + setDragOverlayItem(getItem(clickedItem.itemKey)) } else { // Normal click - select single item - newSelectedIndexes = new Set([index]) const now = Date.now() - doubleClicked = now - lastClickTime.current < 400 && lastSelectedIndex === index + doubleClicked = + now - lastClickTime.current < 400 && dragOverlayItem?.itemKey === clickedItem.itemKey lastClickTime.current = now - setLastSelectedIndex(index) - } - - if (!newSelectedIndexes) { - setFocusedRowIndex(undefined) - } else { - setFocusedRowIndex(index) - } - - if (newSelectedIndexes) { - setSelectedItemKeys( - [...subfolders, ...documents].reduce((acc, item, index) => { - if (newSelectedIndexes.size && newSelectedIndexes.has(index)) { - acc.add(item.itemKey) - } - return acc - }, new Set()), - ) + if (!doubleClicked) { + updateSelections({ + indexes: isCurrentlySelected && selectedItemKeys.size === 1 ? [] : [currentItemIndex], + }) + } + setDragOverlayItem(getItem(clickedItem.itemKey)) } if (doubleClicked) { navigateAfterSelection({ - collectionSlug: item.relationTo, - docID: extractID(item.value), + collectionSlug: clickedItem.relationTo, + docID: extractID(clickedItem.value), }) } }, [ selectedItemKeys, - allowMultiSelection, - lastSelectedIndex, subfolders, documents, + allowMultiSelection, + dragOverlayItem, + getItem, + updateSelections, navigateAfterSelection, + handleShiftSelection, ], ) @@ -602,6 +731,70 @@ export function FolderProvider({ [folderID, clearSelections, folderCollectionSlug, folderFieldName, routes.api, serverURL, t], ) + const checkIfItemIsDisabled: FolderContextValue['checkIfItemIsDisabled'] = React.useCallback( + (item) => { + function folderAcceptsItem({ + item, + selectedFolderCollections, + }: { + item: FolderOrDocument + selectedFolderCollections: string[] + }): boolean { + if ( + !item.value.folderType || + (Array.isArray(item.value.folderType) && item.value.folderType.length === 0) + ) { + // Enable folder that accept all collections + return false + } + + if (selectedFolderCollections.length === 0) { + // If no collections are selected, enable folders that accept all collections + return Boolean(item.value.folderType || item.value.folderType.length > 0) + } + + // Disable folders that do not accept all of the selected collections + return selectedFolderCollections.some((slug) => { + return !item.value.folderType.includes(slug) + }) + } + + if (isDragging) { + const isSelected = selectedItemKeys.has(item.itemKey) + if (isSelected) { + return true + } else if (item.relationTo === folderCollectionSlug) { + return folderAcceptsItem({ item, selectedFolderCollections }) + } else { + // Non folder items are disabled on drag + return true + } + } else if (parentFolderContext?.selectedItemKeys?.size) { + // Disable selected items from being navigated to in move to drawer + if (parentFolderContext.selectedItemKeys.has(item.itemKey)) { + return true + } + // Moving items to folder + if (item.relationTo === folderCollectionSlug) { + return folderAcceptsItem({ + item, + selectedFolderCollections: parentFolderContext.selectedFolderCollections, + }) + } + // If the item is not a folder, it is disabled on move + return true + } + }, + [ + selectedFolderCollections, + isDragging, + selectedItemKeys, + folderCollectionSlug, + parentFolderContext?.selectedFolderCollections, + parentFolderContext?.selectedItemKeys, + ], + ) + // If a new component is provided, update the state so children can re-render with the new component React.useEffect(() => { if (InitialFolderResultsComponent) { @@ -616,33 +809,37 @@ export function FolderProvider({ allCollectionFolderSlugs, allowCreateCollectionSlugs, breadcrumbs, + checkIfItemIsDisabled, clearSelections, - currentFolder: breadcrumbs?.[0]?.id - ? formatFolderOrDocumentItem({ - folderFieldName, - isUpload: false, - relationTo: folderCollectionSlug, - useAsTitle: folderCollectionConfig.admin.useAsTitle, - value: breadcrumbs[breadcrumbs.length - 1], - }) - : null, + currentFolder: + breadcrumbs?.[breadcrumbs.length - 1]?.id !== undefined + ? formatFolderOrDocumentItem({ + folderFieldName, + isUpload: false, + relationTo: folderCollectionSlug, + useAsTitle: folderCollectionConfig.admin.useAsTitle, + value: breadcrumbs[breadcrumbs.length - 1], + }) + : null, documents, + dragOverlayItem, focusedRowIndex, folderCollectionConfig, folderCollectionSlug, folderFieldName, folderID, FolderResultsComponent, + folderType: breadcrumbs?.[breadcrumbs.length - 1]?.folderType, getFolderRoute, getSelectedItems, isDragging, itemKeysToMove: parentFolderContext.selectedItemKeys, - lastSelectedIndex, moveToFolder, onItemClick, onItemKeyPress, refineFolderData, search, + selectedFolderCollections, selectedItemKeys, setBreadcrumbs, setFocusedRowIndex, diff --git a/packages/ui/src/providers/Folders/selection.ts b/packages/ui/src/providers/Folders/selection.ts deleted file mode 100644 index b3b1f932ca..0000000000 --- a/packages/ui/src/providers/Folders/selection.ts +++ /dev/null @@ -1,52 +0,0 @@ -import type { FolderOrDocument } from 'payload/shared' - -export function getShiftSelection({ - selectFromIndex, - selectToIndex, -}: { - selectFromIndex: number - selectToIndex: number -}): Set { - if (selectFromIndex === null || selectFromIndex === undefined) { - return new Set([selectToIndex]) - } - - const start = Math.min(selectToIndex, selectFromIndex) - const end = Math.max(selectToIndex, selectFromIndex) - const rangeSelection = new Set( - Array.from({ length: Math.max(start, end) + 1 }, (_, i) => i).filter((index) => { - return index >= start && index <= end - }), - ) - return rangeSelection -} - -export function getMetaSelection({ - currentSelection, - toggleIndex, -}: { - currentSelection: Set - toggleIndex: number -}): Set { - const newSelection = new Set(currentSelection) - if (newSelection.has(toggleIndex)) { - newSelection.delete(toggleIndex) - } else { - newSelection.add(toggleIndex) - } - return newSelection -} - -export function groupItemIDsByRelation(items: FolderOrDocument[]) { - return items.reduce( - (acc, item) => { - if (!acc[item.relationTo]) { - acc[item.relationTo] = [] - } - acc[item.relationTo].push(item.value.id) - - return acc - }, - {} as Record, - ) -} diff --git a/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx b/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx index 30c6830de0..69378293b6 100644 --- a/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx +++ b/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx @@ -8,7 +8,7 @@ import type { import type { FolderBreadcrumb, FolderOrDocument } from 'payload/shared' import { APIError, formatErrors, getFolderData } from 'payload' -import { buildFolderWhereConstraints } from 'payload/shared' +import { buildFolderWhereConstraints, combineWhereConstraints } from 'payload/shared' import { FolderFileTable, @@ -19,6 +19,7 @@ import { type GetFolderResultsComponentAndDataResult = { breadcrumbs?: FolderBreadcrumb[] documents?: FolderOrDocument[] + folderAssignedCollections?: CollectionSlug[] FolderResultsComponent: React.ReactNode subfolders?: FolderOrDocument[] } @@ -45,17 +46,10 @@ export const getFolderResultsComponentAndDataHandler: ServerFunction< const res = await getFolderResultsComponentAndData(args) return res } catch (err) { - req.payload.logger.error({ err, msg: `There was an error building form state` }) - - if (err.message === 'Could not find field schema for given path') { - return { - message: err.message, - } - } - - if (err.message === 'Unauthorized') { - return null - } + req.payload.logger.error({ + err, + msg: `There was an error getting the folder results component and data`, + }) return formatErrors(err) } @@ -64,16 +58,12 @@ export const getFolderResultsComponentAndDataHandler: ServerFunction< /** * This function is responsible for fetching folder data, building the results component * and returns the data and component together. - * - * - * Open ended questions: - * - If we rerender the results section, does the provider update?? I dont think so, if the provider is on the server. - * Maybe we should move the provider to the client. */ export const getFolderResultsComponentAndData = async ({ - activeCollectionSlugs, - browseByFolder, + browseByFolder = false, + collectionsToDisplay: activeCollectionSlugs, displayAs, + folderAssignedCollections, folderID = undefined, req, sort, @@ -84,9 +74,17 @@ export const getFolderResultsComponentAndData = async ({ throw new APIError('Folders are not enabled in the configuration.') } + const emptyQuery = { + id: { + exists: false, + }, + } + let collectionSlug: CollectionSlug | undefined = undefined - let documentWhere: undefined | Where = undefined - let folderWhere: undefined | Where = undefined + let documentWhere: undefined | Where = + Array.isArray(activeCollectionSlugs) && !activeCollectionSlugs.length ? emptyQuery : undefined + let folderWhere: undefined | Where = + Array.isArray(activeCollectionSlugs) && !activeCollectionSlugs.length ? emptyQuery : undefined // todo(perf): - collect promises and resolve them in parallel for (const activeCollectionSlug of activeCollectionSlugs) { @@ -103,6 +101,39 @@ export const getFolderResultsComponentAndData = async ({ if (folderCollectionConstraints) { folderWhere = folderCollectionConstraints } + + folderWhere = combineWhereConstraints([ + folderWhere, + Array.isArray(folderAssignedCollections) && + folderAssignedCollections.length && + payload.config.folders.collectionSpecific + ? { + or: [ + { + folderType: { + in: folderAssignedCollections, + }, + }, + // if the folderType is not set, it means it accepts all collections and should appear in the results + { + folderType: { + exists: false, + }, + }, + { + folderType: { + equals: [], + }, + }, + { + folderType: { + equals: null, + }, + }, + ], + } + : undefined, + ]) } else if ((browseByFolder && folderID) || !browseByFolder) { if (!browseByFolder) { collectionSlug = activeCollectionSlug @@ -135,6 +166,7 @@ export const getFolderResultsComponentAndData = async ({ folderID, folderWhere, req, + sort, }) let FolderResultsComponent = null @@ -167,6 +199,7 @@ export const getFolderResultsComponentAndData = async ({ return { breadcrumbs: folderData.breadcrumbs, documents: folderData.documents, + folderAssignedCollections: folderData.folderAssignedCollections, FolderResultsComponent, subfolders: folderData.subfolders, } diff --git a/packages/ui/src/views/BrowseByFolder/index.tsx b/packages/ui/src/views/BrowseByFolder/index.tsx index 3e9771e6bb..f2688189e7 100644 --- a/packages/ui/src/views/BrowseByFolder/index.tsx +++ b/packages/ui/src/views/BrowseByFolder/index.tsx @@ -9,10 +9,10 @@ import { useRouter } from 'next/navigation.js' import React, { Fragment } from 'react' import { DroppableBreadcrumb } from '../../elements/FolderView/Breadcrumbs/index.js' -import { CollectionTypePill } from '../../elements/FolderView/CollectionTypePill/index.js' import { ColoredFolderIcon } from '../../elements/FolderView/ColoredFolderIcon/index.js' import { CurrentFolderActions } from '../../elements/FolderView/CurrentFolderActions/index.js' import { DragOverlaySelection } from '../../elements/FolderView/DragOverlaySelection/index.js' +import { FilterFolderTypePill } from '../../elements/FolderView/FilterFolderTypePill/index.js' import { FolderFileTable } from '../../elements/FolderView/FolderFileTable/index.js' import { ItemCardGrid } from '../../elements/FolderView/ItemCardGrid/index.js' import { SortByPill } from '../../elements/FolderView/SortByPill/index.js' @@ -92,6 +92,7 @@ function BrowseByFolderViewInContext(props: BrowseByFolderViewInContextProps) { Description, disableBulkDelete, disableBulkEdit, + folderAssignedCollections, viewPreference, } = props @@ -111,11 +112,12 @@ function BrowseByFolderViewInContext(props: BrowseByFolderViewInContextProps) { allowCreateCollectionSlugs, breadcrumbs, documents, + dragOverlayItem, folderCollectionConfig, folderID, + folderType, getFolderRoute, getSelectedItems, - lastSelectedIndex, moveToFolder, refineFolderData, search, @@ -236,6 +238,10 @@ function BrowseByFolderViewInContext(props: BrowseByFolderViewInContextProps) { } }, [breadcrumbs, drawerDepth, getFolderRoute, router, setStepNav, startRouteTransition, t]) + const nonFolderCollectionSlugs = allowCreateCollectionSlugs.filter( + (slug) => slug !== folderCollectionConfig.slug, + ) + return ( @@ -248,6 +254,7 @@ function BrowseByFolderViewInContext(props: BrowseByFolderViewInContextProps) { ), @@ -259,6 +266,7 @@ function BrowseByFolderViewInContext(props: BrowseByFolderViewInContextProps) { , - folderID && , + folderID && , ), - folderID && - allowCreateCollectionSlugs.filter((slug) => slug !== folderCollectionConfig.slug) - .length > 0 && ( - slug !== folderCollectionConfig.slug, - )} - key="create-document" - onCreateSuccess={clearRouteCache} - slugPrefix="create-document--no-results" - /> - ), + folderID && nonFolderCollectionSlugs.length > 0 && ( + + ), ].filter(Boolean)} Message={

    @@ -347,11 +353,9 @@ function BrowseByFolderViewInContext(props: BrowseByFolderViewInContextProps) { {AfterFolderList}

    - + {selectedItemKeys.size > 0 && dragOverlayItem && ( + + )} ) } diff --git a/packages/ui/src/views/CollectionFolder/ListSelection/index.tsx b/packages/ui/src/views/CollectionFolder/ListSelection/index.tsx index cef1fffdf0..3ab40ab3b5 100644 --- a/packages/ui/src/views/CollectionFolder/ListSelection/index.tsx +++ b/packages/ui/src/views/CollectionFolder/ListSelection/index.tsx @@ -1,5 +1,7 @@ 'use client' +import type { CollectionSlug } from 'payload' + import { useModal } from '@faceless-ui/modal' import { extractID } from 'payload/shared' import React, { Fragment } from 'react' @@ -30,16 +32,17 @@ type GroupedSelections = { export type ListSelectionProps = { disableBulkDelete?: boolean disableBulkEdit?: boolean + folderAssignedCollections: CollectionSlug[] } export const ListSelection: React.FC = ({ disableBulkDelete, disableBulkEdit, + folderAssignedCollections, }) => { const { clearSelections, currentFolder, - folderCollectionConfig, folderCollectionSlug, folderFieldName, folderID, @@ -135,6 +138,7 @@ export const ListSelection: React.FC = ({ = ({ ) } + clearRouteCache() closeModal(moveToFolderDrawerSlug) }} /> diff --git a/packages/ui/src/views/CollectionFolder/index.tsx b/packages/ui/src/views/CollectionFolder/index.tsx index 98c58b60b7..12cc5eefcb 100644 --- a/packages/ui/src/views/CollectionFolder/index.tsx +++ b/packages/ui/src/views/CollectionFolder/index.tsx @@ -107,11 +107,12 @@ function CollectionFolderViewInContext(props: CollectionFolderViewInContextProps allowCreateCollectionSlugs, breadcrumbs, documents, + dragOverlayItem, folderCollectionConfig, folderCollectionSlug, FolderResultsComponent, + folderType, getSelectedItems, - lastSelectedIndex, moveToFolder, refineFolderData, selectedItemKeys, @@ -265,6 +266,9 @@ function CollectionFolderViewInContext(props: CollectionFolderViewInContextProps ), @@ -284,6 +288,9 @@ function CollectionFolderViewInContext(props: CollectionFolderViewInContextProps {AfterFolderList} - + {selectedItemKeys.size > 0 && dragOverlayItem && ( + + )} ) } diff --git a/test/folders/e2e.spec.ts b/test/folders/e2e.spec.ts index aaa1a61aee..2e433de469 100644 --- a/test/folders/e2e.spec.ts +++ b/test/folders/e2e.spec.ts @@ -1,19 +1,26 @@ import type { Page } from '@playwright/test' import { expect, test } from '@playwright/test' -import { reInitializeDB } from 'helpers/reInitializeDB.js' import * as path from 'path' import { fileURLToPath } from 'url' import { ensureCompilationIsDone, initPageConsoleErrorCatch, saveDocAndAssert } from '../helpers.js' import { AdminUrlUtil } from '../helpers/adminUrlUtil.js' +import { + getSelectInputOptions, + getSelectInputValue, + openSelectMenu, +} from '../helpers/e2e/selectInput.js' +import { applyBrowseByFolderTypeFilter } from '../helpers/folders/applyBrowseByFolderTypeFilter.js' import { clickFolderCard } from '../helpers/folders/clickFolderCard.js' import { createFolder } from '../helpers/folders/createFolder.js' +import { createFolderDoc } from '../helpers/folders/createFolderDoc.js' import { createFolderFromDoc } from '../helpers/folders/createFolderFromDoc.js' import { expectNoResultsAndCreateFolderButton } from '../helpers/folders/expectNoResultsAndCreateFolderButton.js' import { selectFolderAndConfirmMove } from '../helpers/folders/selectFolderAndConfirmMove.js' import { selectFolderAndConfirmMoveFromList } from '../helpers/folders/selectFolderAndConfirmMoveFromList.js' import { initPayloadE2ENoConfig } from '../helpers/initPayloadE2ENoConfig.js' +import { reInitializeDB } from '../helpers/reInitializeDB.js' import { TEST_TIMEOUT_LONG } from '../playwright.config.js' import { omittedFromBrowseBySlug, postSlug } from './shared.js' @@ -93,16 +100,15 @@ test.describe('Folders', () => { await page.goto(`${serverURL}/admin/browse-by-folder`) await createFolder({ folderName: 'Test Folder', page }) await clickFolderCard({ folderName: 'Test Folder', page }) - const renameButton = page.locator('.list-selection__actions button', { - hasText: 'Rename', + const editFolderDocButton = page.locator('.list-selection__actions button', { + hasText: 'Edit', + }) + await editFolderDocButton.click() + await createFolderDoc({ + page, + folderName: 'Renamed Folder', + folderType: ['Posts'], }) - await renameButton.click() - const folderNameInput = page.locator('input[id="field-name"]') - await folderNameInput.fill('Renamed Folder') - const applyChangesButton = page.locator( - 'dialog#rename-folder--list button[aria-label="Apply Changes"]', - ) - await applyChangesButton.click() await expect(page.locator('.payload-toast-container')).toContainText('successfully') const renamedFolderCard = page .locator('.folder-file-card__name', { @@ -165,16 +171,12 @@ test.describe('Folders', () => { hasText: 'Move', }) await moveButton.click() - const destinationFolder = page - .locator('dialog#move-to-folder--list .folder-file-card') - .filter({ - has: page.locator('.folder-file-card__name', { hasText: 'Move Into This Folder' }), - }) - .first() - const destinationFolderButton = destinationFolder.locator( - 'div[role="button"].folder-file-card__drag-handle', - ) - await destinationFolderButton.click() + await clickFolderCard({ + folderName: 'Move Into This Folder', + page, + doubleClick: true, + rootLocator: page.locator('dialog#move-to-folder--list'), + }) const selectButton = page.locator( 'dialog#move-to-folder--list button[aria-label="Apply Changes"]', ) @@ -193,7 +195,11 @@ test.describe('Folders', () => { // this test currently fails in postgres test('should create new document from folder', async () => { await page.goto(`${serverURL}/admin/browse-by-folder`) - await createFolder({ folderName: 'Create New Here', page }) + await createFolder({ + folderName: 'Create New Here', + page, + folderType: ['Posts', 'Drafts'], + }) await clickFolderCard({ folderName: 'Create New Here', page, doubleClick: true }) const createDocButton = page.locator('.create-new-doc-in-folder__popup-button', { hasText: 'Create document', @@ -231,22 +237,12 @@ test.describe('Folders', () => { await expect(createFolderButton).toBeVisible() await createFolderButton.click() - const drawerHeader = page.locator( - 'dialog#create-folder--no-results-new-folder-drawer h1.drawerHeader__title', - ) - await expect(drawerHeader).toHaveText('New Folder') + await createFolderDoc({ + page, + folderName: 'Nested Folder', + folderType: ['Posts'], + }) - const titleField = page.locator( - 'dialog#create-folder--no-results-new-folder-drawer input[id="field-name"]', - ) - await titleField.fill('Nested Folder') - const createButton = page - .locator( - 'dialog#create-folder--no-results-new-folder-drawer button[aria-label="Apply Changes"]', - ) - .filter({ hasText: 'Create' }) - .first() - await createButton.click() await expect(page.locator('.payload-toast-container')).toContainText('successfully') await expect(page.locator('dialog#create-folder--no-results-new-folder-drawer')).toBeHidden() }) @@ -296,12 +292,11 @@ test.describe('Folders', () => { await createNewDropdown.click() const createFolderButton = page.locator('.popup-button-list__button').first() await createFolderButton.click() - const folderNameInput = page.locator('input[id="field-name"]') - await folderNameInput.fill('Nested Folder') - const createButton = page - .locator('.drawerHeader button[aria-label="Apply Changes"]') - .filter({ hasText: 'Create' }) - await createButton.click() + await createFolderDoc({ + page, + folderName: 'Nested Folder', + folderType: ['Posts'], + }) await expect(page.locator('.folder-file-card__name')).toHaveText('Nested Folder') await createNewDropdown.click() @@ -314,18 +309,28 @@ test.describe('Folders', () => { await saveButton.click() await expect(page.locator('.payload-toast-container')).toContainText('successfully') - const typeButton = page.locator('.popup-button', { hasText: 'Type' }) - await typeButton.click() - const folderCheckbox = page.locator('.checkbox-popup__options .checkbox-input__input').first() - await folderCheckbox.click() + // should filter out folders and only show posts + await applyBrowseByFolderTypeFilter({ + page, + type: { label: 'Folders', value: 'payload-folders' }, + on: false, + }) const folderGroup = page.locator('.item-card-grid__title', { hasText: 'Folders' }) const postGroup = page.locator('.item-card-grid__title', { hasText: 'Documents' }) await expect(folderGroup).toBeHidden() await expect(postGroup).toBeVisible() - await folderCheckbox.click() - const postCheckbox = page.locator('.checkbox-popup__options .checkbox-input__input').nth(1) - await postCheckbox.click() + // should filter out posts and only show folders + await applyBrowseByFolderTypeFilter({ + page, + type: { label: 'Folders', value: 'payload-folders' }, + on: true, + }) + await applyBrowseByFolderTypeFilter({ + page, + type: { label: 'Posts', value: 'posts' }, + on: false, + }) await expect(folderGroup).toBeVisible() await expect(postGroup).toBeHidden() @@ -389,7 +394,6 @@ test.describe('Folders', () => { test('should resolve folder pills and not get stuck as Loading...', async () => { await selectFolderAndConfirmMoveFromList({ folderName: 'Move Into This Folder', page }) const folderPill = page.locator('tbody .row-1 .move-doc-to-folder') - await page.reload() await expect(folderPill).not.toHaveText('Loading...') }) test('should show updated folder pill after folder change', async () => { @@ -402,10 +406,16 @@ test.describe('Folders', () => { const folderPill = page.locator('tbody .row-1 .move-doc-to-folder') await selectFolderAndConfirmMoveFromList({ folderName: 'Move Into This Folder', page }) await expect(folderPill).toHaveText('Move Into This Folder') - await page.reload() await folderPill.click() - const folderBreadcrumb = page.locator('.folderBreadcrumbs__crumb-item', { hasText: 'Folder' }) - await folderBreadcrumb.click() + const drawerLocator = page.locator('dialog .move-folder-drawer') + await drawerLocator + .locator('.droppable-button.folderBreadcrumbs__crumb-item', { + hasText: 'Folder', + }) + .click() + await expect( + drawerLocator.locator('.folder-file-card__name', { hasText: 'Move Into This Folder' }), + ).toBeVisible() await selectFolderAndConfirmMove({ page }) await expect(folderPill).toHaveText('No Folder') }) @@ -418,14 +428,11 @@ test.describe('Folders', () => { await createDropdown.click() const createFolderButton = page.locator('.popup-button-list__button', { hasText: 'Folder' }) await createFolderButton.click() - const drawerHeader = page.locator('.drawerHeader__title', { hasText: 'New Folder' }) - await expect(drawerHeader).toBeVisible() - const folderNameInput = page.locator('input[id="field-name"]') - await folderNameInput.fill('New Folder From Collection') - const createButton = page - .locator('.drawerHeader button[aria-label="Apply Changes"]') - .filter({ hasText: 'Create' }) - await createButton.click() + await createFolderDoc({ + page, + folderName: 'New Folder From Collection', + folderType: ['Posts'], + }) await expect(page.locator('.payload-toast-container')).toContainText('successfully') }) }) @@ -470,6 +477,58 @@ test.describe('Folders', () => { }) }) + test.describe('Collection with browse by folders disabled', () => { + test('should not show omitted collection documents in browse by folder view', async () => { + await page.goto(OmittedFromBrowseBy.byFolder) + const folderName = 'Folder without omitted Docs' + await page.goto(OmittedFromBrowseBy.byFolder) + await createFolder({ + folderName, + page, + fromDropdown: true, + folderType: ['Omitted From Browse By', 'Posts'], + }) + + // create document + await page.goto(OmittedFromBrowseBy.create) + const titleInput = page.locator('input[name="title"]') + await titleInput.fill('Omitted Doc') + await saveDocAndAssert(page) + + // assign to folder + const folderPill = page.locator('.doc-controls .move-doc-to-folder', { hasText: 'No Folder' }) + await folderPill.click() + await clickFolderCard({ folderName, page }) + const selectButton = page + .locator('button[aria-label="Apply Changes"]') + .filter({ hasText: 'Select' }) + await selectButton.click() + await saveDocAndAssert(page) + + // go to browse by folder view + await page.goto(`${serverURL}/admin/browse-by-folder`) + await clickFolderCard({ folderName, page, doubleClick: true }) + + // folder should be empty + await expectNoResultsAndCreateFolderButton({ page }) + }) + + test('should not show collection type in browse by folder view', async () => { + const folderName = 'omitted collection pill test folder' + await page.goto(`${serverURL}/admin/browse-by-folder`) + await createFolder({ folderName, page }) + await clickFolderCard({ folderName, page, doubleClick: true }) + + await page.locator('button:has(.collection-type__count)').click() + + await expect( + page.locator('.checkbox-input .field-label', { + hasText: 'Omitted From Browse By', + }), + ).toBeHidden() + }) + }) + test.describe('Multiple select options', () => { test.beforeEach(async () => { await page.goto(`${serverURL}/admin/browse-by-folder`) @@ -545,48 +604,140 @@ test.describe('Folders', () => { }) }) - test.describe('Collection with browse by folders disabled', () => { - const folderName = 'Folder without omitted Docs' - test('should not show omitted collection documents in browse by folder view', async () => { - await page.goto(OmittedFromBrowseBy.byFolder) - await createFolder({ folderName, page, fromDropdown: true }) - - // create document - await page.goto(OmittedFromBrowseBy.create) - const titleInput = page.locator('input[name="title"]') - await titleInput.fill('Omitted Doc') - await saveDocAndAssert(page) - - // assign to folder - const folderPill = page.locator('.doc-controls .move-doc-to-folder', { hasText: 'No Folder' }) - await folderPill.click() - await clickFolderCard({ folderName, page }) - const selectButton = page - .locator('button[aria-label="Apply Changes"]') - .filter({ hasText: 'Select' }) - await selectButton.click() - - // go to browse by folder view + test.describe('should inherit folderType select values from parent folder', () => { + test('should scope folderType select options for: scoped > child folder', async () => { await page.goto(`${serverURL}/admin/browse-by-folder`) - await clickFolderCard({ folderName, page, doubleClick: true }) + await createFolder({ folderName: 'Posts and Media', page, folderType: ['Posts', 'Media'] }) + await clickFolderCard({ folderName: 'Posts and Media', page, doubleClick: true }) - // folder should be empty - await expectNoResultsAndCreateFolderButton({ page }) + const createNewDropdown = page.locator('.create-new-doc-in-folder__popup-button', { + hasText: 'Create New', + }) + await createNewDropdown.click() + const createFolderButton = page.locator( + '.list-header__title-actions .popup-button-list__button', + { hasText: 'Folder' }, + ) + await createFolderButton.click() + + const drawer = page.locator('dialog .collection-edit--payload-folders') + const titleInput = drawer.locator('#field-name') + await titleInput.fill('Should only allow Posts and Media') + const selectLocator = drawer.locator('#field-folderType') + await expect(selectLocator).toBeVisible() + + // should prefill with Posts and Media + await expect + .poll(async () => { + const options = await getSelectInputValue({ selectLocator, multiSelect: true }) + return options.sort() + }) + .toEqual(['Posts', 'Media'].sort()) + + // should have no more select options available + await openSelectMenu({ selectLocator }) + await expect( + selectLocator.locator('.rs__menu-notice', { hasText: 'No options' }), + ).toBeVisible() }) - test('should not show collection type in browse by folder view', async () => { - const folderName = 'omitted collection pill test folder' + test('should scope folderType select options for: unscoped > scoped > child folder', async () => { await page.goto(`${serverURL}/admin/browse-by-folder`) - await createFolder({ folderName, page }) - await clickFolderCard({ folderName, page, doubleClick: true }) - await page.locator('button:has(.collection-type__count)').click() + // create an unscoped parent folder + await createFolder({ folderName: 'All collections', page, folderType: [] }) + await clickFolderCard({ folderName: 'All collections', page, doubleClick: true }) + + // create a scoped child folder + await createFolder({ + folderName: 'Posts and Media', + page, + folderType: ['Posts', 'Media'], + fromDropdown: true, + }) + await clickFolderCard({ folderName: 'Posts and Media', page, doubleClick: true }) await expect( - page.locator('.checkbox-input .field-label', { - hasText: 'Omitted From Browse By', + page.locator('.step-nav', { + hasText: 'Posts and Media', }), - ).toBeHidden() + ).toBeVisible() + + const titleActionsLocator = page.locator('.list-header__title-actions') + await expect(titleActionsLocator).toBeVisible() + const folderDropdown = page.locator( + '.list-header__title-actions .create-new-doc-in-folder__action-popup', + { + hasText: 'Create', + }, + ) + await expect(folderDropdown).toBeVisible() + await folderDropdown.click() + const createFolderButton = page.locator( + '.list-header__title-actions .popup-button-list__button', + { + hasText: 'Folder', + }, + ) + await createFolderButton.click() + + const drawer = page.locator('dialog .collection-edit--payload-folders') + const titleInput = drawer.locator('#field-name') + await titleInput.fill('Should only allow posts and media') + const selectLocator = drawer.locator('#field-folderType') + await expect(selectLocator).toBeVisible() + + // should not prefill with any options + await expect + .poll(async () => { + const options = await getSelectInputValue({ selectLocator, multiSelect: true }) + return options.sort() + }) + .toEqual(['Posts', 'Media'].sort()) + + // should have no more select options available + await openSelectMenu({ selectLocator }) + await expect( + selectLocator.locator('.rs__menu-notice', { hasText: 'No options' }), + ).toBeVisible() + }) + + test('should not scope child folder of an unscoped parent folder', async () => { + await page.goto(`${serverURL}/admin/browse-by-folder`) + await createFolder({ folderName: 'All collections', page, folderType: [] }) + await clickFolderCard({ folderName: 'All collections', page, doubleClick: true }) + + const createNewDropdown = page.locator('.create-new-doc-in-folder__popup-button', { + hasText: 'Create New', + }) + await createNewDropdown.click() + const createFolderButton = page.locator( + '.list-header__title-actions .popup-button-list__button', + { hasText: 'Folder' }, + ) + await createFolderButton.click() + + const drawer = page.locator('dialog .collection-edit--payload-folders') + const titleInput = drawer.locator('#field-name') + await titleInput.fill('Should allow all collections') + const selectLocator = drawer.locator('#field-folderType') + await expect(selectLocator).toBeVisible() + + // should not prefill with any options + await expect + .poll(async () => { + const options = await getSelectInputValue({ selectLocator, multiSelect: true }) + return options + }) + .toEqual([]) + + // should have many options + await expect + .poll(async () => { + const options = await getSelectInputOptions({ selectLocator }) + return options.length + }) + .toBeGreaterThan(4) }) }) diff --git a/test/folders/int.spec.ts b/test/folders/int.spec.ts index 17afb242a8..6f10a6e733 100644 --- a/test/folders/int.spec.ts +++ b/test/folders/int.spec.ts @@ -3,18 +3,15 @@ import type { Payload } from 'payload' import path from 'path' import { fileURLToPath } from 'url' -import type { NextRESTClient } from '../helpers/NextRESTClient.js' - import { initPayloadInt } from '../helpers/initPayloadInt.js' let payload: Payload -let restClient: NextRESTClient const filename = fileURLToPath(import.meta.url) const dirname = path.dirname(filename) describe('folders', () => { beforeAll(async () => { - ;({ payload, restClient } = await initPayloadInt(dirname)) + ;({ payload } = await initPayloadInt(dirname)) }) afterAll(async () => { @@ -23,7 +20,7 @@ describe('folders', () => { beforeEach(async () => { await payload.delete({ - collection: 'posts', + collection: 'payload-folders', depth: 0, where: { id: { @@ -48,6 +45,7 @@ describe('folders', () => { collection: 'payload-folders', data: { name: 'Parent Folder', + folderType: ['posts'], }, }) const folderIDFromParams = parentFolder.id @@ -57,6 +55,7 @@ describe('folders', () => { data: { name: 'Nested 1', folder: folderIDFromParams, + folderType: ['posts'], }, }) @@ -65,6 +64,7 @@ describe('folders', () => { data: { name: 'Nested 2', folder: folderIDFromParams, + folderType: ['posts'], }, }) @@ -73,7 +73,7 @@ describe('folders', () => { id: folderIDFromParams, }) - expect(parentFolderQuery.documentsAndFolders.docs).toHaveLength(2) + expect(parentFolderQuery.documentsAndFolders?.docs).toHaveLength(2) }) }) @@ -82,6 +82,7 @@ describe('folders', () => { const parentFolder = await payload.create({ collection: 'payload-folders', data: { + folderType: ['posts'], name: 'Parent Folder', }, }) @@ -108,7 +109,7 @@ describe('folders', () => { id: folderIDFromParams, }) - expect(parentFolderQuery.documentsAndFolders.docs).toHaveLength(2) + expect(parentFolderQuery.documentsAndFolders?.docs).toHaveLength(2) }) }) @@ -117,6 +118,7 @@ describe('folders', () => { const parentFolder = await payload.create({ collection: 'payload-folders', data: { + folderType: ['posts'], name: 'Parent Folder', }, }) @@ -124,6 +126,7 @@ describe('folders', () => { const childFolder = await payload.create({ collection: 'payload-folders', data: { + folderType: ['posts'], name: 'Child Folder', folder: parentFolder, }, @@ -153,6 +156,7 @@ describe('folders', () => { const parentFolder = await payload.create({ collection: 'payload-folders', data: { + folderType: ['posts'], name: 'Parent Folder', }, }) @@ -168,6 +172,7 @@ describe('folders', () => { const parentFolder = await payload.create({ collection: 'payload-folders', data: { + folderType: ['posts'], name: 'Parent Folder', }, }) @@ -176,6 +181,7 @@ describe('folders', () => { data: { name: 'Child Folder', folder: parentFolder, + folderType: ['posts'], }, }) @@ -189,5 +195,154 @@ describe('folders', () => { }), ).resolves.toBeNull() }) + + describe('ensureSafeCollectionsChange', () => { + it('should prevent narrowing scope of a folder if it contains documents of a removed type', async () => { + const sharedFolder = await payload.create({ + collection: 'payload-folders', + data: { + name: 'Posts and Drafts Folder', + folderType: ['posts', 'drafts'], + }, + }) + + await payload.create({ + collection: 'posts', + data: { + title: 'Post 1', + folder: sharedFolder.id, + }, + }) + + await payload.create({ + collection: 'drafts', + data: { + title: 'Post 1', + folder: sharedFolder.id, + }, + }) + + try { + const updatedFolder = await payload.update({ + collection: 'payload-folders', + id: sharedFolder.id, + data: { + folderType: ['posts'], + }, + }) + + expect(updatedFolder).not.toBeDefined() + } catch (e: any) { + expect(e.message).toBe( + 'The folder "Posts and Drafts Folder" contains documents that still belong to the following collections: Drafts', + ) + } + }) + + it('should prevent adding scope to a folder if it contains documents outside of the new scope', async () => { + const folderAcceptsAnything = await payload.create({ + collection: 'payload-folders', + data: { + name: 'Anything Goes', + folderType: [], + }, + }) + + await payload.create({ + collection: 'posts', + data: { + title: 'Post 1', + folder: folderAcceptsAnything.id, + }, + }) + + try { + const scopedFolder = await payload.update({ + collection: 'payload-folders', + id: folderAcceptsAnything.id, + data: { + folderType: ['posts'], + }, + }) + + expect(scopedFolder).not.toBeDefined() + } catch (e: any) { + expect(e.message).toBe( + 'The folder "Anything Goes" contains documents that still belong to the following collections: Posts', + ) + } + }) + + it('should prevent narrowing scope of a folder if subfolders are assigned to any of the removed types', async () => { + const parentFolder = await payload.create({ + collection: 'payload-folders', + data: { + name: 'Parent Folder', + folderType: ['posts', 'drafts'], + }, + }) + + await payload.create({ + collection: 'payload-folders', + data: { + name: 'Parent Folder', + folderType: ['posts', 'drafts'], + folder: parentFolder.id, + }, + }) + + try { + const updatedParent = await payload.update({ + collection: 'payload-folders', + id: parentFolder.id, + data: { + folderType: ['posts'], + }, + }) + + expect(updatedParent).not.toBeDefined() + } catch (e: any) { + expect(e.message).toBe( + 'The folder "Parent Folder" contains folders that still belong to the following collections: Drafts', + ) + } + }) + + it('should prevent widening scope on a scoped subfolder', async () => { + const unscopedFolder = await payload.create({ + collection: 'payload-folders', + data: { + name: 'Parent Folder', + folderType: [], + }, + }) + + const level1Folder = await payload.create({ + collection: 'payload-folders', + data: { + name: 'Level 1 Folder', + folderType: ['posts', 'drafts'], + folder: unscopedFolder.id, + }, + }) + + try { + const level2UnscopedFolder = await payload.create({ + collection: 'payload-folders', + data: { + name: 'Level 2 Folder', + folder: level1Folder.id, + folderType: [], + }, + }) + + expect(level2UnscopedFolder).not.toBeDefined() + } catch (e: any) { + expect(e.message).toBe( + 'The folder "Level 2 Folder" must have folder-type set since its parent folder "Level 1 Folder" has a folder-type set.', + ) + } + }) + }) }) }) diff --git a/test/folders/payload-types.ts b/test/folders/payload-types.ts index 276727a036..8f1e60b6b5 100644 --- a/test/folders/payload-types.ts +++ b/test/folders/payload-types.ts @@ -201,6 +201,7 @@ export interface FolderInterface { hasNextPage?: boolean; totalDocs?: number; }; + folderType?: ('posts' | 'media' | 'drafts' | 'autosave' | 'omitted-from-browse-by')[] | null; folderSlug?: string | null; updatedAt: string; createdAt: string; @@ -419,6 +420,7 @@ export interface PayloadFoldersSelect { name?: T; folder?: T; documentsAndFolders?: T; + folderType?: T; folderSlug?: T; updatedAt?: T; createdAt?: T; diff --git a/test/folders/tsconfig.json b/test/folders/tsconfig.json new file mode 100644 index 0000000000..3c43903cfd --- /dev/null +++ b/test/folders/tsconfig.json @@ -0,0 +1,3 @@ +{ + "extends": "../tsconfig.json" +} diff --git a/test/helpers/folders/applyBrowseByFolderTypeFilter.ts b/test/helpers/folders/applyBrowseByFolderTypeFilter.ts new file mode 100644 index 0000000000..1cdbefe63c --- /dev/null +++ b/test/helpers/folders/applyBrowseByFolderTypeFilter.ts @@ -0,0 +1,41 @@ +import type { Page } from '@playwright/test' + +export const applyBrowseByFolderTypeFilter = async ({ + page, + type, + on, +}: { + on: boolean + page: Page + type: { + label: string + value: string + } +}) => { + // Check if the popup is already active + let typePill = page.locator('.search-bar__actions .checkbox-popup.popup--active', { + hasText: 'Type', + }) + const isActive = (await typePill.count()) > 0 + + if (!isActive) { + typePill = page.locator('.search-bar__actions .checkbox-popup', { hasText: 'Type' }) + await typePill.locator('.popup-button', { hasText: 'Type' }).click() + } + + await typePill.locator('.field-label', { hasText: type.label }).click() + + await page.waitForURL((urlStr) => { + try { + const url = new URL(urlStr) + const relationTo = url.searchParams.get('relationTo') + if (on) { + return Boolean(relationTo?.includes(`"${type.value}"`)) + } else { + return Boolean(!relationTo?.includes(`"${type.value}"`)) + } + } catch { + return false + } + }) +} diff --git a/test/helpers/folders/clickFolderCard.ts b/test/helpers/folders/clickFolderCard.ts index b563122771..f145828420 100644 --- a/test/helpers/folders/clickFolderCard.ts +++ b/test/helpers/folders/clickFolderCard.ts @@ -1,27 +1,37 @@ -import type { Page } from '@playwright/test' +import type { Locator, Page } from '@playwright/test' + +import { expect } from '@playwright/test' type Args = { doubleClick?: boolean folderName: string page: Page + rootLocator?: Locator } export async function clickFolderCard({ page, folderName, doubleClick = false, + rootLocator, }: Args): Promise { - const folderCard = page - .locator('.folder-file-card') + const folderCard = (rootLocator || page) + .locator('div[role="button"].draggable-with-click') .filter({ has: page.locator('.folder-file-card__name', { hasText: folderName }), }) .first() - const dragHandleButton = folderCard.locator('div[role="button"].folder-file-card__drag-handle') + await folderCard.waitFor({ state: 'visible' }) if (doubleClick) { - await dragHandleButton.dblclick() + // Release any modifier keys that might be held down from previous tests + await page.keyboard.up('Shift') + await page.keyboard.up('Control') + await page.keyboard.up('Alt') + await page.keyboard.up('Meta') + await folderCard.dblclick() + await expect(folderCard).toBeHidden() } else { - await dragHandleButton.click() + await folderCard.click() } } diff --git a/test/helpers/folders/createFolder.ts b/test/helpers/folders/createFolder.ts index 5ac1d06e16..f3c4785a12 100644 --- a/test/helpers/folders/createFolder.ts +++ b/test/helpers/folders/createFolder.ts @@ -1,7 +1,10 @@ import { expect, type Page } from '@playwright/test' +import { createFolderDoc } from './createFolderDoc.js' + type Args = { folderName: string + folderType?: string[] fromDropdown?: boolean page: Page } @@ -9,13 +12,15 @@ export async function createFolder({ folderName, fromDropdown = false, page, + folderType = ['Posts'], }: Args): Promise { if (fromDropdown) { - const folderDropdown = page.locator('.create-new-doc-in-folder__popup-button', { + const titleActionsLocator = page.locator('.list-header__title-actions') + const folderDropdown = titleActionsLocator.locator('.create-new-doc-in-folder__action-popup', { hasText: 'Create', }) await folderDropdown.click() - const createFolderButton = page.locator('.popup-button-list__button', { + const createFolderButton = titleActionsLocator.locator('.popup-button-list__button', { hasText: 'Folder', }) await createFolderButton.click() @@ -26,16 +31,11 @@ export async function createFolder({ await createFolderButton.click() } - const folderNameInput = page.locator( - 'dialog#create-document--header-pill-new-folder-drawer div.drawer-content-container input#field-name', - ) - - await folderNameInput.fill(folderName) - - const createButton = page.getByRole('button', { name: 'Apply Changes' }) - await createButton.click() - - await expect(page.locator('.payload-toast-container')).toContainText('successfully') + await createFolderDoc({ + page, + folderName, + folderType, + }) const folderCard = page.locator('.folder-file-card__name', { hasText: folderName }).first() await expect(folderCard).toBeVisible() diff --git a/test/helpers/folders/createFolderDoc.ts b/test/helpers/folders/createFolderDoc.ts new file mode 100644 index 0000000000..4266755e7d --- /dev/null +++ b/test/helpers/folders/createFolderDoc.ts @@ -0,0 +1,26 @@ +import { expect, type Page } from '@playwright/test' + +import { selectInput } from '../../helpers/e2e/selectInput.js' +export const createFolderDoc = async ({ + folderName, + page, + folderType, +}: { + folderName: string + folderType: string[] + page: Page +}) => { + const drawer = page.locator('dialog .collection-edit--payload-folders') + await drawer.locator('input#field-name').fill(folderName) + + await selectInput({ + multiSelect: true, + options: folderType, + selectLocator: drawer.locator('#field-folderType'), + }) + + const createButton = drawer.getByRole('button', { name: 'Save' }) + await createButton.click() + + await expect(page.locator('.payload-toast-container')).toContainText('successfully') +} diff --git a/test/helpers/folders/createFolderFromDoc.ts b/test/helpers/folders/createFolderFromDoc.ts index fe8fdaabd4..b9ff977ba6 100644 --- a/test/helpers/folders/createFolderFromDoc.ts +++ b/test/helpers/folders/createFolderFromDoc.ts @@ -1,26 +1,29 @@ import { expect, type Page } from '@playwright/test' +import { createFolder } from './createFolder.js' +import { createFolderDoc } from './createFolderDoc.js' + type Args = { folderName: string + folderType?: string[] page: Page } -export async function createFolderFromDoc({ folderName, page }: Args): Promise { +export async function createFolderFromDoc({ + folderName, + page, + folderType = ['Posts'], +}: Args): Promise { const addFolderButton = page.locator('.create-new-doc-in-folder__button', { hasText: 'Create folder', }) await addFolderButton.click() - const folderNameInput = page.locator('div.drawer-content-container input#field-name') - - await folderNameInput.fill(folderName) - - const createButton = page - .locator('button[aria-label="Apply Changes"]') - .filter({ hasText: 'Create' }) - await createButton.click() - - await expect(page.locator('.payload-toast-container')).toContainText('successfully') + await createFolderDoc({ + page, + folderName, + folderType, + }) const folderCard = page.locator('.folder-file-card__name', { hasText: folderName }).first() await expect(folderCard).toBeVisible() From 95e373e60b28a1c6fceedeef60ffb378e90e0773 Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Thu, 17 Jul 2025 14:12:58 -0400 Subject: [PATCH 047/143] fix(plugin-import-export): disabled flag to cascade to nested fields from parent containers (#13199) ### What? Fixes the `custom.plugin-import-export.disabled` flag to correctly disable fields in all nested structures including: - Groups - Arrays - Tabs - Blocks Previously, only top-level fields or direct children were respected. This update ensures nested paths (e.g. `group.array.field1`, `blocks.hero.title`, etc.) are matched and filtered from exports. ### Why? - Updated regex logic in both `createExport` and Preview components to recursively support: - Indexed array fields (e.g. `array_0_field1`) - Block fields with slugs (e.g. `blocks_0_hero_title`) - Nested field accessors with correct part-by-part expansion ### How? To allow users to disable entire field groups or deeply nested fields in structured layouts. --- .../FieldsToExport/reduceFields.tsx | 6 +- .../src/components/Preview/index.tsx | 25 +++--- .../src/export/createExport.ts | 18 ++-- .../src/export/flattenObject.ts | 14 +++- packages/plugin-import-export/src/index.ts | 14 +--- .../src/utilities/buildDisabledFieldRegex.ts | 13 +++ .../utilities/collectDisabledFieldPaths.ts | 82 +++++++++++++++++++ .../src/utilities/getFlattenedFieldKeys.ts | 9 +- .../migrations/20250714_201659.ts | 2 +- .../up-down-migration/migrations/index.ts | 6 +- test/plugin-import-export/int.spec.ts | 8 +- 11 files changed, 156 insertions(+), 41 deletions(-) create mode 100644 packages/plugin-import-export/src/utilities/buildDisabledFieldRegex.ts create mode 100644 packages/plugin-import-export/src/utilities/collectDisabledFieldPaths.ts diff --git a/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx b/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx index 37c2a47f48..a20568b436 100644 --- a/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx +++ b/packages/plugin-import-export/src/components/FieldsToExport/reduceFields.tsx @@ -114,7 +114,11 @@ export const reduceFields = ({ const val = createNestedClientFieldPath(path, field) // If the field is disabled, skip it - if (disabledFields.includes(val)) { + if ( + disabledFields.some( + (disabledField) => val === disabledField || val.startsWith(`${disabledField}.`), + ) + ) { return fieldsToUse } diff --git a/packages/plugin-import-export/src/components/Preview/index.tsx b/packages/plugin-import-export/src/components/Preview/index.tsx index 046b04c4a7..4cafe1f4fe 100644 --- a/packages/plugin-import-export/src/components/Preview/index.tsx +++ b/packages/plugin-import-export/src/components/Preview/index.tsx @@ -18,8 +18,9 @@ import type { PluginImportExportTranslations, } from '../../translations/index.js' -import { useImportExport } from '../ImportExportProvider/index.js' +import { buildDisabledFieldRegex } from '../../utilities/buildDisabledFieldRegex.js' import './index.scss' +import { useImportExport } from '../ImportExportProvider/index.js' const baseClass = 'preview' @@ -46,12 +47,11 @@ export const Preview = () => { (collection) => collection.slug === collectionSlug, ) - const disabledFieldsUnderscored = React.useMemo(() => { - return ( - collectionConfig?.admin?.custom?.['plugin-import-export']?.disabledFields?.map((f: string) => - f.replace(/\./g, '_'), - ) ?? [] - ) + const disabledFieldRegexes: RegExp[] = React.useMemo(() => { + const disabledFieldPaths = + collectionConfig?.admin?.custom?.['plugin-import-export']?.disabledFields ?? [] + + return disabledFieldPaths.map(buildDisabledFieldRegex) }, [collectionConfig]) const isCSV = format === 'csv' @@ -101,11 +101,16 @@ export const Preview = () => { Array.isArray(fields) && fields.length > 0 ? fields.flatMap((field) => { const regex = fieldToRegex(field) - return allKeys.filter((key) => regex.test(key)) + return allKeys.filter( + (key) => + regex.test(key) && + !disabledFieldRegexes.some((disabledRegex) => disabledRegex.test(key)), + ) }) : allKeys.filter( (key) => - !defaultMetaFields.includes(key) && !disabledFieldsUnderscored.includes(key), + !defaultMetaFields.includes(key) && + !disabledFieldRegexes.some((regex) => regex.test(key)), ) const fieldKeys = @@ -150,7 +155,7 @@ export const Preview = () => { }, [ collectionConfig, collectionSlug, - disabledFieldsUnderscored, + disabledFieldRegexes, draft, fields, i18n, diff --git a/packages/plugin-import-export/src/export/createExport.ts b/packages/plugin-import-export/src/export/createExport.ts index 9868e0a965..2b4b05bff2 100644 --- a/packages/plugin-import-export/src/export/createExport.ts +++ b/packages/plugin-import-export/src/export/createExport.ts @@ -5,6 +5,7 @@ import { stringify } from 'csv-stringify/sync' import { APIError } from 'payload' import { Readable } from 'stream' +import { buildDisabledFieldRegex } from '../utilities/buildDisabledFieldRegex.js' import { flattenObject } from './flattenObject.js' import { getCustomFieldFunctions } from './getCustomFieldFunctions.js' import { getFilename } from './getFilename.js' @@ -108,15 +109,22 @@ export const createExport = async (args: CreateExportArgs) => { fields: collectionConfig.flattenedFields, }) - const disabledFieldsDot = + const disabledFields = collectionConfig.admin?.custom?.['plugin-import-export']?.disabledFields ?? [] - const disabledFields = disabledFieldsDot.map((f: string) => f.replace(/\./g, '_')) + + const disabledRegexes: RegExp[] = disabledFields.map(buildDisabledFieldRegex) const filterDisabled = (row: Record): Record => { - for (const key of disabledFields) { - delete row[key] + const filtered: Record = {} + + for (const [key, value] of Object.entries(row)) { + const isDisabled = disabledRegexes.some((regex) => regex.test(key)) + if (!isDisabled) { + filtered[key] = value + } } - return row + + return filtered } if (download) { diff --git a/packages/plugin-import-export/src/export/flattenObject.ts b/packages/plugin-import-export/src/export/flattenObject.ts index 022238aacf..0801a2e5ef 100644 --- a/packages/plugin-import-export/src/export/flattenObject.ts +++ b/packages/plugin-import-export/src/export/flattenObject.ts @@ -24,6 +24,10 @@ export const flattenObject = ({ if (Array.isArray(value)) { value.forEach((item, index) => { if (typeof item === 'object' && item !== null) { + const blockType = typeof item.blockType === 'string' ? item.blockType : undefined + + const itemPrefix = blockType ? `${newKey}_${index}_${blockType}` : `${newKey}_${index}` + // Case: hasMany polymorphic relationships if ( 'relationTo' in item && @@ -31,12 +35,12 @@ export const flattenObject = ({ typeof item.value === 'object' && item.value !== null ) { - row[`${`${newKey}_${index}`}_relationTo`] = item.relationTo - row[`${`${newKey}_${index}`}_id`] = item.value.id + row[`${itemPrefix}_relationTo`] = item.relationTo + row[`${itemPrefix}_id`] = item.value.id return } - flatten(item, `${newKey}_${index}`) + flatten(item, itemPrefix) } else { if (toCSVFunctions?.[newKey]) { const columnName = `${newKey}_${index}` @@ -54,7 +58,9 @@ export const flattenObject = ({ } } catch (error) { throw new Error( - `Error in toCSVFunction for array item "${columnName}": ${JSON.stringify(item)}\n${(error as Error).message}`, + `Error in toCSVFunction for array item "${columnName}": ${JSON.stringify(item)}\n${ + (error as Error).message + }`, ) } } else { diff --git a/packages/plugin-import-export/src/index.ts b/packages/plugin-import-export/src/index.ts index 27dd52bd83..a64e80bf15 100644 --- a/packages/plugin-import-export/src/index.ts +++ b/packages/plugin-import-export/src/index.ts @@ -1,6 +1,6 @@ import type { Config, FlattenedField } from 'payload' -import { addDataAndFileToRequest, deepMergeSimple, flattenTopLevelFields } from 'payload' +import { addDataAndFileToRequest, deepMergeSimple } from 'payload' import type { PluginDefaultTranslationsObject } from './translations/types.js' import type { ImportExportPluginConfig, ToCSVFunction } from './types.js' @@ -11,6 +11,7 @@ import { getCustomFieldFunctions } from './export/getCustomFieldFunctions.js' import { getSelect } from './export/getSelect.js' import { getExportCollection } from './getExportCollection.js' import { translations } from './translations/index.js' +import { collectDisabledFieldPaths } from './utilities/collectDisabledFieldPaths.js' import { getFlattenedFieldKeys } from './utilities/getFlattenedFieldKeys.js' export const importExportPlugin = @@ -59,15 +60,8 @@ export const importExportPlugin = path: '@payloadcms/plugin-import-export/rsc#ExportListMenuItem', }) - // Flatten top-level fields to expose nested fields for export config - const flattenedFields = flattenTopLevelFields(collection.fields, { - moveSubFieldsToTop: true, - }) - - // Find fields explicitly marked as disabled for import/export - const disabledFieldAccessors = flattenedFields - .filter((field) => field.custom?.['plugin-import-export']?.disabled) - .map((field) => field.accessor || field.name) + // // Find fields explicitly marked as disabled for import/export + const disabledFieldAccessors = collectDisabledFieldPaths(collection.fields) // Store disabled field accessors in the admin config for use in the UI collection.admin.custom = { diff --git a/packages/plugin-import-export/src/utilities/buildDisabledFieldRegex.ts b/packages/plugin-import-export/src/utilities/buildDisabledFieldRegex.ts new file mode 100644 index 0000000000..41e44ad72e --- /dev/null +++ b/packages/plugin-import-export/src/utilities/buildDisabledFieldRegex.ts @@ -0,0 +1,13 @@ +/** + * Builds a RegExp that matches flattened field keys from a given dot-notated path. + */ +export const buildDisabledFieldRegex = (path: string): RegExp => { + const parts = path.split('.') + + const patternParts = parts.map((part) => { + return `${part}(?:_\\d+)?(?:_[^_]+)?` + }) + + const pattern = `^${patternParts.join('_')}(?:_.*)?$` + return new RegExp(pattern) +} diff --git a/packages/plugin-import-export/src/utilities/collectDisabledFieldPaths.ts b/packages/plugin-import-export/src/utilities/collectDisabledFieldPaths.ts new file mode 100644 index 0000000000..dafeae456c --- /dev/null +++ b/packages/plugin-import-export/src/utilities/collectDisabledFieldPaths.ts @@ -0,0 +1,82 @@ +import type { Field } from 'payload' + +import { traverseFields } from 'payload' +import { fieldAffectsData } from 'payload/shared' + +/** + * Recursively traverses a Payload field schema to collect all field paths + * that are explicitly disabled for the import/export plugin via: + * field.custom['plugin-import-export'].disabled + * + * Handles nested fields including named tabs, groups, arrays, blocks, etc. + * Tracks each field’s path by storing it in `ref.path` and manually propagating + * it through named tab layers via a temporary `__manualRef` marker. + * + * @param fields - The top-level array of Payload field definitions + * @returns An array of dot-notated field paths that are marked as disabled + */ +export const collectDisabledFieldPaths = (fields: Field[]): string[] => { + const disabledPaths: string[] = [] + + traverseFields({ + callback: ({ field, next, parentRef, ref }) => { + // Handle named tabs + if (field.type === 'tabs' && Array.isArray(field.tabs)) { + for (const tab of field.tabs) { + if ('name' in tab && typeof tab.name === 'string') { + // Build the path prefix for this tab + const parentPath = + parentRef && typeof (parentRef as { path?: unknown }).path === 'string' + ? (parentRef as { path: string }).path + : '' + const tabPath = parentPath ? `${parentPath}.${tab.name}` : tab.name + + // Prepare a ref for this named tab's children to inherit the path + const refObj = ref as Record + const tabRef = refObj[tab.name] ?? {} + tabRef.path = tabPath + tabRef.__manualRef = true // flag this as a manually constructed parentRef + refObj[tab.name] = tabRef + } + } + + // Skip further processing of the tab container itself + return + } + + // Skip unnamed fields (e.g. rows/collapsibles) + if (!('name' in field) || typeof field.name !== 'string') { + return + } + + // Determine the path to the current field + let parentPath: string | undefined + + if ( + parentRef && + typeof parentRef === 'object' && + 'path' in parentRef && + typeof (parentRef as { path?: unknown }).path === 'string' + ) { + parentPath = (parentRef as { path: string }).path + } else if ((ref as any)?.__manualRef && typeof (ref as any)?.path === 'string') { + // Fallback: if current ref is a manual tabRef, use its path + parentPath = (ref as any).path + } + + const fullPath = parentPath ? `${parentPath}.${field.name}` : field.name + + // Store current path for any nested children to use + ;(ref as any).path = fullPath + + // If field is a data-affecting field and disabled via plugin config, collect its path + if (fieldAffectsData(field) && field.custom?.['plugin-import-export']?.disabled) { + disabledPaths.push(fullPath) + return next?.() + } + }, + fields, + }) + + return disabledPaths +} diff --git a/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts b/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts index 5ba649c13e..f124208dc9 100644 --- a/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts +++ b/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts @@ -34,12 +34,15 @@ export const getFlattenedFieldKeys = (fields: FieldWithPresentational[], prefix keys.push(...subKeys) break } - case 'blocks': + case 'blocks': { field.blocks.forEach((block) => { - const blockKeys = getFlattenedFieldKeys(block.fields as FlattenedField[], `${name}_0`) - keys.push(...blockKeys) + const blockPrefix = `${name}_0_${block.slug}` + keys.push(`${blockPrefix}_blockType`) + keys.push(`${blockPrefix}_id`) + keys.push(...getFlattenedFieldKeys(block.fields as FlattenedField[], blockPrefix)) }) break + } case 'collapsible': case 'group': case 'row': diff --git a/test/database/up-down-migration/migrations/20250714_201659.ts b/test/database/up-down-migration/migrations/20250714_201659.ts index 098ecd2a0d..b473da2504 100644 --- a/test/database/up-down-migration/migrations/20250714_201659.ts +++ b/test/database/up-down-migration/migrations/20250714_201659.ts @@ -1,4 +1,4 @@ -import type { MigrateDownArgs, MigrateUpArgs} from '@payloadcms/db-postgres'; +import type { MigrateDownArgs, MigrateUpArgs } from '@payloadcms/db-postgres' import { sql } from '@payloadcms/db-postgres' diff --git a/test/database/up-down-migration/migrations/index.ts b/test/database/up-down-migration/migrations/index.ts index fea58e46c2..8fbc100ef2 100644 --- a/test/database/up-down-migration/migrations/index.ts +++ b/test/database/up-down-migration/migrations/index.ts @@ -1,9 +1,9 @@ -import * as migration_20250714_201659 from './20250714_201659.js'; +import * as migration_20250714_201659 from './20250714_201659.js' export const migrations = [ { up: migration_20250714_201659.up, down: migration_20250714_201659.down, - name: '20250714_201659' + name: '20250714_201659', }, -]; +] diff --git a/test/plugin-import-export/int.spec.ts b/test/plugin-import-export/int.spec.ts index afc6ecb854..64d2516de0 100644 --- a/test/plugin-import-export/int.spec.ts +++ b/test/plugin-import-export/int.spec.ts @@ -364,8 +364,8 @@ describe('@payloadcms/plugin-import-export', () => { const expectedPath = path.join(dirname, './uploads', doc.filename as string) const data = await readCSV(expectedPath) - expect(data[0].blocks_0_blockType).toStrictEqual('hero') - expect(data[0].blocks_1_blockType).toStrictEqual('content') + expect(data[0].blocks_0_hero_blockType).toStrictEqual('hero') + expect(data[0].blocks_1_content_blockType).toStrictEqual('content') }) it('should create a csv of all fields when fields is empty', async () => { @@ -629,8 +629,8 @@ describe('@payloadcms/plugin-import-export', () => { const expectedPath = path.join(dirname, './uploads', doc.filename as string) const data = await readCSV(expectedPath) - expect(data[0].blocks_0_blockType).toStrictEqual('hero') - expect(data[0].blocks_1_blockType).toStrictEqual('content') + expect(data[0].blocks_0_hero_blockType).toStrictEqual('hero') + expect(data[0].blocks_1_content_blockType).toStrictEqual('content') }) }) }) From a3361356b2f85ef7052dd4e94026bc343ae3464c Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Thu, 17 Jul 2025 14:45:59 -0400 Subject: [PATCH 048/143] chore(release): v3.48.0 [skip ci] --- package.json | 2 +- packages/admin-bar/package.json | 2 +- packages/create-payload-app/package.json | 2 +- packages/db-mongodb/package.json | 2 +- packages/db-postgres/package.json | 2 +- packages/db-sqlite/package.json | 2 +- packages/db-vercel-postgres/package.json | 2 +- packages/drizzle/package.json | 2 +- packages/email-nodemailer/package.json | 2 +- packages/email-resend/package.json | 2 +- packages/graphql/package.json | 2 +- packages/live-preview-react/package.json | 2 +- packages/live-preview-vue/package.json | 2 +- packages/live-preview/package.json | 2 +- packages/next/package.json | 2 +- packages/payload-cloud/package.json | 2 +- packages/payload/package.json | 2 +- packages/plugin-cloud-storage/package.json | 2 +- packages/plugin-form-builder/package.json | 2 +- packages/plugin-import-export/package.json | 2 +- packages/plugin-multi-tenant/package.json | 2 +- packages/plugin-nested-docs/package.json | 2 +- packages/plugin-redirects/package.json | 2 +- packages/plugin-search/package.json | 2 +- packages/plugin-sentry/package.json | 2 +- packages/plugin-seo/package.json | 2 +- packages/plugin-stripe/package.json | 2 +- packages/richtext-lexical/package.json | 2 +- packages/richtext-slate/package.json | 2 +- packages/storage-azure/package.json | 2 +- packages/storage-gcs/package.json | 2 +- packages/storage-s3/package.json | 2 +- packages/storage-uploadthing/package.json | 2 +- packages/storage-vercel-blob/package.json | 2 +- packages/translations/package.json | 2 +- packages/ui/package.json | 2 +- 36 files changed, 36 insertions(+), 36 deletions(-) diff --git a/package.json b/package.json index 7bd509f311..eefed55b4f 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "payload-monorepo", - "version": "3.47.0", + "version": "3.48.0", "private": true, "type": "module", "workspaces": [ diff --git a/packages/admin-bar/package.json b/packages/admin-bar/package.json index 57b1c7a870..0a6f6a0105 100644 --- a/packages/admin-bar/package.json +++ b/packages/admin-bar/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/admin-bar", - "version": "3.47.0", + "version": "3.48.0", "description": "An admin bar for React apps using Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/create-payload-app/package.json b/packages/create-payload-app/package.json index 5b091b3e62..341570e772 100644 --- a/packages/create-payload-app/package.json +++ b/packages/create-payload-app/package.json @@ -1,6 +1,6 @@ { "name": "create-payload-app", - "version": "3.47.0", + "version": "3.48.0", "homepage": "https://payloadcms.com", "repository": { "type": "git", diff --git a/packages/db-mongodb/package.json b/packages/db-mongodb/package.json index 4fd777f07a..ce2fde62fe 100644 --- a/packages/db-mongodb/package.json +++ b/packages/db-mongodb/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/db-mongodb", - "version": "3.47.0", + "version": "3.48.0", "description": "The officially supported MongoDB database adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/db-postgres/package.json b/packages/db-postgres/package.json index 6030f85c7f..c37cad5380 100644 --- a/packages/db-postgres/package.json +++ b/packages/db-postgres/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/db-postgres", - "version": "3.47.0", + "version": "3.48.0", "description": "The officially supported Postgres database adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/db-sqlite/package.json b/packages/db-sqlite/package.json index 05c2e49058..c76214ab56 100644 --- a/packages/db-sqlite/package.json +++ b/packages/db-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/db-sqlite", - "version": "3.47.0", + "version": "3.48.0", "description": "The officially supported SQLite database adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/db-vercel-postgres/package.json b/packages/db-vercel-postgres/package.json index 71b13d98c4..bad20b8c60 100644 --- a/packages/db-vercel-postgres/package.json +++ b/packages/db-vercel-postgres/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/db-vercel-postgres", - "version": "3.47.0", + "version": "3.48.0", "description": "Vercel Postgres adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/drizzle/package.json b/packages/drizzle/package.json index 56fb038fd8..b2777a10f8 100644 --- a/packages/drizzle/package.json +++ b/packages/drizzle/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/drizzle", - "version": "3.47.0", + "version": "3.48.0", "description": "A library of shared functions used by different payload database adapters", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/email-nodemailer/package.json b/packages/email-nodemailer/package.json index fed7571620..c1a4307614 100644 --- a/packages/email-nodemailer/package.json +++ b/packages/email-nodemailer/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/email-nodemailer", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload Nodemailer Email Adapter", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/email-resend/package.json b/packages/email-resend/package.json index bc9d5f61ed..270bb6fda4 100644 --- a/packages/email-resend/package.json +++ b/packages/email-resend/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/email-resend", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload Resend Email Adapter", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/graphql/package.json b/packages/graphql/package.json index 5e50af1df0..0f3b87400a 100644 --- a/packages/graphql/package.json +++ b/packages/graphql/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/graphql", - "version": "3.47.0", + "version": "3.48.0", "homepage": "https://payloadcms.com", "repository": { "type": "git", diff --git a/packages/live-preview-react/package.json b/packages/live-preview-react/package.json index d8a5915db2..bbc1f6d2eb 100644 --- a/packages/live-preview-react/package.json +++ b/packages/live-preview-react/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/live-preview-react", - "version": "3.47.0", + "version": "3.48.0", "description": "The official React SDK for Payload Live Preview", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/live-preview-vue/package.json b/packages/live-preview-vue/package.json index 5f826d54f0..73674817ba 100644 --- a/packages/live-preview-vue/package.json +++ b/packages/live-preview-vue/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/live-preview-vue", - "version": "3.47.0", + "version": "3.48.0", "description": "The official Vue SDK for Payload Live Preview", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/live-preview/package.json b/packages/live-preview/package.json index d7b9fe1e82..4e695dd2c1 100644 --- a/packages/live-preview/package.json +++ b/packages/live-preview/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/live-preview", - "version": "3.47.0", + "version": "3.48.0", "description": "The official live preview JavaScript SDK for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/next/package.json b/packages/next/package.json index f833f30232..2751f2d441 100644 --- a/packages/next/package.json +++ b/packages/next/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/next", - "version": "3.47.0", + "version": "3.48.0", "homepage": "https://payloadcms.com", "repository": { "type": "git", diff --git a/packages/payload-cloud/package.json b/packages/payload-cloud/package.json index 29087ac235..9679a69c08 100644 --- a/packages/payload-cloud/package.json +++ b/packages/payload-cloud/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/payload-cloud", - "version": "3.47.0", + "version": "3.48.0", "description": "The official Payload Cloud plugin", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/payload/package.json b/packages/payload/package.json index 9b4edd5b5d..3437d8d148 100644 --- a/packages/payload/package.json +++ b/packages/payload/package.json @@ -1,6 +1,6 @@ { "name": "payload", - "version": "3.47.0", + "version": "3.48.0", "description": "Node, React, Headless CMS and Application Framework built on Next.js", "keywords": [ "admin panel", diff --git a/packages/plugin-cloud-storage/package.json b/packages/plugin-cloud-storage/package.json index 98b0ea98bd..c992d92ac7 100644 --- a/packages/plugin-cloud-storage/package.json +++ b/packages/plugin-cloud-storage/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-cloud-storage", - "version": "3.47.0", + "version": "3.48.0", "description": "The official cloud storage plugin for Payload CMS", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/plugin-form-builder/package.json b/packages/plugin-form-builder/package.json index fdaa894696..3847b3980e 100644 --- a/packages/plugin-form-builder/package.json +++ b/packages/plugin-form-builder/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-form-builder", - "version": "3.47.0", + "version": "3.48.0", "description": "Form builder plugin for Payload CMS", "keywords": [ "payload", diff --git a/packages/plugin-import-export/package.json b/packages/plugin-import-export/package.json index bf893f2ec4..6ec831fcf2 100644 --- a/packages/plugin-import-export/package.json +++ b/packages/plugin-import-export/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-import-export", - "version": "3.47.0", + "version": "3.48.0", "description": "Import-Export plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-multi-tenant/package.json b/packages/plugin-multi-tenant/package.json index 6c8ecac5f1..03398f95b7 100644 --- a/packages/plugin-multi-tenant/package.json +++ b/packages/plugin-multi-tenant/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-multi-tenant", - "version": "3.47.0", + "version": "3.48.0", "description": "Multi Tenant plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-nested-docs/package.json b/packages/plugin-nested-docs/package.json index 6e96917d68..416c2335cd 100644 --- a/packages/plugin-nested-docs/package.json +++ b/packages/plugin-nested-docs/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-nested-docs", - "version": "3.47.0", + "version": "3.48.0", "description": "The official Nested Docs plugin for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/plugin-redirects/package.json b/packages/plugin-redirects/package.json index 6ef9ceb11e..2ad67ff908 100644 --- a/packages/plugin-redirects/package.json +++ b/packages/plugin-redirects/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-redirects", - "version": "3.47.0", + "version": "3.48.0", "description": "Redirects plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-search/package.json b/packages/plugin-search/package.json index e4a5086360..e0c482656b 100644 --- a/packages/plugin-search/package.json +++ b/packages/plugin-search/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-search", - "version": "3.47.0", + "version": "3.48.0", "description": "Search plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-sentry/package.json b/packages/plugin-sentry/package.json index 9cfae24d58..3ce1a34195 100644 --- a/packages/plugin-sentry/package.json +++ b/packages/plugin-sentry/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-sentry", - "version": "3.47.0", + "version": "3.48.0", "description": "Sentry plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-seo/package.json b/packages/plugin-seo/package.json index e970f1518a..24ba866c55 100644 --- a/packages/plugin-seo/package.json +++ b/packages/plugin-seo/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-seo", - "version": "3.47.0", + "version": "3.48.0", "description": "SEO plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-stripe/package.json b/packages/plugin-stripe/package.json index d792db9a12..0adb38a9aa 100644 --- a/packages/plugin-stripe/package.json +++ b/packages/plugin-stripe/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-stripe", - "version": "3.47.0", + "version": "3.48.0", "description": "Stripe plugin for Payload", "keywords": [ "payload", diff --git a/packages/richtext-lexical/package.json b/packages/richtext-lexical/package.json index 46f318440b..f1afb8d806 100644 --- a/packages/richtext-lexical/package.json +++ b/packages/richtext-lexical/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/richtext-lexical", - "version": "3.47.0", + "version": "3.48.0", "description": "The officially supported Lexical richtext adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/richtext-slate/package.json b/packages/richtext-slate/package.json index 4bd5c023bf..76d636e0b7 100644 --- a/packages/richtext-slate/package.json +++ b/packages/richtext-slate/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/richtext-slate", - "version": "3.47.0", + "version": "3.48.0", "description": "The officially supported Slate richtext adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-azure/package.json b/packages/storage-azure/package.json index dcd0d6d3ff..46ba50ce2d 100644 --- a/packages/storage-azure/package.json +++ b/packages/storage-azure/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-azure", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload storage adapter for Azure Blob Storage", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-gcs/package.json b/packages/storage-gcs/package.json index 7535814e6b..c99f9baa16 100644 --- a/packages/storage-gcs/package.json +++ b/packages/storage-gcs/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-gcs", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload storage adapter for Google Cloud Storage", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-s3/package.json b/packages/storage-s3/package.json index e667db97e2..00b61727bc 100644 --- a/packages/storage-s3/package.json +++ b/packages/storage-s3/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-s3", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload storage adapter for Amazon S3", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-uploadthing/package.json b/packages/storage-uploadthing/package.json index aeaeecd7fc..6b5e65f06f 100644 --- a/packages/storage-uploadthing/package.json +++ b/packages/storage-uploadthing/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-uploadthing", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload storage adapter for uploadthing", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-vercel-blob/package.json b/packages/storage-vercel-blob/package.json index 652d56acc3..4948f19bd0 100644 --- a/packages/storage-vercel-blob/package.json +++ b/packages/storage-vercel-blob/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-vercel-blob", - "version": "3.47.0", + "version": "3.48.0", "description": "Payload storage adapter for Vercel Blob Storage", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/translations/package.json b/packages/translations/package.json index 8ebecc5dfe..42c7da0a58 100644 --- a/packages/translations/package.json +++ b/packages/translations/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/translations", - "version": "3.47.0", + "version": "3.48.0", "homepage": "https://payloadcms.com", "repository": { "type": "git", diff --git a/packages/ui/package.json b/packages/ui/package.json index 72537a0bb2..66a34c2f31 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/ui", - "version": "3.47.0", + "version": "3.48.0", "homepage": "https://payloadcms.com", "repository": { "type": "git", From 4ae503d70050a2746f12d974ca3958e68d9fa812 Mon Sep 17 00:00:00 2001 From: Jake Fell Date: Thu, 17 Jul 2025 20:33:49 +0100 Subject: [PATCH 049/143] fix: exit payload jobs:run process after completion (#13211) ### What? Exit the process after running jobs. ### Why? When running the `payload jobs:run` bin script with a postgres database the process hangs forever. ### How? Execute `process.exit(0)` after running all jobs. --- packages/payload/src/bin/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/payload/src/bin/index.ts b/packages/payload/src/bin/index.ts index b1b973d0ef..6bd604a373 100755 --- a/packages/payload/src/bin/index.ts +++ b/packages/payload/src/bin/index.ts @@ -133,7 +133,7 @@ export const bin = async () => { await payload.destroy() // close database connections after running jobs so process can exit cleanly - return + process.exit(0) } } From c08b2aea8971863369f23440a7069e2cea05e646 Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Fri, 18 Jul 2025 03:48:27 -0700 Subject: [PATCH 050/143] feat: scheduling jobs (#12863) Adds a new `schedule` property to workflow and task configs that can be used to have Payload automatically _queue_ jobs following a certain _schedule_. Docs: https://payloadcms.com/docs/dynamic/jobs-queue/schedules?branch=feat/schedule-jobs ## API Example ```ts export default buildConfig({ // ... jobs: { // ... scheduler: 'manual', // Or `cron` if you're not using serverless. If `manual` is used, then user needs to set up running /api/payload-jobs/handleSchedules or payload.jobs.handleSchedules in regular intervals tasks: [ { schedule: [ { cron: '* * * * * *', queue: 'autorunSecond', // Hooks are optional hooks: { // Not an array, as providing and calling `defaultBeforeSchedule` would be more error-prone if this was an array beforeSchedule: async (args) => { // Handles verifying that there are no jobs already scheduled or processing. // You can override this behavior by not calling defaultBeforeSchedule, e.g. if you wanted // to allow a maximum of 3 scheduled jobs in the queue instead of 1, or add any additional conditions const result = await args.defaultBeforeSchedule(args) return { ...result, input: { message: 'This task runs every second', }, } }, afterSchedule: async (args) => { await args.defaultAfterSchedule(args) // Handles updating the payload-jobs-stats global args.req.payload.logger.info( 'EverySecond task scheduled: ' + (args.status === 'success' ? args.job.id : 'skipped or failed to schedule'), ) }, }, }, ], slug: 'EverySecond', inputSchema: [ { name: 'message', type: 'text', required: true, }, ], handler: ({ input, req }) => { req.payload.logger.info(input.message) return { output: {}, } }, } ] } }) ``` --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210495300843759 --- docs/jobs-queue/schedules.mdx | 156 +++++++ .../db-mongodb/src/utilities/transform.ts | 4 + packages/payload/package.json | 2 +- packages/payload/src/config/sanitize.ts | 28 +- .../payload/src/database/defaultUpdateJobs.ts | 2 +- packages/payload/src/database/types.ts | 3 + packages/payload/src/index.ts | 48 +- .../queues/config/{index.ts => collection.ts} | 33 +- packages/payload/src/queues/config/global.ts | 45 ++ .../payload/src/queues/config/types/index.ts | 139 +++++- .../src/queues/config/types/taskTypes.ts | 8 + .../src/queues/config/types/workflowTypes.ts | 15 + .../src/queues/endpoints/handleSchedules.ts | 66 +++ packages/payload/src/queues/endpoints/run.ts | 118 +++++ .../errors/calculateBackoffWaitUntil.ts | 12 +- .../src/queues/errors/handleTaskError.ts | 42 +- .../src/queues/errors/handleWorkflowError.ts | 24 +- packages/payload/src/queues/localAPI.ts | 56 ++- .../countRunnableOrActiveJobsForQueue.ts | 74 ++++ .../handleSchedules/defaultAfterSchedule.ts | 64 +++ .../handleSchedules/defaultBeforeSchedule.ts | 20 + .../handleSchedules/getQueuesWithSchedules.ts | 50 +++ .../operations/handleSchedules/index.ts | 223 ++++++++++ .../src/queues/operations/runJobs/index.ts | 44 +- .../operations/runJobs/runJSONJob/index.ts | 15 +- .../runJobs/runJob/getRunTaskFunction.ts | 5 +- .../queues/operations/runJobs/runJob/index.ts | 16 +- .../payload/src/queues/restEndpointRun.ts | 91 ---- .../src/queues/utilities/getCurrentDate.ts | 21 + .../payload/src/queues/utilities/updateJob.ts | 2 +- .../versions/deleteScheduledPublishJobs.ts | 2 +- pnpm-lock.yaml | 10 +- test/helpers/initPayloadInt.ts | 7 +- test/initDevAndTest.ts | 3 +- test/queues/config.schedules-autocron.ts | 22 + test/queues/config.schedules.ts | 22 + test/queues/config.ts | 416 +----------------- test/queues/getConfig.ts | 176 ++++++++ test/queues/int.spec.ts | 148 ++++--- test/queues/payload-types.ts | 72 ++- test/queues/schedules-autocron.int.spec.ts | 105 +++++ test/queues/schedules.int.spec.ts | 341 ++++++++++++++ test/queues/tasks/CreateSimpleRetries0Task.ts | 41 ++ .../tasks/CreateSimpleRetriesUndefinedTask.ts | 40 ++ test/queues/tasks/CreateSimpleTask.ts | 41 ++ .../CreateSimpleWithDuplicateMessageTask.ts | 42 ++ test/queues/tasks/EverySecondMax2Task.ts | 67 +++ test/queues/tasks/EverySecondTask.ts | 54 +++ test/queues/tasks/ExternalTask.ts | 26 ++ test/queues/tasks/ReturnCustomErrorTask.ts | 20 + test/queues/tasks/ReturnErrorTask.ts | 13 + test/queues/tasks/ThrowErrorTask.ts | 11 + test/queues/tasks/UpdatePostStep2Task.ts | 23 + test/queues/tasks/UpdatePostTask.ts | 31 ++ test/queues/utilities.ts | 62 +++ test/runInit.ts | 3 +- 56 files changed, 2579 insertions(+), 645 deletions(-) create mode 100644 docs/jobs-queue/schedules.mdx rename packages/payload/src/queues/config/{index.ts => collection.ts} (84%) create mode 100644 packages/payload/src/queues/config/global.ts create mode 100644 packages/payload/src/queues/endpoints/handleSchedules.ts create mode 100644 packages/payload/src/queues/endpoints/run.ts create mode 100644 packages/payload/src/queues/operations/handleSchedules/countRunnableOrActiveJobsForQueue.ts create mode 100644 packages/payload/src/queues/operations/handleSchedules/defaultAfterSchedule.ts create mode 100644 packages/payload/src/queues/operations/handleSchedules/defaultBeforeSchedule.ts create mode 100644 packages/payload/src/queues/operations/handleSchedules/getQueuesWithSchedules.ts create mode 100644 packages/payload/src/queues/operations/handleSchedules/index.ts delete mode 100644 packages/payload/src/queues/restEndpointRun.ts create mode 100644 packages/payload/src/queues/utilities/getCurrentDate.ts create mode 100644 test/queues/config.schedules-autocron.ts create mode 100644 test/queues/config.schedules.ts create mode 100644 test/queues/getConfig.ts create mode 100644 test/queues/schedules-autocron.int.spec.ts create mode 100644 test/queues/schedules.int.spec.ts create mode 100644 test/queues/tasks/CreateSimpleRetries0Task.ts create mode 100644 test/queues/tasks/CreateSimpleRetriesUndefinedTask.ts create mode 100644 test/queues/tasks/CreateSimpleTask.ts create mode 100644 test/queues/tasks/CreateSimpleWithDuplicateMessageTask.ts create mode 100644 test/queues/tasks/EverySecondMax2Task.ts create mode 100644 test/queues/tasks/EverySecondTask.ts create mode 100644 test/queues/tasks/ExternalTask.ts create mode 100644 test/queues/tasks/ReturnCustomErrorTask.ts create mode 100644 test/queues/tasks/ReturnErrorTask.ts create mode 100644 test/queues/tasks/ThrowErrorTask.ts create mode 100644 test/queues/tasks/UpdatePostStep2Task.ts create mode 100644 test/queues/tasks/UpdatePostTask.ts create mode 100644 test/queues/utilities.ts diff --git a/docs/jobs-queue/schedules.mdx b/docs/jobs-queue/schedules.mdx new file mode 100644 index 0000000000..a2b7aa3311 --- /dev/null +++ b/docs/jobs-queue/schedules.mdx @@ -0,0 +1,156 @@ +--- +title: Job Schedules +label: Schedules +order: 60 +desc: Payload allows you to schedule jobs to run periodically +keywords: jobs queue, application framework, typescript, node, react, nextjs, scheduling, cron, schedule +--- + +Payload's `schedule` property lets you enqueue Jobs regularly according to a cron schedule - daily, weekly, hourly, or any custom interval. This is ideal for tasks or workflows that must repeat automatically and without manual intervention. + +Scheduling Jobs differs significantly from running them: + +- **Queueing**: Scheduling only creates (enqueues) the Job according to your cron expression. It does not immediately execute any business logic. +- **Running**: Execution happens separately through your Jobs runner - such as autorun, or manual invocation using `payload.jobs.run()` or the `payload-jobs/run` endpoint. + +Use the `schedule` property specifically when you have recurring tasks or workflows. To enqueue a single Job to run once in the future, use the `waitUntil` property instead. + +## Example use cases + +**Regular emails or notifications** + +Send nightly digests, weekly newsletters, or hourly updates. + +**Batch processing during off-hours** + +Process analytics data or rebuild static sites during low-traffic times. + +**Periodic data synchronization** + +Regularly push or pull updates to or from external APIs. + +## Handling schedules + +Something needs to actually trigger the scheduling of jobs (execute the scheduling lifecycle seen below). By default, the `jobs.autorun` configuration, as well as the `/api/payload-jobs/run` will also handle scheduling for the queue specified in the `autorun` configuration. + +You can disable this behavior by setting `disableScheduling: true` in your `autorun` configuration, or by passing `disableScheduling=true` to the `/api/payload-jobs/run` endpoint. This is useful if you want to handle scheduling manually, for example, by using a cron job or a serverless function that calls the `/api/payload-jobs/handle-schedules` endpoint or the `payload.jobs.handleSchedules()` local API method. + +## Defining schedules on Tasks or Workflows + +Schedules are defined using the `schedule` property: + +```ts +export type ScheduleConfig = { + cron: string // required, supports seconds precision + queue: string // required, the queue to push Jobs onto + hooks?: { + // Optional hooks to customize scheduling behavior + beforeSchedule?: BeforeScheduleFn + afterSchedule?: AfterScheduleFn + } +} +``` + +### Example schedule + +The following example demonstrates scheduling a Job to enqueue every day at midnight: + +```ts +import type { TaskConfig } from 'payload' + +export const SendDigestEmail: TaskConfig<'SendDigestEmail'> = { + slug: 'SendDigestEmail', + schedule: [ + { + cron: '0 0 * * *', // Every day at midnight + queue: 'nightly', + }, + ], + handler: async () => { + await sendDigestToAllUsers() + }, +} +``` + +This configuration only queues the Job - it does not execute it immediately. To actually run the queued Job, you configure autorun in your Payload config (note that autorun should **not** be used on serverless platforms): + +```ts +export default buildConfig({ + jobs: { + scheduler: 'cron', + autoRun: [ + { + cron: '* * * * *', // Runs every minute + queue: 'nightly', + }, + ], + tasks: [SendDigestEmail], + }, +}) +``` + +That way, Payload's scheduler will automatically enqueue the job into the `nightly` queue every day at midnight. The autorun configuration will check the `nightly` queue every minute and execute any Jobs that are due to run. + +## Scheduling lifecycle + +Here's how the scheduling process operates in detail: + +1. **Cron evaluation**: Payload (or your external trigger in `manual` mode) identifies which schedules are due to run. To do that, it will + read the `payload-jobs-stats` global which contains information about the last time each scheduled task or workflow was run. +2. **BeforeSchedule hook**: + - The default beforeSchedule hook checks how many active or runnable jobs of the same type that have been queued by the scheduling system currently exist. + If such a job exists, it will skip scheduling a new one. + - You can provide your own `beforeSchedule` hook to customize this behavior. For example, you might want to allow multiple overlapping Jobs or dynamically set the Job input data. +3. **Enqueue Job**: Payload queues up a new job. This job will have `waitUntil` set to the next scheduled time based on the cron expression. +4. **AfterSchedule hook**: + - The default afterSchedule hook updates the `payload-jobs-stats` global metadata with the last scheduled time for the Job. + - You can provide your own afterSchedule hook to it for custom logging, metrics, or other post-scheduling actions. + +## Customizing concurrency and input (Advanced) + +You may want more control over concurrency or dynamically set Job inputs at scheduling time. For instance, allowing multiple overlapping Jobs to be scheduled, even if a previously scheduled job has not completed yet, or preparing dynamic data to pass to your Job handler: + +```ts +import { countRunnableOrActiveJobsForQueue } from 'payload' + +schedule: [ + { + cron: '* * * * *', // every minute + queue: 'reports', + hooks: { + beforeSchedule: async ({ queueable, req }) => { + const runnableOrActiveJobsForQueue = + await countRunnableOrActiveJobsForQueue({ + queue: queueable.scheduleConfig.queue, + req, + taskSlug: queueable.taskConfig?.slug, + workflowSlug: queueable.workflowConfig?.slug, + onlyScheduled: true, + }) + + // Allow up to 3 simultaneous scheduled jobs and set dynamic input + return { + shouldSchedule: runnableOrActiveJobsForQueue < 3, + input: { text: 'Hi there' }, + } + }, + }, + }, +] +``` + +This allows fine-grained control over how many Jobs can run simultaneously and provides dynamically computed input values each time a Job is scheduled. + +## Scheduling in serverless environments + +On serverless platforms, scheduling must be triggered externally since Payload does not automatically run cron schedules in ephemeral environments. You have two main ways to trigger scheduling manually: + +- **Invoke via Payload's API:** `payload.jobs.handleSchedules()` +- **Use the REST API endpoint:** `/api/payload-jobs/handle-schedules` +- **Use the run endpoint, which also handles scheduling by default:** `GET /api/payload-jobs/run` + +For example, on Vercel, you can set up a Vercel Cron to regularly trigger scheduling: + +- **Vercel Cron Job:** Configure Vercel Cron to periodically call `GET /api/payload-jobs/handle-schedules`. If you would like to auto-run your scheduled jobs as well, you can use the `GET /api/payload-jobs/run` endpoint. + +Once Jobs are queued, their execution depends entirely on your configured runner setup (e.g., autorun, or manual invocation). diff --git a/packages/db-mongodb/src/utilities/transform.ts b/packages/db-mongodb/src/utilities/transform.ts index 24113806ae..35a271877c 100644 --- a/packages/db-mongodb/src/utilities/transform.ts +++ b/packages/db-mongodb/src/utilities/transform.ts @@ -406,6 +406,10 @@ export const transform = ({ parentIsLocalized = false, validateRelationships = true, }: Args) => { + if (!data) { + return null + } + if (Array.isArray(data)) { for (const item of data) { transform({ $inc, adapter, data: item, fields, globalSlug, operation, validateRelationships }) diff --git a/packages/payload/package.json b/packages/payload/package.json index 3437d8d148..ed757a0d27 100644 --- a/packages/payload/package.json +++ b/packages/payload/package.json @@ -92,7 +92,7 @@ "busboy": "^1.6.0", "ci-info": "^4.1.0", "console-table-printer": "2.12.1", - "croner": "9.0.0", + "croner": "9.1.0", "dataloader": "2.2.3", "deepmerge": "4.3.1", "file-type": "19.3.0", diff --git a/packages/payload/src/config/sanitize.ts b/packages/payload/src/config/sanitize.ts index c90ee9703b..043bb34d3d 100644 --- a/packages/payload/src/config/sanitize.ts +++ b/packages/payload/src/config/sanitize.ts @@ -29,7 +29,8 @@ import { } from '../locked-documents/config.js' import { getPreferencesCollection, preferencesCollectionSlug } from '../preferences/config.js' import { getQueryPresetsConfig, queryPresetsCollectionSlug } from '../query-presets/config.js' -import { getDefaultJobsCollection, jobsCollectionSlug } from '../queues/config/index.js' +import { getDefaultJobsCollection, jobsCollectionSlug } from '../queues/config/collection.js' +import { getJobStatsGlobal } from '../queues/config/global.js' import { flattenBlock } from '../utilities/flattenAllFields.js' import { getSchedulePublishTask } from '../versions/schedule/job.js' import { addDefaultsToConfig } from './defaults.js' @@ -313,7 +314,28 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise task.schedule)) || + (config?.jobs?.workflows?.length && + config.jobs.workflows.some((workflow) => workflow.schedule)) + + if (hasScheduleProperty) { + config.jobs.scheduling = true + // Add payload-jobs-stats global for tracking when a job of a specific slug was last run + ;(config.globals ??= []).push( + await sanitizeGlobal( + config as unknown as Config, + getJobStatsGlobal(config as unknown as Config), + richTextSanitizationPromises, + validRelationships, + ), + ) + + config.jobs.stats = true + } + + let defaultJobsCollection = getDefaultJobsCollection(config.jobs) if (typeof config.jobs.jobsCollectionOverrides === 'function') { defaultJobsCollection = config.jobs.jobsCollectionOverrides({ @@ -342,7 +364,7 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise { - const job = new Cron(cronConfig.cron ?? DEFAULT_CRON, async () => { + const jobAutorunCron = new Cron(cronConfig.cron ?? DEFAULT_CRON, async () => { + if ( + _internal_jobSystemGlobals.shouldAutoSchedule && + !cronConfig.disableScheduling && + this.config.jobs.scheduling + ) { + await this.jobs.handleSchedules({ + queue: cronConfig.queue, + }) + } + + if (!_internal_jobSystemGlobals.shouldAutoRun) { + return + } + if (typeof this.config.jobs.shouldAutoRun === 'function') { const shouldAutoRun = await this.config.jobs.shouldAutoRun(this) if (!shouldAutoRun) { - job.stop() - - return false + jobAutorunCron.stop() + return } } await this.jobs.run({ limit: cronConfig.limit ?? DEFAULT_LIMIT, queue: cronConfig.queue, + silent: cronConfig.silent, }) }) - this.crons.push(job) + this.crons.push(jobAutorunCron) }), ) } @@ -931,8 +946,10 @@ export const reload = async ( payload: Payload, skipImportMapGeneration?: boolean, ): Promise => { - await payload.destroy() - + if (typeof payload.db.destroy === 'function') { + // Only destroy db, as we then later only call payload.db.init and not payload.init + await payload.db.destroy() + } payload.config = config payload.collections = config.collections.reduce( @@ -1176,6 +1193,7 @@ export type { export type { CompoundIndex } from './collections/config/types.js' export type { SanitizedCompoundIndex } from './collections/config/types.js' + export { createDataloaderCacheKey, getDataLoader } from './collections/dataloader.js' export { countOperation } from './collections/operations/count.js' export { createOperation } from './collections/operations/create.js' @@ -1321,6 +1339,7 @@ export { export type { ValidationFieldError } from './errors/index.js' export { baseBlockFields } from './fields/baseFields/baseBlockFields.js' + export { baseIDField } from './fields/baseFields/baseIDField.js' export { @@ -1444,6 +1463,7 @@ export type { export { getDefaultValue } from './fields/getDefaultValue.js' export { traverseFields as afterChangeTraverseFields } from './fields/hooks/afterChange/traverseFields.js' + export { promise as afterReadPromise } from './fields/hooks/afterRead/promise.js' export { traverseFields as afterReadTraverseFields } from './fields/hooks/afterRead/traverseFields.js' export { traverseFields as beforeChangeTraverseFields } from './fields/hooks/beforeChange/traverseFields.js' @@ -1451,6 +1471,7 @@ export { traverseFields as beforeValidateTraverseFields } from './fields/hooks/b export { sortableFieldTypes } from './fields/sortableFieldTypes.js' export { validations } from './fields/validations.js' + export type { ArrayFieldValidation, BlocksFieldValidation, @@ -1505,6 +1526,7 @@ export type { export { docAccessOperation as docAccessOperationGlobal } from './globals/operations/docAccess.js' export { findOneOperation } from './globals/operations/findOne.js' + export { findVersionByIDOperation as findVersionByIDOperationGlobal } from './globals/operations/findVersionByID.js' export { findVersionsOperation as findVersionsOperationGlobal } from './globals/operations/findVersions.js' export { restoreVersionOperation as restoreVersionOperationGlobal } from './globals/operations/restoreVersion.js' @@ -1525,8 +1547,7 @@ export type { TabsPreferences, } from './preferences/types.js' export type { QueryPreset } from './query-presets/types.js' -export { jobAfterRead } from './queues/config/index.js' - +export { jobAfterRead } from './queues/config/collection.js' export type { JobsConfig, RunJobAccess, RunJobAccessArgs } from './queues/config/types/index.js' export type { RunInlineTaskFunction, @@ -1541,6 +1562,7 @@ export type { TaskOutput, TaskType, } from './queues/config/types/taskTypes.js' + export type { BaseJob, JobLog, @@ -1551,8 +1573,14 @@ export type { WorkflowHandler, WorkflowTypes, } from './queues/config/types/workflowTypes.js' - +export { countRunnableOrActiveJobsForQueue } from './queues/operations/handleSchedules/countRunnableOrActiveJobsForQueue.js' export { importHandlerPath } from './queues/operations/runJobs/runJob/importHandlerPath.js' + +export { + _internal_jobSystemGlobals, + _internal_resetJobSystemGlobals, + getCurrentDate, +} from './queues/utilities/getCurrentDate.js' export { getLocalI18n } from './translations/getLocalI18n.js' export * from './types/index.js' export { getFileByPath } from './uploads/getFileByPath.js' diff --git a/packages/payload/src/queues/config/index.ts b/packages/payload/src/queues/config/collection.ts similarity index 84% rename from packages/payload/src/queues/config/index.ts rename to packages/payload/src/queues/config/collection.ts index 9628a29bff..e9e925d804 100644 --- a/packages/payload/src/queues/config/index.ts +++ b/packages/payload/src/queues/config/collection.ts @@ -1,25 +1,28 @@ import type { CollectionConfig } from '../../collections/config/types.js' -import type { Config, SanitizedConfig } from '../../config/types.js' +import type { SanitizedConfig } from '../../config/types.js' import type { Field } from '../../fields/config/types.js' import type { Job } from '../../index.js' -import { runJobsEndpoint } from '../restEndpointRun.js' +import { handleSchedulesJobsEndpoint } from '../endpoints/handleSchedules.js' +import { runJobsEndpoint } from '../endpoints/run.js' import { getJobTaskStatus } from '../utilities/getJobTaskStatus.js' export const jobsCollectionSlug = 'payload-jobs' -export const getDefaultJobsCollection: (config: Config) => CollectionConfig = (config) => { +export const getDefaultJobsCollection: (jobsConfig: SanitizedConfig['jobs']) => CollectionConfig = ( + jobsConfig, +) => { const workflowSlugs: Set = new Set() const taskSlugs: Set = new Set(['inline']) - if (config.jobs?.workflows?.length) { - config.jobs?.workflows.forEach((workflow) => { + if (jobsConfig.workflows?.length) { + jobsConfig.workflows.forEach((workflow) => { workflowSlugs.add(workflow.slug) }) } - if (config.jobs?.tasks?.length) { - config.jobs.tasks.forEach((task) => { + if (jobsConfig.tasks?.length) { + jobsConfig.tasks.forEach((task) => { if (workflowSlugs.has(task.slug)) { throw new Error( `Task slug "${task.slug}" is already used by a workflow. No tasks are allowed to have the same slug as a workflow.`, @@ -78,7 +81,7 @@ export const getDefaultJobsCollection: (config: Config) => CollectionConfig = (c }, ] - if (config?.jobs?.addParentToTaskLog) { + if (jobsConfig.addParentToTaskLog) { logFields.push({ name: 'parent', type: 'group', @@ -102,7 +105,7 @@ export const getDefaultJobsCollection: (config: Config) => CollectionConfig = (c group: 'System', hidden: true, }, - endpoints: [runJobsEndpoint], + endpoints: [runJobsEndpoint, handleSchedulesJobsEndpoint], fields: [ { name: 'input', @@ -198,6 +201,9 @@ export const getDefaultJobsCollection: (config: Config) => CollectionConfig = (c { name: 'waitUntil', type: 'date', + admin: { + date: { pickerAppearance: 'dayAndTime' }, + }, index: true, }, { @@ -237,6 +243,15 @@ export const getDefaultJobsCollection: (config: Config) => CollectionConfig = (c lockDocuments: false, } + if (jobsConfig.stats) { + // TODO: In 4.0, this should be added by default. + // The meta field can be used to store arbitrary data about the job. The scheduling system uses this to store + // `scheduled: true` to indicate that the job was queued by the scheduling system. + jobsCollection.fields.push({ + name: 'meta', + type: 'json', + }) + } return jobsCollection } diff --git a/packages/payload/src/queues/config/global.ts b/packages/payload/src/queues/config/global.ts new file mode 100644 index 0000000000..55c87d247f --- /dev/null +++ b/packages/payload/src/queues/config/global.ts @@ -0,0 +1,45 @@ +import type { Config } from '../../config/types.js' +import type { GlobalConfig } from '../../globals/config/types.js' +import type { TaskType } from './types/taskTypes.js' +import type { WorkflowTypes } from './types/workflowTypes.js' + +export const jobStatsGlobalSlug = 'payload-jobs-stats' + +/** + * Type for data stored in the payload-jobs-stats global. + */ +export type JobStats = { + stats?: { + scheduledRuns?: { + queues?: { + [queueSlug: string]: { + tasks?: { + [taskSlug: TaskType]: { + lastScheduledRun: string + } + } + workflows?: { + [workflowSlug: WorkflowTypes]: { + lastScheduledRun: string + } + } + } + } + } + } +} + +/** + * Global config for job statistics. + */ +export const getJobStatsGlobal: (config: Config) => GlobalConfig = (config) => { + return { + slug: jobStatsGlobalSlug, + fields: [ + { + name: 'stats', + type: 'json', + }, + ], + } +} diff --git a/packages/payload/src/queues/config/types/index.ts b/packages/payload/src/queues/config/types/index.ts index 6bf730f44f..9ea4ff2233 100644 --- a/packages/payload/src/queues/config/types/index.ts +++ b/packages/payload/src/queues/config/types/index.ts @@ -1,10 +1,12 @@ -import type { CollectionConfig } from '../../../index.js' +import type { CollectionConfig, Job } from '../../../index.js' import type { Payload, PayloadRequest, Sort } from '../../../types/index.js' +import type { RunJobsSilent } from '../../localAPI.js' import type { RunJobsArgs } from '../../operations/runJobs/index.js' +import type { JobStats } from '../global.js' import type { TaskConfig } from './taskTypes.js' import type { WorkflowConfig } from './workflowTypes.js' -export type CronConfig = { +export type AutorunCronConfig = { /** * The cron schedule for the job. * @default '* * * * *' (every minute). @@ -26,6 +28,15 @@ export type CronConfig = { * - '* * * * * *' every second */ cron?: string + /** + * By default, the autorun will attempt to schedule jobs for tasks and workflows that have a `schedule` property, given + * the queue name is the same. + * + * Set this to `true` to disable the scheduling of jobs automatically. + * + * @default false + */ + disableScheduling?: boolean /** * The limit for the job. This can be overridden by the user. Defaults to 10. */ @@ -34,6 +45,15 @@ export type CronConfig = { * The queue name for the job. */ queue?: string + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent } export type RunJobAccessArgs = { @@ -48,6 +68,16 @@ export type SanitizedJobsConfig = { * This property is automatically set during sanitization. */ enabled?: boolean + /** + * If set to `true`, at least one task or workflow has scheduling enabled. + * This property is automatically set during sanitization. + */ + scheduling?: boolean + /** + * If set to `true`, a payload-job-stats global exists. + * This property is automatically set during sanitization. + */ + stats?: boolean } & JobsConfig export type JobsConfig = { /** @@ -73,7 +103,9 @@ export type JobsConfig = { * * @remark this property should not be used on serverless platforms like Vercel */ - autoRun?: ((payload: Payload) => CronConfig[] | Promise) | CronConfig[] + autoRun?: + | ((payload: Payload) => AutorunCronConfig[] | Promise) + | AutorunCronConfig[] /** * Determine whether or not to delete a job after it has successfully completed. */ @@ -135,3 +167,104 @@ export type JobsConfig = { */ workflows?: WorkflowConfig[] } + +export type Queueable = { + scheduleConfig: ScheduleConfig + taskConfig?: TaskConfig + // If not set, queue it immediately + waitUntil?: Date + workflowConfig?: WorkflowConfig +} + +type OptionalPromise = Promise | T + +export type BeforeScheduleFn = (args: { + defaultBeforeSchedule: BeforeScheduleFn + /** + * payload-job-stats global data + */ + jobStats: JobStats + queueable: Queueable + req: PayloadRequest +}) => OptionalPromise<{ + input?: object + shouldSchedule: boolean + waitUntil?: Date +}> + +export type AfterScheduleFn = ( + args: { + defaultAfterSchedule: AfterScheduleFn + /** + * payload-job-stats global data. If the global does not exist, it will be null. + */ + jobStats: JobStats | null + queueable: Queueable + req: PayloadRequest + } & ( + | { + error: Error + job?: never + status: 'error' + } + | { + error?: never + job: Job + status: 'success' + } + | { + error?: never + job?: never + /** + * If the beforeSchedule hook returned `shouldSchedule: false`, this will be called with status `skipped`. + */ + status: 'skipped' + } + ), +) => OptionalPromise + +export type ScheduleConfig = { + /** + * The cron for scheduling the job. + * + * @example + * ┌───────────── (optional) second (0 - 59) + * │ ┌───────────── minute (0 - 59) + * │ │ ┌───────────── hour (0 - 23) + * │ │ │ ┌───────────── day of the month (1 - 31) + * │ │ │ │ ┌───────────── month (1 - 12) + * │ │ │ │ │ ┌───────────── day of the week (0 - 6) (Sunday to Saturday) + * │ │ │ │ │ │ + * │ │ │ │ │ │ + * - '* 0 * * * *' every hour at minute 0 + * - '* 0 0 * * *' daily at midnight + * - '* 0 0 * * 0' weekly at midnight on Sundays + * - '* 0 0 1 * *' monthly at midnight on the 1st day of the month + * - '* 0/5 * * * *' every 5 minutes + * - '* * * * * *' every second + */ + cron: string + hooks?: { + /** + * Functions that will be executed after the job has been successfully scheduled. + * + * @default By default, global update?? Unless global update should happen before + */ + afterSchedule?: AfterScheduleFn + /** + * Functions that will be executed before the job is scheduled. + * You can use this to control whether or not the job should be scheduled, or what input + * data should be passed to the job. + * + * @default By default, this has one function that returns { shouldSchedule: true } if the following conditions are met: + * - There currently is no job of the same type in the specified queue that is currently running + * - There currently is no job of the same type in the specified queue that is scheduled to run in the future + * - There currently is no job of the same type in the specified queue that failed previously but can be retried + */ + beforeSchedule?: BeforeScheduleFn + } + /** + * Queue to which the scheduled job will be added. + */ + queue: string +} diff --git a/packages/payload/src/queues/config/types/taskTypes.ts b/packages/payload/src/queues/config/types/taskTypes.ts index fe9107b8f8..48edff6881 100644 --- a/packages/payload/src/queues/config/types/taskTypes.ts +++ b/packages/payload/src/queues/config/types/taskTypes.ts @@ -1,4 +1,5 @@ import type { Field, Job, PayloadRequest, StringKeyOf, TypedJobs } from '../../../index.js' +import type { ScheduleConfig } from './index.js' import type { SingleTaskStatus } from './workflowTypes.js' export type TaskInputOutput = { @@ -54,6 +55,9 @@ export type TaskHandler< args: TaskHandlerArgs, ) => Promise> | TaskHandlerResult +/** + * @todo rename to TaskSlug in 4.0, similar to CollectionSlug + */ export type TaskType = StringKeyOf // Extracts the type of `input` corresponding to each task @@ -233,6 +237,10 @@ export type TaskConfig< * @default By default, tasks are not retried and `retries` is `undefined`. */ retries?: number | RetryConfig | undefined + /** + * Allows automatically scheduling this task to run regularly at a specified interval. + */ + schedule?: ScheduleConfig[] /** * Define a slug-based name for this job. This slug needs to be unique among both tasks and workflows. */ diff --git a/packages/payload/src/queues/config/types/workflowTypes.ts b/packages/payload/src/queues/config/types/workflowTypes.ts index 6a4adc011b..8c16825102 100644 --- a/packages/payload/src/queues/config/types/workflowTypes.ts +++ b/packages/payload/src/queues/config/types/workflowTypes.ts @@ -7,6 +7,7 @@ import type { TypedJobs, } from '../../../index.js' import type { TaskParent } from '../../operations/runJobs/runJob/getRunTaskFunction.js' +import type { ScheduleConfig } from './index.js' import type { RetryConfig, RunInlineTaskFunction, @@ -53,6 +54,13 @@ export type BaseJob< ? TypedJobs['workflows'][TWorkflowSlugOrInput]['input'] : TWorkflowSlugOrInput log?: JobLog[] + meta?: { + [key: string]: unknown + /** + * If true, this job was queued by the scheduling system. + */ + scheduled?: boolean + } processing?: boolean queue?: string taskSlug?: null | TaskType @@ -63,6 +71,9 @@ export type BaseJob< workflowSlug?: null | WorkflowTypes } +/** + * @todo rename to WorkflowSlug in 4.0, similar to CollectionSlug + */ export type WorkflowTypes = StringKeyOf /** @@ -155,6 +166,10 @@ export type WorkflowConfig< * @default undefined. By default, workflows retries are defined by their tasks */ retries?: number | RetryConfig | undefined + /** + * Allows automatically scheduling this workflow to run regularly at a specified interval. + */ + schedule?: ScheduleConfig[] /** * Define a slug-based name for this job. */ diff --git a/packages/payload/src/queues/endpoints/handleSchedules.ts b/packages/payload/src/queues/endpoints/handleSchedules.ts new file mode 100644 index 0000000000..385cb496e9 --- /dev/null +++ b/packages/payload/src/queues/endpoints/handleSchedules.ts @@ -0,0 +1,66 @@ +import type { Endpoint } from '../../config/types.js' + +import { handleSchedules } from '../operations/handleSchedules/index.js' +import { configHasJobs } from './run.js' + +/** + * GET /api/payload-jobs/handle-schedules endpoint + * + * This endpoint is GET instead of POST to allow it to be used in a Vercel Cron. + */ +export const handleSchedulesJobsEndpoint: Endpoint = { + handler: async (req) => { + const jobsConfig = req.payload.config.jobs + + if (!configHasJobs(jobsConfig)) { + return Response.json( + { + message: 'No jobs to schedule.', + }, + { status: 200 }, + ) + } + + const accessFn = jobsConfig.access?.run ?? (() => true) + + const hasAccess = await accessFn({ req }) + + if (!hasAccess) { + return Response.json( + { + message: req.i18n.t('error:unauthorized'), + }, + { status: 401 }, + ) + } + + if (!jobsConfig.scheduling) { + // There is no reason to call the handleSchedules endpoint if the stats global is not enabled (= no schedules defined) + return Response.json( + { + message: + 'Cannot handle schedules because no tasks or workflows with schedules are defined.', + }, + { status: 500 }, + ) + } + + const { queue } = req.query as { + queue?: string + } + + const { errored, queued, skipped } = await handleSchedules({ queue, req }) + + return Response.json( + { + errored, + message: req.i18n.t('general:success'), + queued, + skipped, + }, + { status: 200 }, + ) + }, + method: 'get', + path: '/handle-schedules', +} diff --git a/packages/payload/src/queues/endpoints/run.ts b/packages/payload/src/queues/endpoints/run.ts new file mode 100644 index 0000000000..a362a7d2cc --- /dev/null +++ b/packages/payload/src/queues/endpoints/run.ts @@ -0,0 +1,118 @@ +import type { Endpoint } from '../../config/types.js' +import type { SanitizedJobsConfig } from '../config/types/index.js' + +import { runJobs, type RunJobsArgs } from '../operations/runJobs/index.js' + +/** + * /api/payload-jobs/run endpoint + * + * This endpoint is GET instead of POST to allow it to be used in a Vercel Cron. + */ +export const runJobsEndpoint: Endpoint = { + handler: async (req) => { + const jobsConfig = req.payload.config.jobs + + if (!configHasJobs(jobsConfig)) { + return Response.json( + { + message: 'No jobs to run.', + }, + { status: 200 }, + ) + } + + const accessFn = jobsConfig.access?.run ?? (() => true) + + const hasAccess = await accessFn({ req }) + + if (!hasAccess) { + return Response.json( + { + message: req.i18n.t('error:unauthorized'), + }, + { status: 401 }, + ) + } + + const { + allQueues, + disableScheduling: disableSchedulingParam, + limit, + queue, + silent: silentParam, + } = req.query as { + allQueues?: 'false' | 'true' + disableScheduling?: 'false' | 'true' + limit?: number + queue?: string + silent?: string + } + + const silent = silentParam === 'true' + + const shouldHandleSchedules = disableSchedulingParam !== 'true' + + const runAllQueues = allQueues && !(typeof allQueues === 'string' && allQueues === 'false') + + if (shouldHandleSchedules && jobsConfig.scheduling) { + // If should handle schedules and schedules are defined + await req.payload.jobs.handleSchedules({ queue: runAllQueues ? undefined : queue, req }) + } + + const runJobsArgs: RunJobsArgs = { + queue, + req, + // Access is validated above, so it's safe to override here + allQueues: runAllQueues, + overrideAccess: true, + silent, + } + + if (typeof queue === 'string') { + runJobsArgs.queue = queue + } + + const parsedLimit = Number(limit) + if (!isNaN(parsedLimit)) { + runJobsArgs.limit = parsedLimit + } + + let noJobsRemaining = false + let remainingJobsFromQueried = 0 + try { + const result = await runJobs(runJobsArgs) + noJobsRemaining = !!result.noJobsRemaining + remainingJobsFromQueried = result.remainingJobsFromQueried + } catch (err) { + req.payload.logger.error({ + err, + msg: 'There was an error running jobs:', + queue: runJobsArgs.queue, + }) + + return Response.json( + { + message: req.i18n.t('error:unknown'), + noJobsRemaining: true, + remainingJobsFromQueried, + }, + { status: 500 }, + ) + } + + return Response.json( + { + message: req.i18n.t('general:success'), + noJobsRemaining, + remainingJobsFromQueried, + }, + { status: 200 }, + ) + }, + method: 'get', + path: '/run', +} + +export const configHasJobs = (jobsConfig: SanitizedJobsConfig): boolean => { + return Boolean(jobsConfig.tasks?.length || jobsConfig.workflows?.length) +} diff --git a/packages/payload/src/queues/errors/calculateBackoffWaitUntil.ts b/packages/payload/src/queues/errors/calculateBackoffWaitUntil.ts index e8ff239e8e..0214ecd141 100644 --- a/packages/payload/src/queues/errors/calculateBackoffWaitUntil.ts +++ b/packages/payload/src/queues/errors/calculateBackoffWaitUntil.ts @@ -1,5 +1,7 @@ import type { RetryConfig } from '../config/types/taskTypes.js' +import { getCurrentDate } from '../utilities/getCurrentDate.js' + export function calculateBackoffWaitUntil({ retriesConfig, totalTried, @@ -7,23 +9,23 @@ export function calculateBackoffWaitUntil({ retriesConfig: number | RetryConfig totalTried: number }): Date { - let waitUntil: Date = new Date() + let waitUntil: Date = getCurrentDate() if (typeof retriesConfig === 'object') { if (retriesConfig.backoff) { if (retriesConfig.backoff.type === 'fixed') { waitUntil = retriesConfig.backoff.delay - ? new Date(new Date().getTime() + retriesConfig.backoff.delay) - : new Date() + ? new Date(getCurrentDate().getTime() + retriesConfig.backoff.delay) + : getCurrentDate() } else if (retriesConfig.backoff.type === 'exponential') { // 2 ^ (attempts - 1) * delay (current attempt is not included in totalTried, thus no need for -1) const delay = retriesConfig.backoff.delay ? retriesConfig.backoff.delay : 0 - waitUntil = new Date(new Date().getTime() + Math.pow(2, totalTried) * delay) + waitUntil = new Date(getCurrentDate().getTime() + Math.pow(2, totalTried) * delay) } } } /* - const differenceInMSBetweenNowAndWaitUntil = waitUntil.getTime() - new Date().getTime() + const differenceInMSBetweenNowAndWaitUntil = waitUntil.getTime() - getCurrentDate().getTime() const differenceInSBetweenNowAndWaitUntil = differenceInMSBetweenNowAndWaitUntil / 1000 console.log('Calculated backoff', { diff --git a/packages/payload/src/queues/errors/handleTaskError.ts b/packages/payload/src/queues/errors/handleTaskError.ts index 3d6b491c95..3366b0a15b 100644 --- a/packages/payload/src/queues/errors/handleTaskError.ts +++ b/packages/payload/src/queues/errors/handleTaskError.ts @@ -1,9 +1,11 @@ import ObjectIdImport from 'bson-objectid' import type { PayloadRequest } from '../../index.js' +import type { RunJobsSilent } from '../localAPI.js' import type { UpdateJobFunction } from '../operations/runJobs/runJob/getUpdateJobFunction.js' import type { TaskError } from './index.js' +import { getCurrentDate } from '../utilities/getCurrentDate.js' import { calculateBackoffWaitUntil } from './calculateBackoffWaitUntil.js' import { getWorkflowRetryBehavior } from './getWorkflowRetryBehavior.js' @@ -13,10 +15,20 @@ const ObjectId = (ObjectIdImport.default || export async function handleTaskError({ error, req, + silent = false, updateJob, }: { error: TaskError req: PayloadRequest + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent updateJob: UpdateJobFunction }): Promise<{ hasFinalError: boolean @@ -46,7 +58,7 @@ export async function handleTaskError({ stack: error.stack, } - const currentDate = new Date() + const currentDate = getCurrentDate() ;(job.log ??= []).push({ id: new ObjectId().toHexString(), @@ -102,12 +114,14 @@ export async function handleTaskError({ waitUntil: job.waitUntil, }) - req.payload.logger.error({ - err: error, - job, - msg: `Error running task ${taskID}. Attempt ${job.totalTried} - max retries reached`, - taskSlug, - }) + if (!silent || (typeof silent === 'object' && !silent.error)) { + req.payload.logger.error({ + err: error, + job, + msg: `Error running task ${taskID}. Attempt ${job.totalTried} - max retries reached`, + taskSlug, + }) + } return { hasFinalError: true, } @@ -135,12 +149,14 @@ export async function handleTaskError({ retriesConfig: workflowConfig.retries, }) - req.payload.logger.error({ - err: error, - job, - msg: `Error running task ${taskID}. Attempt ${job.totalTried + 1}${maxWorkflowRetries !== undefined ? '/' + (maxWorkflowRetries + 1) : ''}`, - taskSlug, - }) + if (!silent || (typeof silent === 'object' && !silent.error)) { + req.payload.logger.error({ + err: error, + job, + msg: `Error running task ${taskID}. Attempt ${job.totalTried + 1}${maxWorkflowRetries !== undefined ? '/' + (maxWorkflowRetries + 1) : ''}`, + taskSlug, + }) + } // Update job's waitUntil only if this waitUntil is later than the current one if (waitUntil && (!job.waitUntil || waitUntil > new Date(job.waitUntil))) { diff --git a/packages/payload/src/queues/errors/handleWorkflowError.ts b/packages/payload/src/queues/errors/handleWorkflowError.ts index 6c5fed8da2..2716aebdec 100644 --- a/packages/payload/src/queues/errors/handleWorkflowError.ts +++ b/packages/payload/src/queues/errors/handleWorkflowError.ts @@ -1,7 +1,9 @@ import type { PayloadRequest } from '../../index.js' +import type { RunJobsSilent } from '../localAPI.js' import type { UpdateJobFunction } from '../operations/runJobs/runJob/getUpdateJobFunction.js' import type { WorkflowError } from './index.js' +import { getCurrentDate } from '../utilities/getCurrentDate.js' import { getWorkflowRetryBehavior } from './getWorkflowRetryBehavior.js' /** @@ -15,10 +17,20 @@ import { getWorkflowRetryBehavior } from './getWorkflowRetryBehavior.js' export async function handleWorkflowError({ error, req, + silent = false, updateJob, }: { error: WorkflowError req: PayloadRequest + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent updateJob: UpdateJobFunction }): Promise<{ hasFinalError: boolean @@ -41,7 +53,7 @@ export async function handleWorkflowError({ if (job.waitUntil) { // Check if waitUntil is in the past const waitUntil = new Date(job.waitUntil) - if (waitUntil < new Date()) { + if (waitUntil < getCurrentDate()) { // Outdated waitUntil, remove it delete job.waitUntil } @@ -55,10 +67,12 @@ export async function handleWorkflowError({ const jobLabel = job.workflowSlug || `Task: ${job.taskSlug}` - req.payload.logger.error({ - err: error, - msg: `Error running job ${jobLabel} id: ${job.id} attempt ${job.totalTried + 1}${maxWorkflowRetries !== undefined ? '/' + (maxWorkflowRetries + 1) : ''}`, - }) + if (!silent || (typeof silent === 'object' && !silent.error)) { + req.payload.logger.error({ + err: error, + msg: `Error running job ${jobLabel} id: ${job.id} attempt ${job.totalTried + 1}${maxWorkflowRetries !== undefined ? '/' + (maxWorkflowRetries + 1) : ''}`, + }) + } // Tasks update the job if they error - but in case there is an unhandled error (e.g. in the workflow itself, not in a task) // we need to ensure the job is updated to reflect the error diff --git a/packages/payload/src/queues/localAPI.ts b/packages/payload/src/queues/localAPI.ts index f1449c5664..c38a64868f 100644 --- a/packages/payload/src/queues/localAPI.ts +++ b/packages/payload/src/queues/localAPI.ts @@ -1,4 +1,4 @@ -import type { RunningJobFromTask } from './config/types/workflowTypes.js' +import type { BaseJob, RunningJobFromTask } from './config/types/workflowTypes.js' import { createLocalReq, @@ -9,11 +9,37 @@ import { type TypedJobs, type Where, } from '../index.js' -import { jobAfterRead, jobsCollectionSlug } from './config/index.js' +import { jobAfterRead, jobsCollectionSlug } from './config/collection.js' +import { handleSchedules, type HandleSchedulesResult } from './operations/handleSchedules/index.js' import { runJobs } from './operations/runJobs/index.js' import { updateJob, updateJobs } from './utilities/updateJob.js' +export type RunJobsSilent = + | { + error?: boolean + info?: boolean + } + | boolean export const getJobsLocalAPI = (payload: Payload) => ({ + handleSchedules: async (args?: { + // By default, schedule all queues - only scheduling jobs scheduled to be added to the `default` queue would not make sense + // here, as you'd usually specify a different queue than `default` here, especially if this is used in combination with autorun. + // The `queue` property for setting up schedules is required, and not optional. + /** + * If you want to only schedule jobs that are set to schedule in a specific queue, set this to the queue name. + * + * @default all jobs for all queues will be scheduled. + */ + queue?: string + req?: PayloadRequest + }): Promise => { + const newReq: PayloadRequest = args?.req ?? (await createLocalReq({}, payload)) + + return await handleSchedules({ + queue: args?.queue, + req: newReq, + }) + }, queue: async < // eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents TTaskOrWorkflowSlug extends keyof TypedJobs['tasks'] | keyof TypedJobs['workflows'], @@ -21,6 +47,7 @@ export const getJobsLocalAPI = (payload: Payload) => ({ args: | { input: TypedJobs['tasks'][TTaskOrWorkflowSlug]['input'] + meta?: BaseJob['meta'] queue?: string req?: PayloadRequest // TTaskOrWorkflowlug with keyof TypedJobs['workflows'] removed: @@ -30,6 +57,7 @@ export const getJobsLocalAPI = (payload: Payload) => ({ } | { input: TypedJobs['workflows'][TTaskOrWorkflowSlug]['input'] + meta?: BaseJob['meta'] queue?: string req?: PayloadRequest task?: never @@ -74,6 +102,10 @@ export const getJobsLocalAPI = (payload: Payload) => ({ data.taskSlug = args.task as string } + if (args.meta) { + data.meta = args.meta + } + type ReturnType = TTaskOrWorkflowSlug extends keyof TypedJobs['workflows'] ? Job : RunningJobFromTask // Type assertion is still needed here @@ -130,6 +162,15 @@ export const getJobsLocalAPI = (payload: Payload) => ({ * If you want to run them in sequence, set this to true. */ sequential?: boolean + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent where?: Where }): Promise> => { const newReq: PayloadRequest = args?.req ?? (await createLocalReq({}, payload)) @@ -142,6 +183,7 @@ export const getJobsLocalAPI = (payload: Payload) => ({ queue: args?.queue, req: newReq, sequential: args?.sequential, + silent: args?.silent, where: args?.where, }) }, @@ -150,6 +192,15 @@ export const getJobsLocalAPI = (payload: Payload) => ({ id: number | string overrideAccess?: boolean req?: PayloadRequest + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent }): Promise> => { const newReq: PayloadRequest = args.req ?? (await createLocalReq({}, payload)) @@ -157,6 +208,7 @@ export const getJobsLocalAPI = (payload: Payload) => ({ id: args.id, overrideAccess: args.overrideAccess !== false, req: newReq, + silent: args.silent, }) }, diff --git a/packages/payload/src/queues/operations/handleSchedules/countRunnableOrActiveJobsForQueue.ts b/packages/payload/src/queues/operations/handleSchedules/countRunnableOrActiveJobsForQueue.ts new file mode 100644 index 0000000000..713cfa2048 --- /dev/null +++ b/packages/payload/src/queues/operations/handleSchedules/countRunnableOrActiveJobsForQueue.ts @@ -0,0 +1,74 @@ +import type { PayloadRequest, Where } from '../../../types/index.js' +import type { TaskType } from '../../config/types/taskTypes.js' +import type { WorkflowTypes } from '../../config/types/workflowTypes.js' + +/** + * Gets all queued jobs that can be run. This means they either: + * - failed but do not have a definitive error => can be retried + * - are currently processing + * - have not been started yet + */ +export async function countRunnableOrActiveJobsForQueue({ + onlyScheduled = false, + queue, + req, + taskSlug, + workflowSlug, +}: { + /** + * If true, this counts only jobs that have been created through the scheduling system. + * + * @default false + */ + onlyScheduled?: boolean + queue: string + req: PayloadRequest + taskSlug?: TaskType + workflowSlug?: WorkflowTypes +}): Promise { + const and: Where[] = [ + { + queue: { + equals: queue, + }, + }, + + { + completedAt: { exists: false }, + }, + { + error: { exists: false }, + }, + ] + + if (taskSlug) { + and.push({ + taskSlug: { + equals: taskSlug, + }, + }) + } else if (workflowSlug) { + and.push({ + workflowSlug: { + equals: workflowSlug, + }, + }) + } + if (onlyScheduled) { + and.push({ + 'meta.scheduled': { + equals: true, + }, + }) + } + + const runnableOrActiveJobsForQueue = await req.payload.db.count({ + collection: 'payload-jobs', + req, + where: { + and, + }, + }) + + return runnableOrActiveJobsForQueue.totalDocs +} diff --git a/packages/payload/src/queues/operations/handleSchedules/defaultAfterSchedule.ts b/packages/payload/src/queues/operations/handleSchedules/defaultAfterSchedule.ts new file mode 100644 index 0000000000..4627c407c6 --- /dev/null +++ b/packages/payload/src/queues/operations/handleSchedules/defaultAfterSchedule.ts @@ -0,0 +1,64 @@ +import type { AfterScheduleFn } from '../../config/types/index.js' + +import { type JobStats, jobStatsGlobalSlug } from '../../config/global.js' +import { getCurrentDate } from '../../utilities/getCurrentDate.js' + +type JobStatsScheduledRuns = NonNullable< + NonNullable['scheduledRuns']>['queues'] +>[string] + +export const defaultAfterSchedule: AfterScheduleFn = async ({ jobStats, queueable, req }) => { + const existingQueuesConfig = + jobStats?.stats?.scheduledRuns?.queues?.[queueable.scheduleConfig.queue] || {} + + const queueConfig: JobStatsScheduledRuns = { + ...existingQueuesConfig, + } + if (queueable.taskConfig) { + ;(queueConfig.tasks ??= {})[queueable.taskConfig.slug] = { + lastScheduledRun: getCurrentDate().toISOString(), + } + } else if (queueable.workflowConfig) { + ;(queueConfig.workflows ??= {})[queueable.workflowConfig.slug] = { + lastScheduledRun: getCurrentDate().toISOString(), + } + } + + // Add to payload-jobs-stats global regardless of the status + if (jobStats) { + await req.payload.db.updateGlobal({ + slug: jobStatsGlobalSlug, + data: { + ...(jobStats || {}), + stats: { + ...(jobStats?.stats || {}), + scheduledRuns: { + ...(jobStats?.stats?.scheduledRuns || {}), + queues: { + ...(jobStats?.stats?.scheduledRuns?.queues || {}), + [queueable.scheduleConfig.queue]: queueConfig, + }, + }, + }, + } as JobStats, + req, + returning: false, + }) + } else { + await req.payload.db.createGlobal({ + slug: jobStatsGlobalSlug, + data: { + createdAt: getCurrentDate().toISOString(), + stats: { + scheduledRuns: { + queues: { + [queueable.scheduleConfig.queue]: queueConfig, + }, + }, + }, + } as JobStats, + req, + returning: false, + }) + } +} diff --git a/packages/payload/src/queues/operations/handleSchedules/defaultBeforeSchedule.ts b/packages/payload/src/queues/operations/handleSchedules/defaultBeforeSchedule.ts new file mode 100644 index 0000000000..96b8092258 --- /dev/null +++ b/packages/payload/src/queues/operations/handleSchedules/defaultBeforeSchedule.ts @@ -0,0 +1,20 @@ +import type { BeforeScheduleFn } from '../../config/types/index.js' + +import { countRunnableOrActiveJobsForQueue } from './countRunnableOrActiveJobsForQueue.js' + +export const defaultBeforeSchedule: BeforeScheduleFn = async ({ queueable, req }) => { + // All tasks in that queue that are either currently processing or can be run + const runnableOrActiveJobsForQueue = await countRunnableOrActiveJobsForQueue({ + onlyScheduled: true, + queue: queueable.scheduleConfig.queue, + req, + taskSlug: queueable.taskConfig?.slug, + workflowSlug: queueable.workflowConfig?.slug, + }) + + return { + input: {}, + shouldSchedule: runnableOrActiveJobsForQueue === 0, + waitUntil: queueable.waitUntil, + } +} diff --git a/packages/payload/src/queues/operations/handleSchedules/getQueuesWithSchedules.ts b/packages/payload/src/queues/operations/handleSchedules/getQueuesWithSchedules.ts new file mode 100644 index 0000000000..817de244f6 --- /dev/null +++ b/packages/payload/src/queues/operations/handleSchedules/getQueuesWithSchedules.ts @@ -0,0 +1,50 @@ +import type { SanitizedJobsConfig, ScheduleConfig } from '../../config/types/index.js' +import type { TaskConfig } from '../../config/types/taskTypes.js' +import type { WorkflowConfig } from '../../config/types/workflowTypes.js' + +type QueuesWithSchedules = { + [queue: string]: { + schedules: { + scheduleConfig: ScheduleConfig + taskConfig?: TaskConfig + workflowConfig?: WorkflowConfig + }[] + } +} + +export const getQueuesWithSchedules = ({ + jobsConfig, +}: { + jobsConfig: SanitizedJobsConfig +}): QueuesWithSchedules => { + const tasksWithSchedules = + jobsConfig.tasks?.filter((task) => { + return task.schedule?.length + }) ?? [] + + const workflowsWithSchedules = + jobsConfig.workflows?.filter((workflow) => { + return workflow.schedule?.length + }) ?? [] + + const queuesWithSchedules: QueuesWithSchedules = {} + + for (const task of tasksWithSchedules) { + for (const schedule of task.schedule ?? []) { + ;(queuesWithSchedules[schedule.queue] ??= { schedules: [] }).schedules.push({ + scheduleConfig: schedule, + taskConfig: task, + }) + } + } + for (const workflow of workflowsWithSchedules) { + for (const schedule of workflow.schedule ?? []) { + ;(queuesWithSchedules[schedule.queue] ??= { schedules: [] }).schedules.push({ + scheduleConfig: schedule, + workflowConfig: workflow, + }) + } + } + + return queuesWithSchedules +} diff --git a/packages/payload/src/queues/operations/handleSchedules/index.ts b/packages/payload/src/queues/operations/handleSchedules/index.ts new file mode 100644 index 0000000000..b5daefccb9 --- /dev/null +++ b/packages/payload/src/queues/operations/handleSchedules/index.ts @@ -0,0 +1,223 @@ +import { Cron } from 'croner' + +import type { Job, TaskConfig, WorkflowConfig } from '../../../index.js' +import type { PayloadRequest } from '../../../types/index.js' +import type { BeforeScheduleFn, Queueable, ScheduleConfig } from '../../config/types/index.js' + +import { type JobStats, jobStatsGlobalSlug } from '../../config/global.js' +import { defaultAfterSchedule } from './defaultAfterSchedule.js' +import { defaultBeforeSchedule } from './defaultBeforeSchedule.js' +import { getQueuesWithSchedules } from './getQueuesWithSchedules.js' + +export type HandleSchedulesResult = { + errored: Queueable[] + queued: Queueable[] + skipped: Queueable[] +} + +/** + * On vercel, we cannot auto-schedule jobs using a Cron - instead, we'll use this same endpoint that can + * also be called from Vercel Cron for auto-running jobs. + * + * The benefit of doing it like this instead of a separate endpoint is that we can run jobs immediately + * after they are scheduled + */ +export async function handleSchedules({ + queue, + req, +}: { + /** + * If you want to only schedule jobs that are set to schedule in a specific queue, set this to the queue name. + * + * @default all jobs for all queues will be scheduled. + */ + queue?: string + req: PayloadRequest +}): Promise { + const jobsConfig = req.payload.config.jobs + const queuesWithSchedules = getQueuesWithSchedules({ + jobsConfig, + }) + + const stats: JobStats = await req.payload.db.findGlobal({ + slug: jobStatsGlobalSlug, + req, + }) + + /** + * Almost last step! Tasks and Workflows added here just need to be constraint-checked (e.g max. 1 running task etc.), + * before we can queue them + */ + const queueables: Queueable[] = [] + + // Need to know when that particular job was last scheduled in that particular queue + + for (const [queueName, { schedules }] of Object.entries(queuesWithSchedules)) { + if (queue && queueName !== queue) { + // If a queue is specified, only schedule jobs for that queue + continue + } + for (const schedulable of schedules) { + const queuable = checkQueueableTimeConstraints({ + queue: queueName, + scheduleConfig: schedulable.scheduleConfig, + stats, + taskConfig: schedulable.taskConfig, + workflowConfig: schedulable.workflowConfig, + }) + if (queuable) { + queueables.push(queuable) + } + } + } + + const queued: Queueable[] = [] + const skipped: Queueable[] = [] + const errored: Queueable[] = [] + + /** + * Now queue, but check for constraints (= beforeSchedule) first. + * Default constraint (= defaultBeforeSchedule): max. 1 running / scheduled task or workflow per queue + */ + for (const queueable of queueables) { + const { status } = await scheduleQueueable({ + queueable, + req, + stats, + }) + switch (status) { + case 'error': + errored.push(queueable) + break + case 'skipped': + skipped.push(queueable) + break + case 'success': + queued.push(queueable) + break + } + } + return { + errored, + queued, + skipped, + } +} + +export function checkQueueableTimeConstraints({ + queue, + scheduleConfig, + stats, + taskConfig, + workflowConfig, +}: { + queue: string + scheduleConfig: ScheduleConfig + stats: JobStats + taskConfig?: TaskConfig + workflowConfig?: WorkflowConfig +}): false | Queueable { + const queueScheduleStats = stats?.stats?.scheduledRuns?.queues?.[queue] + + const lastScheduledRun = taskConfig + ? queueScheduleStats?.tasks?.[taskConfig.slug]?.lastScheduledRun + : queueScheduleStats?.workflows?.[workflowConfig?.slug ?? '']?.lastScheduledRun + + const nextRun = new Cron(scheduleConfig.cron).nextRun(lastScheduledRun ?? undefined) + + if (!nextRun) { + return false + } + return { + scheduleConfig, + taskConfig, + waitUntil: nextRun, + workflowConfig, + } +} + +export async function scheduleQueueable({ + queueable, + req, + stats, +}: { + queueable: Queueable + req: PayloadRequest + stats: JobStats +}): Promise<{ + job?: Job + status: 'error' | 'skipped' | 'success' +}> { + if (!queueable.taskConfig && !queueable.workflowConfig) { + return { + status: 'error', + } + } + + const beforeScheduleFn = queueable.scheduleConfig.hooks?.beforeSchedule + const afterScheduleFN = queueable.scheduleConfig.hooks?.afterSchedule + + try { + const beforeScheduleResult: Awaited> = await ( + beforeScheduleFn ?? defaultBeforeSchedule + )({ + // @ts-expect-error we know defaultBeforeSchedule will never call itself => pass null + defaultBeforeSchedule: beforeScheduleFn ? defaultBeforeSchedule : null, + jobStats: stats, + queueable, + req, + }) + + if (!beforeScheduleResult.shouldSchedule) { + await (afterScheduleFN ?? defaultAfterSchedule)({ + // @ts-expect-error we know defaultAfterchedule will never call itself => pass null + defaultAfterSchedule: afterScheduleFN ? defaultAfterSchedule : null, + jobStats: stats, + queueable, + req, + status: 'skipped', + }) + return { + status: 'skipped', + } + } + + const job = (await req.payload.jobs.queue({ + input: beforeScheduleResult.input ?? {}, + meta: { + scheduled: true, + }, + queue: queueable.scheduleConfig.queue, + req, + task: queueable?.taskConfig?.slug, + waitUntil: beforeScheduleResult.waitUntil, + workflow: queueable.workflowConfig?.slug, + } as Parameters[0])) as unknown as Job + + await (afterScheduleFN ?? defaultAfterSchedule)({ + // @ts-expect-error we know defaultAfterchedule will never call itself => pass null + defaultAfterSchedule: afterScheduleFN ? defaultAfterSchedule : null, + job, + jobStats: stats, + queueable, + req, + status: 'success', + }) + return { + status: 'success', + } + } catch (error) { + await (afterScheduleFN ?? defaultAfterSchedule)({ + // @ts-expect-error we know defaultAfterchedule will never call itself => pass null + defaultAfterSchedule: afterScheduleFN ? defaultAfterSchedule : null, + error: error as Error, + jobStats: stats, + queueable, + req, + status: 'error', + }) + return { + status: 'error', + } + } +} diff --git a/packages/payload/src/queues/operations/runJobs/index.ts b/packages/payload/src/queues/operations/runJobs/index.ts index c626103467..9530594788 100644 --- a/packages/payload/src/queues/operations/runJobs/index.ts +++ b/packages/payload/src/queues/operations/runJobs/index.ts @@ -2,12 +2,14 @@ import type { Job } from '../../../index.js' import type { PayloadRequest, Sort, Where } from '../../../types/index.js' import type { WorkflowJSON } from '../../config/types/workflowJSONTypes.js' import type { WorkflowConfig, WorkflowHandler } from '../../config/types/workflowTypes.js' +import type { RunJobsSilent } from '../../localAPI.js' import type { RunJobResult } from './runJob/index.js' import { Forbidden } from '../../../errors/Forbidden.js' import { isolateObjectProperty } from '../../../utilities/isolateObjectProperty.js' -import { jobsCollectionSlug } from '../../config/index.js' +import { jobsCollectionSlug } from '../../config/collection.js' import { JobCancelledError } from '../../errors/index.js' +import { getCurrentDate } from '../../utilities/getCurrentDate.js' import { updateJob, updateJobs } from '../../utilities/updateJob.js' import { getUpdateJobFunction } from './runJob/getUpdateJobFunction.js' import { importHandlerPath } from './runJob/importHandlerPath.js' @@ -53,6 +55,15 @@ export type RunJobsArgs = { * If you want to run them in sequence, set this to true. */ sequential?: boolean + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent where?: Where } @@ -84,6 +95,7 @@ export const runJobs = async (args: RunJobsArgs): Promise => { }, }, sequential, + silent = false, where: whereFromProps, } = args @@ -119,7 +131,7 @@ export const runJobs = async (args: RunJobsArgs): Promise => { }, { waitUntil: { - less_than: new Date().toISOString(), + less_than: getCurrentDate().toISOString(), }, }, ], @@ -219,11 +231,13 @@ export const runJobs = async (args: RunJobsArgs): Promise => { } } - payload.logger.info({ - msg: `Running ${jobs.length} jobs.`, - new: newJobs?.length, - retrying: existingJobs?.length, - }) + if (!silent || (typeof silent === 'object' && !silent.info)) { + payload.logger.info({ + msg: `Running ${jobs.length} jobs.`, + new: newJobs?.length, + retrying: existingJobs?.length, + }) + } const successfullyCompletedJobs: (number | string)[] = [] @@ -277,7 +291,9 @@ export const runJobs = async (args: RunJobsArgs): Promise => { if (!workflowHandler) { const jobLabel = job.workflowSlug || `Task: ${job.taskSlug}` const errorMessage = `Can't find runner while importing with the path ${workflowConfig.handler} in job type ${jobLabel}.` - payload.logger.error(errorMessage) + if (!silent || (typeof silent === 'object' && !silent.error)) { + payload.logger.error(errorMessage) + } await updateJob({ error: { @@ -300,6 +316,7 @@ export const runJobs = async (args: RunJobsArgs): Promise => { const result = await runJob({ job, req: jobReq, + silent, updateJob, workflowConfig, workflowHandler, @@ -314,6 +331,7 @@ export const runJobs = async (args: RunJobsArgs): Promise => { const result = await runJSONJob({ job, req: jobReq, + silent, updateJob, workflowConfig, workflowHandler, @@ -370,10 +388,12 @@ export const runJobs = async (args: RunJobsArgs): Promise => { }) } } catch (err) { - payload.logger.error({ - err, - msg: `Failed to delete jobs ${successfullyCompletedJobs.join(', ')} on complete`, - }) + if (!silent || (typeof silent === 'object' && !silent.error)) { + payload.logger.error({ + err, + msg: `Failed to delete jobs ${successfullyCompletedJobs.join(', ')} on complete`, + }) + } } } diff --git a/packages/payload/src/queues/operations/runJobs/runJSONJob/index.ts b/packages/payload/src/queues/operations/runJobs/runJSONJob/index.ts index 66b0156724..87f5995904 100644 --- a/packages/payload/src/queues/operations/runJobs/runJSONJob/index.ts +++ b/packages/payload/src/queues/operations/runJobs/runJSONJob/index.ts @@ -2,16 +2,27 @@ import type { Job } from '../../../../index.js' import type { PayloadRequest } from '../../../../types/index.js' import type { WorkflowJSON, WorkflowStep } from '../../../config/types/workflowJSONTypes.js' import type { WorkflowConfig } from '../../../config/types/workflowTypes.js' +import type { RunJobsSilent } from '../../../localAPI.js' import type { UpdateJobFunction } from '../runJob/getUpdateJobFunction.js' import type { JobRunStatus } from '../runJob/index.js' import { handleWorkflowError } from '../../../errors/handleWorkflowError.js' import { WorkflowError } from '../../../errors/index.js' +import { getCurrentDate } from '../../../utilities/getCurrentDate.js' import { getRunTaskFunction } from '../runJob/getRunTaskFunction.js' type Args = { job: Job req: PayloadRequest + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent updateJob: UpdateJobFunction workflowConfig: WorkflowConfig workflowHandler: WorkflowJSON @@ -24,6 +35,7 @@ export type RunJSONJobResult = { export const runJSONJob = async ({ job, req, + silent = false, updateJob, workflowConfig, workflowHandler, @@ -79,6 +91,7 @@ export const runJSONJob = async ({ : 'An unhandled error occurred', workflowConfig, }), + silent, req, updateJob, @@ -111,7 +124,7 @@ export const runJSONJob = async ({ if (workflowCompleted) { await updateJob({ - completedAt: new Date().toISOString(), + completedAt: getCurrentDate().toISOString(), processing: false, totalTried: (job.totalTried ?? 0) + 1, }) diff --git a/packages/payload/src/queues/operations/runJobs/runJob/getRunTaskFunction.ts b/packages/payload/src/queues/operations/runJobs/runJob/getRunTaskFunction.ts index 868ac5602d..aa9f171567 100644 --- a/packages/payload/src/queues/operations/runJobs/runJob/getRunTaskFunction.ts +++ b/packages/payload/src/queues/operations/runJobs/runJob/getRunTaskFunction.ts @@ -20,6 +20,7 @@ import type { import type { UpdateJobFunction } from './getUpdateJobFunction.js' import { TaskError } from '../../../errors/index.js' +import { getCurrentDate } from '../../../utilities/getCurrentDate.js' import { getTaskHandlerFromConfig } from './importHandlerPath.js' const ObjectId = (ObjectIdImport.default || @@ -54,7 +55,7 @@ export const getRunTaskFunction = ( task, }: Parameters[1] & Parameters>[1], ) => { - const executedAt = new Date() + const executedAt = getCurrentDate() let taskConfig: TaskConfig | undefined if (!isInline) { @@ -186,7 +187,7 @@ export const getRunTaskFunction = ( ;(job.log ??= []).push({ id: new ObjectId().toHexString(), - completedAt: new Date().toISOString(), + completedAt: getCurrentDate().toISOString(), executedAt: executedAt.toISOString(), input, output, diff --git a/packages/payload/src/queues/operations/runJobs/runJob/index.ts b/packages/payload/src/queues/operations/runJobs/runJob/index.ts index fe8f6256ef..c92599e298 100644 --- a/packages/payload/src/queues/operations/runJobs/runJob/index.ts +++ b/packages/payload/src/queues/operations/runJobs/runJob/index.ts @@ -1,16 +1,27 @@ import type { Job } from '../../../../index.js' import type { PayloadRequest } from '../../../../types/index.js' import type { WorkflowConfig, WorkflowHandler } from '../../../config/types/workflowTypes.js' +import type { RunJobsSilent } from '../../../localAPI.js' import type { UpdateJobFunction } from './getUpdateJobFunction.js' import { handleTaskError } from '../../../errors/handleTaskError.js' import { handleWorkflowError } from '../../../errors/handleWorkflowError.js' import { JobCancelledError, TaskError, WorkflowError } from '../../../errors/index.js' +import { getCurrentDate } from '../../../utilities/getCurrentDate.js' import { getRunTaskFunction } from './getRunTaskFunction.js' type Args = { job: Job req: PayloadRequest + /** + * If set to true, the job system will not log any output to the console (for both info and error logs). + * Can be an option for more granular control over logging. + * + * This will not automatically affect user-configured logs (e.g. if you call `console.log` or `payload.logger.info` in your job code). + * + * @default false + */ + silent?: RunJobsSilent updateJob: UpdateJobFunction workflowConfig: WorkflowConfig workflowHandler: WorkflowHandler @@ -25,6 +36,7 @@ export type RunJobResult = { export const runJob = async ({ job, req, + silent, updateJob, workflowConfig, workflowHandler, @@ -45,6 +57,7 @@ export const runJob = async ({ const { hasFinalError } = await handleTaskError({ error, req, + silent, updateJob, }) @@ -66,6 +79,7 @@ export const runJob = async ({ workflowConfig, }), req, + silent, updateJob, }) @@ -76,7 +90,7 @@ export const runJob = async ({ // Workflow has completed successfully await updateJob({ - completedAt: new Date().toISOString(), + completedAt: getCurrentDate().toISOString(), log: job.log, processing: false, totalTried: (job.totalTried ?? 0) + 1, diff --git a/packages/payload/src/queues/restEndpointRun.ts b/packages/payload/src/queues/restEndpointRun.ts deleted file mode 100644 index 14c425a940..0000000000 --- a/packages/payload/src/queues/restEndpointRun.ts +++ /dev/null @@ -1,91 +0,0 @@ -import type { Endpoint, SanitizedConfig } from '../config/types.js' - -import { runJobs, type RunJobsArgs } from './operations/runJobs/index.js' - -const configHasJobs = (config: SanitizedConfig): boolean => { - return Boolean(config.jobs?.tasks?.length || config.jobs?.workflows?.length) -} - -/** - * /api/payload-jobs/run endpoint - */ -export const runJobsEndpoint: Endpoint = { - handler: async (req) => { - if (!configHasJobs(req.payload.config)) { - return Response.json( - { - message: 'No jobs to run.', - }, - { status: 200 }, - ) - } - - const accessFn = req.payload.config.jobs?.access?.run ?? (() => true) - - const hasAccess = await accessFn({ req }) - - if (!hasAccess) { - return Response.json( - { - message: req.i18n.t('error:unauthorized'), - }, - { status: 401 }, - ) - } - - const { allQueues, limit, queue } = req.query as { - allQueues?: boolean - limit?: number - queue?: string - } - - const runJobsArgs: RunJobsArgs = { - queue, - req, - // We are checking access above, so we can override it here - overrideAccess: true, - } - - if (typeof limit !== 'undefined') { - runJobsArgs.limit = Number(limit) - } - - if (allQueues && !(typeof allQueues === 'string' && allQueues === 'false')) { - runJobsArgs.allQueues = true - } - - let noJobsRemaining = false - let remainingJobsFromQueried = 0 - try { - const result = await runJobs(runJobsArgs) - noJobsRemaining = !!result.noJobsRemaining - remainingJobsFromQueried = result.remainingJobsFromQueried - } catch (err) { - req.payload.logger.error({ - err, - msg: 'There was an error running jobs:', - queue: runJobsArgs.queue, - }) - - return Response.json( - { - message: req.i18n.t('error:unknown'), - noJobsRemaining: true, - remainingJobsFromQueried, - }, - { status: 500 }, - ) - } - - return Response.json( - { - message: req.i18n.t('general:success'), - noJobsRemaining, - remainingJobsFromQueried, - }, - { status: 200 }, - ) - }, - method: 'get', - path: '/run', -} diff --git a/packages/payload/src/queues/utilities/getCurrentDate.ts b/packages/payload/src/queues/utilities/getCurrentDate.ts new file mode 100644 index 0000000000..6e0d67af3b --- /dev/null +++ b/packages/payload/src/queues/utilities/getCurrentDate.ts @@ -0,0 +1,21 @@ +/** + * Globals that are used by our integration tests to modify the behavior of the job system during runtime. + * This is useful to avoid having to wait for the cron jobs to run, or to pause auto-running jobs. + */ +export const _internal_jobSystemGlobals = { + getCurrentDate: () => { + return new Date() + }, + shouldAutoRun: true, + shouldAutoSchedule: true, +} + +export function _internal_resetJobSystemGlobals() { + _internal_jobSystemGlobals.getCurrentDate = () => new Date() + _internal_jobSystemGlobals.shouldAutoRun = true + _internal_jobSystemGlobals.shouldAutoSchedule = true +} + +export const getCurrentDate: () => Date = () => { + return _internal_jobSystemGlobals.getCurrentDate() +} diff --git a/packages/payload/src/queues/utilities/updateJob.ts b/packages/payload/src/queues/utilities/updateJob.ts index 6ce4479eaa..a8a4ff69ee 100644 --- a/packages/payload/src/queues/utilities/updateJob.ts +++ b/packages/payload/src/queues/utilities/updateJob.ts @@ -3,7 +3,7 @@ import type { UpdateJobsArgs } from '../../database/types.js' import type { Job } from '../../index.js' import type { PayloadRequest, Sort, Where } from '../../types/index.js' -import { jobAfterRead, jobsCollectionSlug } from '../config/index.js' +import { jobAfterRead, jobsCollectionSlug } from '../config/collection.js' type BaseArgs = { data: Partial diff --git a/packages/payload/src/versions/deleteScheduledPublishJobs.ts b/packages/payload/src/versions/deleteScheduledPublishJobs.ts index 4020ad4fd6..6ce4199f8c 100644 --- a/packages/payload/src/versions/deleteScheduledPublishJobs.ts +++ b/packages/payload/src/versions/deleteScheduledPublishJobs.ts @@ -1,7 +1,7 @@ import type { PayloadRequest } from '../types/index.js' import { type Payload } from '../index.js' -import { jobsCollectionSlug } from '../queues/config/index.js' +import { jobsCollectionSlug } from '../queues/config/collection.js' type Args = { id?: number | string diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 9f7e558f35..8012d5e446 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -823,8 +823,8 @@ importers: specifier: 2.12.1 version: 2.12.1 croner: - specifier: 9.0.0 - version: 9.0.0 + specifier: 9.1.0 + version: 9.1.0 dataloader: specifier: 2.2.3 version: 2.2.3 @@ -7401,8 +7401,8 @@ packages: engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} hasBin: true - croner@9.0.0: - resolution: {integrity: sha512-onMB0OkDjkXunhdW9htFjEhqrD54+M94i6ackoUkjHKbRnXdyEyKRelp4nJ1kAz32+s27jP1FsebpJCVl0BsvA==} + croner@9.1.0: + resolution: {integrity: sha512-p9nwwR4qyT5W996vBZhdvBCnMhicY5ytZkR4D1Xj0wuTDEiMnjwR57Q3RXYY/s0EpX6Ay3vgIcfaR+ewGHsi+g==} engines: {node: '>=18.0'} cross-env@7.0.3: @@ -18799,7 +18799,7 @@ snapshots: - supports-color - ts-node - croner@9.0.0: {} + croner@9.1.0: {} cross-env@7.0.3: dependencies: diff --git a/test/helpers/initPayloadInt.ts b/test/helpers/initPayloadInt.ts index 2801b5985c..ffd2584ef3 100644 --- a/test/helpers/initPayloadInt.ts +++ b/test/helpers/initPayloadInt.ts @@ -13,15 +13,16 @@ export async function initPayloadInt { const testSuiteName = testSuiteNameOverride ?? path.basename(dirname) - await runInit(testSuiteName, false, true) - console.log('importing config', path.resolve(dirname, 'config.ts')) - const { default: config } = await import(path.resolve(dirname, 'config.ts')) + await runInit(testSuiteName, false, true, configFile) + console.log('importing config', path.resolve(dirname, configFile ?? 'config.ts')) + const { default: config } = await import(path.resolve(dirname, configFile ?? 'config.ts')) if (initializePayload === false) { return { config: await config } as any diff --git a/test/initDevAndTest.ts b/test/initDevAndTest.ts index 791fea9ef8..2d10397287 100644 --- a/test/initDevAndTest.ts +++ b/test/initDevAndTest.ts @@ -17,6 +17,7 @@ export async function initDevAndTest( testSuiteArg: string, writeDBAdapter: string, skipGenImportMap: string, + configFile?: string, ): Promise { const importMapPath: string = path.resolve( getNextRootDir(testSuiteArg).rootDir, @@ -44,7 +45,7 @@ export async function initDevAndTest( const testDir = path.resolve(dirname, testSuiteArg) console.log('Generating import map for config:', testDir) - const configUrl = pathToFileURL(path.resolve(testDir, 'config.ts')).href + const configUrl = pathToFileURL(path.resolve(testDir, configFile ?? 'config.ts')).href const config: SanitizedConfig = await (await import(configUrl)).default process.env.ROOT_DIR = getNextRootDir(testSuiteArg).rootDir diff --git a/test/queues/config.schedules-autocron.ts b/test/queues/config.schedules-autocron.ts new file mode 100644 index 0000000000..7c5a063264 --- /dev/null +++ b/test/queues/config.schedules-autocron.ts @@ -0,0 +1,22 @@ +/* eslint-disable no-restricted-exports */ +import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' +import { getConfig } from './getConfig.js' +import { EverySecondMax2Task } from './tasks/EverySecondMax2Task.js' +import { EverySecondTask } from './tasks/EverySecondTask.js' + +const config = getConfig() + +export default buildConfigWithDefaults({ + ...config, + jobs: { + ...config.jobs, + tasks: [...(config?.jobs?.tasks || []), EverySecondTask, EverySecondMax2Task], + autoRun: [ + { + // @ts-expect-error not undefined + ...config.jobs.autoRun[0], + disableScheduling: false, + }, + ], + }, +}) diff --git a/test/queues/config.schedules.ts b/test/queues/config.schedules.ts new file mode 100644 index 0000000000..a5435e9957 --- /dev/null +++ b/test/queues/config.schedules.ts @@ -0,0 +1,22 @@ +/* eslint-disable no-restricted-exports */ +import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' +import { getConfig } from './getConfig.js' +import { EverySecondMax2Task } from './tasks/EverySecondMax2Task.js' +import { EverySecondTask } from './tasks/EverySecondTask.js' + +const config = getConfig() + +export default buildConfigWithDefaults({ + ...config, + jobs: { + ...config.jobs, + tasks: [...(config?.jobs?.tasks || []), EverySecondTask, EverySecondMax2Task], + autoRun: [ + { + // @ts-expect-error not undefined + ...config.jobs.autoRun[0], + disableScheduling: true, + }, + ], + }, +}) diff --git a/test/queues/config.ts b/test/queues/config.ts index 331d89fd1e..6c16a4bd2b 100644 --- a/test/queues/config.ts +++ b/test/queues/config.ts @@ -1,416 +1,4 @@ -import type { TaskConfig } from 'payload' - -import { lexicalEditor } from '@payloadcms/richtext-lexical' -import { fileURLToPath } from 'node:url' -import path from 'path' - import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' -import { devUser } from '../credentials.js' -import { updatePostStep1, updatePostStep2 } from './runners/updatePost.js' -import { seed } from './seed.js' -import { externalWorkflow } from './workflows/externalWorkflow.js' -import { failsImmediatelyWorkflow } from './workflows/failsImmediately.js' -import { inlineTaskTestWorkflow } from './workflows/inlineTaskTest.js' -import { inlineTaskTestDelayedWorkflow } from './workflows/inlineTaskTestDelayed.js' -import { longRunningWorkflow } from './workflows/longRunning.js' -import { noRetriesSetWorkflow } from './workflows/noRetriesSet.js' -import { parallelTaskWorkflow } from './workflows/parallelTaskWorkflow.js' -import { retries0Workflow } from './workflows/retries0.js' -import { retriesBackoffTestWorkflow } from './workflows/retriesBackoffTest.js' -import { retriesRollbackTestWorkflow } from './workflows/retriesRollbackTest.js' -import { retriesTestWorkflow } from './workflows/retriesTest.js' -import { retriesWorkflowLevelTestWorkflow } from './workflows/retriesWorkflowLevelTest.js' -import { subTaskWorkflow } from './workflows/subTask.js' -import { subTaskFailsWorkflow } from './workflows/subTaskFails.js' -import { updatePostWorkflow } from './workflows/updatePost.js' -import { updatePostJSONWorkflow } from './workflows/updatePostJSON.js' -import { workflowAndTasksRetriesUndefinedWorkflow } from './workflows/workflowAndTasksRetriesUndefined.js' -import { workflowRetries2TasksRetries0Workflow } from './workflows/workflowRetries2TasksRetries0.js' -import { workflowRetries2TasksRetriesUndefinedWorkflow } from './workflows/workflowRetries2TasksRetriesUndefined.js' +import { getConfig } from './getConfig.js' -const filename = fileURLToPath(import.meta.url) -const dirname = path.dirname(filename) - -// eslint-disable-next-line no-restricted-exports -export default buildConfigWithDefaults({ - collections: [ - { - slug: 'posts', - admin: { - useAsTitle: 'title', - }, - hooks: { - afterChange: [ - async ({ req, doc, context }) => { - await req.payload.jobs.queue({ - workflow: context.useJSONWorkflow ? 'updatePostJSONWorkflow' : 'updatePost', - input: { - post: doc.id, - message: 'hello', - }, - req, - }) - }, - ], - }, - fields: [ - { - name: 'title', - type: 'text', - required: true, - }, - { - name: 'content', - type: 'richText', - }, - { - name: 'jobStep1Ran', - type: 'text', - }, - { - name: 'jobStep2Ran', - type: 'text', - }, - ], - }, - { - slug: 'simple', - admin: { - useAsTitle: 'title', - }, - fields: [ - { - name: 'title', - type: 'text', - required: true, - }, - ], - }, - ], - admin: { - importMap: { - baseDir: path.resolve(dirname), - }, - autoLogin: { - prefillOnly: true, - email: devUser.email, - password: devUser.password, - }, - }, - jobs: { - autoRun: [ - { - // Every second - cron: '* * * * * *', - limit: 100, - queue: 'autorunSecond', // name of the queue - }, - // add as many cron jobs as you want - ], - shouldAutoRun: () => true, - jobsCollectionOverrides: ({ defaultJobsCollection }) => { - return { - ...defaultJobsCollection, - admin: { - ...(defaultJobsCollection?.admin || {}), - hidden: false, - }, - } - }, - processingOrder: { - queues: { - lifo: '-createdAt', - }, - }, - tasks: [ - { - retries: 2, - slug: 'UpdatePost', - interfaceName: 'MyUpdatePostType', - inputSchema: [ - { - name: 'post', - type: 'relationship', - relationTo: 'posts', - maxDepth: 0, - required: true, - }, - { - name: 'message', - type: 'text', - required: true, - }, - ], - outputSchema: [ - { - name: 'messageTwice', - type: 'text', - required: true, - }, - ], - handler: updatePostStep1, - } as TaskConfig<'UpdatePost'>, - { - retries: 2, - slug: 'UpdatePostStep2', - inputSchema: [ - { - name: 'post', - type: 'relationship', - relationTo: 'posts', - maxDepth: 0, - required: true, - }, - { - name: 'messageTwice', - type: 'text', - required: true, - }, - ], - handler: updatePostStep2, - } as TaskConfig<'UpdatePostStep2'>, - { - retries: 3, - slug: 'CreateSimple', - inputSchema: [ - { - name: 'message', - type: 'text', - required: true, - }, - { - name: 'shouldFail', - type: 'checkbox', - }, - ], - outputSchema: [ - { - name: 'simpleID', - type: 'text', - required: true, - }, - ], - handler: async ({ input, req }) => { - if (input.shouldFail) { - throw new Error('Failed on purpose') - } - const newSimple = await req.payload.create({ - collection: 'simple', - req, - data: { - title: input.message, - }, - }) - return { - output: { - simpleID: newSimple.id, - }, - } - }, - } as TaskConfig<'CreateSimple'>, - { - slug: 'CreateSimpleRetriesUndefined', - inputSchema: [ - { - name: 'message', - type: 'text', - required: true, - }, - { - name: 'shouldFail', - type: 'checkbox', - }, - ], - outputSchema: [ - { - name: 'simpleID', - type: 'text', - required: true, - }, - ], - handler: async ({ input, req }) => { - if (input.shouldFail) { - throw new Error('Failed on purpose') - } - const newSimple = await req.payload.create({ - collection: 'simple', - req, - data: { - title: input.message, - }, - }) - return { - output: { - simpleID: newSimple.id, - }, - } - }, - } as TaskConfig<'CreateSimpleRetriesUndefined'>, - { - slug: 'CreateSimpleRetries0', - retries: 0, - inputSchema: [ - { - name: 'message', - type: 'text', - required: true, - }, - { - name: 'shouldFail', - type: 'checkbox', - }, - ], - outputSchema: [ - { - name: 'simpleID', - type: 'text', - required: true, - }, - ], - handler: async ({ input, req }) => { - if (input.shouldFail) { - throw new Error('Failed on purpose') - } - const newSimple = await req.payload.create({ - collection: 'simple', - req, - data: { - title: input.message, - }, - }) - return { - output: { - simpleID: newSimple.id, - }, - } - }, - } as TaskConfig<'CreateSimpleRetries0'>, - { - retries: 2, - slug: 'CreateSimpleWithDuplicateMessage', - inputSchema: [ - { - name: 'message', - type: 'text', - required: true, - }, - { - name: 'shouldFail', - type: 'checkbox', - }, - ], - outputSchema: [ - { - name: 'simpleID', - type: 'text', - required: true, - }, - ], - handler: async ({ input, req }) => { - if (input.shouldFail) { - throw new Error('Failed on purpose') - } - const newSimple = await req.payload.create({ - collection: 'simple', - req, - data: { - title: input.message + input.message, - }, - }) - return { - output: { - simpleID: newSimple.id, - }, - } - }, - } as TaskConfig<'CreateSimpleWithDuplicateMessage'>, - { - retries: 2, - slug: 'ExternalTask', - inputSchema: [ - { - name: 'message', - type: 'text', - required: true, - }, - ], - outputSchema: [ - { - name: 'simpleID', - type: 'text', - required: true, - }, - ], - handler: path.resolve(dirname, 'runners/externalTask.ts') + '#externalTaskHandler', - } as TaskConfig<'ExternalTask'>, - { - retries: 0, - slug: 'ThrowError', - inputSchema: [], - outputSchema: [], - handler: () => { - throw new Error('failed') - }, - } as TaskConfig<'ThrowError'>, - { - retries: 0, - slug: 'ReturnError', - inputSchema: [], - outputSchema: [], - handler: () => { - return { - state: 'failed', - } - }, - } as TaskConfig<'ReturnError'>, - { - retries: 0, - slug: 'ReturnCustomError', - inputSchema: [ - { - name: 'errorMessage', - type: 'text', - required: true, - }, - ], - outputSchema: [], - handler: ({ input }) => { - return { - state: 'failed', - errorMessage: input.errorMessage, - } - }, - } as TaskConfig<'ReturnCustomError'>, - ], - workflows: [ - updatePostWorkflow, - updatePostJSONWorkflow, - retriesTestWorkflow, - retriesRollbackTestWorkflow, - retriesWorkflowLevelTestWorkflow, - noRetriesSetWorkflow, - retries0Workflow, - workflowAndTasksRetriesUndefinedWorkflow, - workflowRetries2TasksRetriesUndefinedWorkflow, - workflowRetries2TasksRetries0Workflow, - inlineTaskTestWorkflow, - failsImmediatelyWorkflow, - inlineTaskTestDelayedWorkflow, - externalWorkflow, - retriesBackoffTestWorkflow, - subTaskWorkflow, - subTaskFailsWorkflow, - longRunningWorkflow, - parallelTaskWorkflow, - ], - }, - editor: lexicalEditor(), - onInit: async (payload) => { - if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') { - await seed(payload) - } - }, - typescript: { - outputFile: path.resolve(dirname, 'payload-types.ts'), - }, -}) +export default buildConfigWithDefaults(getConfig()) diff --git a/test/queues/getConfig.ts b/test/queues/getConfig.ts new file mode 100644 index 0000000000..f1c8126e36 --- /dev/null +++ b/test/queues/getConfig.ts @@ -0,0 +1,176 @@ +import type { Config } from 'payload' + +import { lexicalEditor } from '@payloadcms/richtext-lexical' +import { fileURLToPath } from 'node:url' +import path from 'path' + +import { devUser } from '../credentials.js' +import { seed } from './seed.js' +import { CreateSimpleRetries0Task } from './tasks/CreateSimpleRetries0Task.js' +import { CreateSimpleRetriesUndefinedTask } from './tasks/CreateSimpleRetriesUndefinedTask.js' +import { CreateSimpleTask } from './tasks/CreateSimpleTask.js' +import { CreateSimpleWithDuplicateMessageTask } from './tasks/CreateSimpleWithDuplicateMessageTask.js' +import { ExternalTask } from './tasks/ExternalTask.js' +import { ReturnCustomErrorTask } from './tasks/ReturnCustomErrorTask.js' +import { ReturnErrorTask } from './tasks/ReturnErrorTask.js' +import { ThrowErrorTask } from './tasks/ThrowErrorTask.js' +import { UpdatePostStep2Task } from './tasks/UpdatePostStep2Task.js' +import { UpdatePostTask } from './tasks/UpdatePostTask.js' +import { externalWorkflow } from './workflows/externalWorkflow.js' +import { failsImmediatelyWorkflow } from './workflows/failsImmediately.js' +import { inlineTaskTestWorkflow } from './workflows/inlineTaskTest.js' +import { inlineTaskTestDelayedWorkflow } from './workflows/inlineTaskTestDelayed.js' +import { longRunningWorkflow } from './workflows/longRunning.js' +import { noRetriesSetWorkflow } from './workflows/noRetriesSet.js' +import { parallelTaskWorkflow } from './workflows/parallelTaskWorkflow.js' +import { retries0Workflow } from './workflows/retries0.js' +import { retriesBackoffTestWorkflow } from './workflows/retriesBackoffTest.js' +import { retriesRollbackTestWorkflow } from './workflows/retriesRollbackTest.js' +import { retriesTestWorkflow } from './workflows/retriesTest.js' +import { retriesWorkflowLevelTestWorkflow } from './workflows/retriesWorkflowLevelTest.js' +import { subTaskWorkflow } from './workflows/subTask.js' +import { subTaskFailsWorkflow } from './workflows/subTaskFails.js' +import { updatePostWorkflow } from './workflows/updatePost.js' +import { updatePostJSONWorkflow } from './workflows/updatePostJSON.js' +import { workflowAndTasksRetriesUndefinedWorkflow } from './workflows/workflowAndTasksRetriesUndefined.js' +import { workflowRetries2TasksRetries0Workflow } from './workflows/workflowRetries2TasksRetries0.js' +import { workflowRetries2TasksRetriesUndefinedWorkflow } from './workflows/workflowRetries2TasksRetriesUndefined.js' + +const dirname = path.dirname(fileURLToPath(import.meta.url)) + +// Needs to be a function to prevent object reference issues due to duplicative configs +export const getConfig: () => Partial = () => ({ + collections: [ + { + slug: 'posts', + admin: { + useAsTitle: 'title', + }, + hooks: { + afterChange: [ + async ({ req, doc, context }) => { + await req.payload.jobs.queue({ + workflow: context.useJSONWorkflow ? 'updatePostJSONWorkflow' : 'updatePost', + input: { + post: doc.id, + message: 'hello', + }, + req, + }) + }, + ], + }, + fields: [ + { + name: 'title', + type: 'text', + required: true, + }, + { + name: 'content', + type: 'richText', + }, + { + name: 'jobStep1Ran', + type: 'text', + }, + { + name: 'jobStep2Ran', + type: 'text', + }, + ], + }, + { + slug: 'simple', + admin: { + useAsTitle: 'title', + }, + fields: [ + { + name: 'title', + type: 'text', + required: true, + }, + ], + }, + ], + admin: { + importMap: { + baseDir: path.resolve(dirname), + }, + autoLogin: { + prefillOnly: true, + email: devUser.email, + password: devUser.password, + }, + }, + jobs: { + autoRun: [ + { + silent: true, + // Every second + cron: '* * * * * *', + limit: 100, + queue: 'autorunSecond', + }, + // add as many cron jobs as you want + ], + shouldAutoRun: () => true, + jobsCollectionOverrides: ({ defaultJobsCollection }) => { + return { + ...defaultJobsCollection, + admin: { + ...(defaultJobsCollection?.admin || {}), + hidden: false, + }, + } + }, + processingOrder: { + queues: { + lifo: '-createdAt', + }, + }, + tasks: [ + UpdatePostTask, + UpdatePostStep2Task, + CreateSimpleTask, + CreateSimpleRetriesUndefinedTask, + CreateSimpleRetries0Task, + CreateSimpleWithDuplicateMessageTask, + ExternalTask, + ThrowErrorTask, + ReturnErrorTask, + ReturnCustomErrorTask, + ], + workflows: [ + updatePostWorkflow, + updatePostJSONWorkflow, + retriesTestWorkflow, + retriesRollbackTestWorkflow, + retriesWorkflowLevelTestWorkflow, + noRetriesSetWorkflow, + retries0Workflow, + workflowAndTasksRetriesUndefinedWorkflow, + workflowRetries2TasksRetriesUndefinedWorkflow, + workflowRetries2TasksRetries0Workflow, + inlineTaskTestWorkflow, + failsImmediatelyWorkflow, + inlineTaskTestDelayedWorkflow, + externalWorkflow, + retriesBackoffTestWorkflow, + subTaskWorkflow, + subTaskFailsWorkflow, + longRunningWorkflow, + parallelTaskWorkflow, + ], + }, + editor: lexicalEditor(), + onInit: async (payload) => { + if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') { + await seed(payload) + } + }, + typescript: { + outputFile: path.resolve(dirname, 'payload-types.ts'), + }, +}) diff --git a/test/queues/int.spec.ts b/test/queues/int.spec.ts index 0bb23d4748..e5b55963b3 100644 --- a/test/queues/int.spec.ts +++ b/test/queues/int.spec.ts @@ -1,7 +1,13 @@ -import type { JobTaskStatus, Payload, SanitizedConfig } from 'payload' - import path from 'path' +import { + _internal_jobSystemGlobals, + _internal_resetJobSystemGlobals, + type JobTaskStatus, + type Payload, + type SanitizedConfig, +} from 'payload' import { migrateCLI } from 'payload' +import { wait } from 'payload/shared' import { fileURLToPath } from 'url' import type { NextRESTClient } from '../helpers/NextRESTClient.js' @@ -9,6 +15,7 @@ import type { NextRESTClient } from '../helpers/NextRESTClient.js' import { devUser } from '../credentials.js' import { initPayloadInt } from '../helpers/initPayloadInt.js' import { clearAndSeedEverything } from './seed.js' +import { waitUntilAutorunIsDone } from './utilities.js' let payload: Payload let restClient: NextRESTClient @@ -25,10 +32,25 @@ describe('Queues', () => { }) afterAll(async () => { + // Ensure no new crons are scheduled + _internal_jobSystemGlobals.shouldAutoRun = false + _internal_jobSystemGlobals.shouldAutoSchedule = false + // Wait 3 seconds to ensure all currently-running crons are done. If we shut down the db while a function is running, it can cause issues + // Cron function runs may persist after a test has finished + await wait(3000) + // Now we can destroy the payload instance await payload.destroy() + _internal_resetJobSystemGlobals() + }) + + afterEach(() => { + _internal_resetJobSystemGlobals() }) beforeEach(async () => { + // Set autorun to false during seed process to ensure no crons are scheduled, which may affect the tests + _internal_jobSystemGlobals.shouldAutoRun = false + _internal_jobSystemGlobals.shouldAutoSchedule = false await clearAndSeedEverything(payload) const data = await restClient .POST('/users/login', { @@ -43,10 +65,12 @@ describe('Queues', () => { token = data.token } payload.config.jobs.deleteJobOnComplete = true + _internal_jobSystemGlobals.shouldAutoRun = true + _internal_jobSystemGlobals.shouldAutoSchedule = true }) it('will run access control on jobs runner', async () => { - const response = await restClient.GET('/payload-jobs/run', { + const response = await restClient.GET('/payload-jobs/run?silent=true', { headers: { // Authorization: `JWT ${token}`, }, @@ -55,7 +79,7 @@ describe('Queues', () => { }) it('will return 200 from jobs runner', async () => { - const response = await restClient.GET('/payload-jobs/run', { + const response = await restClient.GET('/payload-jobs/run?silent=true', { headers: { Authorization: `JWT ${token}`, }, @@ -109,7 +133,7 @@ describe('Queues', () => { expect(retrievedPost.jobStep1Ran).toBeFalsy() expect(retrievedPost.jobStep2Ran).toBeFalsy() - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const postAfterJobs = await payload.findByID({ collection: 'posts', @@ -139,7 +163,7 @@ describe('Queues', () => { expect(retrievedPost.jobStep1Ran).toBeFalsy() expect(retrievedPost.jobStep2Ran).toBeFalsy() - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const postAfterJobs = await payload.findByID({ collection: 'posts', @@ -163,7 +187,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -198,7 +222,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -221,7 +245,7 @@ describe('Queues', () => { expect(jobAfterRun.input.amountRetried).toBe(2) }) - it('ensure workflows dont limit retries if no retries property is sett', async () => { + it('ensure workflows dont limit retries if no retries property is set', async () => { payload.config.jobs.deleteJobOnComplete = false const job = await payload.jobs.queue({ workflow: 'workflowNoRetriesSet', @@ -233,7 +257,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -268,7 +292,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -303,7 +327,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -338,7 +362,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -373,7 +397,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -409,7 +433,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({silent: true}) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -453,7 +477,7 @@ describe('Queues', () => { !firstGotNoJobs || new Date().getTime() - firstGotNoJobs.getTime() < 3000 ) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { if (hasJobsRemaining) { @@ -537,6 +561,7 @@ describe('Queues', () => { await payload.jobs.run({ sequential: true, + silent: true, }) const allSimples = await payload.find({ @@ -569,6 +594,7 @@ describe('Queues', () => { await payload.jobs.run({ sequential: true, + silent: true, processingOrder: '-createdAt', }) @@ -604,6 +630,7 @@ describe('Queues', () => { await payload.jobs.run({ sequential: true, + silent: true, queue: 'lifo', }) @@ -626,7 +653,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -637,29 +664,6 @@ describe('Queues', () => { expect(allSimples.docs[0]?.title).toBe('hello!') }) - it('can create and autorun jobs', async () => { - await payload.jobs.queue({ - workflow: 'inlineTaskTest', - queue: 'autorunSecond', - input: { - message: 'hello!', - }, - }) - - // Do not call payload.jobs.run() - - // Autorun runs every second - so should definitely be done if we wait 2 seconds - await new Promise((resolve) => setTimeout(resolve, 2000)) - - const allSimples = await payload.find({ - collection: 'simple', - limit: 100, - }) - - expect(allSimples.totalDocs).toBe(1) - expect(allSimples?.docs?.[0]?.title).toBe('hello!') - }) - it('should respect deleteJobOnComplete true default configuration', async () => { const { id } = await payload.jobs.queue({ workflow: 'inlineTaskTest', @@ -671,7 +675,7 @@ describe('Queues', () => { const before = await payload.findByID({ collection: 'payload-jobs', id, disableErrors: true }) expect(before?.id).toBe(id) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const after = await payload.findByID({ collection: 'payload-jobs', id, disableErrors: true }) expect(after).toBeNull() @@ -686,7 +690,7 @@ describe('Queues', () => { const before = await payload.findByID({ collection: 'payload-jobs', id, disableErrors: true }) expect(before?.id).toBe(id) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const after = await payload.findByID({ collection: 'payload-jobs', id, disableErrors: true }) expect(after?.id).toBe(id) @@ -704,7 +708,7 @@ describe('Queues', () => { const before = await payload.findByID({ collection: 'payload-jobs', id, disableErrors: true }) expect(before?.id).toBe(id) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const after = await payload.findByID({ collection: 'payload-jobs', id, disableErrors: true }) expect(after?.id).toBe(id) @@ -718,7 +722,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -739,7 +743,7 @@ describe('Queues', () => { }, }) - await restClient.GET('/payload-jobs/run', { + await restClient.GET('/payload-jobs/run?silent=true', { headers: { Authorization: `JWT ${token}`, }, @@ -877,7 +881,7 @@ describe('Queues', () => { }) } - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -903,6 +907,7 @@ describe('Queues', () => { } await payload.jobs.run({ + silent: true, limit: numberOfTasks, }) @@ -926,7 +931,7 @@ describe('Queues', () => { }) } - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -950,6 +955,7 @@ describe('Queues', () => { await payload.jobs.run({ limit: 42, + silent: true, }) const allSimples = await payload.find({ @@ -985,7 +991,7 @@ describe('Queues', () => { }) } - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -1017,7 +1023,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -1036,7 +1042,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -1066,6 +1072,7 @@ describe('Queues', () => { await payload.jobs.runByID({ id: lastJobID, + silent: true, }) const allSimples = await payload.find({ @@ -1108,6 +1115,7 @@ describe('Queues', () => { } await payload.jobs.run({ + silent: true, where: { id: { equals: lastJobID, @@ -1150,6 +1158,7 @@ describe('Queues', () => { } await payload.jobs.run({ + silent: true, where: { 'input.message': { equals: 'from single task 2', @@ -1188,7 +1197,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const allSimples = await payload.find({ collection: 'simple', @@ -1229,7 +1238,7 @@ describe('Queues', () => { let hasJobsRemaining = true while (hasJobsRemaining) { - const response = await payload.jobs.run() + const response = await payload.jobs.run({ silent: true }) if (response.noJobsRemaining) { hasJobsRemaining = false @@ -1262,7 +1271,7 @@ describe('Queues', () => { workflow: 'longRunning', input: {}, }) - void payload.jobs.run().catch((_ignored) => {}) + void payload.jobs.run({ silent: true }).catch((_ignored) => {}) await new Promise((resolve) => setTimeout(resolve, 1000)) // Should be in processing - cancel job @@ -1296,7 +1305,7 @@ describe('Queues', () => { workflow: 'longRunning', input: {}, }) - void payload.jobs.run().catch((_ignored) => {}) + void payload.jobs.run({ silent: true }).catch((_ignored) => {}) await new Promise((resolve) => setTimeout(resolve, 1000)) // Cancel all jobs @@ -1335,7 +1344,7 @@ describe('Queues', () => { input: {}, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const jobAfterRun = await payload.findByID({ collection: 'payload-jobs', @@ -1356,7 +1365,7 @@ describe('Queues', () => { input: {}, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const jobAfterRun = await payload.findByID({ collection: 'payload-jobs', @@ -1379,7 +1388,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const jobAfterRun = await payload.findByID({ collection: 'payload-jobs', @@ -1408,7 +1417,7 @@ describe('Queues', () => { }, }) - await payload.jobs.run() + await payload.jobs.run({ silent: true }) const jobAfterRun = await payload.findByID({ collection: 'payload-jobs', @@ -1434,6 +1443,29 @@ describe('Queues', () => { expect((logEntry?.output as any)?.simpleID).toBe(simpleDoc?.id) } }) + + it('can create and autorun jobs', async () => { + await payload.jobs.queue({ + workflow: 'inlineTaskTest', + queue: 'autorunSecond', + input: { + message: 'hello!', + }, + }) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples?.docs?.[0]?.title).toBe('hello!') + }) }) describe('Queues - CLI', () => { diff --git a/test/queues/payload-types.ts b/test/queues/payload-types.ts index 54945be89c..19e3d8782a 100644 --- a/test/queues/payload-types.ts +++ b/test/queues/payload-types.ts @@ -88,14 +88,20 @@ export interface Config { db: { defaultIDType: string; }; - globals: {}; - globalsSelect: {}; + globals: { + 'payload-jobs-stats': PayloadJobsStat; + }; + globalsSelect: { + 'payload-jobs-stats': PayloadJobsStatsSelect | PayloadJobsStatsSelect; + }; locale: null; user: User & { collection: 'users'; }; jobs: { tasks: { + EverySecond: TaskEverySecond; + EverySecondMax2: TaskEverySecondMax2; UpdatePost: MyUpdatePostType; UpdatePostStep2: TaskUpdatePostStep2; CreateSimple: TaskCreateSimple; @@ -260,6 +266,8 @@ export interface PayloadJob { completedAt: string; taskSlug: | 'inline' + | 'EverySecond' + | 'EverySecondMax2' | 'UpdatePost' | 'UpdatePostStep2' | 'CreateSimple' @@ -328,6 +336,8 @@ export interface PayloadJob { taskSlug?: | ( | 'inline' + | 'EverySecond' + | 'EverySecondMax2' | 'UpdatePost' | 'UpdatePostStep2' | 'CreateSimple' @@ -343,6 +353,15 @@ export interface PayloadJob { queue?: string | null; waitUntil?: string | null; processing?: boolean | null; + meta?: + | { + [k: string]: unknown; + } + | unknown[] + | string + | number + | boolean + | null; updatedAt: string; createdAt: string; } @@ -476,6 +495,7 @@ export interface PayloadJobsSelect { queue?: T; waitUntil?: T; processing?: T; + meta?: T; updatedAt?: T; createdAt?: T; } @@ -511,6 +531,54 @@ export interface PayloadMigrationsSelect { updatedAt?: T; createdAt?: T; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "payload-jobs-stats". + */ +export interface PayloadJobsStat { + id: string; + stats?: + | { + [k: string]: unknown; + } + | unknown[] + | string + | number + | boolean + | null; + updatedAt?: string | null; + createdAt?: string | null; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "payload-jobs-stats_select". + */ +export interface PayloadJobsStatsSelect { + stats?: T; + updatedAt?: T; + createdAt?: T; + globalType?: T; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "TaskEverySecond". + */ +export interface TaskEverySecond { + input: { + message: string; + }; + output?: unknown; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "TaskEverySecondMax2". + */ +export interface TaskEverySecondMax2 { + input: { + message: string; + }; + output?: unknown; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "MyUpdatePostType". diff --git a/test/queues/schedules-autocron.int.spec.ts b/test/queues/schedules-autocron.int.spec.ts new file mode 100644 index 0000000000..1e9773fe11 --- /dev/null +++ b/test/queues/schedules-autocron.int.spec.ts @@ -0,0 +1,105 @@ +import path from 'path' +import { _internal_jobSystemGlobals, _internal_resetJobSystemGlobals, type Payload } from 'payload' +import { wait } from 'payload/shared' +import { fileURLToPath } from 'url' + +import type { NextRESTClient } from '../helpers/NextRESTClient.js' + +import { devUser } from '../credentials.js' +import { initPayloadInt } from '../helpers/initPayloadInt.js' +import { clearAndSeedEverything } from './seed.js' + +let payload: Payload +let restClient: NextRESTClient +let token: string + +const { email, password } = devUser +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +describe('Queues - scheduling, with automatic scheduling handling', () => { + beforeAll(async () => { + process.env.SEED_IN_CONFIG_ONINIT = 'false' // Makes it so the payload config onInit seed is not run. Otherwise, the seed would be run unnecessarily twice for the initial test run - once for beforeEach and once for onInit + ;({ payload, restClient } = await initPayloadInt( + dirname, + undefined, + undefined, + 'config.schedules-autocron.ts', + )) + }) + + afterAll(async () => { + // Ensure no new crons are scheduled + _internal_jobSystemGlobals.shouldAutoRun = false + _internal_jobSystemGlobals.shouldAutoSchedule = false + // Wait 3 seconds to ensure all currently-running crons are done. If we shut down the db while a function is running, it can cause issues + // Cron function runs may persist after a test has finished + await wait(3000) + // Now we can destroy the payload instance + await payload.destroy() + _internal_resetJobSystemGlobals() + }) + + afterEach(() => { + _internal_resetJobSystemGlobals() + }) + + beforeEach(async () => { + // Set autorun to false during seed process to ensure no crons are scheduled, which may affect the tests + _internal_jobSystemGlobals.shouldAutoRun = false + _internal_jobSystemGlobals.shouldAutoSchedule = false + + await clearAndSeedEverything(payload) + const data = await restClient + .POST('/users/login', { + body: JSON.stringify({ + email, + password, + }), + }) + .then((res) => res.json()) + + if (data.token) { + token = data.token + } + payload.config.jobs.deleteJobOnComplete = true + _internal_jobSystemGlobals.shouldAutoRun = true + _internal_jobSystemGlobals.shouldAutoSchedule = true + }) + + it('can auto-schedule through automatic crons and autorun jobs', async () => { + // Do not call payload.jobs.run() or payload.jobs.handleSchedules() - payload should automatically schedule crons for auto-scheduling + + // Autorun and Autoschedule runs every second - so should have autorun at least twice after 3.5 seconds. Case with the lowest amount of jobs completed, + // if autoschedule runs after the first autorun: + // Second 1: Autorun runs => no jobs + // Second 1: Autoschedule runs => scheduels 1 job + // Second 2: Autorun runs => runs 1 job => 1 + // Second 2: Autoschedule runs => schedules 1 job + // Second 3: Autorun runs => runs 1 job => 2 + // Second 3: Autoschedule runs => schedules 1 job + // Status after 3.5 seconds: 2 jobs running, 1 job scheduled + + // Best case - most amounts of jobs completed: + // Second 1: Autoschedule runs => schedules 1 job + // Second 1: Autorun runs => runs 1 job => 1 + // Second 2: Autoschedule runs => schedules 1 job + // Second 2: Autorun runs => runs 1 job => 2 + // Second 3: Autoschedule runs => schedules 1 job + // Second 3: Autorun runs => runs 1 job => 3 + // Status after 3.5 seconds: 3 jobs running, no jobs scheduled + const minJobsCompleted = 2 + const maxJobsCompleted = 3 + + await new Promise((resolve) => setTimeout(resolve, 3500)) // 3 seconds + 0.5 seconds to ensure the last job has been completed + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBeGreaterThanOrEqual(minJobsCompleted) + expect(allSimples.totalDocs).toBeLessThanOrEqual(maxJobsCompleted) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) +}) diff --git a/test/queues/schedules.int.spec.ts b/test/queues/schedules.int.spec.ts new file mode 100644 index 0000000000..f4fa8a3ed8 --- /dev/null +++ b/test/queues/schedules.int.spec.ts @@ -0,0 +1,341 @@ +import path from 'path' +import { _internal_jobSystemGlobals, _internal_resetJobSystemGlobals, type Payload } from 'payload' +import { wait } from 'payload/shared' +import { fileURLToPath } from 'url' + +import type { NextRESTClient } from '../helpers/NextRESTClient.js' + +import { devUser } from '../credentials.js' +import { initPayloadInt } from '../helpers/initPayloadInt.js' +import { clearAndSeedEverything } from './seed.js' +import { timeFreeze, timeTravel, waitUntilAutorunIsDone, withoutAutoRun } from './utilities.js' + +let payload: Payload +let restClient: NextRESTClient +let token: string + +const { email, password } = devUser +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +describe('Queues - scheduling, without automatic scheduling handling', () => { + beforeAll(async () => { + process.env.SEED_IN_CONFIG_ONINIT = 'false' // Makes it so the payload config onInit seed is not run. Otherwise, the seed would be run unnecessarily twice for the initial test run - once for beforeEach and once for onInit + ;({ payload, restClient } = await initPayloadInt( + dirname, + undefined, + undefined, + 'config.schedules.ts', + )) + }) + + afterAll(async () => { + // Ensure no new crons are scheduled + _internal_jobSystemGlobals.shouldAutoRun = false + _internal_jobSystemGlobals.shouldAutoSchedule = false + // Wait 3 seconds to ensure all currently-running crons are done. If we shut down the db while a function is running, it can cause issues + // Cron function runs may persist after a test has finished + await wait(3000) + // Now we can destroy the payload instance + await payload.destroy() + _internal_resetJobSystemGlobals() + }) + + afterEach(() => { + _internal_resetJobSystemGlobals() + }) + + beforeEach(async () => { + // Set autorun to false during seed process to ensure no crons are scheduled, which may affect the tests + _internal_jobSystemGlobals.shouldAutoRun = false + _internal_jobSystemGlobals.shouldAutoSchedule = false + await clearAndSeedEverything(payload) + const data = await restClient + .POST('/users/login', { + body: JSON.stringify({ + email, + password, + }), + }) + .then((res) => res.json()) + + if (data.token) { + token = data.token + } + payload.config.jobs.deleteJobOnComplete = true + _internal_jobSystemGlobals.shouldAutoRun = true + _internal_jobSystemGlobals.shouldAutoSchedule = true + }) + + it('can auto-schedule through local API and autorun jobs', async () => { + // Do not call payload.jobs.queue() - the `EverySecond` task should be scheduled here + await payload.jobs.handleSchedules() + + // Do not call payload.jobs.run{silent: true}) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) + + it('can auto-schedule through handleSchedules REST API and autorun jobs', async () => { + // Do not call payload.jobs.queue() - the `EverySecond` task should be scheduled here + await restClient.GET('/payload-jobs/handle-schedules', { + headers: { + Authorization: `JWT ${token}`, + }, + }) + + // Do not call payload.jobs.run({silent: true}) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) + + it('can auto-schedule through run REST API and autorun jobs', async () => { + // Do not call payload.jobs.queue() - the `EverySecond` task should be scheduled here + await restClient.GET('/payload-jobs/run?silent=true', { + headers: { + Authorization: `JWT ${token}`, + }, + }) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) + + it('do not auto-schedule through run REST API when passing disableScheduling=true', async () => { + // Do not call payload.jobs.queue() - the `EverySecond` task should be scheduled here + await restClient.GET('/payload-jobs/run?silent=true&disableScheduling=true', { + headers: { + Authorization: `JWT ${token}`, + }, + }) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(0) + }) + + it('ensure scheduler does not schedule more jobs than needed if executed sequentially', async () => { + await withoutAutoRun(async () => { + for (let i = 0; i < 3; i++) { + await payload.jobs.handleSchedules() + } + }) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) + + it('ensure scheduler max-one-job condition, by default, ignores jobs not scheduled by scheduler', async () => { + await withoutAutoRun(async () => { + for (let i = 0; i < 2; i++) { + await payload.jobs.queue({ + task: 'EverySecond', + queue: 'autorunSecond', + input: { + message: 'This task runs every second', + }, + }) + } + for (let i = 0; i < 3; i++) { + await payload.jobs.handleSchedules() + } + }) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(3) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) + + it('ensure scheduler max-one-job condition, respects jobs not scheduled by scheduler due to task setting onlyScheduled: false', async () => { + timeFreeze() + await withoutAutoRun(async () => { + for (let i = 0; i < 2; i++) { + await payload.jobs.queue({ + task: 'EverySecondMax2', + input: { + message: 'This task runs every second - max 2 per second', + }, + }) + } + for (let i = 0; i < 3; i++) { + await payload.jobs.handleSchedules({ queue: 'default' }) + } + }) + + timeTravel(20) // Advance time to satisfy the waitUntil of newly scheduled jobs + + await payload.jobs.run({ + limit: 100, + silent: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(2) // Would be 4 by default, if only scheduled jobs were respected in handleSchedules condition + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second - max 2 per second') + }) + + it('ensure scheduler does not schedule more jobs than needed if executed sequentially - max. 2 jobs configured', async () => { + timeFreeze() + for (let i = 0; i < 3; i++) { + await payload.jobs.handleSchedules({ queue: 'default' }) + } + + // Advance time to satisfy the waitUntil of newly scheduled jobs + timeTravel(20) + + // default queue is not scheduled to autorun + await payload.jobs.run({ + silent: true, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(2) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second - max 2 per second') + }) + + it('ensure job is scheduled every second', async () => { + timeFreeze() + for (let i = 0; i < 3; i++) { + await withoutAutoRun(async () => { + // Call it twice to test that it only schedules one + await payload.jobs.handleSchedules() + await payload.jobs.handleSchedules() + }) + // Advance time to satisfy the waitUntil of newly scheduled jobs + timeTravel(20) + + await waitUntilAutorunIsDone({ + payload, + queue: 'autorunSecond', + onlyScheduled: true, + }) + } + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(3) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second') + }) + + it('ensure job is scheduled every second - max. 2 jobs configured', async () => { + timeFreeze() + + for (let i = 0; i < 3; i++) { + await withoutAutoRun(async () => { + // Call it 3x to test that it only schedules two + await payload.jobs.handleSchedules({ queue: 'default' }) + await payload.jobs.handleSchedules({ queue: 'default' }) + await payload.jobs.handleSchedules({ queue: 'default' }) + }) + + // Advance time to satisfy the waitUntil of newly scheduled jobs + timeTravel(20) + + // default queue is not scheduled to autorun => run manually + await payload.jobs.run({ + silent: true, + }) + } + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + where: { + title: { + equals: 'This task runs every second - max 2 per second', + }, + }, + }) + + expect(allSimples.totalDocs).toBe(6) + expect(allSimples?.docs?.[0]?.title).toBe('This task runs every second - max 2 per second') + }) + + it('should not auto-schedule through automatic crons if scheduler set to manual', async () => { + // Autorun runs every second - so should definitely be done if we wait 2 seconds + await new Promise((resolve) => setTimeout(resolve, 2000)) // Should not flake, as we are expecting nothing to happen + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(0) + }) +}) diff --git a/test/queues/tasks/CreateSimpleRetries0Task.ts b/test/queues/tasks/CreateSimpleRetries0Task.ts new file mode 100644 index 0000000000..cc85b26cf6 --- /dev/null +++ b/test/queues/tasks/CreateSimpleRetries0Task.ts @@ -0,0 +1,41 @@ +import type { TaskConfig } from 'payload' + +export const CreateSimpleRetries0Task: TaskConfig<'CreateSimpleRetries0'> = { + slug: 'CreateSimpleRetries0', + retries: 0, + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + { + name: 'shouldFail', + type: 'checkbox', + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + if (input.shouldFail) { + throw new Error('Failed on purpose') + } + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } + }, +} diff --git a/test/queues/tasks/CreateSimpleRetriesUndefinedTask.ts b/test/queues/tasks/CreateSimpleRetriesUndefinedTask.ts new file mode 100644 index 0000000000..267150005c --- /dev/null +++ b/test/queues/tasks/CreateSimpleRetriesUndefinedTask.ts @@ -0,0 +1,40 @@ +import type { TaskConfig } from 'payload' + +export const CreateSimpleRetriesUndefinedTask: TaskConfig<'CreateSimpleRetriesUndefined'> = { + slug: 'CreateSimpleRetriesUndefined', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + { + name: 'shouldFail', + type: 'checkbox', + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + if (input.shouldFail) { + throw new Error('Failed on purpose') + } + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } + }, +} diff --git a/test/queues/tasks/CreateSimpleTask.ts b/test/queues/tasks/CreateSimpleTask.ts new file mode 100644 index 0000000000..8279aa1fdd --- /dev/null +++ b/test/queues/tasks/CreateSimpleTask.ts @@ -0,0 +1,41 @@ +import type { TaskConfig } from 'payload' + +export const CreateSimpleTask: TaskConfig<'CreateSimple'> = { + retries: 3, + slug: 'CreateSimple', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + { + name: 'shouldFail', + type: 'checkbox', + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + if (input.shouldFail) { + throw new Error('Failed on purpose') + } + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } + }, +} diff --git a/test/queues/tasks/CreateSimpleWithDuplicateMessageTask.ts b/test/queues/tasks/CreateSimpleWithDuplicateMessageTask.ts new file mode 100644 index 0000000000..887a9060fe --- /dev/null +++ b/test/queues/tasks/CreateSimpleWithDuplicateMessageTask.ts @@ -0,0 +1,42 @@ +import type { TaskConfig } from 'payload' + +export const CreateSimpleWithDuplicateMessageTask: TaskConfig<'CreateSimpleWithDuplicateMessage'> = + { + retries: 2, + slug: 'CreateSimpleWithDuplicateMessage', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + { + name: 'shouldFail', + type: 'checkbox', + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + if (input.shouldFail) { + throw new Error('Failed on purpose') + } + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: input.message + input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } + }, + } diff --git a/test/queues/tasks/EverySecondMax2Task.ts b/test/queues/tasks/EverySecondMax2Task.ts new file mode 100644 index 0000000000..b307f5dfd7 --- /dev/null +++ b/test/queues/tasks/EverySecondMax2Task.ts @@ -0,0 +1,67 @@ +import { + countRunnableOrActiveJobsForQueue, + type TaskConfig, + type TaskType, + type WorkflowTypes, +} from 'payload' + +export const EverySecondMax2Task: TaskConfig<'EverySecondMax2'> = { + schedule: [ + { + cron: '* * * * * *', + queue: 'default', + hooks: { + beforeSchedule: async ({ queueable, req }) => { + const runnableOrActiveJobsForQueue = await countRunnableOrActiveJobsForQueue({ + queue: queueable.scheduleConfig.queue, + req, + taskSlug: queueable.taskConfig?.slug as TaskType, + workflowSlug: queueable.workflowConfig?.slug as WorkflowTypes, + onlyScheduled: false, // Set to false, used to test it + }) + + return { + input: { + message: 'This task runs every second - max 2 per second', + }, + shouldSchedule: runnableOrActiveJobsForQueue <= 1, + waitUntil: queueable.waitUntil, + } + }, + afterSchedule: async (args) => { + await args.defaultAfterSchedule(args) // Handles updating the payload-jobs-stats global + args.req.payload.logger.info( + 'EverySecondMax2 task scheduled: ' + + (args.status === 'success' + ? String(args.job.id) + : args.status === 'skipped' + ? 'skipped' + : 'error'), + ) + }, + }, + }, + ], + slug: 'EverySecondMax2', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + req.payload.logger.info(input.message) + + await req.payload.create({ + collection: 'simple', + data: { + title: input.message, + }, + req, + }) + return { + output: {}, + } + }, +} diff --git a/test/queues/tasks/EverySecondTask.ts b/test/queues/tasks/EverySecondTask.ts new file mode 100644 index 0000000000..d79a3ee963 --- /dev/null +++ b/test/queues/tasks/EverySecondTask.ts @@ -0,0 +1,54 @@ +import type { TaskConfig } from 'payload' + +export const EverySecondTask: TaskConfig<'EverySecond'> = { + schedule: [ + { + cron: '* * * * * *', + queue: 'autorunSecond', + hooks: { + beforeSchedule: async (args) => { + const result = await args.defaultBeforeSchedule(args) // Handles verifying that there are no jobs already scheduled or processing + return { + ...result, + input: { + message: 'This task runs every second', + }, + } + }, + afterSchedule: async (args) => { + await args.defaultAfterSchedule(args) // Handles updating the payload-jobs-stats global + args.req.payload.logger.info( + 'EverySecond task scheduled: ' + + (args.status === 'success' + ? String(args.job.id) + : args.status === 'skipped' + ? 'skipped' + : 'error'), + ) + }, + }, + }, + ], + slug: 'EverySecond', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + req.payload.logger.info(input.message) + + await req.payload.create({ + collection: 'simple', + data: { + title: input.message, + }, + req, + }) + return { + output: {}, + } + }, +} diff --git a/test/queues/tasks/ExternalTask.ts b/test/queues/tasks/ExternalTask.ts new file mode 100644 index 0000000000..9eda7dc0dd --- /dev/null +++ b/test/queues/tasks/ExternalTask.ts @@ -0,0 +1,26 @@ +import type { TaskConfig } from 'payload' + +import path from 'path' +import { fileURLToPath } from 'url' + +const dirname = path.dirname(fileURLToPath(import.meta.url)) + +export const ExternalTask: TaskConfig<'ExternalTask'> = { + retries: 2, + slug: 'ExternalTask', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: path.resolve(dirname, '../runners/externalTask.ts') + '#externalTaskHandler', +} diff --git a/test/queues/tasks/ReturnCustomErrorTask.ts b/test/queues/tasks/ReturnCustomErrorTask.ts new file mode 100644 index 0000000000..0c7253d26f --- /dev/null +++ b/test/queues/tasks/ReturnCustomErrorTask.ts @@ -0,0 +1,20 @@ +import type { TaskConfig } from 'payload' + +export const ReturnCustomErrorTask: TaskConfig<'ReturnCustomError'> = { + retries: 0, + slug: 'ReturnCustomError', + inputSchema: [ + { + name: 'errorMessage', + type: 'text', + required: true, + }, + ], + outputSchema: [], + handler: ({ input }) => { + return { + state: 'failed', + errorMessage: input.errorMessage, + } + }, +} diff --git a/test/queues/tasks/ReturnErrorTask.ts b/test/queues/tasks/ReturnErrorTask.ts new file mode 100644 index 0000000000..661551ddd4 --- /dev/null +++ b/test/queues/tasks/ReturnErrorTask.ts @@ -0,0 +1,13 @@ +import type { TaskConfig } from 'payload' + +export const ReturnErrorTask: TaskConfig<'ReturnError'> = { + retries: 0, + slug: 'ReturnError', + inputSchema: [], + outputSchema: [], + handler: () => { + return { + state: 'failed', + } + }, +} diff --git a/test/queues/tasks/ThrowErrorTask.ts b/test/queues/tasks/ThrowErrorTask.ts new file mode 100644 index 0000000000..fa6f9ea303 --- /dev/null +++ b/test/queues/tasks/ThrowErrorTask.ts @@ -0,0 +1,11 @@ +import type { TaskConfig } from 'payload' + +export const ThrowErrorTask: TaskConfig<'ThrowError'> = { + retries: 0, + slug: 'ThrowError', + inputSchema: [], + outputSchema: [], + handler: () => { + throw new Error('failed') + }, +} diff --git a/test/queues/tasks/UpdatePostStep2Task.ts b/test/queues/tasks/UpdatePostStep2Task.ts new file mode 100644 index 0000000000..de7b310459 --- /dev/null +++ b/test/queues/tasks/UpdatePostStep2Task.ts @@ -0,0 +1,23 @@ +import type { TaskConfig } from 'payload' + +import { updatePostStep2 } from '../runners/updatePost.js' + +export const UpdatePostStep2Task: TaskConfig<'UpdatePostStep2'> = { + retries: 2, + slug: 'UpdatePostStep2', + inputSchema: [ + { + name: 'post', + type: 'relationship', + relationTo: 'posts', + maxDepth: 0, + required: true, + }, + { + name: 'messageTwice', + type: 'text', + required: true, + }, + ], + handler: updatePostStep2, +} diff --git a/test/queues/tasks/UpdatePostTask.ts b/test/queues/tasks/UpdatePostTask.ts new file mode 100644 index 0000000000..b8cdfd52a4 --- /dev/null +++ b/test/queues/tasks/UpdatePostTask.ts @@ -0,0 +1,31 @@ +import type { TaskConfig } from 'payload' + +import { updatePostStep1 } from '../runners/updatePost.js' + +export const UpdatePostTask: TaskConfig<'UpdatePost'> = { + retries: 2, + slug: 'UpdatePost', + interfaceName: 'MyUpdatePostType', + inputSchema: [ + { + name: 'post', + type: 'relationship', + relationTo: 'posts', + maxDepth: 0, + required: true, + }, + { + name: 'message', + type: 'text', + required: true, + }, + ], + outputSchema: [ + { + name: 'messageTwice', + type: 'text', + required: true, + }, + ], + handler: updatePostStep1, +} diff --git a/test/queues/utilities.ts b/test/queues/utilities.ts new file mode 100644 index 0000000000..bda1e0ac83 --- /dev/null +++ b/test/queues/utilities.ts @@ -0,0 +1,62 @@ +import { + _internal_jobSystemGlobals, + countRunnableOrActiveJobsForQueue, + createLocalReq, + type Payload, +} from 'payload' + +export async function waitUntilAutorunIsDone({ + payload, + queue, + onlyScheduled = false, +}: { + onlyScheduled?: boolean + payload: Payload + queue: string +}): Promise { + const req = await createLocalReq({}, payload) + + return new Promise((resolve) => { + const interval = setInterval(async () => { + const count = await countRunnableOrActiveJobsForQueue({ + queue, + req, + onlyScheduled, + }) + if (count === 0) { + clearInterval(interval) + resolve() + } + }, 200) + }) +} + +export function timeFreeze() { + const curDate = new Date() + _internal_jobSystemGlobals.getCurrentDate = () => curDate +} + +export function timeTravel(seconds: number) { + const curDate = _internal_jobSystemGlobals.getCurrentDate() + _internal_jobSystemGlobals.getCurrentDate = () => new Date(curDate.getTime() + seconds * 1000) +} + +export async function withoutAutoRun(fn: () => Promise): Promise { + const originalValue = _internal_jobSystemGlobals.shouldAutoRun + _internal_jobSystemGlobals.shouldAutoRun = false + try { + return await fn() + } finally { + _internal_jobSystemGlobals.shouldAutoRun = originalValue + } +} + +export async function withoutAutoSchedule(fn: () => Promise): Promise { + const originalValue = _internal_jobSystemGlobals.shouldAutoSchedule + _internal_jobSystemGlobals.shouldAutoSchedule = false + try { + return await fn() + } finally { + _internal_jobSystemGlobals.shouldAutoSchedule = originalValue + } +} diff --git a/test/runInit.ts b/test/runInit.ts index 46b38b2872..3ff6c7b738 100644 --- a/test/runInit.ts +++ b/test/runInit.ts @@ -4,6 +4,7 @@ export async function runInit( testSuiteArg: string, writeDBAdapter: boolean, skipGenImportMap: boolean = false, + configFile?: string, ): Promise { - await initDevAndTest(testSuiteArg, String(writeDBAdapter), String(skipGenImportMap)) + await initDevAndTest(testSuiteArg, String(writeDBAdapter), String(skipGenImportMap), configFile) } From 46d8a26b0d57d5ecf17c69a1ee8d5a4491d6da7e Mon Sep 17 00:00:00 2001 From: iamacup Date: Fri, 18 Jul 2025 13:34:54 +0100 Subject: [PATCH 051/143] fix: handle undefined values in afterChange hooks when read:false and create:true on the field level access for parents and siblings (#12664) ### What? Fixes a bug where `afterChange` hooks would attempt to access values for fields that are `read: false` but `create: true`, resulting in `undefined` values and unexpected behavior. ### Why? In scenarios where access control allows field creation (`create: true`) but disallows reading it (`read: false`), hooks like `afterChange` would still attempt to operate on `undefined` values from `siblingDoc` or `previousDoc`, potentially causing errors or skipped logic. ### How? Adds safe optional chaining and fallback object initialization in `promise.ts` for: - `previousDoc[field.name]` - `siblingDoc[field.name]` - Group, Array, and Block field traversals This ensures that these values are treated as empty objects or arrays where appropriate to prevent runtime errors during traversal or hook execution. Fixes https://github.com/payloadcms/payload/issues/12660 --------- Co-authored-by: Niall Bambury --- .../src/fields/hooks/afterChange/promise.ts | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/payload/src/fields/hooks/afterChange/promise.ts b/packages/payload/src/fields/hooks/afterChange/promise.ts index 23010c6116..ecbc930870 100644 --- a/packages/payload/src/fields/hooks/afterChange/promise.ts +++ b/packages/payload/src/fields/hooks/afterChange/promise.ts @@ -88,12 +88,12 @@ export const promise = async ({ path: pathSegments, previousDoc, previousSiblingDoc, - previousValue: previousDoc[field.name!], + previousValue: previousDoc?.[field.name!], req, schemaPath: schemaPathSegments, siblingData, siblingFields: siblingFields!, - value: siblingDoc[field.name!], + value: siblingDoc?.[field.name!], }) if (hookedValue !== undefined) { @@ -226,10 +226,10 @@ export const promise = async ({ parentPath: path, parentSchemaPath: schemaPath, previousDoc, - previousSiblingDoc: previousDoc[field.name] as JsonObject, + previousSiblingDoc: (previousDoc?.[field.name] as JsonObject) || {}, req, siblingData: (siblingData?.[field.name] as JsonObject) || {}, - siblingDoc: siblingDoc[field.name] as JsonObject, + siblingDoc: (siblingDoc?.[field.name] as JsonObject) || {}, }) } else { await traverseFields({ @@ -282,11 +282,11 @@ export const promise = async ({ path: pathSegments, previousDoc, previousSiblingDoc, - previousValue: previousDoc[field.name], + previousValue: previousDoc?.[field.name], req, schemaPath: schemaPathSegments, siblingData, - value: siblingDoc[field.name], + value: siblingDoc?.[field.name], }) if (hookedValue !== undefined) { @@ -305,9 +305,9 @@ export const promise = async ({ const isNamedTab = tabHasName(field) if (isNamedTab) { - tabSiblingData = (siblingData[field.name] as JsonObject) ?? {} - tabSiblingDoc = (siblingDoc[field.name] as JsonObject) ?? {} - tabPreviousSiblingDoc = (previousDoc[field.name] as JsonObject) ?? {} + tabSiblingData = (siblingData?.[field.name] ?? {}) as JsonObject + tabSiblingDoc = (siblingDoc?.[field.name] ?? {}) as JsonObject + tabPreviousSiblingDoc = (previousDoc?.[field.name] ?? {}) as JsonObject } await traverseFields({ From d7a3faa4e9d74a953d12316c835cd295e46028d1 Mon Sep 17 00:00:00 2001 From: Jacob Fletcher Date: Fri, 18 Jul 2025 09:29:26 -0400 Subject: [PATCH 052/143] fix(ui): properly sync search params to user preferences (#13200) Some search params within the list view do not properly sync to user preferences, and visa versa. For example, when selecting a query preset, the `?preset=123` param is injected into the URL and saved to preferences, but when reloading the page without the param, that preset is not reactivated as expected. ### Problem The reason this wasn't working before is that omitting this param would also reset prefs. It was designed this way in order to support client-side resets, e.g. clicking the query presets "x" button. This pattern would never work, however, because this means that every time the user navigates to the list view directly, their preference is cleared, as no param would exist in the query. Note: this is not an issue with _all_ params, as not all are handled in the same way. ### Solution The fix is to use empty values instead, e.g. `?preset=`. When the server receives this, it knows to clear the pref. If it doesn't exist at all, it knows to load from prefs. And if it has a value, it saves to prefs. On the client, we sanitize those empty values back out so they don't appear in the URL in the end. This PR also refactors much of the list query context and its respective provider to be significantly more predictable and easier to work with, namely: - The `ListQuery` type now fully aligns with what Payload APIs expect, e.g. `page` is a number, not a string - The provider now receives a single `query` prop which matches the underlying context 1:1 - Propagating the query from the server to the URL is significantly more predictable - Any new props that may be supported in the future will automatically work - No more reconciling `columns` and `listPreferences.columns`, its just `query.columns` --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210827129744922 --- packages/next/src/views/List/index.tsx | 81 ++++++------- packages/next/src/views/Versions/index.tsx | 6 +- packages/payload/src/admin/functions/index.ts | 4 +- .../utilities/transformColumnPreferences.ts | 4 +- .../src/utilities/validateWhereQuery.ts | 7 +- .../elements/ListControls/useQueryPresets.tsx | 8 +- .../src/elements/RelationshipTable/index.tsx | 17 ++- packages/ui/src/providers/ListQuery/index.tsx | 107 ++++++---------- .../ui/src/providers/ListQuery/mergeQuery.ts | 36 ++++++ .../src/providers/ListQuery/sanitizeQuery.ts | 38 ++++++ packages/ui/src/providers/ListQuery/types.ts | 6 +- .../TableColumns/buildColumnState/index.tsx | 12 +- .../buildColumnState/isColumnActive.ts | 10 +- packages/ui/src/utilities/buildTableState.ts | 5 +- packages/ui/src/utilities/renderTable.tsx | 4 - .../ui/src/utilities/upsertPreferences.ts | 39 +++--- test/eslint.config.js | 1 + test/fields/payload-types.ts | 19 +++ test/query-presets/e2e.spec.ts | 43 +++++-- test/query-presets/helpers/assertURLParams.ts | 10 +- test/query-presets/payload-types.ts | 14 +++ tsconfig.base.json | 114 +++++++++++++----- 22 files changed, 370 insertions(+), 215 deletions(-) create mode 100644 packages/ui/src/providers/ListQuery/mergeQuery.ts create mode 100644 packages/ui/src/providers/ListQuery/sanitizeQuery.ts diff --git a/packages/next/src/views/List/index.tsx b/packages/next/src/views/List/index.tsx index a2af698ea1..1a019e48a4 100644 --- a/packages/next/src/views/List/index.tsx +++ b/packages/next/src/views/List/index.tsx @@ -2,13 +2,11 @@ import type { AdminViewServerProps, CollectionPreferences, ColumnPreference, - DefaultDocumentIDType, ListQuery, ListViewClientProps, ListViewServerPropsOnly, QueryPreset, SanitizedCollectionPermission, - Where, } from 'payload' import { DefaultListView, HydrateAuthProvider, ListQueryProvider } from '@payloadcms/ui' @@ -20,6 +18,7 @@ import { isNumber, mergeListSearchAndWhere, transformColumnsToPreferences, + transformColumnsToSearchParams, } from 'payload/shared' import React, { Fragment } from 'react' @@ -87,28 +86,33 @@ export const renderListView = async ( throw new Error('not-found') } - const query = queryFromArgs || queryFromReq + const query: ListQuery = queryFromArgs || queryFromReq - const columns: ColumnPreference[] = transformColumnsToPreferences( - query?.columns as ColumnPreference[] | string, - ) + const columnsFromQuery: ColumnPreference[] = transformColumnsToPreferences(query?.columns) - /** - * @todo: find a pattern to avoid setting preferences on hard navigation, i.e. direct links, page refresh, etc. - * This will ensure that prefs are only updated when explicitly set by the user - * This could potentially be done by injecting a `sessionID` into the params and comparing it against a session cookie - */ const collectionPreferences = await upsertPreferences({ key: `collection-${collectionSlug}`, req, value: { - columns, + columns: columnsFromQuery, limit: isNumber(query?.limit) ? Number(query.limit) : undefined, - preset: (query?.preset as DefaultDocumentIDType) || null, + preset: query?.preset, sort: query?.sort as string, }, }) + query.preset = collectionPreferences?.preset + + query.page = isNumber(query?.page) ? Number(query.page) : 0 + + query.limit = collectionPreferences?.limit || collectionConfig.admin.pagination.defaultLimit + + query.sort = + collectionPreferences?.sort || + (typeof collectionConfig.defaultSort === 'string' ? collectionConfig.defaultSort : undefined) + + query.columns = transformColumnsToSearchParams(collectionPreferences?.columns || []) + const { routes: { admin: adminRoute }, } = config @@ -118,35 +122,27 @@ export const renderListView = async ( throw new Error('not-found') } - const page = isNumber(query?.page) ? Number(query.page) : 0 - - const limit = collectionPreferences?.limit || collectionConfig.admin.pagination.defaultLimit - - const sort = - collectionPreferences?.sort || - (typeof collectionConfig.defaultSort === 'string' ? collectionConfig.defaultSort : undefined) - - let where = mergeListSearchAndWhere({ - collectionConfig, - search: typeof query?.search === 'string' ? query.search : undefined, - where: (query?.where as Where) || undefined, - }) - if (typeof collectionConfig.admin?.baseListFilter === 'function') { const baseListFilter = await collectionConfig.admin.baseListFilter({ - limit, - page, + limit: query.limit, + page: query.page, req, - sort, + sort: query.sort, }) if (baseListFilter) { - where = { - and: [where, baseListFilter].filter(Boolean), + query.where = { + and: [query.where, baseListFilter].filter(Boolean), } } } + const whereWithMergedSearch = mergeListSearchAndWhere({ + collectionConfig, + search: typeof query?.search === 'string' ? query.search : undefined, + where: query?.where, + }) + let queryPreset: QueryPreset | undefined let queryPresetPermissions: SanitizedCollectionPermission | undefined @@ -179,14 +175,14 @@ export const renderListView = async ( draft: true, fallbackLocale: false, includeLockStatus: true, - limit, + limit: query.limit, locale, overrideAccess: false, - page, + page: query.page, req, - sort, + sort: query.sort, user, - where: where || {}, + where: whereWithMergedSearch, }) const clientCollectionConfig = clientConfig.collections.find((c) => c.slug === collectionSlug) @@ -194,8 +190,7 @@ export const renderListView = async ( const { columnState, Table } = renderTable({ clientCollectionConfig, collectionConfig, - columnPreferences: collectionPreferences?.columns, - columns, + columns: collectionPreferences?.columns, customCellProps, docs: data.docs, drawerSlug, @@ -232,7 +227,7 @@ export const renderListView = async ( collectionConfig, data, i18n, - limit, + limit: query.limit, listPreferences: collectionPreferences, listSearchableFields: collectionConfig.admin.listSearchableFields, locale: fullLocale, @@ -258,19 +253,19 @@ export const renderListView = async ( const isInDrawer = Boolean(drawerSlug) + // Needed to prevent: Only plain objects can be passed to Client Components from Server Components. Objects with toJSON methods are not supported. Convert it manually to a simple value before passing it to props. + query.where = query?.where ? JSON.parse(JSON.stringify(query?.where || {})) : undefined + return { List: ( {RenderServerComponent({ clientProps: { diff --git a/packages/next/src/views/Versions/index.tsx b/packages/next/src/views/Versions/index.tsx index c4a564cf44..65eb60913c 100644 --- a/packages/next/src/views/Versions/index.tsx +++ b/packages/next/src/views/Versions/index.tsx @@ -148,10 +148,12 @@ export async function VersionsView(props: DocumentViewServerProps) { { let columnsToTransform = columns @@ -44,5 +44,5 @@ export const transformColumnsToPreferences = ( export const transformColumnsToSearchParams = ( columns: Column[] | ColumnPreference[], ): ColumnsFromURL => { - return columns.map((col) => (col.active ? col.accessor : `-${col.accessor}`)) + return columns?.map((col) => (col.active ? col.accessor : `-${col.accessor}`)) } diff --git a/packages/payload/src/utilities/validateWhereQuery.ts b/packages/payload/src/utilities/validateWhereQuery.ts index cb920db1a9..720aa66a11 100644 --- a/packages/payload/src/utilities/validateWhereQuery.ts +++ b/packages/payload/src/utilities/validateWhereQuery.ts @@ -13,9 +13,10 @@ import { validOperatorSet } from '../types/constants.js' export const validateWhereQuery = (whereQuery: Where): whereQuery is Where => { if ( whereQuery?.or && - whereQuery?.or?.length > 0 && - whereQuery?.or?.[0]?.and && - whereQuery?.or?.[0]?.and?.length > 0 + (whereQuery?.or?.length === 0 || + (whereQuery?.or?.length > 0 && + whereQuery?.or?.[0]?.and && + whereQuery?.or?.[0]?.and?.length > 0)) ) { // At this point we know that the whereQuery has 'or' and 'and' fields, // now let's check the structure and content of these fields. diff --git a/packages/ui/src/elements/ListControls/useQueryPresets.tsx b/packages/ui/src/elements/ListControls/useQueryPresets.tsx index 694d6f68d9..587eac6784 100644 --- a/packages/ui/src/elements/ListControls/useQueryPresets.tsx +++ b/packages/ui/src/elements/ListControls/useQueryPresets.tsx @@ -3,7 +3,7 @@ import type { CollectionSlug, QueryPreset, SanitizedCollectionPermission } from import { useModal } from '@faceless-ui/modal' import { getTranslation } from '@payloadcms/translations' import { transformColumnsToPreferences, transformColumnsToSearchParams } from 'payload/shared' -import React, { Fragment, useCallback, useMemo } from 'react' +import React, { useCallback, useMemo } from 'react' import { toast } from 'sonner' import { useConfig } from '../../providers/Config/index.js' @@ -103,9 +103,9 @@ export const useQueryPresets = ({ const resetQueryPreset = useCallback(async () => { await refineListData( { - columns: undefined, - preset: undefined, - where: undefined, + columns: [], + preset: '', + where: {}, }, false, ) diff --git a/packages/ui/src/elements/RelationshipTable/index.tsx b/packages/ui/src/elements/RelationshipTable/index.tsx index 80fdad7d93..a8080fb72a 100644 --- a/packages/ui/src/elements/RelationshipTable/index.tsx +++ b/packages/ui/src/elements/RelationshipTable/index.tsx @@ -114,7 +114,7 @@ export const RelationshipTable: React.FC = (pro const renderTable = useCallback( async (docs?: PaginatedDocs['docs']) => { const newQuery: ListQuery = { - limit: String(field?.defaultLimit || collectionConfig?.admin?.pagination?.defaultLimit), + limit: field?.defaultLimit || collectionConfig?.admin?.pagination?.defaultLimit, sort: field.defaultSort || collectionConfig?.defaultSort, ...(query || {}), where: { ...(query?.where || {}) }, @@ -240,6 +240,15 @@ export const RelationshipTable: React.FC = (pro // eslint-disable-next-line react-hooks/exhaustive-deps }, [isDrawerOpen]) + const memoizedQuery = React.useMemo( + () => ({ + columns: transformColumnsToPreferences(columnState)?.map(({ accessor }) => accessor), + limit: field.defaultLimit ?? collectionConfig?.admin?.pagination?.defaultLimit, + sort: field.defaultSort ?? collectionConfig?.defaultSort, + }), + [field, columnState, collectionConfig], + ) + return (
    @@ -306,12 +315,7 @@ export const RelationshipTable: React.FC = (pro {data?.docs && data.docs.length > 0 && ( = (pro ? undefined : `_${field.collection}_${fieldPath.replaceAll('.', '_')}_order` } + query={memoizedQuery} > = ({ children, collectionSlug, - columns, data, - defaultLimit, - defaultSort, - listPreferences, modifySearchParams, onQueryChange: onQueryChangeFromProps, orderableFieldName, + query: queryFromProps, }) => { // TODO: Investigate if this is still needed - // eslint-disable-next-line react-compiler/react-compiler + 'use no memo' const router = useRouter() const rawSearchParams = useSearchParams() @@ -36,7 +34,7 @@ export const ListQueryProvider: React.FC = ({ const [modified, setModified] = useState(false) const searchParams = useMemo( - () => parseSearchParams(rawSearchParams), + () => sanitizeQuery(parseSearchParams(rawSearchParams)), [rawSearchParams], ) @@ -51,37 +49,12 @@ export const ListQueryProvider: React.FC = ({ return searchParams } else { return { - limit: String(defaultLimit), - sort: defaultSort, + limit: queryFromProps.limit, + sort: queryFromProps.sort, } } }) - const mergeQuery = useCallback( - (newQuery: ListQuery = {}): ListQuery => { - let page = 'page' in newQuery ? newQuery.page : currentQuery?.page - - if ('where' in newQuery || 'search' in newQuery) { - page = '1' - } - - const mergedQuery: ListQuery = { - ...currentQuery, - ...newQuery, - columns: 'columns' in newQuery ? newQuery.columns : currentQuery.columns, - limit: 'limit' in newQuery ? newQuery.limit : (currentQuery?.limit ?? String(defaultLimit)), - page, - preset: 'preset' in newQuery ? newQuery.preset : currentQuery?.preset, - search: 'search' in newQuery ? newQuery.search : currentQuery?.search, - sort: 'sort' in newQuery ? newQuery.sort : ((currentQuery?.sort as string) ?? defaultSort), - where: 'where' in newQuery ? newQuery.where : currentQuery?.where, - } - - return mergedQuery - }, - [currentQuery, defaultLimit, defaultSort], - ) - const refineListData = useCallback( // eslint-disable-next-line @typescript-eslint/require-await async (incomingQuery: ListQuery, modified?: boolean) => { @@ -91,12 +64,23 @@ export const ListQueryProvider: React.FC = ({ setModified(true) } - const newQuery = mergeQuery(incomingQuery) + const newQuery = mergeQuery(currentQuery, incomingQuery, { + defaults: { + limit: queryFromProps.limit, + sort: queryFromProps.sort, + }, + }) if (modifySearchParams) { startRouteTransition(() => router.replace( - `${qs.stringify({ ...newQuery, columns: JSON.stringify(newQuery.columns) }, { addQueryPrefix: true })}`, + `${qs.stringify( + { + ...newQuery, + columns: JSON.stringify(newQuery.columns), + }, + { addQueryPrefix: true }, + )}`, ), ) } else if ( @@ -110,7 +94,9 @@ export const ListQueryProvider: React.FC = ({ setCurrentQuery(newQuery) }, [ - mergeQuery, + currentQuery, + queryFromProps.limit, + queryFromProps.sort, modifySearchParams, onQueryChange, onQueryChangeFromProps, @@ -121,14 +107,14 @@ export const ListQueryProvider: React.FC = ({ const handlePageChange = useCallback( async (arg: number) => { - await refineListData({ page: String(arg) }) + await refineListData({ page: arg }) }, [refineListData], ) const handlePerPageChange = React.useCallback( async (arg: number) => { - await refineListData({ limit: String(arg), page: '1' }) + await refineListData({ limit: arg, page: 1 }) }, [refineListData], ) @@ -155,47 +141,26 @@ export const ListQueryProvider: React.FC = ({ [refineListData], ) - const syncQuery = useEffectEvent(() => { - let shouldUpdateQueryString = false - const newQuery = { ...(currentQuery || {}) } + const mergeQueryFromPropsAndSyncToURL = useEffectEvent(() => { + const newQuery = sanitizeQuery({ ...(currentQuery || {}), ...(queryFromProps || {}) }) - // Allow the URL to override the default limit - if (isNumber(defaultLimit) && !('limit' in currentQuery)) { - newQuery.limit = String(defaultLimit) - shouldUpdateQueryString = true - } + const search = `?${qs.stringify({ ...newQuery, columns: JSON.stringify(newQuery.columns) })}` - // Allow the URL to override the default sort - if (defaultSort && !('sort' in currentQuery)) { - newQuery.sort = defaultSort - shouldUpdateQueryString = true - } - - // Only modify columns if they originated from preferences - // We can assume they did if `listPreferences.columns` is defined - if (columns && listPreferences?.columns && !('columns' in currentQuery)) { - newQuery.columns = transformColumnsToSearchParams(columns) - shouldUpdateQueryString = true - } - - if (shouldUpdateQueryString) { + if (window.location.search !== search) { setCurrentQuery(newQuery) - // Do not use router.replace here to avoid re-rendering on initial load - window.history.replaceState( - null, - '', - `?${qs.stringify({ ...newQuery, columns: JSON.stringify(newQuery.columns) })}`, - ) + + // Important: do not use router.replace here to avoid re-rendering on initial load + window.history.replaceState(null, '', search) } }) - // If `defaultLimit` or `defaultSort` are updated externally, update the query - // I.e. when HMR runs, these properties may be different + // If `query` is updated externally, update the local state + // E.g. when HMR runs, these properties may be different useEffect(() => { if (modifySearchParams) { - syncQuery() + mergeQueryFromPropsAndSyncToURL() } - }, [defaultSort, defaultLimit, modifySearchParams, columns]) + }, [modifySearchParams, queryFromProps]) return ( { + let page = 'page' in newQuery ? newQuery.page : currentQuery?.page + + if ('where' in newQuery || 'search' in newQuery) { + page = 1 + } + + const mergedQuery: ListQuery = { + ...currentQuery, + ...newQuery, + columns: 'columns' in newQuery ? newQuery.columns : currentQuery.columns, + groupBy: + 'groupBy' in newQuery + ? newQuery.groupBy + : (currentQuery?.groupBy ?? options?.defaults?.groupBy), + limit: 'limit' in newQuery ? newQuery.limit : (currentQuery?.limit ?? options?.defaults?.limit), + page, + preset: 'preset' in newQuery ? newQuery.preset : currentQuery?.preset, + search: 'search' in newQuery ? newQuery.search : currentQuery?.search, + sort: + 'sort' in newQuery + ? newQuery.sort + : ((currentQuery?.sort as string) ?? options?.defaults?.sort), + where: 'where' in newQuery ? newQuery.where : currentQuery?.where, + } + + return mergedQuery +} diff --git a/packages/ui/src/providers/ListQuery/sanitizeQuery.ts b/packages/ui/src/providers/ListQuery/sanitizeQuery.ts new file mode 100644 index 0000000000..551ddf459e --- /dev/null +++ b/packages/ui/src/providers/ListQuery/sanitizeQuery.ts @@ -0,0 +1,38 @@ +import type { ListQuery, Where } from 'payload' + +/** + * Sanitize empty strings from the query, e.g. `?preset=` + * This is how we determine whether to clear user preferences for certain params + * Once cleared, they are no longer needed in the URL + */ +export const sanitizeQuery = (toSanitize: ListQuery): ListQuery => { + const sanitized = { ...toSanitize } + + Object.entries(sanitized).forEach(([key, value]) => { + if ( + key === 'columns' && + (value === '[]' || (Array.isArray(sanitized[key]) && sanitized[key].length === 0)) + ) { + delete sanitized[key] + } + + if (key === 'where' && typeof value === 'object' && !Object.keys(value as Where).length) { + delete sanitized[key] + } + + if ((key === 'limit' || key === 'page') && typeof value === 'string') { + const parsed = parseInt(value, 10) + sanitized[key] = Number.isNaN(parsed) ? undefined : parsed + } + + if (key === 'page' && value === 0) { + delete sanitized[key] + } + + if (value === '') { + delete sanitized[key] + } + }) + + return sanitized +} diff --git a/packages/ui/src/providers/ListQuery/types.ts b/packages/ui/src/providers/ListQuery/types.ts index ec91923027..ea009b2a53 100644 --- a/packages/ui/src/providers/ListQuery/types.ts +++ b/packages/ui/src/providers/ListQuery/types.ts @@ -1,6 +1,5 @@ import type { ClientCollectionConfig, - CollectionPreferences, ColumnPreference, ListQuery, PaginatedDocs, @@ -21,11 +20,7 @@ export type OnListQueryChange = (query: ListQuery) => void export type ListQueryProps = { readonly children: React.ReactNode readonly collectionSlug?: ClientCollectionConfig['slug'] - readonly columns?: ColumnPreference[] readonly data: PaginatedDocs - readonly defaultLimit?: number - readonly defaultSort?: Sort - readonly listPreferences?: CollectionPreferences readonly modifySearchParams?: boolean readonly onQueryChange?: OnListQueryChange readonly orderableFieldName?: string @@ -33,6 +28,7 @@ export type ListQueryProps = { * @deprecated */ readonly preferenceKey?: string + query?: ListQuery } export type IListQueryContext = { diff --git a/packages/ui/src/providers/TableColumns/buildColumnState/index.tsx b/packages/ui/src/providers/TableColumns/buildColumnState/index.tsx index 48c39b0e98..bf23d0c5d0 100644 --- a/packages/ui/src/providers/TableColumns/buildColumnState/index.tsx +++ b/packages/ui/src/providers/TableColumns/buildColumnState/index.tsx @@ -38,7 +38,6 @@ import { sortFieldMap } from './sortFieldMap.js' export type BuildColumnStateArgs = { beforeRows?: Column[] clientFields: ClientField[] - columnPreferences: CollectionPreferences['columns'] columns?: CollectionPreferences['columns'] customCellProps: DefaultCellComponentProps['customCellProps'] enableLinkedCell?: boolean @@ -70,7 +69,6 @@ export const buildColumnState = (args: BuildColumnStateArgs): Column[] => { beforeRows, clientFields, collectionSlug, - columnPreferences, columns, customCellProps, dataType, @@ -99,7 +97,7 @@ export const buildColumnState = (args: BuildColumnStateArgs): Column[] => { // place the `ID` field first, if it exists // do the same for the `useAsTitle` field with precedence over the `ID` field - // then sort the rest of the fields based on the `defaultColumns` or `columnPreferences` + // then sort the rest of the fields based on the `defaultColumns` or `columns` const idFieldIndex = sortedFieldMap?.findIndex((field) => fieldIsID(field)) if (idFieldIndex > -1) { @@ -116,10 +114,10 @@ export const buildColumnState = (args: BuildColumnStateArgs): Column[] => { sortedFieldMap.unshift(useAsTitleField) } - const sortTo = columnPreferences || columns + const sortTo = columns if (sortTo) { - // sort the fields to the order of `defaultColumns` or `columnPreferences` + // sort the fields to the order of `defaultColumns` or `columns` sortedFieldMap = sortFieldMap(sortedFieldMap, sortTo) _sortedFieldMap = sortFieldMap(_sortedFieldMap, sortTo) // TODO: think of a way to avoid this additional sort } @@ -150,14 +148,14 @@ export const buildColumnState = (args: BuildColumnStateArgs): Column[] => { return acc // skip any group without a custom cell } - const columnPreference = columnPreferences?.find( + const columnPref = columns?.find( (preference) => clientField && 'name' in clientField && preference.accessor === accessor, ) const isActive = isColumnActive({ accessor, activeColumnsIndices, - columnPreference, + column: columnPref, columns, }) diff --git a/packages/ui/src/providers/TableColumns/buildColumnState/isColumnActive.ts b/packages/ui/src/providers/TableColumns/buildColumnState/isColumnActive.ts index 517fbb5c2f..52561546da 100644 --- a/packages/ui/src/providers/TableColumns/buildColumnState/isColumnActive.ts +++ b/packages/ui/src/providers/TableColumns/buildColumnState/isColumnActive.ts @@ -3,18 +3,18 @@ import type { ColumnPreference } from 'payload' export function isColumnActive({ accessor, activeColumnsIndices, - columnPreference, + column, columns, }: { accessor: string activeColumnsIndices: number[] - columnPreference: ColumnPreference + column: ColumnPreference columns: ColumnPreference[] }) { - if (columnPreference) { - return columnPreference.active + if (column) { + return column.active } else if (columns && Array.isArray(columns) && columns.length > 0) { - return Boolean(columns.find((column) => column.accessor === accessor)?.active) + return Boolean(columns.find((col) => col.accessor === accessor)?.active) } else if (activeColumnsIndices.length < 4) { return true } diff --git a/packages/ui/src/utilities/buildTableState.ts b/packages/ui/src/utilities/buildTableState.ts index e2b436f9c6..c76fb6e326 100644 --- a/packages/ui/src/utilities/buildTableState.ts +++ b/packages/ui/src/utilities/buildTableState.ts @@ -215,10 +215,10 @@ const buildTableState = async ( collection: collectionSlug, depth: 0, draft: true, - limit: query?.limit ? parseInt(query.limit, 10) : undefined, + limit: query?.limit, locale: req.locale, overrideAccess: false, - page: query?.page ? parseInt(query.page, 10) : undefined, + page: query?.page, sort: query?.sort, user: req.user, where: query?.where, @@ -232,7 +232,6 @@ const buildTableState = async ( clientConfig, collectionConfig, collections: Array.isArray(collectionSlug) ? collectionSlug : undefined, - columnPreferences: Array.isArray(collectionSlug) ? collectionPreferences?.columns : undefined, // TODO, might not be neededcolumns, columns, docs, enableRowSelections, diff --git a/packages/ui/src/utilities/renderTable.tsx b/packages/ui/src/utilities/renderTable.tsx index d0b0216119..9a3fe5716f 100644 --- a/packages/ui/src/utilities/renderTable.tsx +++ b/packages/ui/src/utilities/renderTable.tsx @@ -64,7 +64,6 @@ export const renderTable = ({ clientConfig, collectionConfig, collections, - columnPreferences, columns: columnsFromArgs, customCellProps, docs, @@ -80,7 +79,6 @@ export const renderTable = ({ clientConfig?: ClientConfig collectionConfig?: SanitizedCollectionConfig collections?: string[] - columnPreferences: CollectionPreferences['columns'] columns?: CollectionPreferences['columns'] customCellProps?: Record docs: PaginatedDocs['docs'] @@ -154,7 +152,6 @@ export const renderTable = ({ const sharedArgs: Pick< BuildColumnStateArgs, | 'clientFields' - | 'columnPreferences' | 'columns' | 'customCellProps' | 'enableRowSelections' @@ -164,7 +161,6 @@ export const renderTable = ({ | 'useAsTitle' > = { clientFields, - columnPreferences, columns, enableRowSelections, i18n, diff --git a/packages/ui/src/utilities/upsertPreferences.ts b/packages/ui/src/utilities/upsertPreferences.ts index 26e51ac814..477d98a041 100644 --- a/packages/ui/src/utilities/upsertPreferences.ts +++ b/packages/ui/src/utilities/upsertPreferences.ts @@ -5,13 +5,26 @@ import { cache } from 'react' import { removeUndefined } from './removeUndefined.js' +type PreferenceDoc = { + id: DefaultDocumentIDType | undefined + value?: T | undefined +} + +type DefaultMerge = (existingValue: T, incomingValue: T | undefined) => T + +const defaultMerge: DefaultMerge = (existingValue: T, incomingValue: T | undefined) => + ({ + ...(typeof existingValue === 'object' ? existingValue : {}), // Shallow merge existing prefs to acquire any missing keys from incoming value + ...removeUndefined(incomingValue || {}), + }) as T + export const getPreferences = cache( async ( key: string, payload: Payload, userID: DefaultDocumentIDType, userSlug: string, - ): Promise<{ id: DefaultDocumentIDType; value: T }> => { + ): Promise> => { const result = (await payload .find({ collection: 'payload-preferences', @@ -58,21 +71,14 @@ export const upsertPreferences = async | stri req, value: incomingValue, }: { + customMerge?: (existingValue: T, incomingValue: T, defaultMerge: DefaultMerge) => T key: string req: PayloadRequest -} & ( - | { - customMerge: (existingValue: T) => T - value?: never - } - | { - customMerge?: never - value: T - } -)): Promise => { - const existingPrefs: { id?: DefaultDocumentIDType; value?: T } = req.user + value: T +}): Promise => { + const existingPrefs: PreferenceDoc = req.user ? await getPreferences(key, req.payload, req.user.id, req.user.collection) - : {} + : ({} as PreferenceDoc) let newPrefs = existingPrefs?.value @@ -95,15 +101,12 @@ export const upsertPreferences = async | stri let mergedPrefs: T if (typeof customMerge === 'function') { - mergedPrefs = customMerge(existingPrefs.value) + mergedPrefs = customMerge(existingPrefs.value, incomingValue, defaultMerge) } else { // Strings are valid JSON, i.e. `locale` saved as a string to the locale preferences mergedPrefs = typeof incomingValue === 'object' - ? ({ - ...(typeof existingPrefs.value === 'object' ? existingPrefs?.value : {}), // Shallow merge existing prefs to acquire any missing keys from incoming value - ...removeUndefined(incomingValue || {}), - } as T) + ? defaultMerge(existingPrefs.value, incomingValue) : incomingValue } diff --git a/test/eslint.config.js b/test/eslint.config.js index 2621eb0a66..8cdafaa8f4 100644 --- a/test/eslint.config.js +++ b/test/eslint.config.js @@ -74,6 +74,7 @@ export const testEslintConfig = [ 'expectNoResultsAndCreateFolderButton', 'createFolder', 'createFolderFromDoc', + 'assertURLParams', ], }, ], diff --git a/test/fields/payload-types.ts b/test/fields/payload-types.ts index 352cb40db4..8ee74af84f 100644 --- a/test/fields/payload-types.ts +++ b/test/fields/payload-types.ts @@ -549,6 +549,14 @@ export interface BlockField { } )[] | null; + readOnly?: + | { + title?: string | null; + id?: string | null; + blockName?: string | null; + blockType: 'readOnlyBlock'; + }[] + | null; updatedAt: string; createdAt: string; } @@ -2222,6 +2230,17 @@ export interface BlockFieldsSelect { blockName?: T; }; }; + readOnly?: + | T + | { + readOnlyBlock?: + | T + | { + title?: T; + id?: T; + blockName?: T; + }; + }; updatedAt?: T; createdAt?: T; } diff --git a/test/query-presets/e2e.spec.ts b/test/query-presets/e2e.spec.ts index 14ded55fca..317c96a2d6 100644 --- a/test/query-presets/e2e.spec.ts +++ b/test/query-presets/e2e.spec.ts @@ -4,6 +4,7 @@ import { expect, test } from '@playwright/test' import { devUser } from 'credentials.js' import { openListColumns } from 'helpers/e2e/openListColumns.js' import { toggleColumn } from 'helpers/e2e/toggleColumn.js' +import { openNav } from 'helpers/e2e/toggleNav.js' import * as path from 'path' import { fileURLToPath } from 'url' @@ -152,23 +153,38 @@ describe('Query Presets', () => { test('should select preset and apply filters', async () => { await page.goto(pagesUrl.list) + await selectPreset({ page, presetTitle: seededData.everyone.title }) await assertURLParams({ page, columns: seededData.everyone.columns, - where: seededData.everyone.where, - presetID: everyoneID, + preset: everyoneID, }) - - expect(true).toBe(true) }) test('should clear selected preset and reset filters', async () => { await page.goto(pagesUrl.list) + await selectPreset({ page, presetTitle: seededData.everyone.title }) + await clearSelectedPreset({ page }) - expect(true).toBe(true) + + // ensure that the preset was cleared from preferences by navigating without the `?preset=` param + // e.g. do not do `page.reload()` + await page.goto(pagesUrl.list) + + // poll url to ensure that `?preset=` param is not present + // this is first set to an empty string to clear from the user's preferences + // it is then removed entirely after it is processed on the server + const regex = /preset=/ + await page.waitForURL((url) => !regex.test(url.search), { timeout: TEST_TIMEOUT_LONG }) + + await expect( + page.locator('button#select-preset', { + hasText: exactText('Select Preset'), + }), + ).toBeVisible() }) test('should delete a preset, clear selection, and reset changes', async () => { @@ -205,18 +221,29 @@ describe('Query Presets', () => { test('should save last used preset to preferences and load on initial render', async () => { await page.goto(pagesUrl.list) + await selectPreset({ page, presetTitle: seededData.everyone.title }) - await page.reload() + await page.goto(pagesUrl.list) await assertURLParams({ page, columns: seededData.everyone.columns, where: seededData.everyone.where, - // presetID: everyoneID, + preset: everyoneID, }) - expect(true).toBe(true) + // for good measure, also soft navigate away and back + await page.goto(pagesUrl.admin) + await openNav(page) + await page.click(`a[href="/admin/collections/${pagesSlug}"]`) + + await assertURLParams({ + page, + columns: seededData.everyone.columns, + where: seededData.everyone.where, + preset: everyoneID, + }) }) test('should only show "edit" and "delete" controls when there is an active preset', async () => { diff --git a/test/query-presets/helpers/assertURLParams.ts b/test/query-presets/helpers/assertURLParams.ts index 2e9bf1b166..36e6653d9a 100644 --- a/test/query-presets/helpers/assertURLParams.ts +++ b/test/query-presets/helpers/assertURLParams.ts @@ -10,12 +10,12 @@ export async function assertURLParams({ page, columns, where, - presetID, + preset, }: { columns?: ColumnPreference[] page: Page - presetID?: string | undefined - where: Where + preset?: string | undefined + where?: Where }) { if (where) { // TODO: can't get columns to encode correctly @@ -32,8 +32,8 @@ export async function assertURLParams({ await page.waitForURL(columnsRegex) } - if (presetID) { - const presetRegex = new RegExp(`preset=${presetID}`) + if (preset) { + const presetRegex = new RegExp(`preset=${preset}`) await page.waitForURL(presetRegex) } } diff --git a/test/query-presets/payload-types.ts b/test/query-presets/payload-types.ts index b1c23df8e6..b81b0e04ec 100644 --- a/test/query-presets/payload-types.ts +++ b/test/query-presets/payload-types.ts @@ -154,6 +154,13 @@ export interface User { hash?: string | null; loginAttempts?: number | null; lockUntil?: string | null; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** @@ -301,6 +308,13 @@ export interface UsersSelect { hash?: T; loginAttempts?: T; lockUntil?: T; + sessions?: + | T + | { + id?: T; + createdAt?: T; + expiresAt?: T; + }; } /** * This interface was referenced by `Config`'s JSON-Schema diff --git a/tsconfig.base.json b/tsconfig.base.json index 0898ad390f..153abb8a5f 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -21,8 +21,15 @@ "skipLibCheck": true, "emitDeclarationOnly": true, "sourceMap": true, - "lib": ["DOM", "DOM.Iterable", "ES2022"], - "types": ["node", "jest"], + "lib": [ + "DOM", + "DOM.Iterable", + "ES2022" + ], + "types": [ + "node", + "jest" + ], "incremental": true, "isolatedModules": true, "plugins": [ @@ -31,36 +38,72 @@ } ], "paths": { - "@payload-config": ["./test/_community/config.ts"], - "@payloadcms/admin-bar": ["./packages/admin-bar/src"], - "@payloadcms/live-preview": ["./packages/live-preview/src"], - "@payloadcms/live-preview-react": ["./packages/live-preview-react/src/index.ts"], - "@payloadcms/live-preview-vue": ["./packages/live-preview-vue/src/index.ts"], - "@payloadcms/ui": ["./packages/ui/src/exports/client/index.ts"], - "@payloadcms/ui/shared": ["./packages/ui/src/exports/shared/index.ts"], - "@payloadcms/ui/rsc": ["./packages/ui/src/exports/rsc/index.ts"], - "@payloadcms/ui/scss": ["./packages/ui/src/scss.scss"], - "@payloadcms/ui/scss/app.scss": ["./packages/ui/src/scss/app.scss"], - "@payloadcms/next/*": ["./packages/next/src/exports/*.ts"], + "@payload-config": [ + "./test/fields/config.ts" + ], + "@payloadcms/admin-bar": [ + "./packages/admin-bar/src" + ], + "@payloadcms/live-preview": [ + "./packages/live-preview/src" + ], + "@payloadcms/live-preview-react": [ + "./packages/live-preview-react/src/index.ts" + ], + "@payloadcms/live-preview-vue": [ + "./packages/live-preview-vue/src/index.ts" + ], + "@payloadcms/ui": [ + "./packages/ui/src/exports/client/index.ts" + ], + "@payloadcms/ui/shared": [ + "./packages/ui/src/exports/shared/index.ts" + ], + "@payloadcms/ui/rsc": [ + "./packages/ui/src/exports/rsc/index.ts" + ], + "@payloadcms/ui/scss": [ + "./packages/ui/src/scss.scss" + ], + "@payloadcms/ui/scss/app.scss": [ + "./packages/ui/src/scss/app.scss" + ], + "@payloadcms/next/*": [ + "./packages/next/src/exports/*.ts" + ], "@payloadcms/richtext-lexical/client": [ "./packages/richtext-lexical/src/exports/client/index.ts" ], - "@payloadcms/richtext-lexical/rsc": ["./packages/richtext-lexical/src/exports/server/rsc.ts"], - "@payloadcms/richtext-slate/rsc": ["./packages/richtext-slate/src/exports/server/rsc.ts"], + "@payloadcms/richtext-lexical/rsc": [ + "./packages/richtext-lexical/src/exports/server/rsc.ts" + ], + "@payloadcms/richtext-slate/rsc": [ + "./packages/richtext-slate/src/exports/server/rsc.ts" + ], "@payloadcms/richtext-slate/client": [ "./packages/richtext-slate/src/exports/client/index.ts" ], - "@payloadcms/plugin-seo/client": ["./packages/plugin-seo/src/exports/client.ts"], - "@payloadcms/plugin-sentry/client": ["./packages/plugin-sentry/src/exports/client.ts"], - "@payloadcms/plugin-stripe/client": ["./packages/plugin-stripe/src/exports/client.ts"], - "@payloadcms/plugin-search/client": ["./packages/plugin-search/src/exports/client.ts"], + "@payloadcms/plugin-seo/client": [ + "./packages/plugin-seo/src/exports/client.ts" + ], + "@payloadcms/plugin-sentry/client": [ + "./packages/plugin-sentry/src/exports/client.ts" + ], + "@payloadcms/plugin-stripe/client": [ + "./packages/plugin-stripe/src/exports/client.ts" + ], + "@payloadcms/plugin-search/client": [ + "./packages/plugin-search/src/exports/client.ts" + ], "@payloadcms/plugin-form-builder/client": [ "./packages/plugin-form-builder/src/exports/client.ts" ], "@payloadcms/plugin-import-export/rsc": [ "./packages/plugin-import-export/src/exports/rsc.ts" ], - "@payloadcms/plugin-multi-tenant/rsc": ["./packages/plugin-multi-tenant/src/exports/rsc.ts"], + "@payloadcms/plugin-multi-tenant/rsc": [ + "./packages/plugin-multi-tenant/src/exports/rsc.ts" + ], "@payloadcms/plugin-multi-tenant/utilities": [ "./packages/plugin-multi-tenant/src/exports/utilities.ts" ], @@ -70,25 +113,42 @@ "@payloadcms/plugin-multi-tenant/client": [ "./packages/plugin-multi-tenant/src/exports/client.ts" ], - "@payloadcms/plugin-multi-tenant": ["./packages/plugin-multi-tenant/src/index.ts"], + "@payloadcms/plugin-multi-tenant": [ + "./packages/plugin-multi-tenant/src/index.ts" + ], "@payloadcms/plugin-multi-tenant/translations/languages/all": [ "./packages/plugin-multi-tenant/src/translations/index.ts" ], "@payloadcms/plugin-multi-tenant/translations/languages/*": [ "./packages/plugin-multi-tenant/src/translations/languages/*.ts" ], - "@payloadcms/next": ["./packages/next/src/exports/*"], - "@payloadcms/storage-azure/client": ["./packages/storage-azure/src/exports/client.ts"], - "@payloadcms/storage-s3/client": ["./packages/storage-s3/src/exports/client.ts"], + "@payloadcms/next": [ + "./packages/next/src/exports/*" + ], + "@payloadcms/storage-azure/client": [ + "./packages/storage-azure/src/exports/client.ts" + ], + "@payloadcms/storage-s3/client": [ + "./packages/storage-s3/src/exports/client.ts" + ], "@payloadcms/storage-vercel-blob/client": [ "./packages/storage-vercel-blob/src/exports/client.ts" ], - "@payloadcms/storage-gcs/client": ["./packages/storage-gcs/src/exports/client.ts"], + "@payloadcms/storage-gcs/client": [ + "./packages/storage-gcs/src/exports/client.ts" + ], "@payloadcms/storage-uploadthing/client": [ "./packages/storage-uploadthing/src/exports/client.ts" ] } }, - "include": ["${configDir}/src"], - "exclude": ["${configDir}/dist", "${configDir}/build", "${configDir}/temp", "**/*.spec.ts"] + "include": [ + "${configDir}/src" + ], + "exclude": [ + "${configDir}/dist", + "${configDir}/build", + "${configDir}/temp", + "**/*.spec.ts" + ] } From d6e21adaf0fdc2eaaf3f8212a226c7436afc9cf9 Mon Sep 17 00:00:00 2001 From: German Jablonski <43938777+GermanJablo@users.noreply.github.com> Date: Fri, 18 Jul 2025 15:28:44 +0100 Subject: [PATCH 053/143] docs: shorten line length in code snippet comments to avoid horizontal scrolling (#13217) prettier doesn't seem to cover that, and horizontal scrolling in the browser is even more annoying than in the IDE. Regex used in the search engine: `^[ \t]*\* ` --- docs/plugins/multi-tenant.mdx | 57 ++++++++++++++++++---------- docs/rich-text/custom-features.mdx | 57 +++++++++++++++++++--------- docs/rich-text/official-features.mdx | 21 +++++++--- docs/upload/storage-adapters.mdx | 3 +- 4 files changed, 95 insertions(+), 43 deletions(-) diff --git a/docs/plugins/multi-tenant.mdx b/docs/plugins/multi-tenant.mdx index 3b507aae8b..f9c16f3e3a 100644 --- a/docs/plugins/multi-tenant.mdx +++ b/docs/plugins/multi-tenant.mdx @@ -54,7 +54,8 @@ The plugin accepts an object with the following properties: ```ts type MultiTenantPluginConfig = { /** - * After a tenant is deleted, the plugin will attempt to clean up related documents + * After a tenant is deleted, the plugin will attempt + * to clean up related documents * - removing documents with the tenant ID * - removing the tenant from users * @@ -67,19 +68,22 @@ type MultiTenantPluginConfig = { collections: { [key in CollectionSlug]?: { /** - * Set to `true` if you want the collection to behave as a global + * Set to `true` if you want the collection to + * behave as a global * * @default false */ isGlobal?: boolean /** - * Set to `false` if you want to manually apply the baseListFilter + * Set to `false` if you want to manually apply + * the baseListFilter * * @default true */ useBaseListFilter?: boolean /** - * Set to `false` if you want to handle collection access manually without the multi-tenant constraints applied + * Set to `false` if you want to handle collection access + * manually without the multi-tenant constraints applied * * @default true */ @@ -88,7 +92,8 @@ type MultiTenantPluginConfig = { } /** * Enables debug mode - * - Makes the tenant field visible in the admin UI within applicable collections + * - Makes the tenant field visible in the + * admin UI within applicable collections * * @default false */ @@ -100,22 +105,27 @@ type MultiTenantPluginConfig = { */ enabled?: boolean /** - * Field configuration for the field added to all tenant enabled collections + * Field configuration for the field added + * to all tenant enabled collections */ tenantField?: { access?: RelationshipField['access'] /** - * The name of the field added to all tenant enabled collections + * The name of the field added to all tenant + * enabled collections * * @default 'tenant' */ name?: string } /** - * Field configuration for the field added to the users collection + * Field configuration for the field added + * to the users collection * - * If `includeDefaultField` is `false`, you must include the field on your users collection manually - * This is useful if you want to customize the field or place the field in a specific location + * If `includeDefaultField` is `false`, you must + * include the field on your users collection manually + * This is useful if you want to customize the field + * or place the field in a specific location */ tenantsArrayField?: | { @@ -136,7 +146,8 @@ type MultiTenantPluginConfig = { */ arrayTenantFieldName?: string /** - * When `includeDefaultField` is `true`, the field will be added to the users collection automatically + * When `includeDefaultField` is `true`, the field will + * be added to the users collection automatically */ includeDefaultField?: true /** @@ -153,7 +164,8 @@ type MultiTenantPluginConfig = { arrayFieldName?: string arrayTenantFieldName?: string /** - * When `includeDefaultField` is `false`, you must include the field on your users collection manually + * When `includeDefaultField` is `false`, you must + * include the field on your users collection manually */ includeDefaultField?: false rowFields?: never @@ -162,7 +174,8 @@ type MultiTenantPluginConfig = { /** * Customize tenant selector label * - * Either a string or an object where the keys are i18n codes and the values are the string labels + * Either a string or an object where the keys are i18n + * codes and the values are the string labels */ tenantSelectorLabel?: | Partial<{ @@ -176,7 +189,8 @@ type MultiTenantPluginConfig = { */ tenantsSlug?: string /** - * Function that determines if a user has access to _all_ tenants + * Function that determines if a user has access + * to _all_ tenants * * Useful for super-admin type users */ @@ -184,15 +198,18 @@ type MultiTenantPluginConfig = { user: ConfigTypes extends { user: unknown } ? ConfigTypes['user'] : User, ) => boolean /** - * Opt out of adding access constraints to the tenants collection + * Opt out of adding access constraints to + * the tenants collection */ useTenantsCollectionAccess?: boolean /** - * Opt out including the baseListFilter to filter tenants by selected tenant + * Opt out including the baseListFilter to filter + * tenants by selected tenant */ useTenantsListFilter?: boolean /** - * Opt out including the baseListFilter to filter users by selected tenant + * Opt out including the baseListFilter to filter + * users by selected tenant */ useUsersTenantFilter?: boolean } @@ -327,14 +344,16 @@ type ContextType = { /** * Prevents a refresh when the tenant is changed * - * If not switching tenants while viewing a "global", set to true + * If not switching tenants while viewing a "global", + * set to true */ setPreventRefreshOnChange: React.Dispatch> /** * Sets the selected tenant ID * * @param args.id - The ID of the tenant to select - * @param args.refresh - Whether to refresh the page after changing the tenant + * @param args.refresh - Whether to refresh the page + * after changing the tenant */ setTenant: (args: { id: number | string | undefined diff --git a/docs/rich-text/custom-features.mdx b/docs/rich-text/custom-features.mdx index c29935dd83..217819ec03 100644 --- a/docs/rich-text/custom-features.mdx +++ b/docs/rich-text/custom-features.mdx @@ -474,11 +474,15 @@ const MyNodeComponent = React.lazy(() => ) /** - * This node is a DecoratorNode. DecoratorNodes allow you to render React components in the editor. + * This node is a DecoratorNode. DecoratorNodes allow + * you to render React components in the editor. * - * They need both createDom and decorate functions. createDom => outside of the html. decorate => React Component inside of the html. + * They need both createDom and decorate functions. + * createDom => outside of the html. + * decorate => React Component inside of the html. * - * If we used DecoratorBlockNode instead, we would only need a decorate method + * If we used DecoratorBlockNode instead, + * we would only need a decorate method */ export class MyNode extends DecoratorNode { static clone(node: MyNode): MyNode { @@ -490,9 +494,11 @@ export class MyNode extends DecoratorNode { } /** - * Defines what happens if you copy a div element from another page and paste it into the lexical editor + * Defines what happens if you copy a div element + * from another page and paste it into the lexical editor * - * This also determines the behavior of lexical's internal HTML -> Lexical converter + * This also determines the behavior of lexical's + * internal HTML -> Lexical converter */ static importDOM(): DOMConversionMap | null { return { @@ -504,14 +510,18 @@ export class MyNode extends DecoratorNode { } /** - * The data for this node is stored serialized as JSON. This is the "load function" of that node: it takes the saved data and converts it into a node. + * The data for this node is stored serialized as JSON. + * This is the "load function" of that node: it takes + * the saved data and converts it into a node. */ static importJSON(serializedNode: SerializedMyNode): MyNode { return $createMyNode() } /** - * Determines how the hr element is rendered in the lexical editor. This is only the "initial" / "outer" HTML element. + * Determines how the hr element is rendered in the + * lexical editor. This is only the "initial" / "outer" + * HTML element. */ createDOM(config: EditorConfig): HTMLElement { const element = document.createElement('div') @@ -519,22 +529,28 @@ export class MyNode extends DecoratorNode { } /** - * Allows you to render a React component within whatever createDOM returns. + * Allows you to render a React component within + * whatever createDOM returns. */ decorate(): React.ReactElement { return } /** - * Opposite of importDOM, this function defines what happens when you copy a div element from the lexical editor and paste it into another page. + * Opposite of importDOM, this function defines what + * happens when you copy a div element from the lexical + * editor and paste it into another page. * - * This also determines the behavior of lexical's internal Lexical -> HTML converter + * This also determines the behavior of lexical's + * internal Lexical -> HTML converter */ exportDOM(): DOMExportOutput { return { element: document.createElement('div') } } /** - * Opposite of importJSON. This determines what data is saved in the database / in the lexical editor state. + * Opposite of importJSON. This determines what + * data is saved in the database / in the lexical + * editor state. */ exportJSON(): SerializedLexicalNode { return { @@ -556,18 +572,23 @@ export class MyNode extends DecoratorNode { } } -// This is used in the importDOM method. Totally optional if you do not want your node to be created automatically when copy & pasting certain dom elements -// into your editor. +// This is used in the importDOM method. Totally optional +// if you do not want your node to be created automatically +// when copy & pasting certain dom elements into your editor. function $yourConversionMethod(): DOMConversionOutput { return { node: $createMyNode() } } -// This is a utility method to create a new MyNode. Utility methods prefixed with $ make it explicit that this should only be used within lexical +// This is a utility method to create a new MyNode. +// Utility methods prefixed with $ make it explicit +// that this should only be used within lexical export function $createMyNode(): MyNode { return $applyNodeReplacement(new MyNode()) } -// This is just a utility method you can use to check if a node is a MyNode. This also ensures correct typing. +// This is just a utility method you can use +// to check if a node is a MyNode. This also +// ensures correct typing. export function $isMyNode( node: LexicalNode | null | undefined, ): node is MyNode { @@ -626,10 +647,12 @@ export const INSERT_MYNODE_COMMAND: LexicalCommand = createCommand( ) /** - * Plugin which registers a lexical command to insert a new MyNode into the editor + * Plugin which registers a lexical command to + * insert a new MyNode into the editor */ export const MyNodePlugin: PluginComponent = () => { - // The useLexicalComposerContext hook can be used to access the lexical editor instance + // The useLexicalComposerContext hook can be used + // to access the lexical editor instance const [editor] = useLexicalComposerContext() useEffect(() => { diff --git a/docs/rich-text/official-features.mdx b/docs/rich-text/official-features.mdx index cdf1efcb18..bffa4c0b2e 100644 --- a/docs/rich-text/official-features.mdx +++ b/docs/rich-text/official-features.mdx @@ -124,12 +124,15 @@ HeadingFeature({ ```ts type IndentFeatureProps = { /** - * The nodes that should not be indented. "type" property of the nodes you don't want to be indented. - * These can be: "paragraph", "heading", "listitem", "quote" or other indentable nodes if they exist. + * The nodes that should not be indented. "type" + * property of the nodes you don't want to be indented. + * These can be: "paragraph", "heading", "listitem", + * "quote" or other indentable nodes if they exist. */ disabledNodes?: string[] /** - * If true, pressing Tab in the middle of a block such as a paragraph or heading will not insert a tabNode. + * If true, pressing Tab in the middle of a block such + * as a paragraph or heading will not insert a tabNode. * Instead, Tab will only be used for block-level indentation. * @default false */ @@ -180,7 +183,8 @@ type LinkFeatureServerProps = { */ disableAutoLinks?: 'creationOnly' | true /** - * A function or array defining additional fields for the link feature. + * A function or array defining additional + * fields for the link feature. * These will be displayed in the link editor drawer. */ fields?: @@ -235,7 +239,9 @@ LinkFeature({ ```ts type RelationshipFeatureProps = { /** - * Sets a maximum population depth for this relationship, regardless of the remaining depth when the respective field is reached. + * Sets a maximum population depth for this relationship, + * regardless of the remaining depth when the respective + * field is reached. */ maxDepth?: number } & ExclusiveRelationshipFeatureProps @@ -274,7 +280,10 @@ type UploadFeatureProps = { } } /** - * Sets a maximum population depth for this upload (not the fields for this upload), regardless of the remaining depth when the respective field is reached. + * Sets a maximum population depth for this upload + * (not the fields for this upload), regardless of + * the remaining depth when the respective field is + * reached. */ maxDepth?: number } diff --git a/docs/upload/storage-adapters.mdx b/docs/upload/storage-adapters.mdx index fa25571939..de6420d07b 100644 --- a/docs/upload/storage-adapters.mdx +++ b/docs/upload/storage-adapters.mdx @@ -292,7 +292,8 @@ Reference any of the existing storage adapters for guidance on how this should b ```ts export interface GeneratedAdapter { /** - * Additional fields to be injected into the base collection and image sizes + * Additional fields to be injected into the base + * collection and image sizes */ fields?: Field[] /** From 7f9de6d10146739895b12c40d4046aab7c547c5b Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Mon, 21 Jul 2025 08:39:18 -0400 Subject: [PATCH 054/143] fix: empty folderType arrays break relational dbs (#13219) Relational databases were broken with folders because it was querying on: ```ts { folderType: { equals: [] } } ``` Which does not work since the select hasMany stores values in a separate table. --- .../ui/src/elements/FolderView/FolderTypeField/index.tsx | 4 ++-- .../ui/src/utilities/getFolderResultsComponentAndData.tsx | 5 ----- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx b/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx index 2592eff529..3ac8125945 100644 --- a/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx +++ b/packages/ui/src/elements/FolderView/FolderTypeField/index.tsx @@ -80,10 +80,10 @@ export const FolderTypeField = ({ if (!readOnly || disabled) { let newValue: string | string[] = null if (selectedOption && hasMany) { - if (Array.isArray(selectedOption)) { + if (Array.isArray(selectedOption) && selectedOption.length > 0) { newValue = selectedOption.map((option) => option.value) } else { - newValue = [] + newValue = null } } else if (selectedOption && !Array.isArray(selectedOption)) { newValue = selectedOption.value diff --git a/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx b/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx index 69378293b6..e2a58f3507 100644 --- a/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx +++ b/packages/ui/src/utilities/getFolderResultsComponentAndData.tsx @@ -120,11 +120,6 @@ export const getFolderResultsComponentAndData = async ({ exists: false, }, }, - { - folderType: { - equals: [], - }, - }, { folderType: { equals: null, From dce898d7ca8e59a23eb4091e4c74d3414b2ea49a Mon Sep 17 00:00:00 2001 From: Jessica Rynkar <67977755+jessrynkar@users.noreply.github.com> Date: Mon, 21 Jul 2025 14:19:51 +0100 Subject: [PATCH 055/143] fix(ui): ensure publishSpecificLocale works during create operation (#13129) ### What? This PR ensures that when a document is created using the `Publish in __` button, it is saved to the correct locale. ### Why? During document creation, the buttons `Publish` or `Publish in [locale]` have the same effect. As a result, we overlooked the case where a user may specifically click `Publish in [locale]` for the first save. In this scenario, the create operation does not respect the `publishSpecificLocale` value, so the document was always saved in the default locale regardless of the intended one. ### How? Passes the `publishSpecificLocale` value to the create operation, ensuring the document and version is saved to the correct locale. **Fixes:** #13117 --- .../Version/VersionPillLabel/VersionPillLabel.tsx | 2 +- .../payload/src/collections/endpoints/create.ts | 2 ++ .../payload/src/collections/operations/create.ts | 7 +++++++ test/helpers.ts | 7 ++++++- test/localization/e2e.spec.ts | 15 +++++++++++++++ 5 files changed, 31 insertions(+), 2 deletions(-) diff --git a/packages/next/src/views/Version/VersionPillLabel/VersionPillLabel.tsx b/packages/next/src/views/Version/VersionPillLabel/VersionPillLabel.tsx index 1ca29a7f43..45a5cde220 100644 --- a/packages/next/src/views/Version/VersionPillLabel/VersionPillLabel.tsx +++ b/packages/next/src/views/Version/VersionPillLabel/VersionPillLabel.tsx @@ -116,7 +116,7 @@ export const VersionPillLabel: React.FC<{ )} )} - {localeLabel && {localeLabel}} + {localeLabel && {localeLabel}}
    ) } diff --git a/packages/payload/src/collections/endpoints/create.ts b/packages/payload/src/collections/endpoints/create.ts index 9398eedd72..24641fab8b 100644 --- a/packages/payload/src/collections/endpoints/create.ts +++ b/packages/payload/src/collections/endpoints/create.ts @@ -16,6 +16,7 @@ export const createHandler: PayloadHandler = async (req) => { const autosave = searchParams.get('autosave') === 'true' const draft = searchParams.get('draft') === 'true' const depth = searchParams.get('depth') + const publishSpecificLocale = req.query.publishSpecificLocale as string | undefined const doc = await createOperation({ autosave, @@ -24,6 +25,7 @@ export const createHandler: PayloadHandler = async (req) => { depth: isNumber(depth) ? depth : undefined, draft, populate: sanitizePopulateParam(req.query.populate), + publishSpecificLocale, req, select: sanitizeSelectParam(req.query.select), }) diff --git a/packages/payload/src/collections/operations/create.ts b/packages/payload/src/collections/operations/create.ts index cc09c1ad6a..9c0bfc071a 100644 --- a/packages/payload/src/collections/operations/create.ts +++ b/packages/payload/src/collections/operations/create.ts @@ -47,6 +47,7 @@ export type Arguments = { overrideAccess?: boolean overwriteExistingFiles?: boolean populate?: PopulateType + publishSpecificLocale?: string req: PayloadRequest select?: SelectType showHiddenFields?: boolean @@ -88,6 +89,10 @@ export const createOperation = async < } } + if (args.publishSpecificLocale) { + args.req.locale = args.publishSpecificLocale + } + const { autosave = false, collection: { config: collectionConfig }, @@ -99,6 +104,7 @@ export const createOperation = async < overrideAccess, overwriteExistingFiles = false, populate, + publishSpecificLocale, req: { fallbackLocale, locale, @@ -286,6 +292,7 @@ export const createOperation = async < collection: collectionConfig, docWithLocales: result, payload, + publishSpecificLocale, req, }) } diff --git a/test/helpers.ts b/test/helpers.ts index 83339774bd..ed3e73f487 100644 --- a/test/helpers.ts +++ b/test/helpers.ts @@ -279,7 +279,12 @@ export async function saveDocHotkeyAndAssert(page: Page): Promise { export async function saveDocAndAssert( page: Page, - selector: '#action-publish' | '#action-save' | '#action-save-draft' | string = '#action-save', + selector: + | '#action-publish' + | '#action-save' + | '#action-save-draft' + | '#publish-locale' + | string = '#action-save', expectation: 'error' | 'success' = 'success', ): Promise { await wait(500) // TODO: Fix this diff --git a/test/localization/e2e.spec.ts b/test/localization/e2e.spec.ts index ddb0dbf175..4866ef8d4d 100644 --- a/test/localization/e2e.spec.ts +++ b/test/localization/e2e.spec.ts @@ -618,6 +618,21 @@ describe('Localization', () => { await expect(searchInput).toBeVisible() await expect(searchInput).toHaveAttribute('placeholder', 'Search by Full title') }) + + describe('publish specific locale', () => { + test('should create post in correct locale with publishSpecificLocale', async () => { + await page.goto(urlPostsWithDrafts.create) + await changeLocale(page, 'es') + await fillValues({ title: 'Created In Spanish' }) + const chevronButton = page.locator('.form-submit .popup__trigger-wrap > .popup-button') + await chevronButton.click() + await saveDocAndAssert(page, '#publish-locale') + + await expect(page.locator('#field-title')).toHaveValue('Created In Spanish') + await changeLocale(page, defaultLocale) + await expect(page.locator('#field-title')).toBeEmpty() + }) + }) }) async function fillValues(data: Partial) { From af2ddff203185d51dcf0411814a931ebaf9f2c86 Mon Sep 17 00:00:00 2001 From: Chandler Gonzales Date: Mon, 21 Jul 2025 06:23:44 -0700 Subject: [PATCH 056/143] fix: text field validation for minLength: 1, required: false (#13124) Fixes #13113 ### How? Does not rely on JS falseyness, instead explicitly checking for null & undefined I'm not actually certain this is the approach we want to take. Some people might interpret "required" as not null, not-undefined and min length > 1 in the case of strings. If they do, this change to the behavior in the not-required case will break their expectations --- packages/payload/src/fields/validations.spec.ts | 5 +++++ packages/payload/src/fields/validations.ts | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/payload/src/fields/validations.spec.ts b/packages/payload/src/fields/validations.spec.ts index b3b822771d..4d3d3dd116 100644 --- a/packages/payload/src/fields/validations.spec.ts +++ b/packages/payload/src/fields/validations.spec.ts @@ -61,6 +61,11 @@ describe('Field Validations', () => { const result = text(val, { ...options, minLength: 10 }) expect(result).toBe(true) }) + it('should validate minLength with empty string', () => { + const val = '' + const result = text(val, { ...options, required: false, minLength: 1 }) + expect(result).toBe('validation:longerThanMin') + }) it('should validate an array of texts', async () => { const val = ['test'] const result = text(val, { ...options, hasMany: true }) diff --git a/packages/payload/src/fields/validations.ts b/packages/payload/src/fields/validations.ts index bd89a162ba..7dc86df952 100644 --- a/packages/payload/src/fields/validations.ts +++ b/packages/payload/src/fields/validations.ts @@ -61,7 +61,7 @@ export const text: TextFieldValidation = ( let maxLength!: number if (!required) { - if (!value) { + if (value === undefined || value === null) { return true } } From 0eb8f759461769532c149daee49e83e637337669 Mon Sep 17 00:00:00 2001 From: fgrsource <107464125+fgrsource@users.noreply.github.com> Date: Mon, 21 Jul 2025 17:18:40 +0200 Subject: [PATCH 057/143] docs: fix typo, example was not valid JSON (#13224) ### What? A comma is missing in the example code. This results in not valid JSON. ### Why? I stumbled upon it, while setting up a Tenant-based Payload for the first time. ### How? Adding a comma results in valid JSON. Fixes # Added a comma. ;) --- docs/plugins/multi-tenant.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugins/multi-tenant.mdx b/docs/plugins/multi-tenant.mdx index f9c16f3e3a..39eab63b8b 100644 --- a/docs/plugins/multi-tenant.mdx +++ b/docs/plugins/multi-tenant.mdx @@ -230,7 +230,7 @@ const config = buildConfig({ slug: 'tenants', admin: { useAsTitle: 'name' - } + }, fields: [ // remember, you own these fields // these are merely suggestions/examples From c1cfceb7dcb92319393fb4f2152f2cb3d2b4a8c7 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Tue, 22 Jul 2025 19:53:25 +0300 Subject: [PATCH 058/143] fix(db-mongodb): handle duplicate unique index error for DocumentDB (#13239) Currently, with DocumentDB instead of a friendly error like "Value must be unique" we see a generic "Something went wrong" message. This PR fixes that by adding a fallback to parse the message instead of using `error.keyValue` which doesn't exist for responses from DocumentDB. --- .../db-mongodb/src/utilities/handleError.ts | 27 +++++++++++++------ 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/packages/db-mongodb/src/utilities/handleError.ts b/packages/db-mongodb/src/utilities/handleError.ts index d7a44656ef..172548ff6d 100644 --- a/packages/db-mongodb/src/utilities/handleError.ts +++ b/packages/db-mongodb/src/utilities/handleError.ts @@ -2,6 +2,15 @@ import type { PayloadRequest } from 'payload' import { ValidationError } from 'payload' +function extractFieldFromMessage(message: string) { + // eslint-disable-next-line regexp/no-super-linear-backtracking + const match = message.match(/index:\s*(.*?)_/) + if (match && match[1]) { + return match[1] // e.g., returns "email" from "index: email_1" + } + return null +} + export const handleError = ({ collection, error, @@ -18,20 +27,22 @@ export const handleError = ({ } // Handle uniqueness error from MongoDB - if ( - 'code' in error && - error.code === 11000 && - 'keyValue' in error && - error.keyValue && - typeof error.keyValue === 'object' - ) { + if ('code' in error && error.code === 11000) { + let path: null | string = null + + if ('keyValue' in error && error.keyValue && typeof error.keyValue === 'object') { + path = Object.keys(error.keyValue)[0] ?? '' + } else if ('message' in error && typeof error.message === 'string') { + path = extractFieldFromMessage(error.message) + } + throw new ValidationError( { collection, errors: [ { message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique', - path: Object.keys(error.keyValue)[0] ?? '', + path: path ?? '', }, ], global, From 77f279e7680d860adb845f3fc3ac90ad48d20897 Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Tue, 22 Jul 2025 13:12:20 -0400 Subject: [PATCH 059/143] docs: remove payload cloud (#13240) Remove Payload Cloud from docs --- docs/cloud/configuration.mdx | 62 -------------- docs/cloud/creating-a-project.mdx | 53 ------------ docs/cloud/projects.mdx | 137 ------------------------------ docs/cloud/teams.mdx | 35 -------- docs/production/deployment.mdx | 10 --- 5 files changed, 297 deletions(-) delete mode 100644 docs/cloud/configuration.mdx delete mode 100644 docs/cloud/creating-a-project.mdx delete mode 100644 docs/cloud/projects.mdx delete mode 100644 docs/cloud/teams.mdx diff --git a/docs/cloud/configuration.mdx b/docs/cloud/configuration.mdx deleted file mode 100644 index 6bb352ef96..0000000000 --- a/docs/cloud/configuration.mdx +++ /dev/null @@ -1,62 +0,0 @@ ---- -title: Project Configuration -label: Configuration -order: 20 -desc: Quickly configure and deploy your Payload Cloud project in a few simple steps. -keywords: configuration, config, settings, project, cloud, payload cloud, deploy, deployment ---- - -## Select your plan - -Once you have created a project, you will need to select your plan. This will determine the resources that are allocated to your project and the features that are available to you. - - - Note: All Payload Cloud teams that deploy a project require a card on file. - This helps us prevent fraud and abuse on our platform. If you select a plan - with a free trial, you will not be charged until your trial period is over. - We’ll remind you 7 days before your trial ends and you can cancel anytime. - - -## Project Details - -| Option | Description | -| ---------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **Region** | Select the region closest to your audience. This will ensure the fastest communication between your data and your client. | -| **Project Name** | A name for your project. You can change this at any time. | -| **Project Slug** | Choose a unique slug to identify your project. This needs to be unique for your team and you can change it any time. | -| **Team** | Select the team you want to create the project under. If this is your first project, a personal team will be created for you automatically. You can modify your team settings and invite new members at any time from the Team Settings page. | - -## Build Settings - -If you are deploying a new project from a template, the following settings will be automatically configured for you. If you are using your own repository, you need to make sure your build settings are accurate for your project to deploy correctly. - -| Option | Description | -| -------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **Root Directory** | The folder where your `package.json` file lives. | -| **Install Command** | The command used to install your modules, for example: `yarn install` or `npm install` | -| **Build Command** | The command used to build your application, for example: `yarn build` or `npm run build` | -| **Serve Command** | The command used to serve your application, for example: `yarn serve` or `npm run serve` | -| **Branch to Deploy** | Select the branch of your repository that you want to deploy from. This is the branch that will be used to build your project when you commit new changes. | -| **Default Domain** | Set a default domain for your project. This must be unique and you will not able to change it. You can always add a custom domain later in your project settings. | - -## Environment Variables - -Any of the features in Payload Cloud that require environment variables will automatically be provided to your application. If your app requires any custom environment variables, you can set them here. - - - Note: For security reasons, any variables you wish to provide to the [Admin - Panel](../admin/overview) must be prefixed with `NEXT_PUBLIC_`.  Learn more - [here](../configuration/environment-vars). - - -## Payment - -Payment methods can be set per project and can be updated any time. You can use team’s default payment method, or add a new one. Modify your payment methods in your Project settings / Team settings. - - - **Note:** All Payload Cloud teams that deploy a project require a card on - file. This helps us prevent fraud and abuse on our platform. If you select a - plan with a free trial, you will not be charged until your trial period is - over. We’ll remind you 7 days before your trial ends and you can cancel - anytime. - diff --git a/docs/cloud/creating-a-project.mdx b/docs/cloud/creating-a-project.mdx deleted file mode 100644 index cabda09025..0000000000 --- a/docs/cloud/creating-a-project.mdx +++ /dev/null @@ -1,53 +0,0 @@ ---- -title: Getting Started -label: Getting Started -order: 10 -desc: Get started with Payload Cloud, a deployment solution specifically designed for Node + MongoDB applications. -keywords: cloud, hosted, database, storage, email, deployment, serverless, node, mongodb, s3, aws, cloudflare, atlas, resend, payload, cms ---- - -A deployment solution specifically designed for Node.js + MongoDB applications, offering seamless deployment of your entire stack in one place. You can get started in minutes with a one-click template or bring your own codebase with you. - -Payload Cloud offers various plans tailored to meet your specific needs, including a MongoDB Atlas database, S3 file storage, and email delivery powered by [Resend](https://resend.com). To see a full breakdown of features and plans, see our [Cloud Pricing page](https://payloadcms.com/cloud-pricing). - -To get started, you first need to create an account. Head over to [the login screen](https://payloadcms.com/login) and **Register for Free**. - - - To create your first project, you can either select [a - template](#starting-from-a-template) or [import an existing - project](#importing-from-an-existing-codebase) from GitHub. - - -## Starting from a Template - -Templates come preconfigured and provide a one-click solution to quickly deploy a new application. - -![Screen for creating a new project from a template](https://payloadcms.com/images/docs/cloud/create-from-template.jpg) -_Creating a new project from a template._ - -After creating an account, select your desired template from the Projects page. At this point, you need to connect to authorize the Payload Cloud application with your GitHub account. Click Continue with GitHub and follow the prompts to authorize the app. - -Next, select your `GitHub Scope`. If you belong to multiple organizations, they will show up here. If you do not see the organization you are looking for, you may need to adjust your GitHub app permissions. - -After selecting your scope, create a unique `repository name` and select whether you want your repository to be public or private on GitHub. - - - **Note:** Public repositories can be accessed by anyone online, while private - repositories grant access only to you and anyone you explicitly authorize. - - -Once you are ready, click **Create Project**. This will clone the selected template to a new repository in your GitHub account, and take you to the configuration page to set up your project for deployment. - -## Importing from an Existing Codebase - -Payload Cloud works for any Node.js + MongoDB app. From the New Project page, select **import an existing Git codebase**. Choose the organization and select the repository you want to import. From here, you will be taken to the configuration page to set up your project for deployment. - -![Screen for creating a new project from an existing repository](https://payloadcms.com/images/docs/cloud/create-from-existing.jpg) -_Creating a new project from an existing repository._ - - - **Note:** In order to make use of the features of Payload Cloud in your own - codebase, you will need to add the [Cloud - Plugin](https://github.com/payloadcms/payload/tree/main/packages/payload-cloud) - to your Payload app. - diff --git a/docs/cloud/projects.mdx b/docs/cloud/projects.mdx deleted file mode 100644 index 79df6a69bb..0000000000 --- a/docs/cloud/projects.mdx +++ /dev/null @@ -1,137 +0,0 @@ ---- -title: Cloud Projects -label: Projects -order: 40 -desc: Manage your Payload Cloud projects. -keywords: cloud, payload cloud, projects, project, overview, database, file storage, build settings, environment variables, custom domains, email, developing locally ---- - -## Overview - - - The overview tab shows your most recent deployment, along with build and - deployment logs. From here, you can see your live URL, deployment details like - timestamps and commit hash, as well as the status of your deployment. You can - also trigger a redeployment manually, which will rebuild your project using - the current configuration. - - -![Payload Cloud Overview Page](https://payloadcms.com/images/docs/cloud/overview-page.jpg) -_A screenshot of the Overview page for a Cloud project._ - -## Database - -Your Payload Cloud project comes with a MongoDB serverless Atlas DB instance or a Dedicated Atlas cluster, depending on your plan. To interact with your cloud database, you will be provided with a MongoDB connection string. This can be found under the **Database** tab of your project. - -`mongodb+srv://your_connection_string` - -## File Storage - -Payload Cloud gives you S3 file storage backed by Cloudflare as a CDN, and this plugin extends Payload so that all of your media will be stored in S3 rather than locally. - -AWS Cognito is used for authentication to your S3 bucket. The [Payload Cloud Plugin](https://github.com/payloadcms/payload/tree/main/packages/payload-cloud) will automatically pick up these values. These values are only if you'd like to access your files directly, outside of Payload Cloud. - -### Accessing Files Outside of Payload Cloud - -If you'd like to access your files outside of Payload Cloud, you'll need to retrieve some values from your project's settings and put them into your environment variables. In Payload Cloud, navigate to the File Storage tab and copy the values using the copy button. Put these values in your .env file. Also copy the Cognito Password value separately and put into your .env file as well. - -When you are done, you should have the following values in your .env file: - -```env -PAYLOAD_CLOUD=true -PAYLOAD_CLOUD_ENVIRONMENT=prod -PAYLOAD_CLOUD_COGNITO_USER_POOL_CLIENT_ID= -PAYLOAD_CLOUD_COGNITO_USER_POOL_ID= -PAYLOAD_CLOUD_COGNITO_IDENTITY_POOL_ID= -PAYLOAD_CLOUD_PROJECT_ID= -PAYLOAD_CLOUD_BUCKET= -PAYLOAD_CLOUD_BUCKET_REGION= -PAYLOAD_CLOUD_COGNITO_PASSWORD= -``` - -The plugin will pick up these values and use them to access your files. - -## Build Settings - -You can update settings from your Project’s Settings tab. Changes to your build settings will trigger a redeployment of your project. - -## Environment Variables - -From the Environment Variables page of the Settings tab, you can add, update and delete variables for use in your project. Like build settings, these changes will trigger a redeployment of your project. - - - Note: For security reasons, any variables you wish to provide to the [Admin - Panel](../admin/overview) must be prefixed with `NEXT_PUBLIC_`. [More - details](../configuration/environment-vars). - - -## Custom Domains - -With Payload Cloud, you can add custom domain names to your project. To do so, first go to the Domains page of the Settings tab of your project. Here you can see your default domain. To add a new domain, type in the domain name you wish to use. - - - Note: do not include the protocol (http:// or https://) or any paths (/page). - Only include the domain name and extension, and optionally a subdomain. - - your-domain.com - backend.your-domain.com - - -Once you click save, a DNS record will be generated for your domain name to point to your live project. Add this record into your DNS provider’s records, and once the records are resolving properly (this can take 1hr to 48hrs in some cases), your domain will now to point to your live project. - -You will also need to configure your Payload project to use your specified domain. In your `payload.config.ts` file, specify your `serverURL` with your domain: - -```ts -export default buildConfig({ - serverURL: 'https://example.com', - // the rest of your config, -}) -``` - -## Email - -Powered by [Resend](https://resend.com), Payload Cloud comes with integrated email support out of the box. No configuration is needed, and you can use `payload.sendEmail()` to send email right from your Payload app. To learn more about sending email with Payload, checkout the [Email Configuration](../email/overview) overview. - -If you are on the Pro or Enterprise plan, you can add your own custom Email domain name. From the Email page of your project’s Settings, add the domain you wish to use for email delivery. This will generate a set of DNS records. Add these records to your DNS provider and click verify to check that your records are resolving properly. Once verified, your emails will now be sent from your custom domain name. - -## Developing Locally - -To make changes to your project, you will need to clone the repository defined in your project settings to your local machine. In order to run your project locally, you will need configure your local environment first. Refer to your repository’s `README.md` file to see the steps needed for your specific template. - -From there, you are ready to make updates to your project. When you are ready to make your changes live, commit your changes to the branch you specified in your Project settings, and your application will automatically trigger a redeploy and build from your latest commit. - -## Cloud Plugin - -Projects generated from a template will come pre-configured with the official Cloud Plugin, but if you are using your own repository you will need to add this into your project. To do so, add the plugin to your Payload Config: - -`pnpm add @payloadcms/payload-cloud` - -```js -import { payloadCloudPlugin } from '@payloadcms/payload-cloud' -import { buildConfig } from 'payload' - -export default buildConfig({ - plugins: [payloadCloudPlugin()], - // rest of config -}) -``` - - - **Note:** If your Payload Config already has an email with transport, this - will take precedence over Payload Cloud's email service. - - - - Good to know: the Payload Cloud Plugin was previously named - `@payloadcms/plugin-cloud`. If you are using this plugin, you should update to - the new package name. - - -#### **Optional configuration** - -If you wish to opt-out of any Payload cloud features, the plugin also accepts options to do so. - -```js -payloadCloud({ - storage: false, // Disable file storage - email: false, // Disable email delivery -}) -``` diff --git a/docs/cloud/teams.mdx b/docs/cloud/teams.mdx deleted file mode 100644 index a7f5bd97db..0000000000 --- a/docs/cloud/teams.mdx +++ /dev/null @@ -1,35 +0,0 @@ ---- -title: Cloud Teams -label: Teams -order: 30 -desc: Manage your Payload Cloud team and billing settings. -keywords: team, teams, billing, subscription, payment, plan, plans, cloud, payload cloud ---- - - - Within Payload Cloud, the team management feature offers you the ability to - manage your organization, team members, billing, and subscription settings. - - -![Payload Cloud Team Settings](https://payloadcms.com/images/docs/cloud/team-settings.jpg) -_A screenshot of the Team Settings page._ - -## Members - -Each team has members that can interact with your projects. You can invite multiple people to your team and each individual can belong to more than one team. You can assign them either `owner` or `user` permissions. Owners are able to make admin-only changes, such as deleting projects, and editing billing information. - -## Adding Members - -To add a new member to your team, visit your Team’s Settings page, and click “Invite Teammate”. You can then add their email address, and assign their role. Press “Save” to send the invitations, which will send an email to the invited team member where they can create a new account. - -## Billing - -Users can update billing settings and subscriptions for any teams where they are designated as an `owner`. To make updates to the team’s payment methods, visit the Billing page under the Team Settings tab. You can add new cards, delete cards, and set a payment method as a default. The default payment method will be used in the event that another payment method fails. - -## Subscriptions - -From the Subscriptions page, a team owner can see all current plans for their team. From here, you can see the price of each plan, if there is an active trial, and when you will be billed next. - -## Invoices - -The Invoices page will you show you the invoices for your account, as well as the status on their payment. diff --git a/docs/production/deployment.mdx b/docs/production/deployment.mdx index e86898f004..1865133ece 100644 --- a/docs/production/deployment.mdx +++ b/docs/production/deployment.mdx @@ -24,16 +24,6 @@ Payload can be deployed _anywhere that Next.js can run_ - including Vercel, Netl But it's important to remember that most Payload projects will also need a database, file storage, an email provider, and a CDN. Make sure you have all of the requirements that your project needs, no matter what deployment platform you choose. -Often, the easiest and fastest way to deploy Payload is to use [Payload Cloud](https://payloadcms.com/new) — where you get everything you need out of the box, including: - -1. A MongoDB Atlas database -1. S3 file storage -1. Resend email service -1. Cloudflare CDN -1. Blue / green deployments -1. Logs -1. And more - ## Basics Payload runs fully in Next.js, so the [Next.js build process](https://nextjs.org/docs/app/building-your-application/deploying) is used for building Payload. If you've used `create-payload-app` to create your project, executing the `build` From e7a652f0a8aedf5a3aa148c599709de4625dfbdc Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Tue, 22 Jul 2025 13:27:44 -0400 Subject: [PATCH 060/143] build: suppress pnpm update notification (#13241) Suppress pnpm update notification --- pnpm-workspace.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 054555c688..3c8d382cdc 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,9 +1,8 @@ packages: - # all packages in direct subdirs of packages/ - 'packages/*' - 'tools/*' - 'test' - 'templates/blank' - 'templates/website' - # exclude packages that are inside test directories - # - '!**/test/**' + +updateNotifier: false From 246a42b72781231472cbc4acf6f06fa7035c00d7 Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Tue, 22 Jul 2025 14:09:04 -0400 Subject: [PATCH 061/143] chore(plugin-import-export): use debug-level logging for createExport process (#13242) ### What? Replaces all `payload.logger.info` calls with `payload.logger.debug` in the `createExport` function. ### Why? info logs are too verbose. Using debug ensures detailed logs. ### How? - Updated all logger calls in `createExport` to use `debug` instead of `info`. --- .../src/export/createExport.ts | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/packages/plugin-import-export/src/export/createExport.ts b/packages/plugin-import-export/src/export/createExport.ts index 2b4b05bff2..40e1b954ff 100644 --- a/packages/plugin-import-export/src/export/createExport.ts +++ b/packages/plugin-import-export/src/export/createExport.ts @@ -64,7 +64,7 @@ export const createExport = async (args: CreateExportArgs) => { } = args if (debug) { - req.payload.logger.info({ + req.payload.logger.debug({ message: 'Starting export process with args:', collectionSlug, drafts, @@ -84,7 +84,7 @@ export const createExport = async (args: CreateExportArgs) => { const select = Array.isArray(fields) && fields.length > 0 ? getSelect(fields) : undefined if (debug) { - req.payload.logger.info({ message: 'Export configuration:', name, isCSV, locale }) + req.payload.logger.debug({ message: 'Export configuration:', name, isCSV, locale }) } const findArgs = { @@ -102,7 +102,7 @@ export const createExport = async (args: CreateExportArgs) => { } if (debug) { - req.payload.logger.info({ message: 'Find arguments:', findArgs }) + req.payload.logger.debug({ message: 'Find arguments:', findArgs }) } const toCSVFunctions = getCustomFieldFunctions({ @@ -129,7 +129,7 @@ export const createExport = async (args: CreateExportArgs) => { if (download) { if (debug) { - req.payload.logger.info('Pre-scanning all columns before streaming') + req.payload.logger.debug('Pre-scanning all columns before streaming') } const allColumnsSet = new Set() @@ -155,7 +155,7 @@ export const createExport = async (args: CreateExportArgs) => { } if (debug) { - req.payload.logger.info(`Discovered ${allColumns.length} columns`) + req.payload.logger.debug(`Discovered ${allColumns.length} columns`) } const encoder = new TextEncoder() @@ -167,7 +167,7 @@ export const createExport = async (args: CreateExportArgs) => { const result = await payload.find({ ...findArgs, page: streamPage }) if (debug) { - req.payload.logger.info(`Streaming batch ${streamPage} with ${result.docs.length} docs`) + req.payload.logger.debug(`Streaming batch ${streamPage} with ${result.docs.length} docs`) } if (result.docs.length === 0) { @@ -198,7 +198,7 @@ export const createExport = async (args: CreateExportArgs) => { if (!result.hasNextPage) { if (debug) { - req.payload.logger.info('Stream complete - no more pages') + req.payload.logger.debug('Stream complete - no more pages') } this.push(null) // End the stream } @@ -215,7 +215,7 @@ export const createExport = async (args: CreateExportArgs) => { // Non-download path (buffered export) if (debug) { - req.payload.logger.info('Starting file generation') + req.payload.logger.debug('Starting file generation') } const outputData: string[] = [] @@ -232,7 +232,7 @@ export const createExport = async (args: CreateExportArgs) => { }) if (debug) { - req.payload.logger.info( + req.payload.logger.debug( `Processing batch ${findArgs.page} with ${result.docs.length} documents`, ) } @@ -281,12 +281,12 @@ export const createExport = async (args: CreateExportArgs) => { const buffer = Buffer.from(format === 'json' ? `[${outputData.join(',')}]` : outputData.join('')) if (debug) { - req.payload.logger.info(`${format} file generation complete`) + req.payload.logger.debug(`${format} file generation complete`) } if (!id) { if (debug) { - req.payload.logger.info('Creating new export file') + req.payload.logger.debug('Creating new export file') } req.file = { name, @@ -296,7 +296,7 @@ export const createExport = async (args: CreateExportArgs) => { } } else { if (debug) { - req.payload.logger.info(`Updating existing export with id: ${id}`) + req.payload.logger.debug(`Updating existing export with id: ${id}`) } await req.payload.update({ id, @@ -312,6 +312,6 @@ export const createExport = async (args: CreateExportArgs) => { }) } if (debug) { - req.payload.logger.info('Export process completed successfully') + req.payload.logger.debug('Export process completed successfully') } } From 412bf4ff735c5dc317ecaa20913d4270bcd92b29 Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Tue, 22 Jul 2025 15:23:02 -0400 Subject: [PATCH 062/143] fix(ui): select all should reset when params change, page, filter, etc (#12612) Fixes #11938 Fixes https://github.com/payloadcms/payload/issues/13154 When select-all is checked and you filter or change the page, the selected documents should reset. --- packages/ui/src/providers/Selection/index.tsx | 9 +++++-- test/admin/e2e/list-view/e2e.spec.ts | 27 +++++++++++++++++++ 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/packages/ui/src/providers/Selection/index.tsx b/packages/ui/src/providers/Selection/index.tsx index 921ae8afa2..079866061b 100644 --- a/packages/ui/src/providers/Selection/index.tsx +++ b/packages/ui/src/providers/Selection/index.tsx @@ -6,6 +6,7 @@ import * as qs from 'qs-esm' import React, { createContext, use, useCallback, useEffect, useRef, useState } from 'react' import { parseSearchParams } from '../../utilities/parseSearchParams.js' +import { useListQuery } from '../ListQuery/index.js' import { useLocale } from '../Locale/index.js' export enum SelectAllStatus { @@ -54,6 +55,7 @@ export const SelectionProvider: React.FC = ({ children, docs = [], totalD const [selectAll, setSelectAll] = useState(SelectAllStatus.None) const [count, setCount] = useState(0) const searchParams = useSearchParams() + const { query } = useListQuery() const toggleAll = useCallback( (allAvailable = false) => { @@ -201,7 +203,11 @@ export const SelectionProvider: React.FC = ({ children, docs = [], totalD setCount(newCount) }, [selectAll, selected, totalDocs]) - // eslint-disable-next-line react-compiler/react-compiler -- TODO: fix + useEffect(() => { + setSelectAll(SelectAllStatus.None) + setSelected(new Map()) + }, [query]) + contextRef.current = { count, getQueryParams, @@ -213,7 +219,6 @@ export const SelectionProvider: React.FC = ({ children, docs = [], totalD totalDocs, } - // eslint-disable-next-line react-compiler/react-compiler -- TODO: fix return {children} } diff --git a/test/admin/e2e/list-view/e2e.spec.ts b/test/admin/e2e/list-view/e2e.spec.ts index bff63dd6fd..2e8ec07ccf 100644 --- a/test/admin/e2e/list-view/e2e.spec.ts +++ b/test/admin/e2e/list-view/e2e.spec.ts @@ -1649,6 +1649,33 @@ describe('List View', () => { 'Custom placeholder', ) }) + + test('should reset list selection when query params change', async () => { + await deleteAllPosts() + await Promise.all(Array.from({ length: 12 }, (_, i) => createPost({ title: `post${i + 1}` }))) + await page.goto(postsUrl.list) + + const pageOneButton = page.locator('.paginator__page', { hasText: '1' }) + await expect(pageOneButton).toBeVisible() + await pageOneButton.click() + + await page.locator('.checkbox-input:has(#select-all)').locator('input').click() + await expect(page.locator('.checkbox-input:has(#select-all)').locator('input')).toBeChecked() + await expect(page.locator('.list-selection')).toContainText('5 selected') + + const pageTwoButton = page.locator('.paginator__page', { hasText: '2' }) + await expect(pageTwoButton).toBeVisible() + await pageTwoButton.click() + + await expect( + page.locator('.checkbox-input:has(#select-all) input:not([checked])'), + ).toBeVisible() + + await page.locator('.row-1 .cell-_select input').check() + await page.locator('.row-2 .cell-_select input').check() + + await expect(page.locator('.list-selection')).toContainText('2 selected') + }) }) async function createPost(overrides?: Partial): Promise { From 3f8fb6734cc5a8cff9dd529b5f4ba4c9252b8157 Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Tue, 22 Jul 2025 16:44:56 -0400 Subject: [PATCH 063/143] ci: default audit-dependencies script to high severity (#13244) Default the audit-dependencies workflow to use high severity by default. --- .github/workflows/audit-dependencies.sh | 7 ++++--- .github/workflows/audit-dependencies.yml | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/audit-dependencies.sh b/.github/workflows/audit-dependencies.sh index 107c2a34a4..5e16310078 100755 --- a/.github/workflows/audit-dependencies.sh +++ b/.github/workflows/audit-dependencies.sh @@ -1,14 +1,15 @@ #!/bin/bash -severity=${1:-"critical"} -audit_json=$(pnpm audit --prod --json) +severity=${1:-"high"} output_file="audit_output.json" echo "Auditing for ${severity} vulnerabilities..." +audit_json=$(pnpm audit --prod --json) + echo "${audit_json}" | jq --arg severity "${severity}" ' .advisories | to_entries | - map(select(.value.patched_versions != "<0.0.0" and .value.severity == $severity) | + map(select(.value.patched_versions != "<0.0.0" and (.value.severity == $severity or ($severity == "high" and .value.severity == "critical"))) | { package: .value.module_name, vulnerable: .value.vulnerable_versions, diff --git a/.github/workflows/audit-dependencies.yml b/.github/workflows/audit-dependencies.yml index 043ef633e9..df4056691b 100644 --- a/.github/workflows/audit-dependencies.yml +++ b/.github/workflows/audit-dependencies.yml @@ -9,7 +9,7 @@ on: audit-level: description: The level of audit to run (low, moderate, high, critical) required: false - default: critical + default: high debug: description: Enable debug logging required: false From 94f5e790f6ed0fa3727cb830d827e49fae71f399 Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Wed, 23 Jul 2025 01:45:55 -0700 Subject: [PATCH 064/143] perf(drizzle): single-roundtrip db updates for simple collections (#13186) Currently, an optimized DB update (simple data => no delete-and-create-row) does the following: 1. sql UPDATE 2. sql SELECT This PR reduces this further to one single DB call for simple collections: 1. sql UPDATE with RETURNING() This only works for simple collections that do not have any fields that need to be fetched from other tables. If a collection has fields like relationship or blocks, we'll need that separate SELECT call to join in the other tables. In 4.0, we can remove all "complex" fields from the jobs collection and replace them with a JSON field to make use of this optimization --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210803039809814 --- .../drizzle/src/find/buildFindManyArgs.ts | 9 +- packages/drizzle/src/upsertRow/index.ts | 761 +++++++------- test/database/config.postgreslogs.ts | 19 + test/database/config.ts | 933 +---------------- test/database/getConfig.ts | 942 ++++++++++++++++++ test/database/payload-types.ts | 27 + test/database/postgres-logs.int.spec.ts | 91 ++ test/database/postgres-vector.int.spec.ts | 4 +- test/database/seed.ts | 9 - test/database/shared.ts | 15 - test/select/config.postgreslogs.ts | 19 + test/select/config.ts | 122 +-- test/select/getConfig.ts | 119 +++ test/select/postgreslogs.int.spec.ts | 179 ++++ 14 files changed, 1822 insertions(+), 1427 deletions(-) create mode 100644 test/database/config.postgreslogs.ts create mode 100644 test/database/getConfig.ts create mode 100644 test/database/postgres-logs.int.spec.ts create mode 100644 test/select/config.postgreslogs.ts create mode 100644 test/select/getConfig.ts create mode 100644 test/select/postgreslogs.int.spec.ts diff --git a/packages/drizzle/src/find/buildFindManyArgs.ts b/packages/drizzle/src/find/buildFindManyArgs.ts index 4febf335d1..c45bff699f 100644 --- a/packages/drizzle/src/find/buildFindManyArgs.ts +++ b/packages/drizzle/src/find/buildFindManyArgs.ts @@ -44,7 +44,7 @@ export const buildFindManyArgs = ({ select, tableName, versions, -}: BuildFindQueryArgs): Record => { +}: BuildFindQueryArgs): Result => { const result: Result = { extras: {}, with: {}, @@ -134,5 +134,12 @@ export const buildFindManyArgs = ({ result.with._locales = _locales } + // Delete properties that are empty + for (const key of Object.keys(result)) { + if (!Object.keys(result[key]).length) { + delete result[key] + } + } + return result } diff --git a/packages/drizzle/src/upsertRow/index.ts b/packages/drizzle/src/upsertRow/index.ts index 72f89435ec..52d686a55e 100644 --- a/packages/drizzle/src/upsertRow/index.ts +++ b/packages/drizzle/src/upsertRow/index.ts @@ -1,4 +1,5 @@ import type { LibSQLDatabase } from 'drizzle-orm/libsql' +import type { SelectedFields } from 'drizzle-orm/sqlite-core' import type { TypeWithID } from 'payload' import { eq } from 'drizzle-orm' @@ -53,434 +54,496 @@ export const upsertRow = async | TypeWithID>( const drizzle = db as LibSQLDatabase - await drizzle - .update(adapter.tables[tableName]) - .set(row) - // TODO: we can skip fetching idToUpdate here with using the incoming where - .where(eq(adapter.tables[tableName].id, id)) - } else { - // Split out the incoming data into the corresponding: - // base row, locales, relationships, blocks, and arrays - const rowToInsert = transformForWrite({ + if (ignoreResult) { + await drizzle + .update(adapter.tables[tableName]) + .set(row) + .where(eq(adapter.tables[tableName].id, id)) + return ignoreResult === 'idOnly' ? ({ id } as T) : null + } + + const findManyArgs = buildFindManyArgs({ adapter, - data, - enableAtomicWrites: false, + depth: 0, fields, - path, + joinQuery: false, + select, tableName, }) - // First, we insert the main row - try { - if (operation === 'update') { - const target = upsertTarget || adapter.tables[tableName].id + const findManyKeysLength = Object.keys(findManyArgs).length + const hasOnlyColumns = Object.keys(findManyArgs.columns || {}).length > 0 - if (id) { - rowToInsert.row.id = id - ;[insertedRow] = await adapter.insert({ - db, - onConflictDoUpdate: { set: rowToInsert.row, target }, - tableName, - values: rowToInsert.row, - }) - } else { - ;[insertedRow] = await adapter.insert({ - db, - onConflictDoUpdate: { set: rowToInsert.row, target, where }, - tableName, - values: rowToInsert.row, - }) - } - } else { - if (adapter.allowIDOnCreate && data.id) { - rowToInsert.row.id = data.id + if (findManyKeysLength === 0 || hasOnlyColumns) { + // Optimization - No need for joins => can simply use returning(). This is optimal for very simple collections + // without complex fields that live in separate tables like blocks, arrays, relationships, etc. + + const selectedFields: SelectedFields = {} + if (hasOnlyColumns) { + for (const [column, enabled] of Object.entries(findManyArgs.columns)) { + if (enabled) { + selectedFields[column] = adapter.tables[tableName][column] + } } + } + + const docs = await drizzle + .update(adapter.tables[tableName]) + .set(row) + .where(eq(adapter.tables[tableName].id, id)) + .returning(Object.keys(selectedFields).length ? selectedFields : undefined) + + return transform({ + adapter, + config: adapter.payload.config, + data: docs[0], + fields, + joinQuery: false, + tableName, + }) + } + + // DB Update that needs the result, potentially with joins => need to update first, then find. returning() does not work with joins. + + await drizzle + .update(adapter.tables[tableName]) + .set(row) + .where(eq(adapter.tables[tableName].id, id)) + + findManyArgs.where = eq(adapter.tables[tableName].id, insertedRow.id) + + const doc = await db.query[tableName].findFirst(findManyArgs) + + return transform({ + adapter, + config: adapter.payload.config, + data: doc, + fields, + joinQuery: false, + tableName, + }) + } + // Split out the incoming data into the corresponding: + // base row, locales, relationships, blocks, and arrays + const rowToInsert = transformForWrite({ + adapter, + data, + enableAtomicWrites: false, + fields, + path, + tableName, + }) + + // First, we insert the main row + try { + if (operation === 'update') { + const target = upsertTarget || adapter.tables[tableName].id + + if (id) { + rowToInsert.row.id = id ;[insertedRow] = await adapter.insert({ db, + onConflictDoUpdate: { set: rowToInsert.row, target }, + tableName, + values: rowToInsert.row, + }) + } else { + ;[insertedRow] = await adapter.insert({ + db, + onConflictDoUpdate: { set: rowToInsert.row, target, where }, tableName, values: rowToInsert.row, }) } - - const localesToInsert: Record[] = [] - const relationsToInsert: Record[] = [] - const textsToInsert: Record[] = [] - const numbersToInsert: Record[] = [] - const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {} - const selectsToInsert: { [selectTableName: string]: Record[] } = {} - - // If there are locale rows with data, add the parent and locale to each - if (Object.keys(rowToInsert.locales).length > 0) { - Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => { - localeRow._parentID = insertedRow.id - localeRow._locale = locale - localesToInsert.push(localeRow) - }) + } else { + if (adapter.allowIDOnCreate && data.id) { + rowToInsert.row.id = data.id } + ;[insertedRow] = await adapter.insert({ + db, + tableName, + values: rowToInsert.row, + }) + } - // If there are relationships, add parent to each - if (rowToInsert.relationships.length > 0) { - rowToInsert.relationships.forEach((relation) => { - relation.parent = insertedRow.id - relationsToInsert.push(relation) - }) - } + const localesToInsert: Record[] = [] + const relationsToInsert: Record[] = [] + const textsToInsert: Record[] = [] + const numbersToInsert: Record[] = [] + const blocksToInsert: { [blockType: string]: BlockRowToInsert[] } = {} + const selectsToInsert: { [selectTableName: string]: Record[] } = {} - // If there are texts, add parent to each - if (rowToInsert.texts.length > 0) { - rowToInsert.texts.forEach((textRow) => { - textRow.parent = insertedRow.id - textsToInsert.push(textRow) - }) - } + // If there are locale rows with data, add the parent and locale to each + if (Object.keys(rowToInsert.locales).length > 0) { + Object.entries(rowToInsert.locales).forEach(([locale, localeRow]) => { + localeRow._parentID = insertedRow.id + localeRow._locale = locale + localesToInsert.push(localeRow) + }) + } - // If there are numbers, add parent to each - if (rowToInsert.numbers.length > 0) { - rowToInsert.numbers.forEach((numberRow) => { - numberRow.parent = insertedRow.id - numbersToInsert.push(numberRow) - }) - } + // If there are relationships, add parent to each + if (rowToInsert.relationships.length > 0) { + rowToInsert.relationships.forEach((relation) => { + relation.parent = insertedRow.id + relationsToInsert.push(relation) + }) + } - // If there are selects, add parent to each, and then - // store by table name and rows - if (Object.keys(rowToInsert.selects).length > 0) { - Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => { - selectsToInsert[selectTableName] = [] + // If there are texts, add parent to each + if (rowToInsert.texts.length > 0) { + rowToInsert.texts.forEach((textRow) => { + textRow.parent = insertedRow.id + textsToInsert.push(textRow) + }) + } - selectRows.forEach((row) => { - if (typeof row.parent === 'undefined') { - row.parent = insertedRow.id - } + // If there are numbers, add parent to each + if (rowToInsert.numbers.length > 0) { + rowToInsert.numbers.forEach((numberRow) => { + numberRow.parent = insertedRow.id + numbersToInsert.push(numberRow) + }) + } - selectsToInsert[selectTableName].push(row) - }) - }) - } + // If there are selects, add parent to each, and then + // store by table name and rows + if (Object.keys(rowToInsert.selects).length > 0) { + Object.entries(rowToInsert.selects).forEach(([selectTableName, selectRows]) => { + selectsToInsert[selectTableName] = [] - // If there are blocks, add parent to each, and then - // store by table name and rows - Object.keys(rowToInsert.blocks).forEach((tableName) => { - rowToInsert.blocks[tableName].forEach((blockRow) => { - blockRow.row._parentID = insertedRow.id - if (!blocksToInsert[tableName]) { - blocksToInsert[tableName] = [] + selectRows.forEach((row) => { + if (typeof row.parent === 'undefined') { + row.parent = insertedRow.id } - if (blockRow.row.uuid) { - delete blockRow.row.uuid - } - blocksToInsert[tableName].push(blockRow) + + selectsToInsert[selectTableName].push(row) }) }) + } - // ////////////////////////////////// - // INSERT LOCALES - // ////////////////////////////////// - - if (localesToInsert.length > 0) { - const localeTableName = `${tableName}${adapter.localesSuffix}` - const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`] - - if (operation === 'update') { - await adapter.deleteWhere({ - db, - tableName: localeTableName, - where: eq(localeTable._parentID, insertedRow.id), - }) + // If there are blocks, add parent to each, and then + // store by table name and rows + Object.keys(rowToInsert.blocks).forEach((tableName) => { + rowToInsert.blocks[tableName].forEach((blockRow) => { + blockRow.row._parentID = insertedRow.id + if (!blocksToInsert[tableName]) { + blocksToInsert[tableName] = [] } + if (blockRow.row.uuid) { + delete blockRow.row.uuid + } + blocksToInsert[tableName].push(blockRow) + }) + }) - await adapter.insert({ + // ////////////////////////////////// + // INSERT LOCALES + // ////////////////////////////////// + + if (localesToInsert.length > 0) { + const localeTableName = `${tableName}${adapter.localesSuffix}` + const localeTable = adapter.tables[`${tableName}${adapter.localesSuffix}`] + + if (operation === 'update') { + await adapter.deleteWhere({ db, tableName: localeTableName, - values: localesToInsert, + where: eq(localeTable._parentID, insertedRow.id), }) } - // ////////////////////////////////// - // INSERT RELATIONSHIPS - // ////////////////////////////////// + await adapter.insert({ + db, + tableName: localeTableName, + values: localesToInsert, + }) + } - const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}` + // ////////////////////////////////// + // INSERT RELATIONSHIPS + // ////////////////////////////////// - if (operation === 'update') { - await deleteExistingRowsByPath({ - adapter, - db, - localeColumnName: 'locale', - parentColumnName: 'parent', - parentID: insertedRow.id, - pathColumnName: 'path', - rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete], - tableName: relationshipsTableName, - }) - } + const relationshipsTableName = `${tableName}${adapter.relationshipsSuffix}` - if (relationsToInsert.length > 0) { - await adapter.insert({ - db, - tableName: relationshipsTableName, - values: relationsToInsert, - }) - } + if (operation === 'update') { + await deleteExistingRowsByPath({ + adapter, + db, + localeColumnName: 'locale', + parentColumnName: 'parent', + parentID: insertedRow.id, + pathColumnName: 'path', + rows: [...relationsToInsert, ...rowToInsert.relationshipsToDelete], + tableName: relationshipsTableName, + }) + } - // ////////////////////////////////// - // INSERT hasMany TEXTS - // ////////////////////////////////// + if (relationsToInsert.length > 0) { + await adapter.insert({ + db, + tableName: relationshipsTableName, + values: relationsToInsert, + }) + } - const textsTableName = `${tableName}_texts` + // ////////////////////////////////// + // INSERT hasMany TEXTS + // ////////////////////////////////// - if (operation === 'update') { - await deleteExistingRowsByPath({ - adapter, - db, - localeColumnName: 'locale', - parentColumnName: 'parent', - parentID: insertedRow.id, - pathColumnName: 'path', - rows: [...textsToInsert, ...rowToInsert.textsToDelete], - tableName: textsTableName, - }) - } + const textsTableName = `${tableName}_texts` - if (textsToInsert.length > 0) { - await adapter.insert({ - db, - tableName: textsTableName, - values: textsToInsert, - }) - } + if (operation === 'update') { + await deleteExistingRowsByPath({ + adapter, + db, + localeColumnName: 'locale', + parentColumnName: 'parent', + parentID: insertedRow.id, + pathColumnName: 'path', + rows: [...textsToInsert, ...rowToInsert.textsToDelete], + tableName: textsTableName, + }) + } - // ////////////////////////////////// - // INSERT hasMany NUMBERS - // ////////////////////////////////// + if (textsToInsert.length > 0) { + await adapter.insert({ + db, + tableName: textsTableName, + values: textsToInsert, + }) + } - const numbersTableName = `${tableName}_numbers` + // ////////////////////////////////// + // INSERT hasMany NUMBERS + // ////////////////////////////////// - if (operation === 'update') { - await deleteExistingRowsByPath({ - adapter, - db, - localeColumnName: 'locale', - parentColumnName: 'parent', - parentID: insertedRow.id, - pathColumnName: 'path', - rows: [...numbersToInsert, ...rowToInsert.numbersToDelete], - tableName: numbersTableName, - }) - } + const numbersTableName = `${tableName}_numbers` - if (numbersToInsert.length > 0) { - await adapter.insert({ - db, - tableName: numbersTableName, - values: numbersToInsert, - }) - } + if (operation === 'update') { + await deleteExistingRowsByPath({ + adapter, + db, + localeColumnName: 'locale', + parentColumnName: 'parent', + parentID: insertedRow.id, + pathColumnName: 'path', + rows: [...numbersToInsert, ...rowToInsert.numbersToDelete], + tableName: numbersTableName, + }) + } - // ////////////////////////////////// - // INSERT BLOCKS - // ////////////////////////////////// + if (numbersToInsert.length > 0) { + await adapter.insert({ + db, + tableName: numbersTableName, + values: numbersToInsert, + }) + } - const insertedBlockRows: Record[]> = {} + // ////////////////////////////////// + // INSERT BLOCKS + // ////////////////////////////////// - if (operation === 'update') { - for (const tableName of rowToInsert.blocksToDelete) { - const blockTable = adapter.tables[tableName] - await adapter.deleteWhere({ - db, - tableName, - where: eq(blockTable._parentID, insertedRow.id), - }) - } - } + const insertedBlockRows: Record[]> = {} - // When versions are enabled, adapter is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions. - const arraysBlocksUUIDMap: Record = {} - - for (const [tableName, blockRows] of Object.entries(blocksToInsert)) { - insertedBlockRows[tableName] = await adapter.insert({ + if (operation === 'update') { + for (const tableName of rowToInsert.blocksToDelete) { + const blockTable = adapter.tables[tableName] + await adapter.deleteWhere({ db, tableName, - values: blockRows.map(({ row }) => row), - }) - - insertedBlockRows[tableName].forEach((row, i) => { - blockRows[i].row = row - if ( - typeof row._uuid === 'string' && - (typeof row.id === 'string' || typeof row.id === 'number') - ) { - arraysBlocksUUIDMap[row._uuid] = row.id - } - }) - - const blockLocaleIndexMap: number[] = [] - - const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => { - if (Object.entries(blockRow.locales).length > 0) { - Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => { - if (Object.keys(blockLocaleData).length > 0) { - blockLocaleData._parentID = blockRow.row.id - blockLocaleData._locale = blockLocale - acc.push(blockLocaleData) - blockLocaleIndexMap.push(i) - } - }) - } - - return acc - }, []) - - if (blockLocaleRowsToInsert.length > 0) { - await adapter.insert({ - db, - tableName: `${tableName}${adapter.localesSuffix}`, - values: blockLocaleRowsToInsert, - }) - } - - await insertArrays({ - adapter, - arrays: blockRows.map(({ arrays }) => arrays), - db, - parentRows: insertedBlockRows[tableName], - uuidMap: arraysBlocksUUIDMap, + where: eq(blockTable._parentID, insertedRow.id), }) } + } - // ////////////////////////////////// - // INSERT ARRAYS RECURSIVELY - // ////////////////////////////////// + // When versions are enabled, adapter is used to track mapping between blocks/arrays ObjectID to their numeric generated representation, then we use it for nested to arrays/blocks select hasMany in versions. + const arraysBlocksUUIDMap: Record = {} - if (operation === 'update') { - for (const arrayTableName of Object.keys(rowToInsert.arrays)) { - await deleteExistingArrayRows({ - adapter, - db, - parentID: insertedRow.id, - tableName: arrayTableName, + for (const [tableName, blockRows] of Object.entries(blocksToInsert)) { + insertedBlockRows[tableName] = await adapter.insert({ + db, + tableName, + values: blockRows.map(({ row }) => row), + }) + + insertedBlockRows[tableName].forEach((row, i) => { + blockRows[i].row = row + if ( + typeof row._uuid === 'string' && + (typeof row.id === 'string' || typeof row.id === 'number') + ) { + arraysBlocksUUIDMap[row._uuid] = row.id + } + }) + + const blockLocaleIndexMap: number[] = [] + + const blockLocaleRowsToInsert = blockRows.reduce((acc, blockRow, i) => { + if (Object.entries(blockRow.locales).length > 0) { + Object.entries(blockRow.locales).forEach(([blockLocale, blockLocaleData]) => { + if (Object.keys(blockLocaleData).length > 0) { + blockLocaleData._parentID = blockRow.row.id + blockLocaleData._locale = blockLocale + acc.push(blockLocaleData) + blockLocaleIndexMap.push(i) + } }) } + + return acc + }, []) + + if (blockLocaleRowsToInsert.length > 0) { + await adapter.insert({ + db, + tableName: `${tableName}${adapter.localesSuffix}`, + values: blockLocaleRowsToInsert, + }) } await insertArrays({ adapter, - arrays: [rowToInsert.arrays], + arrays: blockRows.map(({ arrays }) => arrays), db, - parentRows: [insertedRow], + parentRows: insertedBlockRows[tableName], uuidMap: arraysBlocksUUIDMap, }) + } - // ////////////////////////////////// - // INSERT hasMany SELECTS - // ////////////////////////////////// + // ////////////////////////////////// + // INSERT ARRAYS RECURSIVELY + // ////////////////////////////////// - for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) { - const selectTable = adapter.tables[selectTableName] - if (operation === 'update') { - await adapter.deleteWhere({ - db, - tableName: selectTableName, - where: eq(selectTable.parent, insertedRow.id), - }) - } + if (operation === 'update') { + for (const arrayTableName of Object.keys(rowToInsert.arrays)) { + await deleteExistingArrayRows({ + adapter, + db, + parentID: insertedRow.id, + tableName: arrayTableName, + }) + } + } - if (Object.keys(arraysBlocksUUIDMap).length > 0) { - tableRows.forEach((row: any) => { - if (row.parent in arraysBlocksUUIDMap) { - row.parent = arraysBlocksUUIDMap[row.parent] - } - }) - } + await insertArrays({ + adapter, + arrays: [rowToInsert.arrays], + db, + parentRows: [insertedRow], + uuidMap: arraysBlocksUUIDMap, + }) - if (tableRows.length) { - await adapter.insert({ - db, - tableName: selectTableName, - values: tableRows, - }) - } + // ////////////////////////////////// + // INSERT hasMany SELECTS + // ////////////////////////////////// + + for (const [selectTableName, tableRows] of Object.entries(selectsToInsert)) { + const selectTable = adapter.tables[selectTableName] + if (operation === 'update') { + await adapter.deleteWhere({ + db, + tableName: selectTableName, + where: eq(selectTable.parent, insertedRow.id), + }) } - // ////////////////////////////////// - // Error Handling - // ////////////////////////////////// - } catch (caughtError) { - // Unique constraint violation error - // '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite - - let error = caughtError - if (typeof caughtError === 'object' && 'cause' in caughtError) { - error = caughtError.cause + if (Object.keys(arraysBlocksUUIDMap).length > 0) { + tableRows.forEach((row: any) => { + if (row.parent in arraysBlocksUUIDMap) { + row.parent = arraysBlocksUUIDMap[row.parent] + } + }) } - if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') { - let fieldName: null | string = null - // We need to try and find the right constraint for the field but if we can't we fallback to a generic message - if (error.code === '23505') { - // For PostgreSQL, we can try to extract the field name from the error constraint - if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) { - fieldName = adapter.fieldConstraints[tableName]?.[error.constraint] - } else { - const replacement = `${tableName}_` + if (tableRows.length) { + await adapter.insert({ + db, + tableName: selectTableName, + values: tableRows, + }) + } + } - if (error.constraint.includes(replacement)) { - const replacedConstraint = error.constraint.replace(replacement, '') + // ////////////////////////////////// + // Error Handling + // ////////////////////////////////// + } catch (caughtError) { + // Unique constraint violation error + // '23505' is the code for PostgreSQL, and 'SQLITE_CONSTRAINT_UNIQUE' is for SQLite - if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) { - fieldName = adapter.fieldConstraints[tableName][replacedConstraint] - } + let error = caughtError + if (typeof caughtError === 'object' && 'cause' in caughtError) { + error = caughtError.cause + } + + if (error.code === '23505' || error.code === 'SQLITE_CONSTRAINT_UNIQUE') { + let fieldName: null | string = null + // We need to try and find the right constraint for the field but if we can't we fallback to a generic message + if (error.code === '23505') { + // For PostgreSQL, we can try to extract the field name from the error constraint + if (adapter.fieldConstraints?.[tableName]?.[error.constraint]) { + fieldName = adapter.fieldConstraints[tableName]?.[error.constraint] + } else { + const replacement = `${tableName}_` + + if (error.constraint.includes(replacement)) { + const replacedConstraint = error.constraint.replace(replacement, '') + + if (replacedConstraint && adapter.fieldConstraints[tableName]?.[replacedConstraint]) { + fieldName = adapter.fieldConstraints[tableName][replacedConstraint] } } + } + + if (!fieldName) { + // Last case scenario we extract the key and value from the detail on the error + const detail = error.detail + const regex = /Key \(([^)]+)\)=\(([^)]+)\)/ + const match: string[] = detail.match(regex) + + if (match && match[1]) { + const key = match[1] + + fieldName = key + } + } + } else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') { + /** + * For SQLite, we can try to extract the field name from the error message + * The message typically looks like: + * "UNIQUE constraint failed: table_name.field_name" + */ + const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/ + const match: string[] = error.message.match(regex) + + if (match && match[2]) { + if (adapter.fieldConstraints[tableName]) { + fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`] + } if (!fieldName) { - // Last case scenario we extract the key and value from the detail on the error - const detail = error.detail - const regex = /Key \(([^)]+)\)=\(([^)]+)\)/ - const match: string[] = detail.match(regex) - - if (match && match[1]) { - const key = match[1] - - fieldName = key - } - } - } else if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') { - /** - * For SQLite, we can try to extract the field name from the error message - * The message typically looks like: - * "UNIQUE constraint failed: table_name.field_name" - */ - const regex = /UNIQUE constraint failed: ([^.]+)\.([^.]+)/ - const match: string[] = error.message.match(regex) - - if (match && match[2]) { - if (adapter.fieldConstraints[tableName]) { - fieldName = adapter.fieldConstraints[tableName][`${match[2]}_idx`] - } - - if (!fieldName) { - fieldName = match[2] - } + fieldName = match[2] } } - - throw new ValidationError( - { - id, - errors: [ - { - message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique', - path: fieldName, - }, - ], - req, - }, - req?.t, - ) - } else { - throw error } + + throw new ValidationError( + { + id, + errors: [ + { + message: req?.t ? req.t('error:valueMustBeUnique') : 'Value must be unique', + path: fieldName, + }, + ], + req, + }, + req?.t, + ) + } else { + throw error } } diff --git a/test/database/config.postgreslogs.ts b/test/database/config.postgreslogs.ts new file mode 100644 index 0000000000..d47ee88d83 --- /dev/null +++ b/test/database/config.postgreslogs.ts @@ -0,0 +1,19 @@ +/* eslint-disable no-restricted-exports */ +import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' +import { getConfig } from './getConfig.js' + +const config = getConfig() + +import { postgresAdapter } from '@payloadcms/db-postgres' + +export const databaseAdapter = postgresAdapter({ + pool: { + connectionString: process.env.POSTGRES_URL || 'postgres://127.0.0.1:5432/payloadtests', + }, + logger: true, +}) + +export default buildConfigWithDefaults({ + ...config, + db: databaseAdapter, +}) diff --git a/test/database/config.ts b/test/database/config.ts index 1027491eae..6c16a4bd2b 100644 --- a/test/database/config.ts +++ b/test/database/config.ts @@ -1,933 +1,4 @@ -import { fileURLToPath } from 'node:url' -import path from 'path' -const filename = fileURLToPath(import.meta.url) -const dirname = path.dirname(filename) -import type { TextField } from 'payload' - -import { randomUUID } from 'crypto' - import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' -import { seed } from './seed.js' -import { - customIDsSlug, - customSchemaSlug, - defaultValuesSlug, - errorOnUnnamedFieldsSlug, - fakeCustomIDsSlug, - fieldsPersistanceSlug, - pgMigrationSlug, - placesSlug, - postsSlug, - relationASlug, - relationBSlug, - relationshipsMigrationSlug, -} from './shared.js' +import { getConfig } from './getConfig.js' -const defaultValueField: TextField = { - name: 'defaultValue', - type: 'text', - defaultValue: 'default value from database', -} - -export default buildConfigWithDefaults({ - admin: { - importMap: { - baseDir: path.resolve(dirname), - }, - }, - collections: [ - { - slug: 'categories', - versions: { drafts: true }, - fields: [ - { - type: 'text', - name: 'title', - }, - ], - }, - { - slug: 'categories-custom-id', - versions: { drafts: true }, - fields: [ - { - type: 'number', - name: 'id', - }, - ], - }, - { - slug: postsSlug, - fields: [ - { - name: 'title', - type: 'text', - required: true, - // access: { read: () => false }, - }, - { - type: 'relationship', - relationTo: 'categories', - name: 'category', - }, - { - type: 'relationship', - relationTo: 'categories-custom-id', - name: 'categoryCustomID', - }, - { - name: 'localized', - type: 'text', - localized: true, - }, - { - name: 'text', - type: 'text', - }, - { - name: 'number', - type: 'number', - }, - { - type: 'blocks', - name: 'blocks', - blocks: [ - { - slug: 'block-third', - fields: [ - { - type: 'blocks', - name: 'nested', - blocks: [ - { - slug: 'block-fourth', - fields: [ - { - type: 'blocks', - name: 'nested', - blocks: [], - }, - ], - }, - ], - }, - ], - }, - ], - }, - { - type: 'tabs', - tabs: [ - { - name: 'D1', - fields: [ - { - name: 'D2', - type: 'group', - fields: [ - { - type: 'row', - fields: [ - { - type: 'collapsible', - fields: [ - { - type: 'tabs', - tabs: [ - { - fields: [ - { - name: 'D3', - type: 'group', - fields: [ - { - type: 'row', - fields: [ - { - type: 'collapsible', - fields: [ - { - name: 'D4', - type: 'text', - }, - ], - label: 'Collapsible2', - }, - ], - }, - ], - }, - ], - label: 'Tab1', - }, - ], - }, - ], - label: 'Collapsible2', - }, - ], - }, - ], - }, - ], - label: 'Tab1', - }, - ], - }, - { - name: 'hasTransaction', - type: 'checkbox', - hooks: { - beforeChange: [({ req }) => !!req.transactionID], - }, - admin: { - readOnly: true, - }, - }, - { - name: 'throwAfterChange', - type: 'checkbox', - defaultValue: false, - hooks: { - afterChange: [ - ({ value }) => { - if (value) { - throw new Error('throw after change') - } - }, - ], - }, - }, - { - name: 'arrayWithIDs', - type: 'array', - fields: [ - { - name: 'text', - type: 'text', - }, - ], - }, - { - name: 'blocksWithIDs', - type: 'blocks', - blocks: [ - { - slug: 'block-first', - fields: [ - { - name: 'text', - type: 'text', - }, - ], - }, - ], - }, - { - type: 'group', - name: 'group', - fields: [{ name: 'text', type: 'text' }], - }, - { - type: 'tabs', - tabs: [ - { - name: 'tab', - fields: [{ name: 'text', type: 'text' }], - }, - ], - }, - ], - hooks: { - beforeOperation: [ - ({ args, operation, req }) => { - if (operation === 'update') { - const defaultIDType = req.payload.db.defaultIDType - - if (defaultIDType === 'number' && typeof args.id === 'string') { - throw new Error('ID was not sanitized to a number properly') - } - } - - return args - }, - ], - }, - }, - { - slug: errorOnUnnamedFieldsSlug, - fields: [ - { - type: 'tabs', - tabs: [ - { - label: 'UnnamedTab', - fields: [ - { - name: 'groupWithinUnnamedTab', - type: 'group', - fields: [ - { - name: 'text', - type: 'text', - required: true, - }, - ], - }, - ], - }, - ], - }, - ], - }, - { - slug: defaultValuesSlug, - fields: [ - { - name: 'title', - type: 'text', - }, - defaultValueField, - { - name: 'array', - type: 'array', - // default array with one object to test subfield defaultValue properties for Mongoose - defaultValue: [{}], - fields: [defaultValueField], - }, - { - name: 'group', - type: 'group', - // we need to have to use as default in order to have subfield defaultValue properties directly for Mongoose - defaultValue: {}, - fields: [defaultValueField], - }, - { - name: 'select', - type: 'select', - defaultValue: 'default', - options: [ - { value: 'option0', label: 'Option 0' }, - { value: 'option1', label: 'Option 1' }, - { value: 'default', label: 'Default' }, - ], - }, - { - name: 'point', - type: 'point', - defaultValue: [10, 20], - }, - { - name: 'escape', - type: 'text', - defaultValue: "Thanks, we're excited for you to join us.", - }, - ], - }, - { - slug: relationASlug, - fields: [ - { - name: 'title', - type: 'text', - }, - { - name: 'richText', - type: 'richText', - }, - ], - labels: { - plural: 'Relation As', - singular: 'Relation A', - }, - }, - { - slug: relationBSlug, - fields: [ - { - name: 'title', - type: 'text', - }, - { - name: 'relationship', - type: 'relationship', - relationTo: 'relation-a', - }, - { - name: 'richText', - type: 'richText', - }, - ], - labels: { - plural: 'Relation Bs', - singular: 'Relation B', - }, - }, - { - slug: pgMigrationSlug, - fields: [ - { - name: 'relation1', - type: 'relationship', - relationTo: 'relation-a', - }, - { - name: 'myArray', - type: 'array', - fields: [ - { - name: 'relation2', - type: 'relationship', - relationTo: 'relation-b', - }, - { - name: 'mySubArray', - type: 'array', - fields: [ - { - name: 'relation3', - type: 'relationship', - localized: true, - relationTo: 'relation-b', - }, - ], - }, - ], - }, - { - name: 'myGroup', - type: 'group', - fields: [ - { - name: 'relation4', - type: 'relationship', - localized: true, - relationTo: 'relation-b', - }, - ], - }, - { - name: 'myBlocks', - type: 'blocks', - blocks: [ - { - slug: 'myBlock', - fields: [ - { - name: 'relation5', - type: 'relationship', - relationTo: 'relation-a', - }, - { - name: 'relation6', - type: 'relationship', - localized: true, - relationTo: 'relation-b', - }, - ], - }, - ], - }, - ], - versions: true, - }, - { - slug: customSchemaSlug, - dbName: 'customs', - fields: [ - { - name: 'text', - type: 'text', - }, - { - name: 'localizedText', - type: 'text', - localized: true, - }, - { - name: 'relationship', - type: 'relationship', - hasMany: true, - relationTo: 'relation-a', - }, - { - name: 'select', - type: 'select', - dbName: ({ tableName }) => `${tableName}_customSelect`, - enumName: 'selectEnum', - hasMany: true, - options: ['a', 'b', 'c'], - }, - { - name: 'radio', - type: 'select', - enumName: 'radioEnum', - options: ['a', 'b', 'c'], - }, - { - name: 'array', - type: 'array', - dbName: 'customArrays', - fields: [ - { - name: 'text', - type: 'text', - }, - { - name: 'localizedText', - type: 'text', - localized: true, - }, - ], - }, - { - name: 'blocks', - type: 'blocks', - blocks: [ - { - slug: 'block-second', - dbName: 'customBlocks', - fields: [ - { - name: 'text', - type: 'text', - }, - { - name: 'localizedText', - type: 'text', - localized: true, - }, - ], - }, - ], - }, - ], - versions: { - drafts: true, - }, - }, - { - slug: placesSlug, - fields: [ - { - name: 'country', - type: 'text', - }, - { - name: 'city', - type: 'text', - }, - ], - }, - { - slug: 'virtual-relations', - admin: { useAsTitle: 'postTitle' }, - access: { read: () => true }, - fields: [ - { - name: 'postTitle', - type: 'text', - virtual: 'post.title', - }, - { - name: 'postTitleHidden', - type: 'text', - virtual: 'post.title', - hidden: true, - }, - { - name: 'postCategoryTitle', - type: 'text', - virtual: 'post.category.title', - }, - { - name: 'postCategoryID', - type: 'json', - virtual: 'post.category.id', - }, - { - name: 'postCategoryCustomID', - type: 'number', - virtual: 'post.categoryCustomID.id', - }, - { - name: 'postID', - type: 'json', - virtual: 'post.id', - }, - { - name: 'postLocalized', - type: 'text', - virtual: 'post.localized', - }, - { - name: 'post', - type: 'relationship', - relationTo: 'posts', - }, - { - name: 'customID', - type: 'relationship', - relationTo: 'custom-ids', - }, - { - name: 'customIDValue', - type: 'text', - virtual: 'customID.id', - }, - ], - versions: { drafts: true }, - }, - { - slug: fieldsPersistanceSlug, - fields: [ - { - name: 'text', - type: 'text', - virtual: true, - }, - { - name: 'textHooked', - type: 'text', - virtual: true, - hooks: { afterRead: [() => 'hooked'] }, - }, - { - name: 'array', - type: 'array', - virtual: true, - fields: [], - }, - { - type: 'row', - fields: [ - { - type: 'text', - name: 'textWithinRow', - virtual: true, - }, - ], - }, - { - type: 'collapsible', - fields: [ - { - type: 'text', - name: 'textWithinCollapsible', - virtual: true, - }, - ], - label: 'Colllapsible', - }, - { - type: 'tabs', - tabs: [ - { - label: 'tab', - fields: [ - { - type: 'text', - name: 'textWithinTabs', - virtual: true, - }, - ], - }, - ], - }, - ], - }, - { - slug: customIDsSlug, - fields: [ - { - name: 'id', - type: 'text', - admin: { - readOnly: true, - }, - hooks: { - beforeChange: [ - ({ value, operation }) => { - if (operation === 'create') { - return randomUUID() - } - return value - }, - ], - }, - }, - { - name: 'title', - type: 'text', - }, - ], - versions: { drafts: true }, - }, - { - slug: fakeCustomIDsSlug, - fields: [ - { - name: 'title', - type: 'text', - }, - { - name: 'group', - type: 'group', - fields: [ - { - name: 'id', - type: 'text', - }, - ], - }, - { - type: 'tabs', - tabs: [ - { - name: 'myTab', - fields: [ - { - name: 'id', - type: 'text', - }, - ], - }, - ], - }, - ], - }, - { - slug: relationshipsMigrationSlug, - fields: [ - { - type: 'relationship', - relationTo: 'default-values', - name: 'relationship', - }, - { - type: 'relationship', - relationTo: ['default-values'], - name: 'relationship_2', - }, - ], - versions: true, - }, - { - slug: 'compound-indexes', - fields: [ - { - name: 'one', - type: 'text', - }, - { - name: 'two', - type: 'text', - }, - { - name: 'three', - type: 'text', - }, - { - name: 'group', - type: 'group', - fields: [ - { - name: 'four', - type: 'text', - }, - ], - }, - ], - indexes: [ - { - fields: ['one', 'two'], - unique: true, - }, - { - fields: ['three', 'group.four'], - unique: true, - }, - ], - }, - { - slug: 'aliases', - fields: [ - { - name: 'thisIsALongFieldNameThatCanCauseAPostgresErrorEvenThoughWeSetAShorterDBName', - dbName: 'shortname', - type: 'array', - fields: [ - { - name: 'nestedArray', - type: 'array', - dbName: 'short_nested_1', - fields: [ - { - type: 'text', - name: 'text', - }, - ], - }, - ], - }, - ], - }, - { - slug: 'blocks-docs', - fields: [ - { - type: 'blocks', - localized: true, - blocks: [ - { - slug: 'cta', - fields: [ - { - type: 'text', - name: 'text', - }, - ], - }, - ], - name: 'testBlocksLocalized', - }, - { - type: 'blocks', - blocks: [ - { - slug: 'cta', - fields: [ - { - type: 'text', - name: 'text', - }, - ], - }, - ], - name: 'testBlocks', - }, - ], - }, - { - slug: 'unique-fields', - fields: [ - { - name: 'slugField', - type: 'text', - unique: true, - }, - ], - }, - ], - globals: [ - { - slug: 'header', - fields: [ - { - name: 'itemsLvl1', - type: 'array', - dbName: 'header_items_lvl1', - fields: [ - { - name: 'label', - type: 'text', - }, - { - name: 'itemsLvl2', - type: 'array', - dbName: 'header_items_lvl2', - fields: [ - { - name: 'label', - type: 'text', - }, - { - name: 'itemsLvl3', - type: 'array', - dbName: 'header_items_lvl3', - fields: [ - { - name: 'label', - type: 'text', - }, - { - name: 'itemsLvl4', - type: 'array', - dbName: 'header_items_lvl4', - fields: [ - { - name: 'label', - type: 'text', - }, - ], - }, - ], - }, - ], - }, - ], - }, - ], - }, - { - slug: 'global', - dbName: 'customGlobal', - fields: [ - { - name: 'text', - type: 'text', - }, - ], - versions: true, - }, - { - slug: 'global-2', - fields: [ - { - name: 'text', - type: 'text', - }, - ], - }, - { - slug: 'global-3', - fields: [ - { - name: 'text', - type: 'text', - }, - ], - }, - { - slug: 'virtual-relation-global', - fields: [ - { - type: 'text', - name: 'postTitle', - virtual: 'post.title', - }, - { - type: 'relationship', - name: 'post', - relationTo: 'posts', - }, - ], - }, - ], - localization: { - defaultLocale: 'en', - locales: ['en', 'es'], - }, - onInit: async (payload) => { - if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') { - await seed(payload) - } - }, - typescript: { - outputFile: path.resolve(dirname, 'payload-types.ts'), - }, -}) - -export const postDoc = { - title: 'test post', -} +export default buildConfigWithDefaults(getConfig()) diff --git a/test/database/getConfig.ts b/test/database/getConfig.ts new file mode 100644 index 0000000000..b5ea622a4f --- /dev/null +++ b/test/database/getConfig.ts @@ -0,0 +1,942 @@ +import type { Config, TextField } from 'payload' + +import { randomUUID } from 'crypto' +import { fileURLToPath } from 'node:url' +import path from 'path' + +import { seed } from './seed.js' +import { + customIDsSlug, + customSchemaSlug, + defaultValuesSlug, + errorOnUnnamedFieldsSlug, + fakeCustomIDsSlug, + fieldsPersistanceSlug, + pgMigrationSlug, + placesSlug, + postsSlug, + relationASlug, + relationBSlug, + relationshipsMigrationSlug, +} from './shared.js' + +const defaultValueField: TextField = { + name: 'defaultValue', + type: 'text', + defaultValue: 'default value from database', +} + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +export const getConfig: () => Partial = () => ({ + admin: { + importMap: { + baseDir: path.resolve(dirname), + }, + }, + collections: [ + { + slug: 'categories', + versions: { drafts: true }, + fields: [ + { + type: 'text', + name: 'title', + }, + ], + }, + { + slug: 'simple', + fields: [ + { + type: 'text', + name: 'text', + }, + { + type: 'number', + name: 'number', + }, + ], + }, + { + slug: 'categories-custom-id', + versions: { drafts: true }, + fields: [ + { + type: 'number', + name: 'id', + }, + ], + }, + { + slug: postsSlug, + fields: [ + { + name: 'title', + type: 'text', + required: true, + // access: { read: () => false }, + }, + { + type: 'relationship', + relationTo: 'categories', + name: 'category', + }, + { + type: 'relationship', + relationTo: 'categories-custom-id', + name: 'categoryCustomID', + }, + { + name: 'localized', + type: 'text', + localized: true, + }, + { + name: 'text', + type: 'text', + }, + { + name: 'number', + type: 'number', + }, + { + type: 'blocks', + name: 'blocks', + blocks: [ + { + slug: 'block-third', + fields: [ + { + type: 'blocks', + name: 'nested', + blocks: [ + { + slug: 'block-fourth', + fields: [ + { + type: 'blocks', + name: 'nested', + blocks: [], + }, + ], + }, + ], + }, + ], + }, + ], + }, + { + type: 'tabs', + tabs: [ + { + name: 'D1', + fields: [ + { + name: 'D2', + type: 'group', + fields: [ + { + type: 'row', + fields: [ + { + type: 'collapsible', + fields: [ + { + type: 'tabs', + tabs: [ + { + fields: [ + { + name: 'D3', + type: 'group', + fields: [ + { + type: 'row', + fields: [ + { + type: 'collapsible', + fields: [ + { + name: 'D4', + type: 'text', + }, + ], + label: 'Collapsible2', + }, + ], + }, + ], + }, + ], + label: 'Tab1', + }, + ], + }, + ], + label: 'Collapsible2', + }, + ], + }, + ], + }, + ], + label: 'Tab1', + }, + ], + }, + { + name: 'hasTransaction', + type: 'checkbox', + hooks: { + beforeChange: [({ req }) => !!req.transactionID], + }, + admin: { + readOnly: true, + }, + }, + { + name: 'throwAfterChange', + type: 'checkbox', + defaultValue: false, + hooks: { + afterChange: [ + ({ value }) => { + if (value) { + throw new Error('throw after change') + } + }, + ], + }, + }, + { + name: 'arrayWithIDs', + type: 'array', + fields: [ + { + name: 'text', + type: 'text', + }, + ], + }, + { + name: 'blocksWithIDs', + type: 'blocks', + blocks: [ + { + slug: 'block-first', + fields: [ + { + name: 'text', + type: 'text', + }, + ], + }, + ], + }, + { + type: 'group', + name: 'group', + fields: [{ name: 'text', type: 'text' }], + }, + { + type: 'tabs', + tabs: [ + { + name: 'tab', + fields: [{ name: 'text', type: 'text' }], + }, + ], + }, + ], + hooks: { + beforeOperation: [ + ({ args, operation, req }) => { + if (operation === 'update') { + const defaultIDType = req.payload.db.defaultIDType + + if (defaultIDType === 'number' && typeof args.id === 'string') { + throw new Error('ID was not sanitized to a number properly') + } + } + + return args + }, + ], + }, + }, + { + slug: errorOnUnnamedFieldsSlug, + fields: [ + { + type: 'tabs', + tabs: [ + { + label: 'UnnamedTab', + fields: [ + { + name: 'groupWithinUnnamedTab', + type: 'group', + fields: [ + { + name: 'text', + type: 'text', + required: true, + }, + ], + }, + ], + }, + ], + }, + ], + }, + { + slug: defaultValuesSlug, + fields: [ + { + name: 'title', + type: 'text', + }, + defaultValueField, + { + name: 'array', + type: 'array', + // default array with one object to test subfield defaultValue properties for Mongoose + defaultValue: [{}], + fields: [defaultValueField], + }, + { + name: 'group', + type: 'group', + // we need to have to use as default in order to have subfield defaultValue properties directly for Mongoose + defaultValue: {}, + fields: [defaultValueField], + }, + { + name: 'select', + type: 'select', + defaultValue: 'default', + options: [ + { value: 'option0', label: 'Option 0' }, + { value: 'option1', label: 'Option 1' }, + { value: 'default', label: 'Default' }, + ], + }, + { + name: 'point', + type: 'point', + defaultValue: [10, 20], + }, + { + name: 'escape', + type: 'text', + defaultValue: "Thanks, we're excited for you to join us.", + }, + ], + }, + { + slug: relationASlug, + fields: [ + { + name: 'title', + type: 'text', + }, + { + name: 'richText', + type: 'richText', + }, + ], + labels: { + plural: 'Relation As', + singular: 'Relation A', + }, + }, + { + slug: relationBSlug, + fields: [ + { + name: 'title', + type: 'text', + }, + { + name: 'relationship', + type: 'relationship', + relationTo: 'relation-a', + }, + { + name: 'richText', + type: 'richText', + }, + ], + labels: { + plural: 'Relation Bs', + singular: 'Relation B', + }, + }, + { + slug: pgMigrationSlug, + fields: [ + { + name: 'relation1', + type: 'relationship', + relationTo: 'relation-a', + }, + { + name: 'myArray', + type: 'array', + fields: [ + { + name: 'relation2', + type: 'relationship', + relationTo: 'relation-b', + }, + { + name: 'mySubArray', + type: 'array', + fields: [ + { + name: 'relation3', + type: 'relationship', + localized: true, + relationTo: 'relation-b', + }, + ], + }, + ], + }, + { + name: 'myGroup', + type: 'group', + fields: [ + { + name: 'relation4', + type: 'relationship', + localized: true, + relationTo: 'relation-b', + }, + ], + }, + { + name: 'myBlocks', + type: 'blocks', + blocks: [ + { + slug: 'myBlock', + fields: [ + { + name: 'relation5', + type: 'relationship', + relationTo: 'relation-a', + }, + { + name: 'relation6', + type: 'relationship', + localized: true, + relationTo: 'relation-b', + }, + ], + }, + ], + }, + ], + versions: true, + }, + { + slug: customSchemaSlug, + dbName: 'customs', + fields: [ + { + name: 'text', + type: 'text', + }, + { + name: 'localizedText', + type: 'text', + localized: true, + }, + { + name: 'relationship', + type: 'relationship', + hasMany: true, + relationTo: 'relation-a', + }, + { + name: 'select', + type: 'select', + dbName: ({ tableName }) => `${tableName}_customSelect`, + enumName: 'selectEnum', + hasMany: true, + options: ['a', 'b', 'c'], + }, + { + name: 'radio', + type: 'select', + enumName: 'radioEnum', + options: ['a', 'b', 'c'], + }, + { + name: 'array', + type: 'array', + dbName: 'customArrays', + fields: [ + { + name: 'text', + type: 'text', + }, + { + name: 'localizedText', + type: 'text', + localized: true, + }, + ], + }, + { + name: 'blocks', + type: 'blocks', + blocks: [ + { + slug: 'block-second', + dbName: 'customBlocks', + fields: [ + { + name: 'text', + type: 'text', + }, + { + name: 'localizedText', + type: 'text', + localized: true, + }, + ], + }, + ], + }, + ], + versions: { + drafts: true, + }, + }, + { + slug: placesSlug, + fields: [ + { + name: 'country', + type: 'text', + }, + { + name: 'city', + type: 'text', + }, + ], + }, + { + slug: 'virtual-relations', + admin: { useAsTitle: 'postTitle' }, + access: { read: () => true }, + fields: [ + { + name: 'postTitle', + type: 'text', + virtual: 'post.title', + }, + { + name: 'postTitleHidden', + type: 'text', + virtual: 'post.title', + hidden: true, + }, + { + name: 'postCategoryTitle', + type: 'text', + virtual: 'post.category.title', + }, + { + name: 'postCategoryID', + type: 'json', + virtual: 'post.category.id', + }, + { + name: 'postCategoryCustomID', + type: 'number', + virtual: 'post.categoryCustomID.id', + }, + { + name: 'postID', + type: 'json', + virtual: 'post.id', + }, + { + name: 'postLocalized', + type: 'text', + virtual: 'post.localized', + }, + { + name: 'post', + type: 'relationship', + relationTo: 'posts', + }, + { + name: 'customID', + type: 'relationship', + relationTo: 'custom-ids', + }, + { + name: 'customIDValue', + type: 'text', + virtual: 'customID.id', + }, + ], + versions: { drafts: true }, + }, + { + slug: fieldsPersistanceSlug, + fields: [ + { + name: 'text', + type: 'text', + virtual: true, + }, + { + name: 'textHooked', + type: 'text', + virtual: true, + hooks: { afterRead: [() => 'hooked'] }, + }, + { + name: 'array', + type: 'array', + virtual: true, + fields: [], + }, + { + type: 'row', + fields: [ + { + type: 'text', + name: 'textWithinRow', + virtual: true, + }, + ], + }, + { + type: 'collapsible', + fields: [ + { + type: 'text', + name: 'textWithinCollapsible', + virtual: true, + }, + ], + label: 'Colllapsible', + }, + { + type: 'tabs', + tabs: [ + { + label: 'tab', + fields: [ + { + type: 'text', + name: 'textWithinTabs', + virtual: true, + }, + ], + }, + ], + }, + ], + }, + { + slug: customIDsSlug, + fields: [ + { + name: 'id', + type: 'text', + admin: { + readOnly: true, + }, + hooks: { + beforeChange: [ + ({ value, operation }) => { + if (operation === 'create') { + return randomUUID() + } + return value + }, + ], + }, + }, + { + name: 'title', + type: 'text', + }, + ], + versions: { drafts: true }, + }, + { + slug: fakeCustomIDsSlug, + fields: [ + { + name: 'title', + type: 'text', + }, + { + name: 'group', + type: 'group', + fields: [ + { + name: 'id', + type: 'text', + }, + ], + }, + { + type: 'tabs', + tabs: [ + { + name: 'myTab', + fields: [ + { + name: 'id', + type: 'text', + }, + ], + }, + ], + }, + ], + }, + { + slug: relationshipsMigrationSlug, + fields: [ + { + type: 'relationship', + relationTo: 'default-values', + name: 'relationship', + }, + { + type: 'relationship', + relationTo: ['default-values'], + name: 'relationship_2', + }, + ], + versions: true, + }, + { + slug: 'compound-indexes', + fields: [ + { + name: 'one', + type: 'text', + }, + { + name: 'two', + type: 'text', + }, + { + name: 'three', + type: 'text', + }, + { + name: 'group', + type: 'group', + fields: [ + { + name: 'four', + type: 'text', + }, + ], + }, + ], + indexes: [ + { + fields: ['one', 'two'], + unique: true, + }, + { + fields: ['three', 'group.four'], + unique: true, + }, + ], + }, + { + slug: 'aliases', + fields: [ + { + name: 'thisIsALongFieldNameThatCanCauseAPostgresErrorEvenThoughWeSetAShorterDBName', + dbName: 'shortname', + type: 'array', + fields: [ + { + name: 'nestedArray', + type: 'array', + dbName: 'short_nested_1', + fields: [ + { + type: 'text', + name: 'text', + }, + ], + }, + ], + }, + ], + }, + { + slug: 'blocks-docs', + fields: [ + { + type: 'blocks', + localized: true, + blocks: [ + { + slug: 'cta', + fields: [ + { + type: 'text', + name: 'text', + }, + ], + }, + ], + name: 'testBlocksLocalized', + }, + { + type: 'blocks', + blocks: [ + { + slug: 'cta', + fields: [ + { + type: 'text', + name: 'text', + }, + ], + }, + ], + name: 'testBlocks', + }, + ], + }, + { + slug: 'unique-fields', + fields: [ + { + name: 'slugField', + type: 'text', + unique: true, + }, + ], + }, + ], + globals: [ + { + slug: 'header', + fields: [ + { + name: 'itemsLvl1', + type: 'array', + dbName: 'header_items_lvl1', + fields: [ + { + name: 'label', + type: 'text', + }, + { + name: 'itemsLvl2', + type: 'array', + dbName: 'header_items_lvl2', + fields: [ + { + name: 'label', + type: 'text', + }, + { + name: 'itemsLvl3', + type: 'array', + dbName: 'header_items_lvl3', + fields: [ + { + name: 'label', + type: 'text', + }, + { + name: 'itemsLvl4', + type: 'array', + dbName: 'header_items_lvl4', + fields: [ + { + name: 'label', + type: 'text', + }, + ], + }, + ], + }, + ], + }, + ], + }, + ], + }, + { + slug: 'global', + dbName: 'customGlobal', + fields: [ + { + name: 'text', + type: 'text', + }, + ], + versions: true, + }, + { + slug: 'global-2', + fields: [ + { + name: 'text', + type: 'text', + }, + ], + }, + { + slug: 'global-3', + fields: [ + { + name: 'text', + type: 'text', + }, + ], + }, + { + slug: 'virtual-relation-global', + fields: [ + { + type: 'text', + name: 'postTitle', + virtual: 'post.title', + }, + { + type: 'relationship', + name: 'post', + relationTo: 'posts', + }, + ], + }, + ], + localization: { + defaultLocale: 'en', + locales: ['en', 'es'], + }, + onInit: async (payload) => { + if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') { + await seed(payload) + } + }, + typescript: { + outputFile: path.resolve(dirname, 'payload-types.ts'), + }, +}) diff --git a/test/database/payload-types.ts b/test/database/payload-types.ts index d1f52cb4a1..18b196b38e 100644 --- a/test/database/payload-types.ts +++ b/test/database/payload-types.ts @@ -68,6 +68,7 @@ export interface Config { blocks: {}; collections: { categories: Category; + simple: Simple; 'categories-custom-id': CategoriesCustomId; posts: Post; 'error-on-unnamed-fields': ErrorOnUnnamedField; @@ -94,6 +95,7 @@ export interface Config { collectionsJoins: {}; collectionsSelect: { categories: CategoriesSelect | CategoriesSelect; + simple: SimpleSelect | SimpleSelect; 'categories-custom-id': CategoriesCustomIdSelect | CategoriesCustomIdSelect; posts: PostsSelect | PostsSelect; 'error-on-unnamed-fields': ErrorOnUnnamedFieldsSelect | ErrorOnUnnamedFieldsSelect; @@ -172,6 +174,17 @@ export interface Category { createdAt: string; _status?: ('draft' | 'published') | null; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "simple". + */ +export interface Simple { + id: string; + text?: string | null; + number?: number | null; + updatedAt: string; + createdAt: string; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "categories-custom-id". @@ -608,6 +621,10 @@ export interface PayloadLockedDocument { relationTo: 'categories'; value: string | Category; } | null) + | ({ + relationTo: 'simple'; + value: string | Simple; + } | null) | ({ relationTo: 'categories-custom-id'; value: number | CategoriesCustomId; @@ -736,6 +753,16 @@ export interface CategoriesSelect { createdAt?: T; _status?: T; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "simple_select". + */ +export interface SimpleSelect { + text?: T; + number?: T; + updatedAt?: T; + createdAt?: T; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "categories-custom-id_select". diff --git a/test/database/postgres-logs.int.spec.ts b/test/database/postgres-logs.int.spec.ts new file mode 100644 index 0000000000..a179b64c56 --- /dev/null +++ b/test/database/postgres-logs.int.spec.ts @@ -0,0 +1,91 @@ +import type { Payload } from 'payload' + +/* eslint-disable jest/require-top-level-describe */ +import assert from 'assert' +import path from 'path' +import { fileURLToPath } from 'url' + +import { initPayloadInt } from '../helpers/initPayloadInt.js' + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +const describePostgres = process.env.PAYLOAD_DATABASE?.startsWith('postgres') + ? describe + : describe.skip + +let payload: Payload + +describePostgres('database - postgres logs', () => { + beforeAll(async () => { + const initialized = await initPayloadInt( + dirname, + undefined, + undefined, + 'config.postgreslogs.ts', + ) + assert(initialized.payload) + assert(initialized.restClient) + ;({ payload } = initialized) + }) + + afterAll(async () => { + await payload.destroy() + }) + + it('ensure simple update uses optimized upsertRow with returning()', async () => { + const doc = await payload.create({ + collection: 'simple', + data: { + text: 'Some title', + number: 5, + }, + }) + + // Count every console log + const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {}) + + const result: any = await payload.db.updateOne({ + collection: 'simple', + id: doc.id, + data: { + text: 'Updated Title', + number: 5, + }, + }) + + expect(result.text).toEqual('Updated Title') + expect(result.number).toEqual(5) // Ensure the update did not reset the number field + + expect(consoleCount).toHaveBeenCalledTimes(1) // Should be 1 single sql call if the optimization is used. If not, this would be 2 calls + consoleCount.mockRestore() + }) + + it('ensure simple update of complex collection uses optimized upsertRow without returning()', async () => { + const doc = await payload.create({ + collection: 'posts', + data: { + title: 'Some title', + number: 5, + }, + }) + + // Count every console log + const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {}) + + const result: any = await payload.db.updateOne({ + collection: 'posts', + id: doc.id, + data: { + title: 'Updated Title', + number: 5, + }, + }) + + expect(result.title).toEqual('Updated Title') + expect(result.number).toEqual(5) // Ensure the update did not reset the number field + + expect(consoleCount).toHaveBeenCalledTimes(2) // Should be 2 sql call if the optimization is used (update + find). If not, this would be 5 calls + consoleCount.mockRestore() + }) +}) diff --git a/test/database/postgres-vector.int.spec.ts b/test/database/postgres-vector.int.spec.ts index 81d374a108..58a10743fd 100644 --- a/test/database/postgres-vector.int.spec.ts +++ b/test/database/postgres-vector.int.spec.ts @@ -12,11 +12,11 @@ import { fileURLToPath } from 'url' const filename = fileURLToPath(import.meta.url) const dirname = path.dirname(filename) -const describeToUse = process.env.PAYLOAD_DATABASE?.startsWith('postgres') +const describePostgres = process.env.PAYLOAD_DATABASE?.startsWith('postgres') ? describe : describe.skip -describeToUse('postgres vector custom column', () => { +describePostgres('postgres vector custom column', () => { const vectorColumnQueryTest = async (vectorType: string) => { const { databaseAdapter, diff --git a/test/database/seed.ts b/test/database/seed.ts index 921273e4bb..48fd373021 100644 --- a/test/database/seed.ts +++ b/test/database/seed.ts @@ -1,15 +1,6 @@ import type { Payload } from 'payload' -import path from 'path' -import { getFileByPath } from 'payload' -import { fileURLToPath } from 'url' - import { devUser } from '../credentials.js' -import { seedDB } from '../helpers/seed.js' -import { collectionSlugs } from './shared.js' - -const filename = fileURLToPath(import.meta.url) -const dirname = path.dirname(filename) export const _seed = async (_payload: Payload) => { await _payload.create({ diff --git a/test/database/shared.ts b/test/database/shared.ts index 7600f66547..5932229581 100644 --- a/test/database/shared.ts +++ b/test/database/shared.ts @@ -20,18 +20,3 @@ export const customIDsSlug = 'custom-ids' export const fakeCustomIDsSlug = 'fake-custom-ids' export const relationshipsMigrationSlug = 'relationships-migration' - -export const collectionSlugs = [ - postsSlug, - errorOnUnnamedFieldsSlug, - defaultValuesSlug, - relationASlug, - relationBSlug, - pgMigrationSlug, - customSchemaSlug, - placesSlug, - fieldsPersistanceSlug, - customIDsSlug, - fakeCustomIDsSlug, - relationshipsMigrationSlug, -] diff --git a/test/select/config.postgreslogs.ts b/test/select/config.postgreslogs.ts new file mode 100644 index 0000000000..d47ee88d83 --- /dev/null +++ b/test/select/config.postgreslogs.ts @@ -0,0 +1,19 @@ +/* eslint-disable no-restricted-exports */ +import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' +import { getConfig } from './getConfig.js' + +const config = getConfig() + +import { postgresAdapter } from '@payloadcms/db-postgres' + +export const databaseAdapter = postgresAdapter({ + pool: { + connectionString: process.env.POSTGRES_URL || 'postgres://127.0.0.1:5432/payloadtests', + }, + logger: true, +}) + +export default buildConfigWithDefaults({ + ...config, + db: databaseAdapter, +}) diff --git a/test/select/config.ts b/test/select/config.ts index 280946aa51..6c16a4bd2b 100644 --- a/test/select/config.ts +++ b/test/select/config.ts @@ -1,122 +1,4 @@ -import type { GlobalConfig } from 'payload' - -import { lexicalEditor } from '@payloadcms/richtext-lexical' -import { fileURLToPath } from 'node:url' -import path from 'path' - -import type { Post } from './payload-types.js' - import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' -import { devUser } from '../credentials.js' -import { CustomID } from './collections/CustomID/index.js' -import { DeepPostsCollection } from './collections/DeepPosts/index.js' -import { ForceSelect } from './collections/ForceSelect/index.js' -import { LocalizedPostsCollection } from './collections/LocalizedPosts/index.js' -import { Pages } from './collections/Pages/index.js' -import { Points } from './collections/Points/index.js' -import { PostsCollection } from './collections/Posts/index.js' -import { UsersCollection } from './collections/Users/index.js' -import { VersionedPostsCollection } from './collections/VersionedPosts/index.js' +import { getConfig } from './getConfig.js' -const filename = fileURLToPath(import.meta.url) -const dirname = path.dirname(filename) - -export default buildConfigWithDefaults({ - // ...extend config here - collections: [ - PostsCollection, - LocalizedPostsCollection, - VersionedPostsCollection, - DeepPostsCollection, - Pages, - Points, - ForceSelect, - { - slug: 'upload', - fields: [], - upload: { - staticDir: path.resolve(dirname, 'media'), - }, - }, - { - slug: 'rels', - fields: [], - }, - CustomID, - UsersCollection, - ], - globals: [ - { - slug: 'global-post', - fields: [ - { - name: 'text', - type: 'text', - }, - { - name: 'number', - type: 'number', - }, - ], - }, - { - slug: 'force-select-global', - fields: [ - { - name: 'text', - type: 'text', - }, - { - name: 'forceSelected', - type: 'text', - }, - { - name: 'array', - type: 'array', - fields: [ - { - name: 'forceSelected', - type: 'text', - }, - ], - }, - ], - forceSelect: { array: { forceSelected: true }, forceSelected: true }, - } satisfies GlobalConfig<'force-select-global'>, - ], - admin: { - importMap: { - baseDir: path.resolve(dirname), - }, - }, - localization: { - locales: ['en', 'de'], - defaultLocale: 'en', - }, - editor: lexicalEditor({ - features: ({ defaultFeatures }) => [...defaultFeatures], - }), - cors: ['http://localhost:3000', 'http://localhost:3001'], - onInit: async (payload) => { - await payload.create({ - collection: 'users', - data: { - email: devUser.email, - password: devUser.password, - }, - }) - - // // Create image - // const imageFilePath = path.resolve(dirname, '../uploads/image.png') - // const imageFile = await getFileByPath(imageFilePath) - - // await payload.create({ - // collection: 'media', - // data: {}, - // file: imageFile, - // }) - }, - typescript: { - outputFile: path.resolve(dirname, 'payload-types.ts'), - }, -}) +export default buildConfigWithDefaults(getConfig()) diff --git a/test/select/getConfig.ts b/test/select/getConfig.ts new file mode 100644 index 0000000000..7712c3e82d --- /dev/null +++ b/test/select/getConfig.ts @@ -0,0 +1,119 @@ +import type { Config, GlobalConfig } from 'payload' + +import { lexicalEditor } from '@payloadcms/richtext-lexical' +import { fileURLToPath } from 'node:url' +import path from 'path' + +import { devUser } from '../credentials.js' +import { CustomID } from './collections/CustomID/index.js' +import { DeepPostsCollection } from './collections/DeepPosts/index.js' +import { ForceSelect } from './collections/ForceSelect/index.js' +import { LocalizedPostsCollection } from './collections/LocalizedPosts/index.js' +import { Pages } from './collections/Pages/index.js' +import { Points } from './collections/Points/index.js' +import { PostsCollection } from './collections/Posts/index.js' +import { UsersCollection } from './collections/Users/index.js' +import { VersionedPostsCollection } from './collections/VersionedPosts/index.js' + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +export const getConfig: () => Partial = () => ({ + // ...extend config here + collections: [ + PostsCollection, + LocalizedPostsCollection, + VersionedPostsCollection, + DeepPostsCollection, + Pages, + Points, + ForceSelect, + { + slug: 'upload', + fields: [], + upload: { + staticDir: path.resolve(dirname, 'media'), + }, + }, + { + slug: 'rels', + fields: [], + }, + CustomID, + UsersCollection, + ], + globals: [ + { + slug: 'global-post', + fields: [ + { + name: 'text', + type: 'text', + }, + { + name: 'number', + type: 'number', + }, + ], + }, + { + slug: 'force-select-global', + fields: [ + { + name: 'text', + type: 'text', + }, + { + name: 'forceSelected', + type: 'text', + }, + { + name: 'array', + type: 'array', + fields: [ + { + name: 'forceSelected', + type: 'text', + }, + ], + }, + ], + forceSelect: { array: { forceSelected: true }, forceSelected: true }, + } satisfies GlobalConfig<'force-select-global'>, + ], + admin: { + importMap: { + baseDir: path.resolve(dirname), + }, + }, + localization: { + locales: ['en', 'de'], + defaultLocale: 'en', + }, + editor: lexicalEditor({ + features: ({ defaultFeatures }) => [...defaultFeatures], + }), + cors: ['http://localhost:3000', 'http://localhost:3001'], + onInit: async (payload) => { + await payload.create({ + collection: 'users', + data: { + email: devUser.email, + password: devUser.password, + }, + }) + + // // Create image + // const imageFilePath = path.resolve(dirname, '../uploads/image.png') + // const imageFile = await getFileByPath(imageFilePath) + + // await payload.create({ + // collection: 'media', + // data: {}, + // file: imageFile, + // }) + }, + typescript: { + outputFile: path.resolve(dirname, 'payload-types.ts'), + }, +}) diff --git a/test/select/postgreslogs.int.spec.ts b/test/select/postgreslogs.int.spec.ts new file mode 100644 index 0000000000..58517e196e --- /dev/null +++ b/test/select/postgreslogs.int.spec.ts @@ -0,0 +1,179 @@ +/* eslint-disable jest/require-top-level-describe */ +import type { Payload } from 'payload' + +import path from 'path' +import { assert } from 'ts-essentials' +import { fileURLToPath } from 'url' + +import type { Point, Post } from './payload-types.js' + +import { initPayloadInt } from '../helpers/initPayloadInt.js' + +let payload: Payload + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +const describePostgres = process.env.PAYLOAD_DATABASE === 'postgres' ? describe : describe.skip + +describePostgres('Select - with postgres logs', () => { + // --__--__--__--__--__--__--__--__--__ + // Boilerplate test setup/teardown + // --__--__--__--__--__--__--__--__--__ + beforeAll(async () => { + const initialized = await initPayloadInt( + dirname, + undefined, + undefined, + 'config.postgreslogs.ts', + ) + assert(initialized.payload) + assert(initialized.restClient) + ;({ payload } = initialized) + }) + + afterAll(async () => { + await payload.destroy() + }) + + describe('Local API - Base', () => { + let post: Post + let postId: number | string + + let point: Point + let pointId: number | string + + beforeEach(async () => { + post = await createPost() + postId = post.id + + point = await createPoint() + pointId = point.id + }) + + // Clean up to safely mutate in each test + afterEach(async () => { + await payload.delete({ id: postId, collection: 'posts' }) + await payload.delete({ id: pointId, collection: 'points' }) + }) + + describe('Local API - operations', () => { + it('ensure optimized db update is still used when using select', async () => { + const post = await createPost() + + // Count every console log + const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {}) + + const res = removeEmptyAndUndefined( + (await payload.db.updateOne({ + collection: 'posts', + id: post.id, + data: { + text: 'new text', + }, + select: { text: true, number: true }, + })) as any, + ) + + expect(consoleCount).toHaveBeenCalledTimes(1) // Should be 1 single sql call if the optimization is used. If not, this would be 2 calls + consoleCount.mockRestore() + + expect(res.number).toEqual(1) + expect(res.text).toEqual('new text') + expect(res.id).toEqual(post.id) + expect(Object.keys(res)).toHaveLength(3) + }) + }) + }) +}) + +function removeEmptyAndUndefined(obj: any): any { + if (Array.isArray(obj)) { + const cleanedArray = obj + .map(removeEmptyAndUndefined) + .filter( + (item) => + item !== undefined && !(typeof item === 'object' && Object.keys(item).length === 0), + ) + + return cleanedArray.length > 0 ? cleanedArray : undefined + } + + if (obj !== null && typeof obj === 'object') { + const cleanedEntries = Object.entries(obj) + .map(([key, value]) => [key, removeEmptyAndUndefined(value)]) + .filter( + ([, value]) => + value !== undefined && + !( + typeof value === 'object' && + (Array.isArray(value) ? value.length === 0 : Object.keys(value).length === 0) + ), + ) + + return cleanedEntries.length > 0 ? Object.fromEntries(cleanedEntries) : undefined + } + + return obj +} +async function createPost() { + const upload = await payload.create({ + collection: 'upload', + data: {}, + filePath: path.resolve(dirname, 'image.jpg'), + }) + + const relation = await payload.create({ + depth: 0, + collection: 'rels', + data: {}, + }) + + return payload.create({ + collection: 'posts', + depth: 0, + data: { + number: 1, + text: 'text', + select: 'a', + selectMany: ['a'], + group: { + number: 1, + text: 'text', + }, + hasMany: [relation], + hasManyUpload: [upload], + hasOne: relation, + hasManyPoly: [{ relationTo: 'rels', value: relation }], + hasOnePoly: { relationTo: 'rels', value: relation }, + blocks: [ + { + blockType: 'cta', + ctaText: 'cta-text', + text: 'text', + }, + { + blockType: 'intro', + introText: 'intro-text', + text: 'text', + }, + ], + array: [ + { + text: 'text', + number: 1, + }, + ], + tab: { + text: 'text', + number: 1, + }, + unnamedTabNumber: 2, + unnamedTabText: 'text2', + }, + }) +} + +function createPoint() { + return payload.create({ collection: 'points', data: { text: 'some', point: [10, 20] } }) +} From 380ce04d5c2c6b22c41d9998952478796f0e4e01 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Wed, 23 Jul 2025 19:05:31 +0300 Subject: [PATCH 065/143] perf(db-postgres): avoid including `prettier` to the bundle (#13251) This PR optimizes bundle size with drizzle adapters by avoiding including `prettier` to the production bundle --- packages/drizzle/src/utilities/createSchemaGenerator.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/drizzle/src/utilities/createSchemaGenerator.ts b/packages/drizzle/src/utilities/createSchemaGenerator.ts index b979460b26..cc6c85656e 100644 --- a/packages/drizzle/src/utilities/createSchemaGenerator.ts +++ b/packages/drizzle/src/utilities/createSchemaGenerator.ts @@ -296,12 +296,13 @@ declare module '${this.packageName}' { if (prettify) { try { - const prettier = await import('prettier') + const prettier = await eval('import("prettier")') const configPath = await prettier.resolveConfigFile() const config = configPath ? await prettier.resolveConfig(configPath) : {} code = await prettier.format(code, { ...config, parser: 'typescript' }) - // eslint-disable-next-line no-empty - } catch {} + } catch { + /* empty */ + } } await writeFile(outputFile, code, 'utf-8') From 0eac58ed723917399895be8737078416bdfcbba0 Mon Sep 17 00:00:00 2001 From: Jacob Fletcher Date: Wed, 23 Jul 2025 15:19:10 -0400 Subject: [PATCH 066/143] fix(next): prevent base list filters from being injected into the url (#13253) Prevents base list filters from being injected into the URL. This is a problem with the multi-tenant plugin, for example, where changing the tenant adds a `baseListFilter` to the query, but should never be exposed to the end user. Introduced in #13200. --- packages/next/src/views/List/index.tsx | 23 ++--- tsconfig.base.json | 114 ++++++------------------- 2 files changed, 36 insertions(+), 101 deletions(-) diff --git a/packages/next/src/views/List/index.tsx b/packages/next/src/views/List/index.tsx index 1a019e48a4..59c0c3dfc5 100644 --- a/packages/next/src/views/List/index.tsx +++ b/packages/next/src/views/List/index.tsx @@ -14,6 +14,7 @@ import { RenderServerComponent } from '@payloadcms/ui/elements/RenderServerCompo import { renderFilters, renderTable, upsertPreferences } from '@payloadcms/ui/rsc' import { notFound } from 'next/navigation.js' import { + combineWhereConstraints, formatAdminURL, isNumber, mergeListSearchAndWhere, @@ -122,27 +123,17 @@ export const renderListView = async ( throw new Error('not-found') } + let baseListFilter = undefined + if (typeof collectionConfig.admin?.baseListFilter === 'function') { - const baseListFilter = await collectionConfig.admin.baseListFilter({ + baseListFilter = await collectionConfig.admin.baseListFilter({ limit: query.limit, page: query.page, req, sort: query.sort, }) - - if (baseListFilter) { - query.where = { - and: [query.where, baseListFilter].filter(Boolean), - } - } } - const whereWithMergedSearch = mergeListSearchAndWhere({ - collectionConfig, - search: typeof query?.search === 'string' ? query.search : undefined, - where: query?.where, - }) - let queryPreset: QueryPreset | undefined let queryPresetPermissions: SanitizedCollectionPermission | undefined @@ -182,7 +173,11 @@ export const renderListView = async ( req, sort: query.sort, user, - where: whereWithMergedSearch, + where: mergeListSearchAndWhere({ + collectionConfig, + search: typeof query?.search === 'string' ? query.search : undefined, + where: combineWhereConstraints([query?.where, baseListFilter]), + }), }) const clientCollectionConfig = clientConfig.collections.find((c) => c.slug === collectionSlug) diff --git a/tsconfig.base.json b/tsconfig.base.json index 153abb8a5f..0898ad390f 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -21,15 +21,8 @@ "skipLibCheck": true, "emitDeclarationOnly": true, "sourceMap": true, - "lib": [ - "DOM", - "DOM.Iterable", - "ES2022" - ], - "types": [ - "node", - "jest" - ], + "lib": ["DOM", "DOM.Iterable", "ES2022"], + "types": ["node", "jest"], "incremental": true, "isolatedModules": true, "plugins": [ @@ -38,72 +31,36 @@ } ], "paths": { - "@payload-config": [ - "./test/fields/config.ts" - ], - "@payloadcms/admin-bar": [ - "./packages/admin-bar/src" - ], - "@payloadcms/live-preview": [ - "./packages/live-preview/src" - ], - "@payloadcms/live-preview-react": [ - "./packages/live-preview-react/src/index.ts" - ], - "@payloadcms/live-preview-vue": [ - "./packages/live-preview-vue/src/index.ts" - ], - "@payloadcms/ui": [ - "./packages/ui/src/exports/client/index.ts" - ], - "@payloadcms/ui/shared": [ - "./packages/ui/src/exports/shared/index.ts" - ], - "@payloadcms/ui/rsc": [ - "./packages/ui/src/exports/rsc/index.ts" - ], - "@payloadcms/ui/scss": [ - "./packages/ui/src/scss.scss" - ], - "@payloadcms/ui/scss/app.scss": [ - "./packages/ui/src/scss/app.scss" - ], - "@payloadcms/next/*": [ - "./packages/next/src/exports/*.ts" - ], + "@payload-config": ["./test/_community/config.ts"], + "@payloadcms/admin-bar": ["./packages/admin-bar/src"], + "@payloadcms/live-preview": ["./packages/live-preview/src"], + "@payloadcms/live-preview-react": ["./packages/live-preview-react/src/index.ts"], + "@payloadcms/live-preview-vue": ["./packages/live-preview-vue/src/index.ts"], + "@payloadcms/ui": ["./packages/ui/src/exports/client/index.ts"], + "@payloadcms/ui/shared": ["./packages/ui/src/exports/shared/index.ts"], + "@payloadcms/ui/rsc": ["./packages/ui/src/exports/rsc/index.ts"], + "@payloadcms/ui/scss": ["./packages/ui/src/scss.scss"], + "@payloadcms/ui/scss/app.scss": ["./packages/ui/src/scss/app.scss"], + "@payloadcms/next/*": ["./packages/next/src/exports/*.ts"], "@payloadcms/richtext-lexical/client": [ "./packages/richtext-lexical/src/exports/client/index.ts" ], - "@payloadcms/richtext-lexical/rsc": [ - "./packages/richtext-lexical/src/exports/server/rsc.ts" - ], - "@payloadcms/richtext-slate/rsc": [ - "./packages/richtext-slate/src/exports/server/rsc.ts" - ], + "@payloadcms/richtext-lexical/rsc": ["./packages/richtext-lexical/src/exports/server/rsc.ts"], + "@payloadcms/richtext-slate/rsc": ["./packages/richtext-slate/src/exports/server/rsc.ts"], "@payloadcms/richtext-slate/client": [ "./packages/richtext-slate/src/exports/client/index.ts" ], - "@payloadcms/plugin-seo/client": [ - "./packages/plugin-seo/src/exports/client.ts" - ], - "@payloadcms/plugin-sentry/client": [ - "./packages/plugin-sentry/src/exports/client.ts" - ], - "@payloadcms/plugin-stripe/client": [ - "./packages/plugin-stripe/src/exports/client.ts" - ], - "@payloadcms/plugin-search/client": [ - "./packages/plugin-search/src/exports/client.ts" - ], + "@payloadcms/plugin-seo/client": ["./packages/plugin-seo/src/exports/client.ts"], + "@payloadcms/plugin-sentry/client": ["./packages/plugin-sentry/src/exports/client.ts"], + "@payloadcms/plugin-stripe/client": ["./packages/plugin-stripe/src/exports/client.ts"], + "@payloadcms/plugin-search/client": ["./packages/plugin-search/src/exports/client.ts"], "@payloadcms/plugin-form-builder/client": [ "./packages/plugin-form-builder/src/exports/client.ts" ], "@payloadcms/plugin-import-export/rsc": [ "./packages/plugin-import-export/src/exports/rsc.ts" ], - "@payloadcms/plugin-multi-tenant/rsc": [ - "./packages/plugin-multi-tenant/src/exports/rsc.ts" - ], + "@payloadcms/plugin-multi-tenant/rsc": ["./packages/plugin-multi-tenant/src/exports/rsc.ts"], "@payloadcms/plugin-multi-tenant/utilities": [ "./packages/plugin-multi-tenant/src/exports/utilities.ts" ], @@ -113,42 +70,25 @@ "@payloadcms/plugin-multi-tenant/client": [ "./packages/plugin-multi-tenant/src/exports/client.ts" ], - "@payloadcms/plugin-multi-tenant": [ - "./packages/plugin-multi-tenant/src/index.ts" - ], + "@payloadcms/plugin-multi-tenant": ["./packages/plugin-multi-tenant/src/index.ts"], "@payloadcms/plugin-multi-tenant/translations/languages/all": [ "./packages/plugin-multi-tenant/src/translations/index.ts" ], "@payloadcms/plugin-multi-tenant/translations/languages/*": [ "./packages/plugin-multi-tenant/src/translations/languages/*.ts" ], - "@payloadcms/next": [ - "./packages/next/src/exports/*" - ], - "@payloadcms/storage-azure/client": [ - "./packages/storage-azure/src/exports/client.ts" - ], - "@payloadcms/storage-s3/client": [ - "./packages/storage-s3/src/exports/client.ts" - ], + "@payloadcms/next": ["./packages/next/src/exports/*"], + "@payloadcms/storage-azure/client": ["./packages/storage-azure/src/exports/client.ts"], + "@payloadcms/storage-s3/client": ["./packages/storage-s3/src/exports/client.ts"], "@payloadcms/storage-vercel-blob/client": [ "./packages/storage-vercel-blob/src/exports/client.ts" ], - "@payloadcms/storage-gcs/client": [ - "./packages/storage-gcs/src/exports/client.ts" - ], + "@payloadcms/storage-gcs/client": ["./packages/storage-gcs/src/exports/client.ts"], "@payloadcms/storage-uploadthing/client": [ "./packages/storage-uploadthing/src/exports/client.ts" ] } }, - "include": [ - "${configDir}/src" - ], - "exclude": [ - "${configDir}/dist", - "${configDir}/build", - "${configDir}/temp", - "**/*.spec.ts" - ] + "include": ["${configDir}/src"], + "exclude": ["${configDir}/dist", "${configDir}/build", "${configDir}/temp", "**/*.spec.ts"] } From 29fb9ee5b49f11151e88f62df086d284db0bee62 Mon Sep 17 00:00:00 2001 From: Jarrod Flesch <30633324+JarrodMFlesch@users.noreply.github.com> Date: Wed, 23 Jul 2025 16:31:05 -0400 Subject: [PATCH 067/143] fix(ui): monomorphic relationship fields should not show relationTo option labels (#13245) --- packages/ui/src/fields/Relationship/Input.tsx | 40 +++++++++++-------- packages/ui/src/fields/Relationship/index.tsx | 3 ++ packages/ui/src/fields/Relationship/types.ts | 1 + .../Lexical/e2e/blocks/e2e.spec.ts | 6 +-- 4 files changed, 31 insertions(+), 19 deletions(-) diff --git a/packages/ui/src/fields/Relationship/Input.tsx b/packages/ui/src/fields/Relationship/Input.tsx index 328bfc2e8e..0792911cd8 100644 --- a/packages/ui/src/fields/Relationship/Input.tsx +++ b/packages/ui/src/fields/Relationship/Input.tsx @@ -49,6 +49,7 @@ export const RelationshipInput: React.FC = (props) => { Description, Error, filterOptions, + formatDisplayedOptions, hasMany, initialValue, isSortable = true, @@ -100,9 +101,6 @@ export const RelationshipInput: React.FC = (props) => { const [options, dispatchOptions] = useReducer(optionsReducer, []) const valueRef = useRef(value) - // the line below seems odd - - valueRef.current = value const [DocumentDrawer, , { isDrawerOpen, openDrawer }] = useDocumentDrawer({ id: currentlyOpenRelationship.id, @@ -474,11 +472,7 @@ export const RelationshipInput: React.FC = (props) => { const docID = args.doc.id if (hasMany) { - const currentValue = valueRef.current - ? Array.isArray(valueRef.current) - ? valueRef.current - : [valueRef.current] - : [] + const currentValue = value ? (Array.isArray(value) ? value : [value]) : [] const valuesToSet = currentValue.map((option: ValueWithRelation) => { return { @@ -492,7 +486,7 @@ export const RelationshipInput: React.FC = (props) => { onChange({ relationTo: args.collectionConfig.slug, value: docID }) } }, - [i18n, config, hasMany, onChange], + [i18n, config, hasMany, onChange, value], ) const onDuplicate = useCallback( @@ -508,8 +502,8 @@ export const RelationshipInput: React.FC = (props) => { if (hasMany) { onChange( - valueRef.current - ? (valueRef.current as ValueWithRelation[]).concat({ + value + ? value.concat({ relationTo: args.collectionConfig.slug, value: args.doc.id, }) @@ -522,7 +516,7 @@ export const RelationshipInput: React.FC = (props) => { }) } }, - [i18n, config, hasMany, onChange], + [i18n, config, hasMany, onChange, value], ) const onDelete = useCallback( @@ -537,8 +531,8 @@ export const RelationshipInput: React.FC = (props) => { if (hasMany) { onChange( - valueRef.current - ? (valueRef.current as ValueWithRelation[]).filter((option) => { + value + ? value.filter((option) => { return option.value !== args.id }) : null, @@ -549,7 +543,7 @@ export const RelationshipInput: React.FC = (props) => { return }, - [i18n, config, hasMany, onChange], + [i18n, config, hasMany, onChange, value], ) const filterOption = useCallback((item: Option, searchFilter: string) => { @@ -671,6 +665,12 @@ export const RelationshipInput: React.FC = (props) => { } }, [openDrawer, currentlyOpenRelationship]) + useEffect(() => { + // needed to sync the ref value when other fields influence the value + // i.e. when a drawer is opened and the value is set + valueRef.current = value + }, [value]) + const valueToRender = findOptionsByValue({ allowEdit, options, value }) if (!Array.isArray(valueToRender) && valueToRender?.value === 'null') { @@ -742,14 +742,18 @@ export const RelationshipInput: React.FC = (props) => { ? (selected) => { if (hasMany) { if (selected === null) { + valueRef.current = [] onChange([]) } else { + valueRef.current = selected as ValueWithRelation[] onChange(selected as ValueWithRelation[]) } } else if (hasMany === false) { if (selected === null) { + valueRef.current = null onChange(null) } else { + valueRef.current = selected as ValueWithRelation onChange(selected as ValueWithRelation) } } @@ -822,7 +826,11 @@ export const RelationshipInput: React.FC = (props) => { }), }) }} - options={options} + options={ + typeof formatDisplayedOptions === 'function' + ? formatDisplayedOptions(options) + : options + } placeholder={placeholder} showError={showError} value={valueToRender ?? null} diff --git a/packages/ui/src/fields/Relationship/index.tsx b/packages/ui/src/fields/Relationship/index.tsx index a4b4f6dd04..d226727df8 100644 --- a/packages/ui/src/fields/Relationship/index.tsx +++ b/packages/ui/src/fields/Relationship/index.tsx @@ -196,6 +196,9 @@ const RelationshipFieldComponent: RelationshipFieldClientComponent = (props) => description={description} Error={Error} filterOptions={filterOptions} + formatDisplayedOptions={ + isPolymorphic ? undefined : (options) => options.map((opt) => opt.options).flat() + } isSortable={isSortable} Label={Label} label={label} diff --git a/packages/ui/src/fields/Relationship/types.ts b/packages/ui/src/fields/Relationship/types.ts index 98c7500cfb..cf48acfa75 100644 --- a/packages/ui/src/fields/Relationship/types.ts +++ b/packages/ui/src/fields/Relationship/types.ts @@ -100,6 +100,7 @@ export type RelationshipInputProps = { readonly description?: StaticDescription readonly Error?: React.ReactNode readonly filterOptions?: FilterOptionsResult + readonly formatDisplayedOptions?: (options: OptionGroup[]) => Option[] | OptionGroup[] readonly isSortable?: boolean readonly Label?: React.ReactNode readonly label?: StaticLabel diff --git a/test/lexical/collections/Lexical/e2e/blocks/e2e.spec.ts b/test/lexical/collections/Lexical/e2e/blocks/e2e.spec.ts index 772faf68b6..c12b4264aa 100644 --- a/test/lexical/collections/Lexical/e2e/blocks/e2e.spec.ts +++ b/test/lexical/collections/Lexical/e2e/blocks/e2e.spec.ts @@ -240,7 +240,7 @@ describe('lexicalBlocks', () => { ) await dependsOnDocData.locator('.rs__control').click() - await expect(newBlock.locator('.rs__menu')).toHaveText('Text Fieldsinvalid') + await expect(newBlock.locator('.rs__menu')).toHaveText('invalid') await dependsOnDocData.locator('.rs__control').click() await dependsOnSiblingData.locator('.rs__control').click() @@ -281,7 +281,7 @@ describe('lexicalBlocks', () => { await dependsOnDocData.locator('.rs__control').click() await dependsOnSiblingData.locator('.rs__control').click() - await expect(newBlock.locator('.rs__menu')).toHaveText('Text Fieldsinvalid') + await expect(newBlock.locator('.rs__menu')).toHaveText('invalid') await dependsOnSiblingData.locator('.rs__control').click() await dependsOnBlockData.locator('.rs__control').click() @@ -322,7 +322,7 @@ describe('lexicalBlocks', () => { await dependsOnSiblingData.locator('.rs__control').click() await dependsOnBlockData.locator('.rs__control').click() - await expect(newBlock.locator('.rs__menu')).toHaveText('Text Fieldsinvalid') + await expect(newBlock.locator('.rs__menu')).toHaveText('invalid') await dependsOnBlockData.locator('.rs__control').click() await saveDocAndAssert(page) From aeee0704dd640e3a3a522f77b0dda16a4d16d608 Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Wed, 23 Jul 2025 16:48:25 -0700 Subject: [PATCH 068/143] chore: add new int test verifying that select *improves* performance of new optimization (#13254) https://github.com/payloadcms/payload/pull/13186 actually made the select API _more powerful_, as it can reduce the amount of db calls even for complex collections with blocks down to 1. This PR adds a test that verifies this. --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210871676349303 --- test/select/postgreslogs.int.spec.ts | 120 +++++++++++++++++++++------ 1 file changed, 96 insertions(+), 24 deletions(-) diff --git a/test/select/postgreslogs.int.spec.ts b/test/select/postgreslogs.int.spec.ts index 58517e196e..076d4758e2 100644 --- a/test/select/postgreslogs.int.spec.ts +++ b/test/select/postgreslogs.int.spec.ts @@ -57,31 +57,101 @@ describePostgres('Select - with postgres logs', () => { await payload.delete({ id: pointId, collection: 'points' }) }) - describe('Local API - operations', () => { - it('ensure optimized db update is still used when using select', async () => { - const post = await createPost() + it('ensure optimized db update is still used when using select', async () => { + const post = await createPost() - // Count every console log - const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {}) + // Count every console log + const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {}) - const res = removeEmptyAndUndefined( - (await payload.db.updateOne({ - collection: 'posts', - id: post.id, - data: { - text: 'new text', + const res = removeEmptyAndUndefined( + (await payload.db.updateOne({ + collection: 'posts', + id: post.id, + data: { + text: 'new text', + }, + select: { text: true, number: true }, + })) as any, + ) + + expect(consoleCount).toHaveBeenCalledTimes(1) // Should be 1 single sql call if the optimization is used. If not, this would be 2 calls + consoleCount.mockRestore() + + expect(res.number).toEqual(1) + expect(res.text).toEqual('new text') + expect(res.id).toEqual(post.id) + expect(Object.keys(res)).toHaveLength(3) + }) + + // This verifies that select actually improves performance of simple updates for complex collections. + // This is possible as no `with` is returned by buildFindManyArgs for the blocks field, only if we have a select that does not select that blocks field. + it('ensure simple update of complex collection uses optimized upsertRow with optimized returning() if only simple fields are selected', async () => { + const page = await payload.create({ + collection: 'pages', + data: { + slug: 'test-page', + additional: 'value', + blocks: [ + { + id: '123', + blockType: 'some', + other: 'value', + title: 'Test Block', }, - select: { text: true, number: true }, - })) as any, - ) + ], + }, + }) - expect(consoleCount).toHaveBeenCalledTimes(1) // Should be 1 single sql call if the optimization is used. If not, this would be 2 calls - consoleCount.mockRestore() + // Count every console log + const consoleCount = jest.spyOn(console, 'log').mockImplementation(() => {}) - expect(res.number).toEqual(1) - expect(res.text).toEqual('new text') - expect(res.id).toEqual(post.id) - expect(Object.keys(res)).toHaveLength(3) + const res = removeEmptyAndUndefined( + (await payload.db.updateOne({ + collection: 'pages', + id: page.id, + select: { + slug: true, + additional: true, + }, + data: { + slug: 'new-slug', + }, + })) as any, + ) + + expect(consoleCount).toHaveBeenCalledTimes(1) // Should be 1 single sql call if the optimization is used. If not, this would be 2 calls + consoleCount.mockRestore() + + expect(res.slug).toEqual('new-slug') + expect(res.additional).toEqual('value') + expect(res.id).toEqual(page.id) + expect(Object.keys(res)).toHaveLength(3) + + // Do full find without select just to ensure that the update worked + const fullPage: any = await payload.findByID({ + collection: 'pages', + id: page.id, + }) + + delete fullPage.createdAt + delete fullPage.updatedAt + delete fullPage.array + delete fullPage.content + + expect(fullPage).toEqual({ + id: page.id, + slug: 'new-slug', + additional: 'value', + relatedPage: null, + blocks: [ + { + id: '123', + blockType: 'some', + blockName: null, + other: 'value', + title: 'Test Block', + }, + ], }) }) }) @@ -102,14 +172,16 @@ function removeEmptyAndUndefined(obj: any): any { if (obj !== null && typeof obj === 'object') { const cleanedEntries = Object.entries(obj) .map(([key, value]) => [key, removeEmptyAndUndefined(value)]) - .filter( - ([, value]) => + .filter(([, value]) => { + return ( value !== undefined && + value !== null && !( typeof value === 'object' && (Array.isArray(value) ? value.length === 0 : Object.keys(value).length === 0) - ), - ) + ) + ) + }) return cleanedEntries.length > 0 ? Object.fromEntries(cleanedEntries) : undefined } From 1ad7b55e057907c524c5200c843b7d2e56845211 Mon Sep 17 00:00:00 2001 From: Alessio Gravili Date: Thu, 24 Jul 2025 05:04:16 -0700 Subject: [PATCH 069/143] refactor(drizzle): use getTableName utility (#13257) ~~Sometimes, drizzle is adding the same join to the joins array twice (`addJoinTable`), despite the table being the same. This is due to a bug in `getNameFromDrizzleTable` where it would sometimes return a UUID instead of the table name.~~ ~~This PR changes it to read from the drizzle:BaseName symbol instead, which is correctly returning the table name in my testing. It falls back to `getTableName`, which uses drizzle:Name.~~ This for some reason fails the tests. Instead, this PR just uses the getTableName utility now instead of searching for the symbol manually. --- packages/drizzle/src/utilities/getNameFromDrizzleTable.ts | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/packages/drizzle/src/utilities/getNameFromDrizzleTable.ts b/packages/drizzle/src/utilities/getNameFromDrizzleTable.ts index 7395c46ab9..e8c4233f9a 100644 --- a/packages/drizzle/src/utilities/getNameFromDrizzleTable.ts +++ b/packages/drizzle/src/utilities/getNameFromDrizzleTable.ts @@ -1,9 +1,7 @@ import type { Table } from 'drizzle-orm' -export const getNameFromDrizzleTable = (table: Table): string => { - const symbol = Object.getOwnPropertySymbols(table).find((symb) => - symb.description.includes('Name'), - ) +import { getTableName } from 'drizzle-orm' - return table[symbol] +export const getNameFromDrizzleTable = (table: Table): string => { + return getTableName(table) } From 7ae4f8c709c429ec8e20bef9eb65dc94b1fe1643 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Thu, 24 Jul 2025 16:29:53 +0300 Subject: [PATCH 070/143] docs: add `status` to forbidden field names when using Postgres and drafts are enabled (#13233) Fixes https://github.com/payloadcms/payload/issues/13144 --- docs/fields/overview.mdx | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/fields/overview.mdx b/docs/fields/overview.mdx index 7ecc88d5f2..be4ffe2238 100644 --- a/docs/fields/overview.mdx +++ b/docs/fields/overview.mdx @@ -157,6 +157,7 @@ The following field names are forbidden and cannot be used: - `salt` - `hash` - `file` +- `status` - with Postgres Adapter and when drafts are enabled ### Field-level Hooks From e48427e59a149711edbd76fe178eb348bf6123c4 Mon Sep 17 00:00:00 2001 From: Jacob Fletcher Date: Thu, 24 Jul 2025 10:12:45 -0400 Subject: [PATCH 071/143] feat(ui): expose refresh method to list drawer context (#13173) --- .../src/elements/ListDrawer/DrawerContent.tsx | 61 +++++++--- .../ui/src/elements/ListDrawer/Provider.tsx | 12 +- packages/ui/src/elements/ListDrawer/index.tsx | 19 +++ .../CustomListDrawer/Component.tsx | 60 +++++++++ .../collections/CustomListDrawer/index.ts | 16 +++ test/admin/config.ts | 2 + test/admin/e2e/list-view/e2e.spec.ts | 36 ++++++ test/admin/payload-types.ts | 23 ++++ tsconfig.base.json | 114 +++++++++++++----- 9 files changed, 293 insertions(+), 50 deletions(-) create mode 100644 test/admin/collections/CustomListDrawer/Component.tsx create mode 100644 test/admin/collections/CustomListDrawer/index.ts diff --git a/packages/ui/src/elements/ListDrawer/DrawerContent.tsx b/packages/ui/src/elements/ListDrawer/DrawerContent.tsx index 19c7c26c30..37232b8b82 100644 --- a/packages/ui/src/elements/ListDrawer/DrawerContent.tsx +++ b/packages/ui/src/elements/ListDrawer/DrawerContent.tsx @@ -1,10 +1,11 @@ 'use client' -import type { ListQuery } from 'payload' +import type { CollectionSlug, ListQuery } from 'payload' import { useModal } from '@faceless-ui/modal' import { hoistQueryParamsToAnd } from 'payload/shared' import React, { useCallback, useEffect, useState } from 'react' +import type { ListDrawerContextProps, ListDrawerContextType } from '../ListDrawer/Provider.js' import type { ListDrawerProps } from './types.js' import { useDocumentDrawer } from '../../elements/DocumentDrawer/index.js' @@ -25,7 +26,7 @@ export const ListDrawerContent: React.FC = ({ onBulkSelect, onSelect, overrideEntityVisibility = true, - selectedCollection: selectedCollectionFromProps, + selectedCollection: collectionSlugFromProps, }) => { const { closeModal, isModalOpen } = useModal() @@ -45,7 +46,7 @@ export const ListDrawerContent: React.FC = ({ }) const [selectedOption, setSelectedOption] = useState>(() => { - const initialSelection = selectedCollectionFromProps || enabledCollections[0]?.slug + const initialSelection = collectionSlugFromProps || enabledCollections[0]?.slug const found = getEntityConfig({ collectionSlug: initialSelection }) return found @@ -61,20 +62,25 @@ export const ListDrawerContent: React.FC = ({ collectionSlug: selectedOption.value, }) - const updateSelectedOption = useEffectEvent((selectedCollectionFromProps: string) => { - if (selectedCollectionFromProps && selectedCollectionFromProps !== selectedOption?.value) { + const updateSelectedOption = useEffectEvent((collectionSlug: CollectionSlug) => { + if (collectionSlug && collectionSlug !== selectedOption?.value) { setSelectedOption({ - label: getEntityConfig({ collectionSlug: selectedCollectionFromProps })?.labels, - value: selectedCollectionFromProps, + label: getEntityConfig({ collectionSlug })?.labels, + value: collectionSlug, }) } }) useEffect(() => { - updateSelectedOption(selectedCollectionFromProps) - }, [selectedCollectionFromProps]) + updateSelectedOption(collectionSlugFromProps) + }, [collectionSlugFromProps]) - const renderList = useCallback( + /** + * This performs a full server round trip to get the list view for the selected collection. + * On the server, the data is freshly queried for the list view and all components are fully rendered. + * This work includes building column state, rendering custom components, etc. + */ + const refresh = useCallback( async ({ slug, query }: { query?: ListQuery; slug: string }) => { try { const newQuery: ListQuery = { ...(query || {}), where: { ...(query?.where || {}) } } @@ -129,9 +135,9 @@ export const ListDrawerContent: React.FC = ({ useEffect(() => { if (!ListView) { - void renderList({ slug: selectedOption?.value }) + void refresh({ slug: selectedOption?.value }) } - }, [renderList, ListView, selectedOption.value]) + }, [refresh, ListView, selectedOption.value]) const onCreateNew = useCallback( ({ doc }) => { @@ -149,19 +155,33 @@ export const ListDrawerContent: React.FC = ({ [closeModal, documentDrawerSlug, drawerSlug, onSelect, selectedOption.value], ) - const onQueryChange = useCallback( - (query: ListQuery) => { - void renderList({ slug: selectedOption?.value, query }) + const onQueryChange: ListDrawerContextProps['onQueryChange'] = useCallback( + (query) => { + void refresh({ slug: selectedOption?.value, query }) }, - [renderList, selectedOption.value], + [refresh, selectedOption.value], ) - const setMySelectedOption = useCallback( - (incomingSelection: Option) => { + const setMySelectedOption: ListDrawerContextProps['setSelectedOption'] = useCallback( + (incomingSelection) => { setSelectedOption(incomingSelection) - void renderList({ slug: incomingSelection?.value }) + void refresh({ slug: incomingSelection?.value }) }, - [renderList], + [refresh], + ) + + const refreshSelf: ListDrawerContextType['refresh'] = useCallback( + async (incomingCollectionSlug) => { + if (incomingCollectionSlug) { + setSelectedOption({ + label: getEntityConfig({ collectionSlug: incomingCollectionSlug })?.labels, + value: incomingCollectionSlug, + }) + } + + await refresh({ slug: selectedOption.value || incomingCollectionSlug }) + }, + [getEntityConfig, refresh, selectedOption.value], ) if (isLoading) { @@ -178,6 +198,7 @@ export const ListDrawerContent: React.FC = ({ onBulkSelect={onBulkSelect} onQueryChange={onQueryChange} onSelect={onSelect} + refresh={refreshSelf} selectedOption={selectedOption} setSelectedOption={setMySelectedOption} > diff --git a/packages/ui/src/elements/ListDrawer/Provider.tsx b/packages/ui/src/elements/ListDrawer/Provider.tsx index 8aeb4ed7ae..7a0156ed80 100644 --- a/packages/ui/src/elements/ListDrawer/Provider.tsx +++ b/packages/ui/src/elements/ListDrawer/Provider.tsx @@ -24,12 +24,17 @@ export type ListDrawerContextProps = { */ docID: string }) => void - readonly selectedOption?: Option - readonly setSelectedOption?: (option: Option) => void + readonly selectedOption?: Option + readonly setSelectedOption?: (option: Option) => void } export type ListDrawerContextType = { - isInDrawer: boolean + readonly isInDrawer: boolean + /** + * When called, will either refresh the list view with its currently selected collection. + * If an collection slug is provided, will use that instead of the currently selected one. + */ + readonly refresh: (collectionSlug?: CollectionSlug) => Promise } & ListDrawerContextProps export const ListDrawerContext = createContext({} as ListDrawerContextType) @@ -37,6 +42,7 @@ export const ListDrawerContext = createContext({} as ListDrawerContextType) export const ListDrawerContextProvider: React.FC< { children: React.ReactNode + refresh: ListDrawerContextType['refresh'] } & ListDrawerContextProps > = ({ children, ...rest }) => { return ( diff --git a/packages/ui/src/elements/ListDrawer/index.tsx b/packages/ui/src/elements/ListDrawer/index.tsx index c4eaae6188..342bdb5600 100644 --- a/packages/ui/src/elements/ListDrawer/index.tsx +++ b/packages/ui/src/elements/ListDrawer/index.tsx @@ -51,6 +51,25 @@ export const ListDrawer: React.FC = (props) => { ) } +/** + * Returns an array containing the ListDrawer component, the ListDrawerToggler component, and an object with state and methods for controlling the drawer. + * @example + * import { useListDrawer } from '@payloadcms/ui' + * + * // inside a React component + * const [ListDrawer, ListDrawerToggler, { closeDrawer, openDrawer }] = useListDrawer({ + * collectionSlugs: ['users'], + * selectedCollection: 'users', + * }) + * + * // inside the return statement + * return ( + * <> + * + * Open List Drawer + * + * ) + */ export const useListDrawer: UseListDrawer = ({ collectionSlugs: collectionSlugsFromProps, filterOptions, diff --git a/test/admin/collections/CustomListDrawer/Component.tsx b/test/admin/collections/CustomListDrawer/Component.tsx new file mode 100644 index 0000000000..c3b6c09a23 --- /dev/null +++ b/test/admin/collections/CustomListDrawer/Component.tsx @@ -0,0 +1,60 @@ +'use client' +import { toast, useListDrawer, useListDrawerContext, useTranslation } from '@payloadcms/ui' +import React, { useCallback } from 'react' + +export const CustomListDrawer = () => { + const [isCreating, setIsCreating] = React.useState(false) + + // this is the _outer_ drawer context (if any), not the one for the list drawer below + const { refresh } = useListDrawerContext() + const { t } = useTranslation() + + const [ListDrawer, ListDrawerToggler] = useListDrawer({ + collectionSlugs: ['custom-list-drawer'], + }) + + const createDoc = useCallback(async () => { + if (isCreating) { + return + } + + setIsCreating(true) + + try { + await fetch('/api/custom-list-drawer', { + body: JSON.stringify({}), + credentials: 'include', + headers: { + 'Content-Type': 'application/json', + }, + method: 'POST', + }) + + setIsCreating(false) + + toast.success( + t('general:successfullyCreated', { + label: 'Custom List Drawer', + }), + ) + + // In the root document view, there is no outer drawer context, so this will be `undefined` + if (typeof refresh === 'function') { + await refresh() + } + } catch (_err) { + console.error('Error creating document:', _err) // eslint-disable-line no-console + setIsCreating(false) + } + }, [isCreating, refresh, t]) + + return ( +
    + + + Open list drawer +
    + ) +} diff --git a/test/admin/collections/CustomListDrawer/index.ts b/test/admin/collections/CustomListDrawer/index.ts new file mode 100644 index 0000000000..5a8caefc79 --- /dev/null +++ b/test/admin/collections/CustomListDrawer/index.ts @@ -0,0 +1,16 @@ +import type { CollectionConfig } from 'payload' + +export const CustomListDrawer: CollectionConfig = { + slug: 'custom-list-drawer', + fields: [ + { + name: 'customListDrawer', + type: 'ui', + admin: { + components: { + Field: '/collections/CustomListDrawer/Component.js#CustomListDrawer', + }, + }, + }, + ], +} diff --git a/test/admin/config.ts b/test/admin/config.ts index 52d1d84e4e..69ba64555d 100644 --- a/test/admin/config.ts +++ b/test/admin/config.ts @@ -5,6 +5,7 @@ import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' import { Array } from './collections/Array.js' import { BaseListFilter } from './collections/BaseListFilter.js' import { CustomFields } from './collections/CustomFields/index.js' +import { CustomListDrawer } from './collections/CustomListDrawer/index.js' import { CustomViews1 } from './collections/CustomViews1.js' import { CustomViews2 } from './collections/CustomViews2.js' import { DisableBulkEdit } from './collections/DisableBulkEdit.js' @@ -185,6 +186,7 @@ export default buildConfigWithDefaults({ Placeholder, UseAsTitleGroupField, DisableBulkEdit, + CustomListDrawer, ], globals: [ GlobalHidden, diff --git a/test/admin/e2e/list-view/e2e.spec.ts b/test/admin/e2e/list-view/e2e.spec.ts index 2e8ec07ccf..bdcc5ef83d 100644 --- a/test/admin/e2e/list-view/e2e.spec.ts +++ b/test/admin/e2e/list-view/e2e.spec.ts @@ -1676,6 +1676,42 @@ describe('List View', () => { await expect(page.locator('.list-selection')).toContainText('2 selected') }) + + test('should refresh custom list drawer using the refresh method from context', async () => { + const url = new AdminUrlUtil(serverURL, 'custom-list-drawer') + + await payload.delete({ + collection: 'custom-list-drawer', + where: { id: { exists: true } }, + }) + + const { id } = await payload.create({ + collection: 'custom-list-drawer', + data: {}, + }) + + await page.goto(url.list) + + await expect(page.locator('.table > table > tbody > tr')).toHaveCount(1) + + await page.goto(url.edit(id)) + + await page.locator('#open-custom-list-drawer').click() + const drawer = page.locator('[id^=list-drawer_1_]') + await expect(drawer).toBeVisible() + + await expect(drawer.locator('.table > table > tbody > tr')).toHaveCount(1) + + await drawer.locator('.list-header__create-new-button.doc-drawer__toggler').click() + const createNewDrawer = page.locator('[id^=doc-drawer_custom-list-drawer_1_]') + await createNewDrawer.locator('#create-custom-list-drawer-doc').click() + + await expect(page.locator('.payload-toast-container')).toContainText('successfully') + + await createNewDrawer.locator('.doc-drawer__header-close').click() + + await expect(drawer.locator('.table > table > tbody > tr')).toHaveCount(2) + }) }) async function createPost(overrides?: Partial): Promise { diff --git a/test/admin/payload-types.ts b/test/admin/payload-types.ts index 10e34bc1bd..0a739c4fa0 100644 --- a/test/admin/payload-types.ts +++ b/test/admin/payload-types.ts @@ -93,6 +93,7 @@ export interface Config { placeholder: Placeholder; 'use-as-title-group-field': UseAsTitleGroupField; 'disable-bulk-edit': DisableBulkEdit; + 'custom-list-drawer': CustomListDrawer; 'payload-locked-documents': PayloadLockedDocument; 'payload-preferences': PayloadPreference; 'payload-migrations': PayloadMigration; @@ -125,6 +126,7 @@ export interface Config { placeholder: PlaceholderSelect | PlaceholderSelect; 'use-as-title-group-field': UseAsTitleGroupFieldSelect | UseAsTitleGroupFieldSelect; 'disable-bulk-edit': DisableBulkEditSelect | DisableBulkEditSelect; + 'custom-list-drawer': CustomListDrawerSelect | CustomListDrawerSelect; 'payload-locked-documents': PayloadLockedDocumentsSelect | PayloadLockedDocumentsSelect; 'payload-preferences': PayloadPreferencesSelect | PayloadPreferencesSelect; 'payload-migrations': PayloadMigrationsSelect | PayloadMigrationsSelect; @@ -565,6 +567,15 @@ export interface DisableBulkEdit { updatedAt: string; createdAt: string; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "custom-list-drawer". + */ +export interface CustomListDrawer { + id: string; + updatedAt: string; + createdAt: string; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "payload-locked-documents". @@ -675,6 +686,10 @@ export interface PayloadLockedDocument { | ({ relationTo: 'disable-bulk-edit'; value: string | DisableBulkEdit; + } | null) + | ({ + relationTo: 'custom-list-drawer'; + value: string | CustomListDrawer; } | null); globalSlug?: string | null; user: { @@ -1074,6 +1089,14 @@ export interface DisableBulkEditSelect { updatedAt?: T; createdAt?: T; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "custom-list-drawer_select". + */ +export interface CustomListDrawerSelect { + updatedAt?: T; + createdAt?: T; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "payload-locked-documents_select". diff --git a/tsconfig.base.json b/tsconfig.base.json index 0898ad390f..5e4e343504 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -21,8 +21,15 @@ "skipLibCheck": true, "emitDeclarationOnly": true, "sourceMap": true, - "lib": ["DOM", "DOM.Iterable", "ES2022"], - "types": ["node", "jest"], + "lib": [ + "DOM", + "DOM.Iterable", + "ES2022" + ], + "types": [ + "node", + "jest" + ], "incremental": true, "isolatedModules": true, "plugins": [ @@ -31,36 +38,72 @@ } ], "paths": { - "@payload-config": ["./test/_community/config.ts"], - "@payloadcms/admin-bar": ["./packages/admin-bar/src"], - "@payloadcms/live-preview": ["./packages/live-preview/src"], - "@payloadcms/live-preview-react": ["./packages/live-preview-react/src/index.ts"], - "@payloadcms/live-preview-vue": ["./packages/live-preview-vue/src/index.ts"], - "@payloadcms/ui": ["./packages/ui/src/exports/client/index.ts"], - "@payloadcms/ui/shared": ["./packages/ui/src/exports/shared/index.ts"], - "@payloadcms/ui/rsc": ["./packages/ui/src/exports/rsc/index.ts"], - "@payloadcms/ui/scss": ["./packages/ui/src/scss.scss"], - "@payloadcms/ui/scss/app.scss": ["./packages/ui/src/scss/app.scss"], - "@payloadcms/next/*": ["./packages/next/src/exports/*.ts"], + "@payload-config": [ + "./test/admin/config.ts" + ], + "@payloadcms/admin-bar": [ + "./packages/admin-bar/src" + ], + "@payloadcms/live-preview": [ + "./packages/live-preview/src" + ], + "@payloadcms/live-preview-react": [ + "./packages/live-preview-react/src/index.ts" + ], + "@payloadcms/live-preview-vue": [ + "./packages/live-preview-vue/src/index.ts" + ], + "@payloadcms/ui": [ + "./packages/ui/src/exports/client/index.ts" + ], + "@payloadcms/ui/shared": [ + "./packages/ui/src/exports/shared/index.ts" + ], + "@payloadcms/ui/rsc": [ + "./packages/ui/src/exports/rsc/index.ts" + ], + "@payloadcms/ui/scss": [ + "./packages/ui/src/scss.scss" + ], + "@payloadcms/ui/scss/app.scss": [ + "./packages/ui/src/scss/app.scss" + ], + "@payloadcms/next/*": [ + "./packages/next/src/exports/*.ts" + ], "@payloadcms/richtext-lexical/client": [ "./packages/richtext-lexical/src/exports/client/index.ts" ], - "@payloadcms/richtext-lexical/rsc": ["./packages/richtext-lexical/src/exports/server/rsc.ts"], - "@payloadcms/richtext-slate/rsc": ["./packages/richtext-slate/src/exports/server/rsc.ts"], + "@payloadcms/richtext-lexical/rsc": [ + "./packages/richtext-lexical/src/exports/server/rsc.ts" + ], + "@payloadcms/richtext-slate/rsc": [ + "./packages/richtext-slate/src/exports/server/rsc.ts" + ], "@payloadcms/richtext-slate/client": [ "./packages/richtext-slate/src/exports/client/index.ts" ], - "@payloadcms/plugin-seo/client": ["./packages/plugin-seo/src/exports/client.ts"], - "@payloadcms/plugin-sentry/client": ["./packages/plugin-sentry/src/exports/client.ts"], - "@payloadcms/plugin-stripe/client": ["./packages/plugin-stripe/src/exports/client.ts"], - "@payloadcms/plugin-search/client": ["./packages/plugin-search/src/exports/client.ts"], + "@payloadcms/plugin-seo/client": [ + "./packages/plugin-seo/src/exports/client.ts" + ], + "@payloadcms/plugin-sentry/client": [ + "./packages/plugin-sentry/src/exports/client.ts" + ], + "@payloadcms/plugin-stripe/client": [ + "./packages/plugin-stripe/src/exports/client.ts" + ], + "@payloadcms/plugin-search/client": [ + "./packages/plugin-search/src/exports/client.ts" + ], "@payloadcms/plugin-form-builder/client": [ "./packages/plugin-form-builder/src/exports/client.ts" ], "@payloadcms/plugin-import-export/rsc": [ "./packages/plugin-import-export/src/exports/rsc.ts" ], - "@payloadcms/plugin-multi-tenant/rsc": ["./packages/plugin-multi-tenant/src/exports/rsc.ts"], + "@payloadcms/plugin-multi-tenant/rsc": [ + "./packages/plugin-multi-tenant/src/exports/rsc.ts" + ], "@payloadcms/plugin-multi-tenant/utilities": [ "./packages/plugin-multi-tenant/src/exports/utilities.ts" ], @@ -70,25 +113,42 @@ "@payloadcms/plugin-multi-tenant/client": [ "./packages/plugin-multi-tenant/src/exports/client.ts" ], - "@payloadcms/plugin-multi-tenant": ["./packages/plugin-multi-tenant/src/index.ts"], + "@payloadcms/plugin-multi-tenant": [ + "./packages/plugin-multi-tenant/src/index.ts" + ], "@payloadcms/plugin-multi-tenant/translations/languages/all": [ "./packages/plugin-multi-tenant/src/translations/index.ts" ], "@payloadcms/plugin-multi-tenant/translations/languages/*": [ "./packages/plugin-multi-tenant/src/translations/languages/*.ts" ], - "@payloadcms/next": ["./packages/next/src/exports/*"], - "@payloadcms/storage-azure/client": ["./packages/storage-azure/src/exports/client.ts"], - "@payloadcms/storage-s3/client": ["./packages/storage-s3/src/exports/client.ts"], + "@payloadcms/next": [ + "./packages/next/src/exports/*" + ], + "@payloadcms/storage-azure/client": [ + "./packages/storage-azure/src/exports/client.ts" + ], + "@payloadcms/storage-s3/client": [ + "./packages/storage-s3/src/exports/client.ts" + ], "@payloadcms/storage-vercel-blob/client": [ "./packages/storage-vercel-blob/src/exports/client.ts" ], - "@payloadcms/storage-gcs/client": ["./packages/storage-gcs/src/exports/client.ts"], + "@payloadcms/storage-gcs/client": [ + "./packages/storage-gcs/src/exports/client.ts" + ], "@payloadcms/storage-uploadthing/client": [ "./packages/storage-uploadthing/src/exports/client.ts" ] } }, - "include": ["${configDir}/src"], - "exclude": ["${configDir}/dist", "${configDir}/build", "${configDir}/temp", "**/*.spec.ts"] + "include": [ + "${configDir}/src" + ], + "exclude": [ + "${configDir}/dist", + "${configDir}/build", + "${configDir}/temp", + "**/*.spec.ts" + ] } From 8f85da893135e3b97cbb9d5b94783dd4390f019e Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Thu, 24 Jul 2025 11:36:46 -0400 Subject: [PATCH 072/143] fix(plugin-import-export): json preview and downloads preserve nesting and exclude disabled fields (#13210) ### What? Improves both the JSON preview and export functionality in the import-export plugin: - Preserves proper nesting of object and array fields (e.g., groups, tabs, arrays) - Excludes any fields explicitly marked as `disabled` via `custom.plugin-import-export` - Ensures downloaded files use proper JSON formatting when `format` is `json` (no CSV-style flattening) ### Why? Previously: - The JSON preview flattened all fields to a single level and included disabled fields. - Exported files with `format: json` were still CSV-style data encoded as `.json`, rather than real JSON. ### How? - Refactored `/preview-data` JSON handling to preserve original document shape. - Applied `removeDisabledFields` to clean nested fields using dot-notation paths. - Updated `createExport` to skip `flattenObject` for JSON formats, using a nested JSON filter instead. - Fixed streaming and buffered export paths to output valid JSON arrays when `format` is `json`. --- .../src/components/Preview/index.tsx | 11 +- .../src/export/createExport.ts | 124 ++++++++++++------ packages/plugin-import-export/src/index.ts | 67 +++++++--- .../src/utilities/getFlattenedFieldKeys.ts | 42 +++--- .../src/utilities/getvalueAtPath.ts | 59 +++++++++ .../src/utilities/removeDisabledFields.ts | 80 +++++++++++ .../src/utilities/setNestedValue.ts | 65 +++++++++ .../plugin-import-export/collections/Pages.ts | 20 +++ test/plugin-import-export/int.spec.ts | 23 ++++ test/plugin-import-export/payload-types.ts | 2 + 10 files changed, 413 insertions(+), 80 deletions(-) create mode 100644 packages/plugin-import-export/src/utilities/getvalueAtPath.ts create mode 100644 packages/plugin-import-export/src/utilities/removeDisabledFields.ts create mode 100644 packages/plugin-import-export/src/utilities/setNestedValue.ts diff --git a/packages/plugin-import-export/src/components/Preview/index.tsx b/packages/plugin-import-export/src/components/Preview/index.tsx index 4cafe1f4fe..3dbdd9a417 100644 --- a/packages/plugin-import-export/src/components/Preview/index.tsx +++ b/packages/plugin-import-export/src/components/Preview/index.tsx @@ -68,6 +68,7 @@ export const Preview = () => { collectionSlug, draft, fields, + format, limit, locale, sort, @@ -115,8 +116,13 @@ export const Preview = () => { const fieldKeys = Array.isArray(fields) && fields.length > 0 - ? selectedKeys // strictly only what was selected - : [...selectedKeys, ...defaultMetaFields.filter((key) => allKeys.includes(key))] + ? selectedKeys // strictly use selected fields only + : [ + ...selectedKeys, + ...defaultMetaFields.filter( + (key) => allKeys.includes(key) && !selectedKeys.includes(key), + ), + ] // Build columns based on flattened keys const newColumns: Column[] = fieldKeys.map((key) => ({ @@ -158,6 +164,7 @@ export const Preview = () => { disabledFieldRegexes, draft, fields, + format, i18n, limit, locale, diff --git a/packages/plugin-import-export/src/export/createExport.ts b/packages/plugin-import-export/src/export/createExport.ts index 40e1b954ff..fba5e11a0c 100644 --- a/packages/plugin-import-export/src/export/createExport.ts +++ b/packages/plugin-import-export/src/export/createExport.ts @@ -114,7 +114,7 @@ export const createExport = async (args: CreateExportArgs) => { const disabledRegexes: RegExp[] = disabledFields.map(buildDisabledFieldRegex) - const filterDisabled = (row: Record): Record => { + const filterDisabledCSV = (row: Record): Record => { const filtered: Record = {} for (const [key, value] of Object.entries(row)) { @@ -127,35 +127,62 @@ export const createExport = async (args: CreateExportArgs) => { return filtered } + const filterDisabledJSON = (doc: any, parentPath = ''): any => { + if (Array.isArray(doc)) { + return doc.map((item) => filterDisabledJSON(item, parentPath)) + } + + if (typeof doc !== 'object' || doc === null) { + return doc + } + + const filtered: Record = {} + for (const [key, value] of Object.entries(doc)) { + const currentPath = parentPath ? `${parentPath}.${key}` : key + + // Only remove if this exact path is disabled + const isDisabled = disabledFields.includes(currentPath) + + if (!isDisabled) { + filtered[key] = filterDisabledJSON(value, currentPath) + } + } + + return filtered + } + if (download) { if (debug) { req.payload.logger.debug('Pre-scanning all columns before streaming') } - const allColumnsSet = new Set() const allColumns: string[] = [] - let scanPage = 1 - let hasMore = true - while (hasMore) { - const result = await payload.find({ ...findArgs, page: scanPage }) + if (isCSV) { + const allColumnsSet = new Set() + let scanPage = 1 + let hasMore = true - result.docs.forEach((doc) => { - const flat = filterDisabled(flattenObject({ doc, fields, toCSVFunctions })) - Object.keys(flat).forEach((key) => { - if (!allColumnsSet.has(key)) { - allColumnsSet.add(key) - allColumns.push(key) - } + while (hasMore) { + const result = await payload.find({ ...findArgs, page: scanPage }) + + result.docs.forEach((doc) => { + const flat = filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })) + Object.keys(flat).forEach((key) => { + if (!allColumnsSet.has(key)) { + allColumnsSet.add(key) + allColumns.push(key) + } + }) }) - }) - hasMore = result.hasNextPage - scanPage += 1 - } + hasMore = result.hasNextPage + scanPage += 1 + } - if (debug) { - req.payload.logger.debug(`Discovered ${allColumns.length} columns`) + if (debug) { + req.payload.logger.debug(`Discovered ${allColumns.length} columns`) + } } const encoder = new TextEncoder() @@ -171,28 +198,48 @@ export const createExport = async (args: CreateExportArgs) => { } if (result.docs.length === 0) { + // Close JSON array properly if JSON + if (!isCSV) { + this.push(encoder.encode(']')) + } this.push(null) return } - const batchRows = result.docs.map((doc) => - filterDisabled(flattenObject({ doc, fields, toCSVFunctions })), - ) + if (isCSV) { + // --- CSV Streaming --- + const batchRows = result.docs.map((doc) => + filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })), + ) - const paddedRows = batchRows.map((row) => { - const fullRow: Record = {} - for (const col of allColumns) { - fullRow[col] = row[col] ?? '' + const paddedRows = batchRows.map((row) => { + const fullRow: Record = {} + for (const col of allColumns) { + fullRow[col] = row[col] ?? '' + } + return fullRow + }) + + const csvString = stringify(paddedRows, { + header: isFirstBatch, + columns: allColumns, + }) + + this.push(encoder.encode(csvString)) + } else { + // --- JSON Streaming --- + const batchRows = result.docs.map((doc) => filterDisabledJSON(doc)) + + // Convert each filtered/flattened row into JSON string + const batchJSON = batchRows.map((row) => JSON.stringify(row)).join(',') + + if (isFirstBatch) { + this.push(encoder.encode('[' + batchJSON)) + } else { + this.push(encoder.encode(',' + batchJSON)) } - return fullRow - }) + } - const csvString = stringify(paddedRows, { - header: isFirstBatch, - columns: allColumns, - }) - - this.push(encoder.encode(csvString)) isFirstBatch = false streamPage += 1 @@ -200,6 +247,9 @@ export const createExport = async (args: CreateExportArgs) => { if (debug) { req.payload.logger.debug('Stream complete - no more pages') } + if (!isCSV) { + this.push(encoder.encode(']')) + } this.push(null) // End the stream } }, @@ -239,7 +289,7 @@ export const createExport = async (args: CreateExportArgs) => { if (isCSV) { const batchRows = result.docs.map((doc) => - filterDisabled(flattenObject({ doc, fields, toCSVFunctions })), + filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions })), ) // Track discovered column keys @@ -254,8 +304,8 @@ export const createExport = async (args: CreateExportArgs) => { rows.push(...batchRows) } else { - const jsonInput = result.docs.map((doc) => JSON.stringify(doc)) - outputData.push(jsonInput.join(',\n')) + const batchRows = result.docs.map((doc) => filterDisabledJSON(doc)) + outputData.push(batchRows.map((doc) => JSON.stringify(doc)).join(',\n')) } hasNextPage = result.hasNextPage diff --git a/packages/plugin-import-export/src/index.ts b/packages/plugin-import-export/src/index.ts index a64e80bf15..366d444c54 100644 --- a/packages/plugin-import-export/src/index.ts +++ b/packages/plugin-import-export/src/index.ts @@ -13,6 +13,9 @@ import { getExportCollection } from './getExportCollection.js' import { translations } from './translations/index.js' import { collectDisabledFieldPaths } from './utilities/collectDisabledFieldPaths.js' import { getFlattenedFieldKeys } from './utilities/getFlattenedFieldKeys.js' +import { getValueAtPath } from './utilities/getvalueAtPath.js' +import { removeDisabledFields } from './utilities/removeDisabledFields.js' +import { setNestedValue } from './utilities/setNestedValue.js' export const importExportPlugin = (pluginConfig: ImportExportPluginConfig) => @@ -91,6 +94,7 @@ export const importExportPlugin = collectionSlug: string draft?: 'no' | 'yes' fields?: string[] + format?: 'csv' | 'json' limit?: number locale?: string sort?: any @@ -120,29 +124,58 @@ export const importExportPlugin = where, }) + const isCSV = req?.data?.format === 'csv' const docs = result.docs - const toCSVFunctions = getCustomFieldFunctions({ - fields: collection.config.fields as FlattenedField[], - }) + let transformed: Record[] = [] - const possibleKeys = getFlattenedFieldKeys(collection.config.fields as FlattenedField[]) - - const transformed = docs.map((doc) => { - const row = flattenObject({ - doc, - fields, - toCSVFunctions, + if (isCSV) { + const toCSVFunctions = getCustomFieldFunctions({ + fields: collection.config.fields as FlattenedField[], }) - for (const key of possibleKeys) { - if (!(key in row)) { - row[key] = null - } - } + const possibleKeys = getFlattenedFieldKeys(collection.config.fields as FlattenedField[]) - return row - }) + transformed = docs.map((doc) => { + const row = flattenObject({ + doc, + fields, + toCSVFunctions, + }) + + for (const key of possibleKeys) { + if (!(key in row)) { + row[key] = null + } + } + + return row + }) + } else { + const disabledFields = + collection.config.admin.custom?.['plugin-import-export']?.disabledFields + + transformed = docs.map((doc) => { + let output: Record = { ...doc } + + // Remove disabled fields first + output = removeDisabledFields(output, disabledFields) + + // Then trim to selected fields only (if fields are provided) + if (Array.isArray(fields) && fields.length > 0) { + const trimmed: Record = {} + + for (const key of fields) { + const value = getValueAtPath(output, key) + setNestedValue(trimmed, key, value ?? null) + } + + output = trimmed + } + + return output + }) + } return Response.json({ docs: transformed, diff --git a/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts b/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts index f124208dc9..db25206b8b 100644 --- a/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts +++ b/packages/plugin-import-export/src/utilities/getFlattenedFieldKeys.ts @@ -22,21 +22,18 @@ export const getFlattenedFieldKeys = (fields: FieldWithPresentational[], prefix 'plugin-import-export' in field.custom && field.custom['plugin-import-export']?.toCSV - if (!('name' in field) || typeof field.name !== 'string' || fieldHasToCSVFunction) { - return - } - - const name = prefix ? `${prefix}_${field.name}` : field.name + const name = 'name' in field && typeof field.name === 'string' ? field.name : undefined + const fullKey = name && prefix ? `${prefix}_${name}` : (name ?? prefix) switch (field.type) { case 'array': { - const subKeys = getFlattenedFieldKeys(field.fields as FlattenedField[], `${name}_0`) + const subKeys = getFlattenedFieldKeys(field.fields as FlattenedField[], `${fullKey}_0`) keys.push(...subKeys) break } case 'blocks': { field.blocks.forEach((block) => { - const blockPrefix = `${name}_0_${block.slug}` + const blockPrefix = `${fullKey}_0_${block.slug}` keys.push(`${blockPrefix}_blockType`) keys.push(`${blockPrefix}_id`) keys.push(...getFlattenedFieldKeys(block.fields as FlattenedField[], blockPrefix)) @@ -46,45 +43,42 @@ export const getFlattenedFieldKeys = (fields: FieldWithPresentational[], prefix case 'collapsible': case 'group': case 'row': - keys.push(...getFlattenedFieldKeys(field.fields as FlattenedField[], name)) + keys.push(...getFlattenedFieldKeys(field.fields as FlattenedField[], fullKey)) break case 'relationship': if (field.hasMany) { if (Array.isArray(field.relationTo)) { // hasMany polymorphic - keys.push(`${name}_0_relationTo`, `${name}_0_id`) + keys.push(`${fullKey}_0_relationTo`, `${fullKey}_0_id`) } else { // hasMany monomorphic - keys.push(`${name}_0`) + keys.push(`${fullKey}_0`) } } else { if (Array.isArray(field.relationTo)) { // hasOne polymorphic - keys.push(`${name}_relationTo`, `${name}_id`) + keys.push(`${fullKey}_relationTo`, `${fullKey}_id`) } else { // hasOne monomorphic - keys.push(name) + keys.push(fullKey) } } break case 'tabs': - if (field.tabs) { - field.tabs.forEach((tab) => { - if (tab.name) { - const tabPrefix = prefix ? `${prefix}_${tab.name}` : tab.name - keys.push(...getFlattenedFieldKeys(tab.fields, tabPrefix)) - } else { - keys.push(...getFlattenedFieldKeys(tab.fields, prefix)) - } - }) - } + field.tabs?.forEach((tab) => { + const tabPrefix = tab.name ? `${fullKey}_${tab.name}` : fullKey + keys.push(...getFlattenedFieldKeys(tab.fields || [], tabPrefix)) + }) break default: + if (!name || fieldHasToCSVFunction) { + break + } if ('hasMany' in field && field.hasMany) { // Push placeholder for first index - keys.push(`${name}_0`) + keys.push(`${fullKey}_0`) } else { - keys.push(name) + keys.push(fullKey) } break } diff --git a/packages/plugin-import-export/src/utilities/getvalueAtPath.ts b/packages/plugin-import-export/src/utilities/getvalueAtPath.ts new file mode 100644 index 0000000000..4173b51730 --- /dev/null +++ b/packages/plugin-import-export/src/utilities/getvalueAtPath.ts @@ -0,0 +1,59 @@ +/** + * Safely retrieves a deeply nested value from an object using a dot-notation path. + * + * Supports: + * - Indexed array access (e.g., "array.0.field1") + * - Polymorphic blocks or keyed unions (e.g., "blocks.0.hero.title"), where the block key + * (e.g., "hero") maps to a nested object inside the block item. + * + * + * @param obj - The input object to traverse. + * @param path - A dot-separated string representing the path to retrieve. + * @returns The value at the specified path, or undefined if not found. + */ +export const getValueAtPath = (obj: unknown, path: string): unknown => { + if (!obj || typeof obj !== 'object') { + return undefined + } + + const parts = path.split('.') + let current: any = obj + + for (const part of parts) { + if (current == null) { + return undefined + } + + // If the path part is a number, treat it as an array index + if (!isNaN(Number(part))) { + current = current[Number(part)] + continue + } + + // Special case: if current is an array of blocks like [{ hero: { title: '...' } }] + // and the path is "blocks.0.hero.title", then `part` would be "hero" + if (Array.isArray(current)) { + const idx = Number(parts[parts.indexOf(part) - 1]) + const blockItem = current[idx] + + if (typeof blockItem === 'object') { + const keys = Object.keys(blockItem) + + // Find the key (e.g., "hero") that maps to an object + const matchingBlock = keys.find( + (key) => blockItem[key] && typeof blockItem[key] === 'object', + ) + + if (matchingBlock && part === matchingBlock) { + current = blockItem[matchingBlock] + continue + } + } + } + + // Fallback to plain object key access + current = current[part] + } + + return current +} diff --git a/packages/plugin-import-export/src/utilities/removeDisabledFields.ts b/packages/plugin-import-export/src/utilities/removeDisabledFields.ts new file mode 100644 index 0000000000..4f68799b42 --- /dev/null +++ b/packages/plugin-import-export/src/utilities/removeDisabledFields.ts @@ -0,0 +1,80 @@ +/** + * Recursively removes fields from a deeply nested object based on dot-notation paths. + * + * This utility supports removing: + * - Nested fields in plain objects (e.g., "group.value") + * - Fields inside arrays of objects (e.g., "group.array.field1") + * + * It safely traverses both object and array structures and avoids mutating the original input. + * + * @param obj - The original object to clean. + * @param disabled - An array of dot-separated paths indicating which fields to remove. + * @returns A deep clone of the original object with specified fields removed. + */ + +export const removeDisabledFields = ( + obj: Record, + disabled: string[] = [], +): Record => { + if (!disabled.length) { + return obj + } + + const clone = structuredClone(obj) + + // Process each disabled path independently + for (const path of disabled) { + const parts = path.split('.') + + /** + * Recursively walks the object tree according to the dot path, + * and deletes the field once the full path is reached. + * + * @param target - The current object or array being traversed + * @param i - The index of the current path part + */ + const removeRecursively = (target: any, i = 0): void => { + if (target == null) { + return + } + + const key = parts[i] + + // If at the final part of the path, perform the deletion + if (i === parts.length - 1) { + // If the current level is an array, delete the key from each item + if (Array.isArray(target)) { + for (const item of target) { + if (item && typeof item === 'object' && key !== undefined) { + delete item[key as keyof typeof item] + } + } + } else if (typeof target === 'object' && key !== undefined) { + delete target[key] + } + return + } + + if (key === undefined) { + return + } + + // Traverse to the next level in the path + const next = target[key] + + if (Array.isArray(next)) { + // If the next value is an array, recurse into each item + for (const item of next) { + removeRecursively(item, i + 1) + } + } else { + // Otherwise, continue down the object path + removeRecursively(next, i + 1) + } + } + + removeRecursively(clone) + } + + return clone +} diff --git a/packages/plugin-import-export/src/utilities/setNestedValue.ts b/packages/plugin-import-export/src/utilities/setNestedValue.ts new file mode 100644 index 0000000000..89e5487329 --- /dev/null +++ b/packages/plugin-import-export/src/utilities/setNestedValue.ts @@ -0,0 +1,65 @@ +/** + * Sets a value deeply into a nested object or array, based on a dot-notation path. + * + * This function: + * - Supports array indexing (e.g., "array.0.field1") + * - Creates intermediate arrays/objects as needed + * - Mutates the target object directly + * + * @example + * const obj = {} + * setNestedValue(obj, 'group.array.0.field1', 'hello') + * // Result: { group: { array: [ { field1: 'hello' } ] } } + * + * @param obj - The target object to mutate. + * @param path - A dot-separated string path indicating where to assign the value. + * @param value - The value to set at the specified path. + */ + +export const setNestedValue = ( + obj: Record, + path: string, + value: unknown, +): void => { + const parts = path.split('.') + let current: any = obj + + for (let i = 0; i < parts.length; i++) { + const part = parts[i] + const isLast = i === parts.length - 1 + const isIndex = !Number.isNaN(Number(part)) + + if (isIndex) { + const index = Number(part) + + // Ensure the current target is an array + if (!Array.isArray(current)) { + current = [] + } + + // Ensure the array slot is initialized + if (!current[index]) { + current[index] = {} + } + + if (isLast) { + current[index] = value + } else { + current = current[index] as Record + } + } else { + // Ensure the object key exists + if (isLast) { + if (typeof part === 'string') { + current[part] = value + } + } else { + if (typeof current[part as string] !== 'object' || current[part as string] === null) { + current[part as string] = {} + } + + current = current[part as string] as Record + } + } + } +} diff --git a/test/plugin-import-export/collections/Pages.ts b/test/plugin-import-export/collections/Pages.ts index 35f38032fd..818978b15e 100644 --- a/test/plugin-import-export/collections/Pages.ts +++ b/test/plugin-import-export/collections/Pages.ts @@ -61,6 +61,11 @@ export const Pages: CollectionConfig = { name: 'value', type: 'text', defaultValue: 'group value', + // custom: { + // 'plugin-import-export': { + // disabled: true, + // }, + // }, }, { name: 'ignore', @@ -216,5 +221,20 @@ export const Pages: CollectionConfig = { relationTo: ['users', 'posts'], hasMany: true, }, + { + type: 'collapsible', + label: 'Collapsible Field', + fields: [ + { + name: 'textFieldInCollapsible', + type: 'text', + // custom: { + // 'plugin-import-export': { + // disabled: true, + // }, + // }, + }, + ], + }, ], } diff --git a/test/plugin-import-export/int.spec.ts b/test/plugin-import-export/int.spec.ts index 64d2516de0..caa57f5d2e 100644 --- a/test/plugin-import-export/int.spec.ts +++ b/test/plugin-import-export/int.spec.ts @@ -467,6 +467,29 @@ describe('@payloadcms/plugin-import-export', () => { expect(data[0].title).toStrictEqual('JSON 0') }) + it('should download an existing export JSON file', async () => { + const response = await restClient.POST('/exports/download', { + body: JSON.stringify({ + data: { + collectionSlug: 'pages', + fields: ['id', 'title'], + format: 'json', + sort: 'title', + }, + }), + headers: { 'Content-Type': 'application/json' }, + }) + + expect(response.status).toBe(200) + expect(response.headers.get('content-type')).toMatch(/application\/json/) + + const data = await response.json() + + expect(Array.isArray(data)).toBe(true) + expect(['string', 'number']).toContain(typeof data[0].id) + expect(typeof data[0].title).toBe('string') + }) + it('should create an export with every field when no fields are defined', async () => { let doc = await payload.create({ collection: 'exports', diff --git a/test/plugin-import-export/payload-types.ts b/test/plugin-import-export/payload-types.ts index 83ef2a640d..c598e3b25e 100644 --- a/test/plugin-import-export/payload-types.ts +++ b/test/plugin-import-export/payload-types.ts @@ -242,6 +242,7 @@ export interface Page { } )[] | null; + textFieldInCollapsible?: string | null; updatedAt: string; createdAt: string; _status?: ('draft' | 'published') | null; @@ -579,6 +580,7 @@ export interface PagesSelect { excerpt?: T; hasOnePolymorphic?: T; hasManyPolymorphic?: T; + textFieldInCollapsible?: T; updatedAt?: T; createdAt?: T; _status?: T; From a83ed5ebb5ce6a03ff691c41f24c4b582d51a5f1 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Thu, 24 Jul 2025 18:42:17 +0300 Subject: [PATCH 073/143] fix(db-postgres): search is broken when `useAsTitle` is not specified (#13232) Fixes https://github.com/payloadcms/payload/issues/13171 --- packages/drizzle/src/queries/parseParams.ts | 5 ++++- test/database/int.spec.ts | 15 +++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/packages/drizzle/src/queries/parseParams.ts b/packages/drizzle/src/queries/parseParams.ts index b43dad70a4..9c12c69416 100644 --- a/packages/drizzle/src/queries/parseParams.ts +++ b/packages/drizzle/src/queries/parseParams.ts @@ -219,7 +219,10 @@ export function parseParams({ if ( operator === 'like' && - (field.type === 'number' || table[columnName].columnType === 'PgUUID') + (field.type === 'number' || + field.type === 'relationship' || + field.type === 'upload' || + table[columnName].columnType === 'PgUUID') ) { operator = 'equals' } diff --git a/test/database/int.spec.ts b/test/database/int.spec.ts index 8cc74b84f7..c4cbff4393 100644 --- a/test/database/int.spec.ts +++ b/test/database/int.spec.ts @@ -3001,6 +3001,21 @@ describe('database', () => { } }) + it('should allow to query like by ID with draft: true', async () => { + const category = await payload.create({ + collection: 'categories', + data: { title: 'category123' }, + }) + const res = await payload.find({ + collection: 'categories', + draft: true, + // eslint-disable-next-line jest/no-conditional-in-test + where: { id: { like: typeof category.id === 'number' ? `${category.id}` : category.id } }, + }) + expect(res.docs).toHaveLength(1) + expect(res.docs[0].id).toBe(category.id) + }) + it('should allow incremental number update', async () => { const post = await payload.create({ collection: 'posts', data: { number: 1, title: 'post' } }) From 7e81d30808bf6a9ea3256e060c05d0133ae31b1f Mon Sep 17 00:00:00 2001 From: Patrik <35232443+PatrikKozak@users.noreply.github.com> Date: Thu, 24 Jul 2025 12:18:49 -0400 Subject: [PATCH 074/143] fix(ui): ensure document unlocks when logging out from edit view of a locked document (#13142) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What? Refactors the `LeaveWithoutSaving` modal to be generic and delegates document unlock logic back to the `DefaultEditView` component via a callback. ### Why? Previously, `unlockDocument` was triggered in a cleanup `useEffect` in the edit view. When logging out from the edit view, the unlock request would often fail due to the session ending — leaving the document in a locked state. ### How? - Introduced `onConfirm` and `onPrevent` props for `LeaveWithoutSaving`. - Moved all document lock/unlock logic into `DefaultEditView`’s `handleLeaveConfirm`. - Captures the next navigation target via `onPrevent` and evaluates whether to unlock based on: - Locking being enabled. - Current user owning the lock. - Navigation not targeting internal admin views (`/preview`, `/api`, `/versions`). --------- Co-authored-by: Jarrod Flesch --- .../views/CreateFirstUser/index.client.tsx | 9 +- .../src/elements/LeaveWithoutSaving/index.tsx | 30 +- .../ui/src/elements/RenderTitle/index.tsx | 1 + .../ui/src/providers/DocumentInfo/index.tsx | 11 + .../ui/src/providers/DocumentInfo/types.ts | 5 + packages/ui/src/views/Edit/index.tsx | 121 +++-- test/auth/config.ts | 31 +- test/auth/e2e.spec.ts | 453 ++++++++++-------- test/auth/payload-types.ts | 36 +- test/auth/seed.ts | 34 ++ test/helpers.ts | 24 +- test/helpers/reInitEndpoint.ts | 5 +- test/locked-documents/payload-types.ts | 14 + 13 files changed, 473 insertions(+), 301 deletions(-) create mode 100644 test/auth/seed.ts diff --git a/packages/next/src/views/CreateFirstUser/index.client.tsx b/packages/next/src/views/CreateFirstUser/index.client.tsx index caaabdaa09..d1462cfae5 100644 --- a/packages/next/src/views/CreateFirstUser/index.client.tsx +++ b/packages/next/src/views/CreateFirstUser/index.client.tsx @@ -85,7 +85,14 @@ export const CreateFirstUserClient: React.FC<{ return (
    { +type LeaveWithoutSavingProps = { + onConfirm?: () => Promise | void + onPrevent?: (nextHref: null | string) => void +} + +export const LeaveWithoutSaving: React.FC = ({ onConfirm, onPrevent }) => { const { closeModal, openModal } = useModal() const modified = useFormModified() const { isValid } = useForm() @@ -22,23 +27,34 @@ export const LeaveWithoutSaving: React.FC = () => { const prevent = Boolean((modified || !isValid) && user) - const onPrevent = useCallback(() => { + const handlePrevent = useCallback(() => { + const activeHref = (document.activeElement as HTMLAnchorElement)?.href || null + if (onPrevent) { + onPrevent(activeHref) + } openModal(modalSlug) - }, [openModal]) + }, [openModal, onPrevent]) const handleAccept = useCallback(() => { closeModal(modalSlug) }, [closeModal]) - usePreventLeave({ hasAccepted, onAccept: handleAccept, onPrevent, prevent }) + usePreventLeave({ hasAccepted, onAccept: handleAccept, onPrevent: handlePrevent, prevent }) const onCancel: OnCancel = useCallback(() => { closeModal(modalSlug) }, [closeModal]) - const onConfirm = useCallback(() => { + const handleConfirm = useCallback(async () => { + if (onConfirm) { + try { + await onConfirm() + } catch (err) { + console.error('Error in LeaveWithoutSaving onConfirm:', err) + } + } setHasAccepted(true) - }, []) + }, [onConfirm]) return ( { heading={t('general:leaveWithoutSaving')} modalSlug={modalSlug} onCancel={onCancel} - onConfirm={onConfirm} + onConfirm={handleConfirm} /> ) } diff --git a/packages/ui/src/elements/RenderTitle/index.tsx b/packages/ui/src/elements/RenderTitle/index.tsx index c0e51e5c2d..cdd105aa0a 100644 --- a/packages/ui/src/elements/RenderTitle/index.tsx +++ b/packages/ui/src/elements/RenderTitle/index.tsx @@ -36,6 +36,7 @@ export const RenderTitle: React.FC = (props) => { className={[className, baseClass, idAsTitle && `${baseClass}--has-id`] .filter(Boolean) .join(' ')} + data-doc-id={id} title={title} > {isInitializing ? ( diff --git a/packages/ui/src/providers/DocumentInfo/index.tsx b/packages/ui/src/providers/DocumentInfo/index.tsx index b98e4f944d..0fdb4d9324 100644 --- a/packages/ui/src/providers/DocumentInfo/index.tsx +++ b/packages/ui/src/providers/DocumentInfo/index.tsx @@ -113,6 +113,16 @@ const DocumentInfo: React.FC< 'idle', ) + const documentLockState = useRef<{ + hasShownLockedModal: boolean + isLocked: boolean + user: ClientUser | number | string + } | null>({ + hasShownLockedModal: false, + isLocked: false, + user: null, + }) + const updateUploadStatus = useCallback( (status: 'failed' | 'idle' | 'uploading') => { setUploadStatus(status) @@ -344,6 +354,7 @@ const DocumentInfo: React.FC< docConfig, docPermissions, documentIsLocked, + documentLockState, getDocPermissions, getDocPreferences, hasPublishedDoc, diff --git a/packages/ui/src/providers/DocumentInfo/types.ts b/packages/ui/src/providers/DocumentInfo/types.ts index 93ec9674e2..273df0aa7c 100644 --- a/packages/ui/src/providers/DocumentInfo/types.ts +++ b/packages/ui/src/providers/DocumentInfo/types.ts @@ -49,6 +49,11 @@ export type DocumentInfoContext = { currentEditor?: ClientUser | null | number | string docConfig?: ClientCollectionConfig | ClientGlobalConfig documentIsLocked?: boolean + documentLockState: React.RefObject<{ + hasShownLockedModal: boolean + isLocked: boolean + user: ClientUser | number | string + } | null> getDocPermissions: (data?: Data) => Promise getDocPreferences: () => Promise incrementVersionCount: () => void diff --git a/packages/ui/src/views/Edit/index.tsx b/packages/ui/src/views/Edit/index.tsx index 6de54ad36e..b4b5446861 100644 --- a/packages/ui/src/views/Edit/index.tsx +++ b/packages/ui/src/views/Edit/index.tsx @@ -70,6 +70,7 @@ export function DefaultEditView({ disableLeaveWithoutSaving, docPermissions, documentIsLocked, + documentLockState, getDocPermissions, getDocPreferences, globalSlug, @@ -164,16 +165,6 @@ export function DefaultEditView({ const isLockExpired = Date.now() > lockExpiryTime - const documentLockStateRef = useRef<{ - hasShownLockedModal: boolean - isLocked: boolean - user: ClientUser | number | string - } | null>({ - hasShownLockedModal: false, - isLocked: false, - user: null, - }) - const schemaPathSegments = useMemo(() => [entitySlug], [entitySlug]) const [validateBeforeSubmit, setValidateBeforeSubmit] = useState(() => { @@ -184,13 +175,15 @@ export function DefaultEditView({ return false }) + const nextHrefRef = React.useRef(null) + const handleDocumentLocking = useCallback( (lockedState: LockedState) => { setDocumentIsLocked(true) const previousOwnerID = - typeof documentLockStateRef.current?.user === 'object' - ? documentLockStateRef.current?.user?.id - : documentLockStateRef.current?.user + typeof documentLockState.current?.user === 'object' + ? documentLockState.current?.user?.id + : documentLockState.current?.user if (lockedState) { const lockedUserID = @@ -198,14 +191,14 @@ export function DefaultEditView({ ? lockedState.user : lockedState.user.id - if (!documentLockStateRef.current || lockedUserID !== previousOwnerID) { + if (!documentLockState.current || lockedUserID !== previousOwnerID) { if (previousOwnerID === user.id && lockedUserID !== user.id) { setShowTakeOverModal(true) - documentLockStateRef.current.hasShownLockedModal = true + documentLockState.current.hasShownLockedModal = true } - documentLockStateRef.current = { - hasShownLockedModal: documentLockStateRef.current?.hasShownLockedModal || false, + documentLockState.current = { + hasShownLockedModal: documentLockState.current?.hasShownLockedModal || false, isLocked: true, user: lockedState.user as ClientUser, } @@ -213,9 +206,52 @@ export function DefaultEditView({ } } }, - [setCurrentEditor, setDocumentIsLocked, user?.id], + [documentLockState, setCurrentEditor, setDocumentIsLocked, user?.id], ) + const handlePrevent = useCallback((nextHref: null | string) => { + nextHrefRef.current = nextHref + }, []) + + const handleLeaveConfirm = useCallback(async () => { + const lockUser = documentLockState.current?.user + + const isLockOwnedByCurrentUser = + typeof lockUser === 'object' ? lockUser?.id === user?.id : lockUser === user?.id + + if (isLockingEnabled && documentIsLocked && (id || globalSlug)) { + // Check where user is trying to go + const nextPath = nextHrefRef.current ? new URL(nextHrefRef.current).pathname : '' + const isInternalView = ['/preview', '/api', '/versions'].some((path) => + nextPath.includes(path), + ) + + // Only retain the lock if the user is still viewing the document + if (!isInternalView) { + if (isLockOwnedByCurrentUser) { + try { + await unlockDocument(id, collectionSlug ?? globalSlug) + setDocumentIsLocked(false) + setCurrentEditor(null) + } catch (err) { + console.error('Failed to unlock before leave', err) + } + } + } + } + }, [ + collectionSlug, + documentIsLocked, + documentLockState, + globalSlug, + id, + isLockingEnabled, + setCurrentEditor, + setDocumentIsLocked, + unlockDocument, + user?.id, + ]) + const onSave = useCallback( async (json): Promise => { const controller = handleAbortRef(abortOnSaveRef) @@ -342,7 +378,7 @@ export function DefaultEditView({ const docPreferences = await getDocPreferences() - const { lockedState, state } = await getFormState({ + const result = await getFormState({ id, collectionSlug, docPermissions, @@ -360,6 +396,12 @@ export function DefaultEditView({ updateLastEdited, }) + if (!result) { + return + } + + const { lockedState, state } = result + if (isLockingEnabled) { handleDocumentLocking(lockedState) } @@ -386,38 +428,9 @@ export function DefaultEditView({ // Clean up when the component unmounts or when the document is unlocked useEffect(() => { return () => { - if (isLockingEnabled && documentIsLocked && (id || globalSlug)) { - // Only retain the lock if the user is still viewing the document - const shouldUnlockDocument = !['preview', 'api', 'versions'].some((path) => - window.location.pathname.includes(path), - ) - if (shouldUnlockDocument) { - // Check if this user is still the current editor - if ( - typeof documentLockStateRef.current?.user === 'object' - ? documentLockStateRef.current?.user?.id === user?.id - : documentLockStateRef.current?.user === user?.id - ) { - void unlockDocument(id, collectionSlug ?? globalSlug) - setDocumentIsLocked(false) - setCurrentEditor(null) - } - } - } - setShowTakeOverModal(false) } - }, [ - collectionSlug, - globalSlug, - id, - unlockDocument, - user, - setCurrentEditor, - isLockingEnabled, - documentIsLocked, - setDocumentIsLocked, - ]) + }, []) useEffect(() => { const abortOnChange = abortOnChangeRef.current @@ -437,7 +450,7 @@ export function DefaultEditView({ : currentEditor !== user?.id) && !isReadOnlyForIncomingUser && !showTakeOverModal && - !documentLockStateRef.current?.hasShownLockedModal && + !documentLockState.current?.hasShownLockedModal && !isLockExpired const isFolderCollection = config.folders && collectionSlug === config.folders?.slug @@ -487,7 +500,7 @@ export function DefaultEditView({ false, updateDocumentEditor, setCurrentEditor, - documentLockStateRef, + documentLockState, isLockingEnabled, ) } @@ -505,7 +518,9 @@ export function DefaultEditView({ }} /> )} - {!isReadOnlyForIncomingUser && preventLeaveWithoutSaving && } + {!isReadOnlyForIncomingUser && preventLeaveWithoutSaving && ( + + )} {!isInDrawer && ( { - await payload.create({ - collection: 'users', - data: { - custom: 'Hello, world!', - email: devUser.email, - password: devUser.password, - roles: ['admin'], - }, - }) - - await payload.create({ - collection: apiKeysSlug, - data: { - apiKey: uuid(), - enableAPIKey: true, - }, - }) - - await payload.create({ - collection: apiKeysSlug, - data: { - apiKey: uuid(), - enableAPIKey: true, - }, - }) - }, + onInit: seed, typescript: { outputFile: path.resolve(dirname, 'payload-types.ts'), }, diff --git a/test/auth/e2e.spec.ts b/test/auth/e2e.spec.ts index 02253074d5..fd9ec90f0f 100644 --- a/test/auth/e2e.spec.ts +++ b/test/auth/e2e.spec.ts @@ -1,8 +1,8 @@ import type { BrowserContext, Page } from '@playwright/test' -import type { SanitizedConfig } from 'payload' import { expect, test } from '@playwright/test' import { devUser } from 'credentials.js' +import { openNav } from 'helpers/e2e/toggleNav.js' import path from 'path' import { fileURLToPath } from 'url' import { v4 as uuid } from 'uuid' @@ -15,6 +15,7 @@ import { exactText, getRoutes, initPageConsoleErrorCatch, + login, saveDocAndAssert, } from '../helpers.js' import { AdminUrlUtil } from '../helpers/adminUrlUtil.js' @@ -28,59 +29,12 @@ const dirname = path.dirname(filename) let payload: PayloadTestSDK -const { beforeAll, describe } = test +const { beforeAll, afterAll, describe } = test const headers = { 'Content-Type': 'application/json', } -const createFirstUser = async ({ - page, - serverURL, -}: { - customAdminRoutes?: SanitizedConfig['admin']['routes'] - customRoutes?: SanitizedConfig['routes'] - page: Page - serverURL: string -}) => { - const { - admin: { - routes: { createFirstUser: createFirstUserRoute }, - }, - routes: { admin: adminRoute }, - } = getRoutes({}) - - // wait for create first user route - await page.goto(serverURL + `${adminRoute}${createFirstUserRoute}`) - - // forget to fill out confirm password - await page.locator('#field-email').fill(devUser.email) - await page.locator('#field-password').fill(devUser.password) - await page.locator('.form-submit > button').click() - await expect(page.locator('.field-type.confirm-password .field-error')).toHaveText( - 'This field is required.', - ) - - // make them match, but does not pass password validation - await page.locator('#field-email').fill(devUser.email) - await page.locator('#field-password').fill('12') - await page.locator('#field-confirm-password').fill('12') - await page.locator('.form-submit > button').click() - await expect(page.locator('.field-type.password .field-error')).toHaveText( - 'This value must be longer than the minimum length of 3 characters.', - ) - - await page.locator('#field-email').fill(devUser.email) - await page.locator('#field-password').fill(devUser.password) - await page.locator('#field-confirm-password').fill(devUser.password) - await page.locator('#field-custom').fill('Hello, world!') - await page.locator('.form-submit > button').click() - - await expect - .poll(() => page.url(), { timeout: POLL_TOPASS_TIMEOUT }) - .not.toContain('create-first-user') -} - describe('Auth', () => { let page: Page let context: BrowserContext @@ -97,169 +51,288 @@ describe('Auth', () => { context = await browser.newContext() page = await context.newPage() initPageConsoleErrorCatch(page) - - await ensureCompilationIsDone({ page, serverURL, noAutoLogin: true }) - - // Undo onInit seeding, as we need to test this without having a user created, or testing create-first-user - await reInitializeDB({ - serverURL, - snapshotKey: 'auth', - deleteOnly: true, - }) - - await payload.create({ - collection: apiKeysSlug, - data: { - apiKey: uuid(), - enableAPIKey: true, - }, - }) - - await payload.create({ - collection: apiKeysSlug, - data: { - apiKey: uuid(), - enableAPIKey: true, - }, - }) - - await createFirstUser({ page, serverURL }) - - await ensureCompilationIsDone({ page, serverURL }) }) - - describe('passwords', () => { - beforeAll(() => { - url = new AdminUrlUtil(serverURL, slug) - }) - - test('should allow change password', async () => { - await page.goto(url.account) - const emailBeforeSave = await page.locator('#field-email').inputValue() - await page.locator('#change-password').click() - await page.locator('#field-password').fill('password') - // should fail to save without confirm password - await page.locator('#action-save').click() - await expect( - page.locator('.field-type.confirm-password .tooltip--show', { - hasText: exactText('This field is required.'), - }), - ).toBeVisible() - - // should fail to save with incorrect confirm password - await page.locator('#field-confirm-password').fill('wrong password') - await page.locator('#action-save').click() - await expect( - page.locator('.field-type.confirm-password .tooltip--show', { - hasText: exactText('Passwords do not match.'), - }), - ).toBeVisible() - - // should succeed with matching confirm password - await page.locator('#field-confirm-password').fill('password') - await saveDocAndAssert(page, '#action-save') - - // should still have the same email - await expect(page.locator('#field-email')).toHaveValue(emailBeforeSave) - }) - - test('should prevent new user creation without confirm password', async () => { - await page.goto(url.create) - await page.locator('#field-email').fill('dev2@payloadcms.com') - await page.locator('#field-password').fill('password') - // should fail to save without confirm password - await page.locator('#action-save').click() - await expect( - page.locator('.field-type.confirm-password .tooltip--show', { - hasText: exactText('This field is required.'), - }), - ).toBeVisible() - - // should succeed with matching confirm password - await page.locator('#field-confirm-password').fill('password') - await saveDocAndAssert(page, '#action-save') - }) - }) - - describe('authenticated users', () => { - beforeAll(() => { - url = new AdminUrlUtil(serverURL, slug) - }) - - test('should have up-to-date user in `useAuth` hook', async () => { - await page.goto(url.account) - await expect(page.locator('#users-api-result')).toHaveText('Hello, world!') - await expect(page.locator('#use-auth-result')).toHaveText('Hello, world!') - const field = page.locator('#field-custom') - await field.fill('Goodbye, world!') - await saveDocAndAssert(page) - await expect(page.locator('#users-api-result')).toHaveText('Goodbye, world!') - await expect(page.locator('#use-auth-result')).toHaveText('Goodbye, world!') - }) - }) - - describe('api-keys', () => { - let user - + describe('create first user', () => { beforeAll(async () => { - url = new AdminUrlUtil(serverURL, apiKeysSlug) + await reInitializeDB({ + serverURL, + snapshotKey: 'create-first-user', + deleteOnly: true, + }) - user = await payload.create({ - collection: apiKeysSlug, - data: { - apiKey: uuid(), - enableAPIKey: true, + await ensureCompilationIsDone({ page, serverURL, noAutoLogin: true }) + + await payload.delete({ + collection: slug, + where: { + email: { + exists: true, + }, }, }) }) - test('should enable api key', async () => { - await page.goto(url.create) + async function waitForVisibleAuthFields() { + await expect(page.locator('#field-email')).toBeVisible() + await expect(page.locator('#field-password')).toBeVisible() + await expect(page.locator('#field-confirm-password')).toBeVisible() + } - // click enable api key checkbox - await page.locator('#field-enableAPIKey').click() + test('should create first user and redirect to admin', async () => { + const { + admin: { + routes: { createFirstUser: createFirstUserRoute }, + }, + routes: { admin: adminRoute }, + } = getRoutes({}) + + // wait for create first user route + await page.goto(serverURL + `${adminRoute}${createFirstUserRoute}`) + + await expect(page.locator('.create-first-user')).toBeVisible() + + await waitForVisibleAuthFields() + + // forget to fill out confirm password + await page.locator('#field-email').fill(devUser.email) + await page.locator('#field-password').fill(devUser.password) + + await page.locator('.form-submit > button').click() + await expect(page.locator('.field-type.confirm-password .field-error')).toHaveText( + 'This field is required.', + ) + + // make them match, but does not pass password validation + await page.locator('#field-email').fill(devUser.email) + await page.locator('#field-password').fill('12') + await page.locator('#field-confirm-password').fill('12') + + await page.locator('.form-submit > button').click() + await expect(page.locator('.field-type.password .field-error')).toHaveText( + 'This value must be longer than the minimum length of 3 characters.', + ) + + // should fill out all fields correctly + await page.locator('#field-email').fill(devUser.email) + await page.locator('#field-password').fill(devUser.password) + await page.locator('#field-confirm-password').fill(devUser.password) + await page.locator('#field-custom').fill('Hello, world!') + + await page.locator('.form-submit > button').click() - // assert that the value is set - const apiKeyLocator = page.locator('#apiKey') await expect - .poll(async () => await apiKeyLocator.inputValue(), { timeout: POLL_TOPASS_TIMEOUT }) - .toBeDefined() + .poll(() => page.url(), { timeout: POLL_TOPASS_TIMEOUT }) + .not.toContain('create-first-user') + }) + }) - const apiKey = await apiKeyLocator.inputValue() + describe('non create first user', () => { + beforeAll(async () => { + await reInitializeDB({ + serverURL, + snapshotKey: 'auth', + deleteOnly: false, + }) - await saveDocAndAssert(page) + await ensureCompilationIsDone({ page, serverURL, noAutoLogin: true }) - await expect(async () => { - const apiKeyAfterSave = await apiKeyLocator.inputValue() - expect(apiKey).toStrictEqual(apiKeyAfterSave) - }).toPass({ - timeout: POLL_TOPASS_TIMEOUT, + await login({ page, serverURL }) + }) + + describe('passwords', () => { + beforeAll(() => { + url = new AdminUrlUtil(serverURL, slug) + }) + + afterAll(async () => { + // reset password to original password + await page.goto(url.account) + await page.locator('#change-password').click() + await page.locator('#field-password').fill(devUser.password) + await page.locator('#field-confirm-password').fill(devUser.password) + await saveDocAndAssert(page, '#action-save') + }) + + test('should allow change password', async () => { + await page.goto(url.account) + const emailBeforeSave = await page.locator('#field-email').inputValue() + await page.locator('#change-password').click() + await page.locator('#field-password').fill('password') + // should fail to save without confirm password + await page.locator('#action-save').click() + await expect( + page.locator('.field-type.confirm-password .tooltip--show', { + hasText: exactText('This field is required.'), + }), + ).toBeVisible() + + // should fail to save with incorrect confirm password + await page.locator('#field-confirm-password').fill('wrong password') + await page.locator('#action-save').click() + await expect( + page.locator('.field-type.confirm-password .tooltip--show', { + hasText: exactText('Passwords do not match.'), + }), + ).toBeVisible() + + // should succeed with matching confirm password + await page.locator('#field-confirm-password').fill('password') + await saveDocAndAssert(page, '#action-save') + + // should still have the same email + await expect(page.locator('#field-email')).toHaveValue(emailBeforeSave) + }) + + test('should prevent new user creation without confirm password', async () => { + await page.goto(url.create) + await page.locator('#field-email').fill('dev2@payloadcms.com') + await page.locator('#field-password').fill('password') + // should fail to save without confirm password + await page.locator('#action-save').click() + await expect( + page.locator('.field-type.confirm-password .tooltip--show', { + hasText: exactText('This field is required.'), + }), + ).toBeVisible() + + // should succeed with matching confirm password + await page.locator('#field-confirm-password').fill('password') + await saveDocAndAssert(page, '#action-save') }) }) - test('should disable api key', async () => { - await page.goto(url.edit(user.id)) + describe('authenticated users', () => { + beforeAll(() => { + url = new AdminUrlUtil(serverURL, slug) + }) - // click enable api key checkbox - await page.locator('#field-enableAPIKey').click() + test('should have up-to-date user in `useAuth` hook', async () => { + await page.goto(url.account) + await expect(page.locator('#users-api-result')).toHaveText('Hello, world!') + await expect(page.locator('#use-auth-result')).toHaveText('Hello, world!') + const field = page.locator('#field-custom') + await field.fill('Goodbye, world!') + await saveDocAndAssert(page) + await expect(page.locator('#users-api-result')).toHaveText('Goodbye, world!') + await expect(page.locator('#use-auth-result')).toHaveText('Goodbye, world!') + }) - // assert that the apiKey field is hidden - await expect(page.locator('#apiKey')).toBeHidden() + // Need to test unlocking documents on logout here as this test suite does not auto login users + test('should unlock document on logout after editing without saving', async () => { + await page.goto(url.list) - await saveDocAndAssert(page) + await page.locator('.table .row-1 .cell-custom a').click() - // use the api key in a fetch to assert that it is disabled - await expect(async () => { - const response = await fetch(`${apiURL}/${apiKeysSlug}/me`, { - headers: { - ...headers, - Authorization: `${apiKeysSlug} API-Key ${user.apiKey}`, + const textInput = page.locator('#field-namedSaveToJWT') + await expect(textInput).toBeVisible() + const docID = (await page.locator('.render-title').getAttribute('data-doc-id')) as string + + const lockDocRequest = page.waitForResponse( + (response) => + response.request().method() === 'POST' && response.request().url() === url.edit(docID), + ) + await textInput.fill('some text') + await lockDocRequest + + const lockedDocs = await payload.find({ + collection: 'payload-locked-documents', + limit: 1, + pagination: false, + }) + + await expect.poll(() => lockedDocs.docs.length).toBe(1) + + await openNav(page) + + await page.locator('.nav .nav__controls a[href="/admin/logout"]').click() + + // Locate the modal container + const modalContainer = page.locator('.payload__modal-container') + await expect(modalContainer).toBeVisible() + + // Click the "Leave anyway" button + await page + .locator('#leave-without-saving .confirmation-modal__controls .btn--style-primary') + .click() + + await expect(page.locator('.login')).toBeVisible() + + const unlockedDocs = await payload.find({ + collection: 'payload-locked-documents', + limit: 1, + pagination: false, + }) + + await expect.poll(() => unlockedDocs.docs.length).toBe(0) + + // added so tests after this do not need to re-login + await login({ page, serverURL }) + }) + }) + + describe('api-keys', () => { + let user + + beforeAll(async () => { + url = new AdminUrlUtil(serverURL, apiKeysSlug) + + user = await payload.create({ + collection: apiKeysSlug, + data: { + apiKey: uuid(), + enableAPIKey: true, }, - }).then((res) => res.json()) + }) + }) - expect(response.user).toBeNull() - }).toPass({ - timeout: POLL_TOPASS_TIMEOUT, + test('should enable api key', async () => { + await page.goto(url.create) + + // click enable api key checkbox + await page.locator('#field-enableAPIKey').click() + + // assert that the value is set + const apiKeyLocator = page.locator('#apiKey') + await expect + .poll(async () => await apiKeyLocator.inputValue(), { timeout: POLL_TOPASS_TIMEOUT }) + .toBeDefined() + + const apiKey = await apiKeyLocator.inputValue() + + await saveDocAndAssert(page) + + await expect(async () => { + const apiKeyAfterSave = await apiKeyLocator.inputValue() + expect(apiKey).toStrictEqual(apiKeyAfterSave) + }).toPass({ + timeout: POLL_TOPASS_TIMEOUT, + }) + }) + + test('should disable api key', async () => { + await page.goto(url.edit(user.id)) + + // click enable api key checkbox + await page.locator('#field-enableAPIKey').click() + + // assert that the apiKey field is hidden + await expect(page.locator('#apiKey')).toBeHidden() + + await saveDocAndAssert(page) + + // use the api key in a fetch to assert that it is disabled + await expect(async () => { + const response = await fetch(`${apiURL}/${apiKeysSlug}/me`, { + headers: { + ...headers, + Authorization: `${apiKeysSlug} API-Key ${user.apiKey}`, + }, + }).then((res) => res.json()) + + expect(response.user).toBeNull() + }).toPass({ + timeout: POLL_TOPASS_TIMEOUT, + }) }) }) }) diff --git a/test/auth/payload-types.ts b/test/auth/payload-types.ts index 176fae2020..f3630e3f5e 100644 --- a/test/auth/payload-types.ts +++ b/test/auth/payload-types.ts @@ -248,11 +248,13 @@ export interface User { hash?: string | null; loginAttempts?: number | null; lockUntil?: string | null; - sessions: { - id: string; - createdAt?: string | null; - expiresAt: string; - }[]; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** @@ -270,11 +272,13 @@ export interface PartialDisableLocalStrategy { hash?: string | null; loginAttempts?: number | null; lockUntil?: string | null; - sessions: { - id: string; - createdAt?: string | null; - expiresAt: string; - }[]; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** @@ -316,11 +320,13 @@ export interface PublicUser { _verificationToken?: string | null; loginAttempts?: number | null; lockUntil?: string | null; - sessions: { - id: string; - createdAt?: string | null; - expiresAt: string; - }[]; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** diff --git a/test/auth/seed.ts b/test/auth/seed.ts new file mode 100644 index 0000000000..57f1930350 --- /dev/null +++ b/test/auth/seed.ts @@ -0,0 +1,34 @@ +import type { Config } from 'payload' + +import { v4 as uuid } from 'uuid' + +import { devUser } from '../credentials.js' +import { apiKeysSlug } from './shared.js' + +export const seed: Config['onInit'] = async (payload) => { + await payload.create({ + collection: 'users', + data: { + custom: 'Hello, world!', + email: devUser.email, + password: devUser.password, + roles: ['admin'], + }, + }) + + await payload.create({ + collection: apiKeysSlug, + data: { + apiKey: uuid(), + enableAPIKey: true, + }, + }) + + await payload.create({ + collection: apiKeysSlug, + data: { + apiKey: uuid(), + enableAPIKey: true, + }, + }) +} diff --git a/test/helpers.ts b/test/helpers.ts index ed3e73f487..07127f43c9 100644 --- a/test/helpers.ts +++ b/test/helpers.ts @@ -98,10 +98,26 @@ export async function ensureCompilationIsDone({ await page.goto(adminURL) - await page.waitForURL( - readyURL ?? - (noAutoLogin ? `${adminURL + (adminURL.endsWith('/') ? '' : '/')}login` : adminURL), - ) + if (readyURL) { + await page.waitForURL(readyURL) + } else { + await expect + .poll( + () => { + if (noAutoLogin) { + const baseAdminURL = adminURL + (adminURL.endsWith('/') ? '' : '/') + return ( + page.url() === `${baseAdminURL}create-first-user` || + page.url() === `${baseAdminURL}login` + ) + } else { + return page.url() === adminURL + } + }, + { timeout: POLL_TOPASS_TIMEOUT }, + ) + .toBe(true) + } console.log('Successfully compiled') return diff --git a/test/helpers/reInitEndpoint.ts b/test/helpers/reInitEndpoint.ts index a6e98cc6ef..992f10de73 100644 --- a/test/helpers/reInitEndpoint.ts +++ b/test/helpers/reInitEndpoint.ts @@ -15,7 +15,7 @@ const handler: PayloadHandler = async (req) => { } const query: { - deleteOnly?: boolean + deleteOnly?: string snapshotKey?: string uploadsDir?: string | string[] } = qs.parse(req.url.split('?')[1] ?? '', { @@ -31,7 +31,8 @@ const handler: PayloadHandler = async (req) => { snapshotKey: String(query.snapshotKey), // uploadsDir can be string or stringlist uploadsDir: query.uploadsDir as string | string[], - deleteOnly: query.deleteOnly, + // query value will be a string of 'true' or 'false' + deleteOnly: query.deleteOnly === 'true', }) return Response.json( diff --git a/test/locked-documents/payload-types.ts b/test/locked-documents/payload-types.ts index 71e1e949cd..0960f2a653 100644 --- a/test/locked-documents/payload-types.ts +++ b/test/locked-documents/payload-types.ts @@ -174,6 +174,13 @@ export interface User { hash?: string | null; loginAttempts?: number | null; lockUntil?: string | null; + sessions?: + | { + id: string; + createdAt?: string | null; + expiresAt: string; + }[] + | null; password?: string | null; } /** @@ -288,6 +295,13 @@ export interface UsersSelect { hash?: T; loginAttempts?: T; lockUntil?: T; + sessions?: + | T + | { + id?: T; + createdAt?: T; + expiresAt?: T; + }; } /** * This interface was referenced by `Config`'s JSON-Schema From 14322a71bbe57d9c27b190779a9a126fc2416987 Mon Sep 17 00:00:00 2001 From: Dan Ribbens Date: Thu, 24 Jul 2025 13:03:21 -0400 Subject: [PATCH 075/143] docs(plugin-import-export): document plugin-import-export (#13243) Add documentation for @payloadcms/plugin-import-export. --- docs/plugins/form-builder.mdx | 2 +- docs/plugins/import-export.mdx | 155 +++++++++++++++++++++ docs/plugins/multi-tenant.mdx | 14 +- docs/plugins/nested-docs.mdx | 2 +- docs/plugins/overview.mdx | 1 + docs/plugins/redirects.mdx | 2 +- docs/plugins/search.mdx | 2 +- docs/plugins/sentry.mdx | 2 +- docs/plugins/seo.mdx | 2 +- docs/plugins/stripe.mdx | 2 +- packages/plugin-import-export/src/index.ts | 3 + 11 files changed, 173 insertions(+), 14 deletions(-) create mode 100644 docs/plugins/import-export.mdx diff --git a/docs/plugins/form-builder.mdx b/docs/plugins/form-builder.mdx index 2643188c5c..5872c887d0 100644 --- a/docs/plugins/form-builder.mdx +++ b/docs/plugins/form-builder.mdx @@ -1,7 +1,7 @@ --- title: Form Builder Plugin label: Form Builder -order: 40 +order: 30 desc: Easily build and manage forms from the Admin Panel. Send dynamic, personalized emails and even accept and process payments. keywords: plugins, plugin, form, forms, form builder --- diff --git a/docs/plugins/import-export.mdx b/docs/plugins/import-export.mdx new file mode 100644 index 0000000000..63c2cb2159 --- /dev/null +++ b/docs/plugins/import-export.mdx @@ -0,0 +1,155 @@ +--- +title: Import Export Plugin +label: Import Export +order: 40 +desc: Add Import and export functionality to create CSV and JSON data exports +keywords: plugins, plugin, import, export, csv, JSON, data, ETL, download +--- + +![https://www.npmjs.com/package/@payloadcms/plugin-import-export](https://img.shields.io/npm/v/@payloadcms/plugin-import-export) + + + **Note**: This plugin is in **beta** as some aspects of it may change on any + minor releases. It is under development and currently only supports exporting + of collection data. + + +This plugin adds features that give admin users the ability to download or create export data as an upload collection and import it back into a project. + +## Core Features + +- Export data as CSV or JSON format via the admin UI +- Download the export directly through the browser +- Create a file upload of the export data +- Use the jobs queue for large exports +- (Coming soon) Import collection data + +## Installation + +Install the plugin using any JavaScript package manager like [pnpm](https://pnpm.io), [npm](https://npmjs.com), or [Yarn](https://yarnpkg.com): + +```bash +pnpm add @payloadcms/plugin-import-export +``` + +## Basic Usage + +In the `plugins` array of your [Payload Config](https://payloadcms.com/docs/configuration/overview), call the plugin with [options](#options): + +```ts +import { buildConfig } from 'payload' +import { importExportPlugin } from '@payloadcms/plugin-import-export' + +const config = buildConfig({ + collections: [Pages, Media], + plugins: [ + importExportPlugin({ + collections: ['users', 'pages'], + // see below for a list of available options + }), + ], +}) + +export default config +``` + +## Options + +| Property | Type | Description | +| -------------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------ | +| `collections` | string[] | Collections to include Import/Export controls in. Defaults to all collections. | +| `debug` | boolean | If true, enables debug logging. | +| `disableDownload` | boolean | If true, disables the download button in the export preview UI. | +| `disableJobsQueue` | boolean | If true, forces the export to run synchronously. | +| `disableSave` | boolean | If true, disables the save button in the export preview UI. | +| `format` | string | Forces a specific export format (`csv` or `json`), hides the format dropdown, and prevents the user from choosing the export format. | +| `overrideExportCollection` | function | Function to override the default export collection; takes the default export collection and allows you to modify and return it. | + +## Field Options + +In addition to the above plugin configuration options, you can granularly set the following field level options using the `custom['plugin-import-export']` properties in any of your collections. + +| Property | Type | Description | +| ---------- | -------- | ----------------------------------------------------------------------------------------------------------------------------- | +| `disabled` | boolean | When `true` the field is completely excluded from the import-export plugin. | +| `toCSV` | function | Custom function used to modify the outgoing csv data by manipulating the data, siblingData or by returning the desired value. | + +### Customizing the output of CSV data + +To manipulate the data that a field exports you can add `toCSV` custom functions. This allows you to modify the outgoing csv data by manipulating the data, siblingData or by returning the desired value. + +The toCSV function argument is an object with the following properties: + +| Property | Type | Description | +| ------------ | ------- | ----------------------------------------------------------------- | +| `columnName` | string | The CSV column name given to the field. | +| `doc` | object | The top level document | +| `row` | object | The object data that can be manipulated to assign data to the CSV | +| `siblingDoc` | object | The document data at the level where it belongs | +| `value` | unknown | The data for the field. | + +Example function: + +```ts +const pages: CollectionConfig = { + slug: 'pages', + fields: [ + { + name: 'author', + type: 'relationship', + relationTo: 'users', + custom: { + 'plugin-import-export': { + toCSV: ({ value, columnName, row }) => { + // add both `author_id` and the `author_email` to the csv export + if ( + value && + typeof value === 'object' && + 'id' in value && + 'email' in value + ) { + row[`${columnName}_id`] = (value as { id: number | string }).id + row[`${columnName}_email`] = (value as { email: string }).email + } + }, + }, + }, + }, + ], +} +``` + +## Exporting Data + +There are four possible ways that the plugin allows for exporting documents, the first two are available in the admin UI from the list view of a collection: + +1. Direct download - Using a `POST` to `/api/exports/download` and streams the response as a file download +2. File storage - Goes to the `exports` collection as an uploads enabled collection +3. Local API - A create call to the uploads collection: `payload.create({ slug: 'uploads', ...parameters })` +4. Jobs Queue - `payload.jobs.queue({ task: 'createCollectionExport', input: parameters })` + +By default, a user can use the Export drawer to create a file download by choosing `Save` or stream a downloadable file directly without persisting it by using the `Download` button. Either option can be disabled to provide the export experience you desire for your use-case. + +The UI for creating exports provides options so that users can be selective about which documents to include and also which columns or fields to include. + +It is necessary to add access control to the uploads collection configuration using the `overrideExportCollection` function if you have enabled this plugin on collections with data that some authenticated users should not have access to. + + + **Note**: Users who have read access to the upload collection may be able to + download data that is normally not readable due to [access + control](../access-control/overview). + + +The following parameters are used by the export function to handle requests: + +| Property | Type | Description | +| ---------------- | -------- | ----------------------------------------------------------------------------------------------------------------- | +| `format` | text | Either `csv` or `json` to determine the shape of data exported | +| `limit` | number | The max number of documents to return | +| `sort` | select | The field to use for ordering documents | +| `locale` | string | The locale code to query documents or `all` | +| `draft` | string | Either `yes` or `no` to return documents with their newest drafts for drafts enabled collections | +| `fields` | string[] | Which collection fields are used to create the export, defaults to all | +| `collectionSlug` | string | The slug to query against | +| `where` | object | The WhereObject used to query documents to export. This is set by making selections or filters from the list view | +| `filename` | text | What to call the export being created | diff --git a/docs/plugins/multi-tenant.mdx b/docs/plugins/multi-tenant.mdx index 39eab63b8b..32dcfb86c6 100644 --- a/docs/plugins/multi-tenant.mdx +++ b/docs/plugins/multi-tenant.mdx @@ -1,7 +1,7 @@ --- title: Multi-Tenant Plugin label: Multi-Tenant -order: 40 +order: 50 desc: Scaffolds multi-tenancy for your Payload application keywords: plugins, multi-tenant, multi-tenancy, plugin, payload, cms, seo, indexing, search, search engine --- @@ -229,15 +229,15 @@ const config = buildConfig({ { slug: 'tenants', admin: { - useAsTitle: 'name' + useAsTitle: 'name', }, fields: [ // remember, you own these fields // these are merely suggestions/examples { - name: 'name', - type: 'text', - required: true, + name: 'name', + type: 'text', + required: true, }, { name: 'slug', @@ -248,7 +248,7 @@ const config = buildConfig({ name: 'domain', type: 'text', required: true, - } + }, ], }, ], @@ -258,7 +258,7 @@ const config = buildConfig({ pages: {}, navigation: { isGlobal: true, - } + }, }, }), ], diff --git a/docs/plugins/nested-docs.mdx b/docs/plugins/nested-docs.mdx index 5725bdabc2..60c3d43323 100644 --- a/docs/plugins/nested-docs.mdx +++ b/docs/plugins/nested-docs.mdx @@ -1,7 +1,7 @@ --- title: Nested Docs Plugin label: Nested Docs -order: 40 +order: 60 desc: Nested documents in a parent, child, and sibling relationship. keywords: plugins, nested, documents, parent, child, sibling, relationship --- diff --git a/docs/plugins/overview.mdx b/docs/plugins/overview.mdx index 96b2430896..d5bce425fd 100644 --- a/docs/plugins/overview.mdx +++ b/docs/plugins/overview.mdx @@ -55,6 +55,7 @@ Payload maintains a set of Official Plugins that solve for some of the common us - [Sentry](./sentry) - [SEO](./seo) - [Stripe](./stripe) +- [Import/Export](./import-export) You can also [build your own plugin](./build-your-own) to easily extend Payload's functionality in some other way. Once your plugin is ready, consider [sharing it with the community](#community-plugins). diff --git a/docs/plugins/redirects.mdx b/docs/plugins/redirects.mdx index 3fbc624d58..dae099a499 100644 --- a/docs/plugins/redirects.mdx +++ b/docs/plugins/redirects.mdx @@ -1,7 +1,7 @@ --- title: Redirects Plugin label: Redirects -order: 40 +order: 70 desc: Automatically create redirects for your Payload application keywords: plugins, redirects, redirect, plugin, payload, cms, seo, indexing, search, search engine --- diff --git a/docs/plugins/search.mdx b/docs/plugins/search.mdx index 868e87b5b7..8eee4073aa 100644 --- a/docs/plugins/search.mdx +++ b/docs/plugins/search.mdx @@ -1,7 +1,7 @@ --- title: Search Plugin label: Search -order: 40 +order: 80 desc: Generates records of your documents that are extremely fast to search on. keywords: plugins, search, search plugin, search engine, search index, search results, search bar, search box, search field, search form, search input --- diff --git a/docs/plugins/sentry.mdx b/docs/plugins/sentry.mdx index fc87f2e2de..ecd6826487 100644 --- a/docs/plugins/sentry.mdx +++ b/docs/plugins/sentry.mdx @@ -1,7 +1,7 @@ --- title: Sentry Plugin label: Sentry -order: 40 +order: 90 desc: Integrate Sentry error tracking into your Payload application keywords: plugins, sentry, error, tracking, monitoring, logging, bug, reporting, performance --- diff --git a/docs/plugins/seo.mdx b/docs/plugins/seo.mdx index c0fa06d0e0..b22e01c829 100644 --- a/docs/plugins/seo.mdx +++ b/docs/plugins/seo.mdx @@ -2,7 +2,7 @@ description: Manage SEO metadata from your Payload admin keywords: plugins, seo, meta, search, engine, ranking, google label: SEO -order: 30 +order: 100 title: SEO Plugin --- diff --git a/docs/plugins/stripe.mdx b/docs/plugins/stripe.mdx index 214267f0a2..79111274d7 100644 --- a/docs/plugins/stripe.mdx +++ b/docs/plugins/stripe.mdx @@ -1,7 +1,7 @@ --- title: Stripe Plugin label: Stripe -order: 40 +order: 110 desc: Easily accept payments with Stripe keywords: plugins, stripe, payments, ecommerce --- diff --git a/packages/plugin-import-export/src/index.ts b/packages/plugin-import-export/src/index.ts index 366d444c54..e3b4f99f96 100644 --- a/packages/plugin-import-export/src/index.ts +++ b/packages/plugin-import-export/src/index.ts @@ -216,6 +216,9 @@ declare module 'payload' { * @default false */ disabled?: boolean + /** + * Custom function used to modify the outgoing csv data by manipulating the data, siblingData or by returning the desired value + */ toCSV?: ToCSVFunction } } From bccf6ab16f3562bbf2d9e26dc0e2d34d4a3cf732 Mon Sep 17 00:00:00 2001 From: Jacob Fletcher Date: Thu, 24 Jul 2025 14:00:52 -0400 Subject: [PATCH 076/143] feat: group by (#13138) Supports grouping documents by specific fields within the list view. For example, imagine having a "posts" collection with a "categories" field. To report on each specific category, you'd traditionally filter for each category, one at a time. This can be quite inefficient, especially with large datasets. Now, you can interact with all categories simultaneously, grouped by distinct values. Here is a simple demonstration: https://github.com/user-attachments/assets/0dcd19d2-e983-47e6-9ea2-cfdd2424d8b5 Enable on any collection by setting the `admin.groupBy` property: ```ts import type { CollectionConfig } from 'payload' const MyCollection: CollectionConfig = { // ... admin: { groupBy: true } } ``` This is currently marked as beta to gather feedback while we reach full stability, and to leave room for API changes and other modifications. Use at your own risk. Note: when using `groupBy`, bulk editing is done group-by-group. In the future we may support cross-group bulk editing. Dependent on #13102 (merged). --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210774523852467 --------- Co-authored-by: Paul Popus --- .github/workflows/main.yml | 2 + docs/admin/overview.mdx | 2 +- docs/configuration/collections.mdx | 1 + packages/next/src/views/List/handleGroupBy.ts | 199 + packages/next/src/views/List/index.tsx | 101 +- packages/payload/src/admin/functions/index.ts | 10 + packages/payload/src/admin/views/list.ts | 2 +- .../payload/src/collections/config/types.ts | 7 + packages/payload/src/index.ts | 1 - packages/payload/src/preferences/types.ts | 1 + .../utilities/transformColumnPreferences.ts | 4 + packages/translations/src/clientKeys.ts | 2 + packages/translations/src/languages/ar.ts | 2 + packages/translations/src/languages/az.ts | 3 + packages/translations/src/languages/bg.ts | 2 + packages/translations/src/languages/bnBd.ts | 3 + packages/translations/src/languages/bnIn.ts | 2 + packages/translations/src/languages/ca.ts | 2 + packages/translations/src/languages/cs.ts | 2 + packages/translations/src/languages/da.ts | 2 + packages/translations/src/languages/de.ts | 3 + packages/translations/src/languages/en.ts | 2 + packages/translations/src/languages/es.ts | 2 + packages/translations/src/languages/et.ts | 2 + packages/translations/src/languages/fa.ts | 2 + packages/translations/src/languages/fr.ts | 2 + packages/translations/src/languages/he.ts | 3 + packages/translations/src/languages/hr.ts | 2 + packages/translations/src/languages/hu.ts | 2 + packages/translations/src/languages/hy.ts | 3 + packages/translations/src/languages/it.ts | 2 + packages/translations/src/languages/ja.ts | 2 + packages/translations/src/languages/ko.ts | 3 + packages/translations/src/languages/lt.ts | 2 + packages/translations/src/languages/lv.ts | 3 + packages/translations/src/languages/my.ts | 2 + packages/translations/src/languages/nb.ts | 2 + packages/translations/src/languages/nl.ts | 2 + packages/translations/src/languages/pl.ts | 2 + packages/translations/src/languages/pt.ts | 2 + packages/translations/src/languages/ro.ts | 2 + packages/translations/src/languages/rs.ts | 2 + .../translations/src/languages/rsLatin.ts | 2 + packages/translations/src/languages/ru.ts | 2 + packages/translations/src/languages/sk.ts | 2 + packages/translations/src/languages/sl.ts | 2 + packages/translations/src/languages/sv.ts | 2 + packages/translations/src/languages/th.ts | 3 + packages/translations/src/languages/tr.ts | 2 + packages/translations/src/languages/uk.ts | 2 + packages/translations/src/languages/vi.ts | 2 + packages/translations/src/languages/zh.ts | 2 + packages/translations/src/languages/zhTw.ts | 2 + .../ui/src/elements/ColumnSelector/index.tsx | 2 +- packages/ui/src/elements/DeleteMany/index.tsx | 50 +- .../src/elements/EditMany/DrawerContent.tsx | 11 +- packages/ui/src/elements/EditMany/index.tsx | 23 +- .../ui/src/elements/GroupByBuilder/index.scss | 39 + .../ui/src/elements/GroupByBuilder/index.tsx | 144 + .../ui/src/elements/ListControls/index.scss | 6 +- .../ui/src/elements/ListControls/index.tsx | 43 +- .../PageControls/GroupByPageControls.tsx | 62 + .../ui/src/elements/PageControls/index.scss | 40 + .../ui/src/elements/PageControls/index.tsx | 94 + .../Pagination/ClickableArrow/index.scss | 6 +- .../ui/src/elements/Pagination/index.scss | 8 +- packages/ui/src/elements/Pagination/index.tsx | 2 +- .../elements/PublishMany/DrawerContent.tsx | 13 +- .../ui/src/elements/PublishMany/index.tsx | 19 +- packages/ui/src/elements/ReactSelect/types.ts | 1 + .../src/elements/RelationshipTable/index.tsx | 26 +- .../ui/src/elements/StickyToolbar/index.scss | 27 + .../ui/src/elements/StickyToolbar/index.tsx | 9 + .../ui/src/elements/Table/OrderableTable.tsx | 4 + packages/ui/src/elements/Table/index.tsx | 6 +- .../elements/UnpublishMany/DrawerContent.tsx | 8 +- .../ui/src/elements/UnpublishMany/index.tsx | 18 +- .../ui/src/elements/WhereBuilder/index.tsx | 8 +- packages/ui/src/exports/client/index.ts | 2 + packages/ui/src/fields/Select/Input.tsx | 3 + packages/ui/src/providers/ListQuery/index.tsx | 37 +- .../ui/src/providers/ListQuery/mergeQuery.ts | 31 +- packages/ui/src/providers/ListQuery/types.ts | 12 +- packages/ui/src/providers/Selection/index.tsx | 63 +- packages/ui/src/utilities/buildTableState.ts | 12 +- .../reduceFieldsToOptions.tsx} | 22 +- packages/ui/src/utilities/renderTable.tsx | 81 +- .../CollectionFolder/ListSelection/index.tsx | 1 + .../src/views/List/GroupByHeader/index.scss | 17 + .../ui/src/views/List/GroupByHeader/index.tsx | 31 + .../ui/src/views/List/ListHeader/index.tsx | 5 +- .../ui/src/views/List/ListSelection/index.tsx | 32 +- packages/ui/src/views/List/index.scss | 47 +- packages/ui/src/views/List/index.tsx | 122 +- test/admin/e2e/list-view/e2e.spec.ts | 72 +- test/bulk-edit/e2e.spec.ts | 2 +- test/fields-relationship/e2e.spec.ts | 5 +- test/group-by/.gitignore | 2 + test/group-by/collections/Categories/index.ts | 16 + test/group-by/collections/Media/index.ts | 33 + test/group-by/collections/Posts/index.ts | 48 + test/group-by/config.ts | 30 + test/group-by/e2e.spec.ts | 607 +++ test/group-by/payload-types.ts | 428 ++ test/group-by/schema.graphql | 4271 +++++++++++++++++ test/group-by/seed.ts | 84 + test/group-by/tsconfig.eslint.json | 13 + test/group-by/tsconfig.json | 3 + test/group-by/types.d.ts | 9 + test/helpers.ts | 6 +- test/helpers/e2e/goToNextPage.ts | 49 + test/helpers/e2e/groupBy.ts | 90 + test/helpers/e2e/openListFilters.ts | 8 +- test/helpers/e2e/sortColumn.ts | 36 + test/helpers/e2e/toggleListDrawer.ts | 3 + test/joins/e2e.spec.ts | 3 + test/joins/payload-types.ts | 2 + test/lexical/collections/RichText/e2e.spec.ts | 16 +- test/lexical/payload-types.ts | 14 + test/locked-documents/e2e.spec.ts | 11 +- test/query-presets/e2e.spec.ts | 3 +- test/query-presets/helpers/togglePreset.ts | 3 +- test/sort/payload-types.ts | 38 +- tsconfig.base.json | 114 +- 124 files changed, 7181 insertions(+), 447 deletions(-) create mode 100644 packages/next/src/views/List/handleGroupBy.ts create mode 100644 packages/ui/src/elements/GroupByBuilder/index.scss create mode 100644 packages/ui/src/elements/GroupByBuilder/index.tsx create mode 100644 packages/ui/src/elements/PageControls/GroupByPageControls.tsx create mode 100644 packages/ui/src/elements/PageControls/index.scss create mode 100644 packages/ui/src/elements/PageControls/index.tsx create mode 100644 packages/ui/src/elements/StickyToolbar/index.scss create mode 100644 packages/ui/src/elements/StickyToolbar/index.tsx rename packages/ui/src/{elements/WhereBuilder/reduceFields.tsx => utilities/reduceFieldsToOptions.tsx} (90%) create mode 100644 packages/ui/src/views/List/GroupByHeader/index.scss create mode 100644 packages/ui/src/views/List/GroupByHeader/index.tsx create mode 100644 test/group-by/.gitignore create mode 100644 test/group-by/collections/Categories/index.ts create mode 100644 test/group-by/collections/Media/index.ts create mode 100644 test/group-by/collections/Posts/index.ts create mode 100644 test/group-by/config.ts create mode 100644 test/group-by/e2e.spec.ts create mode 100644 test/group-by/payload-types.ts create mode 100644 test/group-by/schema.graphql create mode 100644 test/group-by/seed.ts create mode 100644 test/group-by/tsconfig.eslint.json create mode 100644 test/group-by/tsconfig.json create mode 100644 test/group-by/types.d.ts create mode 100644 test/helpers/e2e/goToNextPage.ts create mode 100644 test/helpers/e2e/groupBy.ts create mode 100644 test/helpers/e2e/sortColumn.ts diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e10abad457..17c907c446 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -284,6 +284,7 @@ jobs: - fields__collections__Text - fields__collections__UI - fields__collections__Upload + - group-by - folders - hooks - lexical__collections__Lexical__e2e__main @@ -419,6 +420,7 @@ jobs: - fields__collections__Text - fields__collections__UI - fields__collections__Upload + - group-by - folders - hooks - lexical__collections__Lexical__e2e__main diff --git a/docs/admin/overview.mdx b/docs/admin/overview.mdx index 069357d585..30be428847 100644 --- a/docs/admin/overview.mdx +++ b/docs/admin/overview.mdx @@ -77,7 +77,7 @@ All auto-generated files will contain the following comments at the top of each ## Admin Options -All options for the Admin Panel are defined in your [Payload Config](../configuration/overview) under the `admin` property: +All root-level options for the Admin Panel are defined in your [Payload Config](../configuration/overview) under the `admin` property: ```ts import { buildConfig } from 'payload' diff --git a/docs/configuration/collections.mdx b/docs/configuration/collections.mdx index f431c925f7..c6a6e1ebd4 100644 --- a/docs/configuration/collections.mdx +++ b/docs/configuration/collections.mdx @@ -130,6 +130,7 @@ The following options are available: | `description` | Text to display below the Collection label in the List View to give editors more information. Alternatively, you can use the `admin.components.Description` to render a React component. [More details](#custom-components). | | `defaultColumns` | Array of field names that correspond to which columns to show by default in this Collection's List View. | | `disableCopyToLocale` | Disables the "Copy to Locale" button while editing documents within this Collection. Only applicable when localization is enabled. | +| `groupBy` | Beta. Enable grouping by a field in the list view. | | `hideAPIURL` | Hides the "API URL" meta field while editing documents within this Collection. | | `enableRichTextLink` | The [Rich Text](../fields/rich-text) field features a `Link` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. | | `enableRichTextRelationship` | The [Rich Text](../fields/rich-text) field features a `Relationship` element which allows for users to automatically reference related documents within their rich text. Set to `true` by default. | diff --git a/packages/next/src/views/List/handleGroupBy.ts b/packages/next/src/views/List/handleGroupBy.ts new file mode 100644 index 0000000000..8d96e5d7e8 --- /dev/null +++ b/packages/next/src/views/List/handleGroupBy.ts @@ -0,0 +1,199 @@ +import type { + ClientConfig, + Column, + ListQuery, + PaginatedDocs, + PayloadRequest, + SanitizedCollectionConfig, + Where, +} from 'payload' + +import { renderTable } from '@payloadcms/ui/rsc' +import { formatDate } from '@payloadcms/ui/shared' +import { flattenAllFields } from 'payload' + +export const handleGroupBy = async ({ + clientConfig, + collectionConfig, + collectionSlug, + columns, + customCellProps, + drawerSlug, + enableRowSelections, + query, + req, + user, + where: whereWithMergedSearch, +}: { + clientConfig: ClientConfig + collectionConfig: SanitizedCollectionConfig + collectionSlug: string + columns: any[] + customCellProps?: Record + drawerSlug?: string + enableRowSelections?: boolean + query?: ListQuery + req: PayloadRequest + user: any + where: Where +}): Promise<{ + columnState: Column[] + data: PaginatedDocs + Table: null | React.ReactNode | React.ReactNode[] +}> => { + let Table: React.ReactNode | React.ReactNode[] = null + let columnState: Column[] + + const dataByGroup: Record = {} + const clientCollectionConfig = clientConfig.collections.find((c) => c.slug === collectionSlug) + + // NOTE: is there a faster/better way to do this? + const flattenedFields = flattenAllFields({ fields: collectionConfig.fields }) + + const groupByFieldPath = query.groupBy.replace(/^-/, '') + + const groupByField = flattenedFields.find((f) => f.name === groupByFieldPath) + + const relationshipConfig = + groupByField?.type === 'relationship' + ? clientConfig.collections.find((c) => c.slug === groupByField.relationTo) + : undefined + + let populate + + if (groupByField?.type === 'relationship' && groupByField.relationTo) { + const relationTo = + typeof groupByField.relationTo === 'string' + ? [groupByField.relationTo] + : groupByField.relationTo + + if (Array.isArray(relationTo)) { + relationTo.forEach((rel) => { + if (!populate) { + populate = {} + } + populate[rel] = { [relationshipConfig?.admin.useAsTitle || 'id']: true } + }) + } + } + + const distinct = await req.payload.findDistinct({ + collection: collectionSlug, + depth: 1, + field: groupByFieldPath, + limit: query?.limit ? Number(query.limit) : undefined, + locale: req.locale, + overrideAccess: false, + page: query?.page ? Number(query.page) : undefined, + populate, + req, + sort: query?.groupBy, + where: whereWithMergedSearch, + }) + + const data = { + ...distinct, + docs: distinct.values?.map(() => ({})) || [], + values: undefined, + } + + await Promise.all( + distinct.values.map(async (distinctValue, i) => { + const potentiallyPopulatedRelationship = distinctValue[groupByFieldPath] + + const valueOrRelationshipID = + groupByField?.type === 'relationship' && + potentiallyPopulatedRelationship && + typeof potentiallyPopulatedRelationship === 'object' && + 'id' in potentiallyPopulatedRelationship + ? potentiallyPopulatedRelationship.id + : potentiallyPopulatedRelationship + + const groupData = await req.payload.find({ + collection: collectionSlug, + depth: 0, + draft: true, + fallbackLocale: false, + includeLockStatus: true, + limit: query?.queryByGroup?.[valueOrRelationshipID]?.limit + ? Number(query.queryByGroup[valueOrRelationshipID].limit) + : undefined, + locale: req.locale, + overrideAccess: false, + page: query?.queryByGroup?.[valueOrRelationshipID]?.page + ? Number(query.queryByGroup[valueOrRelationshipID].page) + : undefined, + req, + // Note: if we wanted to enable table-by-table sorting, we could use this: + // sort: query?.queryByGroup?.[valueOrRelationshipID]?.sort, + sort: query?.sort, + user, + where: { + ...(whereWithMergedSearch || {}), + [groupByFieldPath]: { + equals: valueOrRelationshipID, + }, + }, + }) + + let heading = valueOrRelationshipID || req.i18n.t('general:noValue') + + if ( + groupByField?.type === 'relationship' && + typeof potentiallyPopulatedRelationship === 'object' + ) { + heading = + potentiallyPopulatedRelationship[relationshipConfig.admin.useAsTitle || 'id'] || + valueOrRelationshipID + } + + if (groupByField.type === 'date') { + heading = formatDate({ + date: String(heading), + i18n: req.i18n, + pattern: clientConfig.admin.dateFormat, + }) + } + + if (groupData.docs && groupData.docs.length > 0) { + const { columnState: newColumnState, Table: NewTable } = renderTable({ + clientCollectionConfig, + collectionConfig, + columns, + customCellProps, + data: groupData, + drawerSlug, + enableRowSelections, + groupByFieldPath, + groupByValue: valueOrRelationshipID, + heading, + i18n: req.i18n, + key: `table-${valueOrRelationshipID}`, + orderableFieldName: collectionConfig.orderable === true ? '_order' : undefined, + payload: req.payload, + query, + useAsTitle: collectionConfig.admin.useAsTitle, + }) + + // Only need to set `columnState` once, using the first table's column state + // This will avoid needing to generate column state explicitly for root context that wraps all tables + if (!columnState) { + columnState = newColumnState + } + + if (!Table) { + Table = [] + } + + dataByGroup[valueOrRelationshipID] = groupData + ;(Table as Array)[i] = NewTable + } + }), + ) + + return { + columnState, + data, + Table, + } +} diff --git a/packages/next/src/views/List/index.tsx b/packages/next/src/views/List/index.tsx index 59c0c3dfc5..41dbbc208e 100644 --- a/packages/next/src/views/List/index.tsx +++ b/packages/next/src/views/List/index.tsx @@ -1,10 +1,12 @@ import type { AdminViewServerProps, CollectionPreferences, + Column, ColumnPreference, ListQuery, ListViewClientProps, ListViewServerPropsOnly, + PaginatedDocs, QueryPreset, SanitizedCollectionPermission, } from 'payload' @@ -24,6 +26,7 @@ import { import React, { Fragment } from 'react' import { getDocumentPermissions } from '../Document/getDocumentPermissions.js' +import { handleGroupBy } from './handleGroupBy.js' import { renderListViewSlots } from './renderListViewSlots.js' import { resolveAllFilterOptions } from './resolveAllFilterOptions.js' @@ -74,7 +77,6 @@ export const renderListView = async ( req, req: { i18n, - locale, payload, payload: { config }, query: queryFromReq, @@ -91,11 +93,17 @@ export const renderListView = async ( const columnsFromQuery: ColumnPreference[] = transformColumnsToPreferences(query?.columns) + query.queryByGroup = + query?.queryByGroup && typeof query.queryByGroup === 'string' + ? JSON.parse(query.queryByGroup) + : query?.queryByGroup + const collectionPreferences = await upsertPreferences({ key: `collection-${collectionSlug}`, req, value: { columns: columnsFromQuery, + groupBy: query?.groupBy, limit: isNumber(query?.limit) ? Number(query.limit) : undefined, preset: query?.preset, sort: query?.sort as string, @@ -112,6 +120,8 @@ export const renderListView = async ( collectionPreferences?.sort || (typeof collectionConfig.defaultSort === 'string' ? collectionConfig.defaultSort : undefined) + query.groupBy = collectionPreferences?.groupBy + query.columns = transformColumnsToSearchParams(collectionPreferences?.columns || []) const { @@ -137,6 +147,12 @@ export const renderListView = async ( let queryPreset: QueryPreset | undefined let queryPresetPermissions: SanitizedCollectionPermission | undefined + const whereWithMergedSearch = mergeListSearchAndWhere({ + collectionConfig, + search: typeof query?.search === 'string' ? query.search : undefined, + where: combineWhereConstraints([query?.where, baseListFilter]), + }) + if (collectionPreferences?.preset) { try { queryPreset = (await payload.findByID({ @@ -160,41 +176,55 @@ export const renderListView = async ( } } - const data = await payload.find({ - collection: collectionSlug, - depth: 0, - draft: true, - fallbackLocale: false, - includeLockStatus: true, - limit: query.limit, - locale, - overrideAccess: false, - page: query.page, - req, - sort: query.sort, - user, - where: mergeListSearchAndWhere({ + let data: PaginatedDocs | undefined + let Table: React.ReactNode | React.ReactNode[] = null + let columnState: Column[] = [] + + if (collectionConfig.admin.groupBy && query.groupBy) { + ;({ columnState, data, Table } = await handleGroupBy({ + clientConfig, collectionConfig, - search: typeof query?.search === 'string' ? query.search : undefined, - where: combineWhereConstraints([query?.where, baseListFilter]), - }), - }) - - const clientCollectionConfig = clientConfig.collections.find((c) => c.slug === collectionSlug) - - const { columnState, Table } = renderTable({ - clientCollectionConfig, - collectionConfig, - columns: collectionPreferences?.columns, - customCellProps, - docs: data.docs, - drawerSlug, - enableRowSelections, - i18n: req.i18n, - orderableFieldName: collectionConfig.orderable === true ? '_order' : undefined, - payload, - useAsTitle: collectionConfig.admin.useAsTitle, - }) + collectionSlug, + columns: collectionPreferences?.columns, + customCellProps, + drawerSlug, + enableRowSelections, + query, + req, + user, + where: whereWithMergedSearch, + })) + } else { + data = await req.payload.find({ + collection: collectionSlug, + depth: 0, + draft: true, + fallbackLocale: false, + includeLockStatus: true, + limit: query?.limit ? Number(query.limit) : undefined, + locale: req.locale, + overrideAccess: false, + page: query?.page ? Number(query.page) : undefined, + req, + sort: query?.sort, + user, + where: whereWithMergedSearch, + }) + ;({ columnState, Table } = renderTable({ + clientCollectionConfig: clientConfig.collections.find((c) => c.slug === collectionSlug), + collectionConfig, + columns: collectionPreferences?.columns, + customCellProps, + data, + drawerSlug, + enableRowSelections, + i18n: req.i18n, + orderableFieldName: collectionConfig.orderable === true ? '_order' : undefined, + payload: req.payload, + query, + useAsTitle: collectionConfig.admin.useAsTitle, + })) + } const renderedFilters = renderFilters(collectionConfig.fields, req.payload.importMap) @@ -249,6 +279,7 @@ export const renderListView = async ( const isInDrawer = Boolean(drawerSlug) // Needed to prevent: Only plain objects can be passed to Client Components from Server Components. Objects with toJSON methods are not supported. Convert it manually to a simple value before passing it to props. + // Is there a way to avoid this? The `where` object is already seemingly plain, but is not bc it originates from the params. query.where = query?.where ? JSON.parse(JSON.stringify(query?.where || {})) : undefined return { diff --git a/packages/payload/src/admin/functions/index.ts b/packages/payload/src/admin/functions/index.ts index 1d42c1cca0..aaa1106fe5 100644 --- a/packages/payload/src/admin/functions/index.ts +++ b/packages/payload/src/admin/functions/index.ts @@ -45,9 +45,15 @@ export type ListQuery = { * Use `transformColumnsToPreferences` and `transformColumnsToSearchParams` to convert it back and forth */ columns?: ColumnsFromURL + /* + * A string representing the field to group by, e.g. `category` + * A leading hyphen represents descending order, e.g. `-category` + */ + groupBy?: string limit?: number page?: number preset?: number | string + queryByGroup?: Record /* When provided, is automatically injected into the `where` object */ @@ -59,6 +65,10 @@ export type ListQuery = { export type BuildTableStateArgs = { collectionSlug: string | string[] columns?: ColumnPreference[] + data?: PaginatedDocs + /** + * @deprecated Use `data` instead + */ docs?: PaginatedDocs['docs'] enableRowSelections?: boolean orderableFieldName: string diff --git a/packages/payload/src/admin/views/list.ts b/packages/payload/src/admin/views/list.ts index 7097e0bd41..6a3b320aca 100644 --- a/packages/payload/src/admin/views/list.ts +++ b/packages/payload/src/admin/views/list.ts @@ -17,7 +17,7 @@ export type ListViewSlots = { BeforeListTable?: React.ReactNode Description?: React.ReactNode listMenuItems?: React.ReactNode[] - Table: React.ReactNode + Table: React.ReactNode | React.ReactNode[] } /** diff --git a/packages/payload/src/collections/config/types.ts b/packages/payload/src/collections/config/types.ts index 4414715544..97c4004c89 100644 --- a/packages/payload/src/collections/config/types.ts +++ b/packages/payload/src/collections/config/types.ts @@ -367,6 +367,13 @@ export type CollectionAdminOptions = { * - Set to `false` to exclude the entity from the sidebar / dashboard without disabling its routes. */ group?: false | Record | string + /** + * @experimental This option is currently in beta and may change in future releases and/or contain bugs. + * Use at your own risk. + * @description Enable grouping by a field in the list view. + * Uses `payload.findDistinct` under the hood to populate the group-by options. + */ + groupBy?: boolean /** * Exclude the collection from the admin nav and routes */ diff --git a/packages/payload/src/index.ts b/packages/payload/src/index.ts index 55a5207224..542f18c71d 100644 --- a/packages/payload/src/index.ts +++ b/packages/payload/src/index.ts @@ -1208,7 +1208,6 @@ export { findVersionsOperation } from './collections/operations/findVersions.js' export { restoreVersionOperation } from './collections/operations/restoreVersion.js' export { updateOperation } from './collections/operations/update.js' export { updateByIDOperation } from './collections/operations/updateByID.js' - export { buildConfig } from './config/build.js' export { type ClientConfig, diff --git a/packages/payload/src/preferences/types.ts b/packages/payload/src/preferences/types.ts index 0e4a137a39..245fd2ad63 100644 --- a/packages/payload/src/preferences/types.ts +++ b/packages/payload/src/preferences/types.ts @@ -37,6 +37,7 @@ export type ColumnPreference = { export type CollectionPreferences = { columns?: ColumnPreference[] editViewType?: 'default' | 'live-preview' + groupBy?: string limit?: number preset?: DefaultDocumentIDType sort?: string diff --git a/packages/payload/src/utilities/transformColumnPreferences.ts b/packages/payload/src/utilities/transformColumnPreferences.ts index d0412df476..b6619c9532 100644 --- a/packages/payload/src/utilities/transformColumnPreferences.ts +++ b/packages/payload/src/utilities/transformColumnPreferences.ts @@ -13,6 +13,10 @@ export type ColumnsFromURL = string[] export const transformColumnsToPreferences = ( columns: Column[] | ColumnPreference[] | ColumnsFromURL | string | undefined, ): ColumnPreference[] | undefined => { + if (!columns) { + return undefined + } + let columnsToTransform = columns // Columns that originate from the URL are a stringified JSON array and need to be parsed first diff --git a/packages/translations/src/clientKeys.ts b/packages/translations/src/clientKeys.ts index f50aa54f8f..71e84d9b6c 100644 --- a/packages/translations/src/clientKeys.ts +++ b/packages/translations/src/clientKeys.ts @@ -185,6 +185,7 @@ export const clientTranslationKeys = createClientTranslationKeys([ 'general:confirmReindexDescription', 'general:confirmReindexDescriptionAll', 'general:copied', + 'general:clear', 'general:clearAll', 'general:copy', 'general:copyField', @@ -232,6 +233,7 @@ export const clientTranslationKeys = createClientTranslationKeys([ 'general:filterWhere', 'general:globals', 'general:goBack', + 'general:groupByLabel', 'general:isEditing', 'general:item', 'general:items', diff --git a/packages/translations/src/languages/ar.ts b/packages/translations/src/languages/ar.ts index ce23c1780c..300a893294 100644 --- a/packages/translations/src/languages/ar.ts +++ b/packages/translations/src/languages/ar.ts @@ -224,6 +224,7 @@ export const arTranslations: DefaultTranslationsObject = { backToDashboard: 'العودة للوحة التّحكّم', cancel: 'إلغاء', changesNotSaved: 'لم يتمّ حفظ التّغييرات. إن غادرت الآن ، ستفقد تغييراتك.', + clear: 'واضح', clearAll: 'امسح الكل', close: 'إغلاق', collapse: 'طيّ', @@ -294,6 +295,7 @@ export const arTranslations: DefaultTranslationsObject = { filterWhere: 'تصفية {{label}} حيث', globals: 'عامة', goBack: 'العودة', + groupByLabel: 'التجميع حسب {{label}}', import: 'استيراد', isEditing: 'يحرر', item: 'عنصر', diff --git a/packages/translations/src/languages/az.ts b/packages/translations/src/languages/az.ts index bc2ecb7ab7..1efebc54dc 100644 --- a/packages/translations/src/languages/az.ts +++ b/packages/translations/src/languages/az.ts @@ -229,6 +229,8 @@ export const azTranslations: DefaultTranslationsObject = { cancel: 'Ləğv et', changesNotSaved: 'Dəyişiklikləriniz saxlanılmayıb. İndi çıxsanız, dəyişikliklərinizi itirəcəksiniz.', + clear: + 'Payload kontekstində orijinal mətnin mənasını qoruya. İşte Payload terminləri siyahısıdır ki, onlar üzərində çox xüsusi mənalar gəlir:\n - Kolleksiya: Kolleksiya sənədlərin hamıya ortaq struktur və məqsəd sərbəst olan bir qrupdur. Kolleksiyalar Payload-da məzmunu təşkil etmək və idarə etmək üçün istifadə edilir.\n - Sahə: Sahə', clearAll: 'Hamısını təmizlə', close: 'Bağla', collapse: 'Bağla', @@ -300,6 +302,7 @@ export const azTranslations: DefaultTranslationsObject = { filterWhere: '{{label}} filtrlə', globals: 'Qloballar', goBack: 'Geri qayıt', + groupByLabel: '{{label}} ilə qruplaşdırın', import: 'İdxal', isEditing: 'redaktə edir', item: 'əşya', diff --git a/packages/translations/src/languages/bg.ts b/packages/translations/src/languages/bg.ts index 308778b051..8f99debc9a 100644 --- a/packages/translations/src/languages/bg.ts +++ b/packages/translations/src/languages/bg.ts @@ -229,6 +229,7 @@ export const bgTranslations: DefaultTranslationsObject = { backToDashboard: 'Обратно към таблото', cancel: 'Отмени', changesNotSaved: 'Промените ти не са запазени. Ако напуснеш сега, ще ги загубиш.', + clear: 'Ясно', clearAll: 'Изчисти всичко', close: 'Затвори', collapse: 'Свий', @@ -300,6 +301,7 @@ export const bgTranslations: DefaultTranslationsObject = { filterWhere: 'Филтрирай {{label}} където', globals: 'Глобални', goBack: 'Върни се', + groupByLabel: 'Групирай по {{label}}', import: 'Внос', isEditing: 'редактира', item: 'артикул', diff --git a/packages/translations/src/languages/bnBd.ts b/packages/translations/src/languages/bnBd.ts index 9a41c809f9..4a1b06d22e 100644 --- a/packages/translations/src/languages/bnBd.ts +++ b/packages/translations/src/languages/bnBd.ts @@ -231,6 +231,8 @@ export const bnBdTranslations: DefaultTranslationsObject = { cancel: 'বাতিল করুন', changesNotSaved: 'আপনার পরিবর্তনগুলি সংরক্ষণ করা হয়নি। আপনি যদি এখন চলে যান, তাহলে আপনার পরিবর্তনগুলি হারিয়ে যাবে।', + clear: + 'মূল পাঠের অর্থ সম্মান করুন পেলোড প্রসঙ্গে। এখানে পেলোড নির্দিষ্ট বিশেষ অর্থ বহন করে এরকম একটি সাধারণ টার্মের তালিকা:\n - সংগ্রহ', clearAll: 'সমস্ত সাফ করুন', close: 'বন্ধ করুন', collapse: 'সংকুচিত করুন', @@ -302,6 +304,7 @@ export const bnBdTranslations: DefaultTranslationsObject = { filterWhere: '{{label}} যেখানে ফিল্টার করুন', globals: 'গ্লোবালগুলি', goBack: 'পিছনে যান', + groupByLabel: '{{label}} অনুযায়ী গ্রুপ করুন', import: 'ইম্পোর্ট করুন', isEditing: 'সম্পাদনা করছেন', item: 'আইটেম', diff --git a/packages/translations/src/languages/bnIn.ts b/packages/translations/src/languages/bnIn.ts index 8c01eb2f78..0c527627b9 100644 --- a/packages/translations/src/languages/bnIn.ts +++ b/packages/translations/src/languages/bnIn.ts @@ -231,6 +231,7 @@ export const bnInTranslations: DefaultTranslationsObject = { cancel: 'বাতিল করুন', changesNotSaved: 'আপনার পরিবর্তনগুলি সংরক্ষণ করা হয়নি। আপনি যদি এখন চলে যান, তাহলে আপনার পরিবর্তনগুলি হারিয়ে যাবে।', + clear: 'স্পষ্ট', clearAll: 'সমস্ত সাফ করুন', close: 'বন্ধ করুন', collapse: 'সংকুচিত করুন', @@ -302,6 +303,7 @@ export const bnInTranslations: DefaultTranslationsObject = { filterWhere: '{{label}} যেখানে ফিল্টার করুন', globals: 'গ্লোবালগুলি', goBack: 'পিছনে যান', + groupByLabel: '{{label}} দ্বারা গ্রুপ করুন', import: 'ইম্পোর্ট করুন', isEditing: 'সম্পাদনা করছেন', item: 'আইটেম', diff --git a/packages/translations/src/languages/ca.ts b/packages/translations/src/languages/ca.ts index c3c2ecead5..5d15a184de 100644 --- a/packages/translations/src/languages/ca.ts +++ b/packages/translations/src/languages/ca.ts @@ -230,6 +230,7 @@ export const caTranslations: DefaultTranslationsObject = { backToDashboard: 'Torna al tauler', cancel: 'Cancel·la', changesNotSaved: 'El teu document té canvis no desats. Si continues, els canvis es perdran.', + clear: 'Clar', clearAll: 'Esborra-ho tot', close: 'Tanca', collapse: 'Replegar', @@ -301,6 +302,7 @@ export const caTranslations: DefaultTranslationsObject = { filterWhere: 'Filtra {{label}} on', globals: 'Globals', goBack: 'Torna enrere', + groupByLabel: 'Agrupa per {{label}}', import: 'Importar', isEditing: 'esta editant', item: 'element', diff --git a/packages/translations/src/languages/cs.ts b/packages/translations/src/languages/cs.ts index 7f8304f59b..6259af3519 100644 --- a/packages/translations/src/languages/cs.ts +++ b/packages/translations/src/languages/cs.ts @@ -229,6 +229,7 @@ export const csTranslations: DefaultTranslationsObject = { backToDashboard: 'Zpět na nástěnku', cancel: 'Zrušit', changesNotSaved: 'Vaše změny nebyly uloženy. Pokud teď odejdete, ztratíte své změny.', + clear: 'Jasný', clearAll: 'Vymazat vše', close: 'Zavřít', collapse: 'Sbalit', @@ -299,6 +300,7 @@ export const csTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrovat {{label}} kde', globals: 'Globální', goBack: 'Vrátit se', + groupByLabel: 'Seskupit podle {{label}}', import: 'Import', isEditing: 'upravuje', item: 'položka', diff --git a/packages/translations/src/languages/da.ts b/packages/translations/src/languages/da.ts index ec1ef4b6ef..ba32797276 100644 --- a/packages/translations/src/languages/da.ts +++ b/packages/translations/src/languages/da.ts @@ -228,6 +228,7 @@ export const daTranslations: DefaultTranslationsObject = { cancel: 'Anuller', changesNotSaved: 'Dine ændringer er ikke blevet gemt. Hvis du forlader siden, vil din ændringer gå tabt.', + clear: 'Klar', clearAll: 'Ryd alt', close: 'Luk', collapse: 'Skjul', @@ -298,6 +299,7 @@ export const daTranslations: DefaultTranslationsObject = { filterWhere: 'Filter {{label}} hvor', globals: 'Globale', goBack: 'Gå tilbage', + groupByLabel: 'Gruppér efter {{label}}', import: 'Import', isEditing: 'redigerer', item: 'vare', diff --git a/packages/translations/src/languages/de.ts b/packages/translations/src/languages/de.ts index ae924bb363..5adc816669 100644 --- a/packages/translations/src/languages/de.ts +++ b/packages/translations/src/languages/de.ts @@ -235,6 +235,8 @@ export const deTranslations: DefaultTranslationsObject = { cancel: 'Abbrechen', changesNotSaved: 'Deine Änderungen wurden nicht gespeichert. Wenn du diese Seite verlässt, gehen deine Änderungen verloren.', + clear: + 'Respektieren Sie die Bedeutung des ursprünglichen Textes im Kontext von Payload. Hier ist eine Liste von gängigen Payload-Begriffen, die sehr spezifische Bedeutungen tragen:\n - Sammlung: Eine Sammlung ist eine Gruppe von Dokumenten, die eine gemeinsame Struktur und Funktion teilen. Sammlungen werden verwendet, um Inhalte in Payload zu organisieren und zu verwalten.\n - Feld: Ein Feld ist ein spezifisches Datenstück innerhalb eines Dokuments in einer Sammlung. Felder definieren die Struktur und den Datentyp, der in einem Dokument gespeichert werden kann.\n -', clearAll: 'Alles löschen', close: 'Schließen', collapse: 'Einklappen', @@ -306,6 +308,7 @@ export const deTranslations: DefaultTranslationsObject = { filterWhere: 'Filter {{label}}, wo', globals: 'Globale Dokumente', goBack: 'Zurück', + groupByLabel: 'Gruppieren nach {{label}}', import: 'Importieren', isEditing: 'bearbeitet gerade', item: 'Artikel', diff --git a/packages/translations/src/languages/en.ts b/packages/translations/src/languages/en.ts index e1600e45ae..6e4116e7fe 100644 --- a/packages/translations/src/languages/en.ts +++ b/packages/translations/src/languages/en.ts @@ -230,6 +230,7 @@ export const enTranslations = { cancel: 'Cancel', changesNotSaved: 'Your changes have not been saved. If you leave now, you will lose your changes.', + clear: 'Clear', clearAll: 'Clear All', close: 'Close', collapse: 'Collapse', @@ -301,6 +302,7 @@ export const enTranslations = { filterWhere: 'Filter {{label}} where', globals: 'Globals', goBack: 'Go back', + groupByLabel: 'Group by {{label}}', import: 'Import', isEditing: 'is editing', item: 'item', diff --git a/packages/translations/src/languages/es.ts b/packages/translations/src/languages/es.ts index d91a45c21e..e61ad47f6e 100644 --- a/packages/translations/src/languages/es.ts +++ b/packages/translations/src/languages/es.ts @@ -234,6 +234,7 @@ export const esTranslations: DefaultTranslationsObject = { cancel: 'Cancelar', changesNotSaved: 'Tus cambios no han sido guardados. Si te sales ahora, se perderán tus cambios.', + clear: 'Claro', clearAll: 'Limpiar todo', close: 'Cerrar', collapse: 'Contraer', @@ -305,6 +306,7 @@ export const esTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrar {{label}} donde', globals: 'Globales', goBack: 'Volver', + groupByLabel: 'Agrupar por {{label}}', import: 'Importar', isEditing: 'está editando', item: 'artículo', diff --git a/packages/translations/src/languages/et.ts b/packages/translations/src/languages/et.ts index 15c77ebea4..f15b3adc54 100644 --- a/packages/translations/src/languages/et.ts +++ b/packages/translations/src/languages/et.ts @@ -227,6 +227,7 @@ export const etTranslations: DefaultTranslationsObject = { backToDashboard: 'Tagasi töölaua juurde', cancel: 'Tühista', changesNotSaved: 'Teie muudatusi pole salvestatud. Kui lahkute praegu, kaotate oma muudatused.', + clear: 'Selge', clearAll: 'Tühjenda kõik', close: 'Sulge', collapse: 'Ahenda', @@ -297,6 +298,7 @@ export const etTranslations: DefaultTranslationsObject = { filterWhere: 'Filtreeri {{label}} kus', globals: 'Globaalsed', goBack: 'Mine tagasi', + groupByLabel: 'Rühmita {{label}} järgi', import: 'Importimine', isEditing: 'muudab', item: 'üksus', diff --git a/packages/translations/src/languages/fa.ts b/packages/translations/src/languages/fa.ts index 1284b1d942..4b6d8db058 100644 --- a/packages/translations/src/languages/fa.ts +++ b/packages/translations/src/languages/fa.ts @@ -227,6 +227,7 @@ export const faTranslations: DefaultTranslationsObject = { cancel: 'لغو', changesNotSaved: 'تغییرات شما ذخیره نشده، اگر این برگه را ترک کنید. تمام تغییرات از دست خواهد رفت.', + clear: 'روشن', clearAll: 'همه را پاک کنید', close: 'بستن', collapse: 'بستن', @@ -298,6 +299,7 @@ export const faTranslations: DefaultTranslationsObject = { filterWhere: 'علامت گذاری کردن {{label}} جایی که', globals: 'سراسری', goBack: 'برگشت', + groupByLabel: 'گروه بندی بر اساس {{label}}', import: 'واردات', isEditing: 'در حال ویرایش است', item: 'مورد', diff --git a/packages/translations/src/languages/fr.ts b/packages/translations/src/languages/fr.ts index c5eab55fda..926a9917b5 100644 --- a/packages/translations/src/languages/fr.ts +++ b/packages/translations/src/languages/fr.ts @@ -237,6 +237,7 @@ export const frTranslations: DefaultTranslationsObject = { cancel: 'Annuler', changesNotSaved: 'Vos modifications n’ont pas été enregistrées. Vous perdrez vos modifications si vous quittez maintenant.', + clear: 'Clair', clearAll: 'Tout effacer', close: 'Fermer', collapse: 'Réduire', @@ -308,6 +309,7 @@ export const frTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrer {{label}} où', globals: 'Globals(es)', goBack: 'Retourner', + groupByLabel: 'Regrouper par {{label}}', import: 'Importation', isEditing: 'est en train de modifier', item: 'article', diff --git a/packages/translations/src/languages/he.ts b/packages/translations/src/languages/he.ts index f7a8d4ff93..394335db64 100644 --- a/packages/translations/src/languages/he.ts +++ b/packages/translations/src/languages/he.ts @@ -222,6 +222,8 @@ export const heTranslations: DefaultTranslationsObject = { backToDashboard: 'חזרה ללוח המחוונים', cancel: 'ביטול', changesNotSaved: 'השינויים שלך לא נשמרו. אם תצא כעת, תאבד את השינויים שלך.', + clear: + 'בהתחשב במשמעות של הטקסט המקורי בהקשר של Payload. הנה רשימה של מונחים מקוריים של Payload שנושאים משמעויות מסוימות:\n- אוסף: אוסף הוא קבוצה של מסמכים ששותפים למבנה ולמטרה משות', clearAll: 'נקה הכל', close: 'סגור', collapse: 'כווץ', @@ -292,6 +294,7 @@ export const heTranslations: DefaultTranslationsObject = { filterWhere: 'סנן {{label}} בהם', globals: 'גלובלים', goBack: 'חזור', + groupByLabel: 'קבץ לפי {{label}}', import: 'יבוא', isEditing: 'עורך', item: 'פריט', diff --git a/packages/translations/src/languages/hr.ts b/packages/translations/src/languages/hr.ts index 320217c8ef..5f2b7d7db2 100644 --- a/packages/translations/src/languages/hr.ts +++ b/packages/translations/src/languages/hr.ts @@ -230,6 +230,7 @@ export const hrTranslations: DefaultTranslationsObject = { backToDashboard: 'Natrag na nadzornu ploču', cancel: 'Otkaži', changesNotSaved: 'Vaše promjene nisu spremljene. Ako izađete sada, izgubit ćete promjene.', + clear: 'Jasan', clearAll: 'Očisti sve', close: 'Zatvori', collapse: 'Sažmi', @@ -301,6 +302,7 @@ export const hrTranslations: DefaultTranslationsObject = { filterWhere: 'Filter {{label}} gdje', globals: 'Globali', goBack: 'Vrati se', + groupByLabel: 'Grupiraj po {{label}}', import: 'Uvoz', isEditing: 'uređuje', item: 'stavka', diff --git a/packages/translations/src/languages/hu.ts b/packages/translations/src/languages/hu.ts index 8aaa81144b..7cad548bb0 100644 --- a/packages/translations/src/languages/hu.ts +++ b/packages/translations/src/languages/hu.ts @@ -232,6 +232,7 @@ export const huTranslations: DefaultTranslationsObject = { cancel: 'Mégsem', changesNotSaved: 'A módosítások nem lettek mentve. Ha most távozik, elveszíti a változtatásokat.', + clear: 'Tiszta', clearAll: 'Törölj mindent', close: 'Bezárás', collapse: 'Összecsukás', @@ -303,6 +304,7 @@ export const huTranslations: DefaultTranslationsObject = { filterWhere: 'Szűrő {{label}} ahol', globals: 'Globálisok', goBack: 'Vissza', + groupByLabel: 'Csoportosítás {{label}} szerint', import: 'Behozatal', isEditing: 'szerkeszt', item: 'tétel', diff --git a/packages/translations/src/languages/hy.ts b/packages/translations/src/languages/hy.ts index 704b20d8e1..c35a995178 100644 --- a/packages/translations/src/languages/hy.ts +++ b/packages/translations/src/languages/hy.ts @@ -230,6 +230,8 @@ export const hyTranslations: DefaultTranslationsObject = { cancel: 'Չեղարկել', changesNotSaved: 'Ձեր փոփոխությունները չեն պահպանվել։ Եթե հիմա հեռանաք, կկորցնեք չպահպանված փոփոխությունները։', + clear: + 'Հիմնական տեքստի իմաստը պետք է պահպանվի Payload կոնտեքստի մեջ: Այս այս այստեղ են հաճախակի', clearAll: 'Մաքրել բոլորը', close: 'Փակել', collapse: 'Փակել', @@ -301,6 +303,7 @@ export const hyTranslations: DefaultTranslationsObject = { filterWhere: 'Ֆիլտրել {{label}}-ը, որտեղ', globals: 'Համընդհանուրներ', goBack: 'Հետ գնալ', + groupByLabel: 'Խմբավորել {{label}}-ով', import: 'Ներմուծում', isEditing: 'խմբագրում է', item: 'տարր', diff --git a/packages/translations/src/languages/it.ts b/packages/translations/src/languages/it.ts index 3a51ef09b2..86e0d42fb5 100644 --- a/packages/translations/src/languages/it.ts +++ b/packages/translations/src/languages/it.ts @@ -234,6 +234,7 @@ export const itTranslations: DefaultTranslationsObject = { backToDashboard: 'Torna alla Dashboard', cancel: 'Cancella', changesNotSaved: 'Le tue modifiche non sono state salvate. Se esci ora, verranno perse.', + clear: 'Chiara', clearAll: 'Cancella Tutto', close: 'Chiudere', collapse: 'Comprimi', @@ -304,6 +305,7 @@ export const itTranslations: DefaultTranslationsObject = { filterWhere: 'Filtra {{label}} se', globals: 'Globali', goBack: 'Torna indietro', + groupByLabel: 'Raggruppa per {{label}}', import: 'Importare', isEditing: 'sta modificando', item: 'articolo', diff --git a/packages/translations/src/languages/ja.ts b/packages/translations/src/languages/ja.ts index 024cf4e1fe..128252db06 100644 --- a/packages/translations/src/languages/ja.ts +++ b/packages/translations/src/languages/ja.ts @@ -230,6 +230,7 @@ export const jaTranslations: DefaultTranslationsObject = { backToDashboard: 'ダッシュボードに戻る', cancel: 'キャンセル', changesNotSaved: '未保存の変更があります。このまま画面を離れると内容が失われます。', + clear: 'クリア', clearAll: 'すべてクリア', close: '閉じる', collapse: '閉じる', @@ -301,6 +302,7 @@ export const jaTranslations: DefaultTranslationsObject = { filterWhere: '{{label}} の絞り込み', globals: 'グローバル', goBack: '戻る', + groupByLabel: '{{label}}でグループ化する', import: '輸入', isEditing: '編集中', item: 'アイテム', diff --git a/packages/translations/src/languages/ko.ts b/packages/translations/src/languages/ko.ts index 0d5af0445e..f093b2d0cd 100644 --- a/packages/translations/src/languages/ko.ts +++ b/packages/translations/src/languages/ko.ts @@ -227,6 +227,8 @@ export const koTranslations: DefaultTranslationsObject = { backToDashboard: '대시보드로 돌아가기', cancel: '취소', changesNotSaved: '변경 사항이 저장되지 않았습니다. 지금 떠나면 변경 사항을 잃게 됩니다.', + clear: + '페이로드의 맥락 내에서 원문의 의미를 존중하십시오. 다음은 페이로드에서 사용되는 특정 의미를 내포하는 일반적인 페이로드 용어 목록입니다: \n- Collection: 컬렉션은 공통의 구조와 목적을 공유하는 문서의 그룹입니다. 컬렉션은 페이로드에서 콘텐츠를 정리하고 관리하는 데 사용됩니다.\n- Field: 필드는 컬렉', clearAll: '모두 지우기', close: '닫기', collapse: '접기', @@ -297,6 +299,7 @@ export const koTranslations: DefaultTranslationsObject = { filterWhere: '{{label}} 필터링 조건', globals: '글로벌', goBack: '돌아가기', + groupByLabel: '{{label}}로 그룹화', import: '수입', isEditing: '편집 중', item: '항목', diff --git a/packages/translations/src/languages/lt.ts b/packages/translations/src/languages/lt.ts index 94048058cd..ac2afd6c62 100644 --- a/packages/translations/src/languages/lt.ts +++ b/packages/translations/src/languages/lt.ts @@ -232,6 +232,7 @@ export const ltTranslations: DefaultTranslationsObject = { cancel: 'Atšaukti', changesNotSaved: 'Jūsų pakeitimai nebuvo išsaugoti. Jei dabar išeisite, prarasite savo pakeitimus.', + clear: 'Aišku', clearAll: 'Išvalyti viską', close: 'Uždaryti', collapse: 'Susikolimas', @@ -303,6 +304,7 @@ export const ltTranslations: DefaultTranslationsObject = { filterWhere: 'Filtruoti {{label}}, kur', globals: 'Globalai', goBack: 'Grįžkite', + groupByLabel: 'Grupuoti pagal {{label}}', import: 'Importas', isEditing: 'redaguoja', item: 'daiktas', diff --git a/packages/translations/src/languages/lv.ts b/packages/translations/src/languages/lv.ts index 0dcd973687..62c907615b 100644 --- a/packages/translations/src/languages/lv.ts +++ b/packages/translations/src/languages/lv.ts @@ -229,6 +229,8 @@ export const lvTranslations: DefaultTranslationsObject = { backToDashboard: 'Atpakaļ uz paneli', cancel: 'Atcelt', changesNotSaved: 'Jūsu izmaiņas nav saglabātas. Ja tagad pametīsiet, izmaiņas tiks zaudētas.', + clear: + 'Izpratiet oriģinālteksta nozīmi Payload kontekstā. Šeit ir saraksts ar Payload terminiem, kas ir ļoti specifiskas nozīmes:\n - Kolekcija: Kolekcija ir dokumentu grupa, kuriem ir kopīga struktūra un mērķis. Kolekcijas tiek izmantotas saturu organizēšanai un pārvaldīšanai Payload.\n - Lauks: Lauks ir konkrēts datu fragments dokumentā iekš kolekcijas. Lauki definē struktūru un dat', clearAll: 'Notīrīt visu', close: 'Aizvērt', collapse: 'Sakļaut', @@ -300,6 +302,7 @@ export const lvTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrēt {{label}} kur', globals: 'Globālie', goBack: 'Doties atpakaļ', + groupByLabel: 'Grupēt pēc {{label}}', import: 'Imports', isEditing: 'redzē', item: 'vienība', diff --git a/packages/translations/src/languages/my.ts b/packages/translations/src/languages/my.ts index 78c87fa725..ec822f8359 100644 --- a/packages/translations/src/languages/my.ts +++ b/packages/translations/src/languages/my.ts @@ -231,6 +231,7 @@ export const myTranslations: DefaultTranslationsObject = { cancel: 'မလုပ်တော့ပါ။', changesNotSaved: 'သင်၏ပြောင်းလဲမှုများကို မသိမ်းဆည်းရသေးပါ။ ယခု စာမျက်နှာက ထွက်လိုက်ပါက သင်၏ပြောင်းလဲမှုများ အကုန် ဆုံးရှုံးသွားပါမည်။ အကုန်နော်။', + clear: 'Jelas', clearAll: 'အားလုံးကိုရှင်းလင်းပါ', close: 'ပိတ်', collapse: 'ခေါက်သိမ်းပါ။', @@ -302,6 +303,7 @@ export const myTranslations: DefaultTranslationsObject = { filterWhere: 'နေရာတွင် စစ်ထုတ်ပါ။', globals: 'Globals', goBack: 'နောက်သို့', + groupByLabel: 'Berkumpulkan mengikut {{label}}', import: 'သွင်းကုန်', isEditing: 'ပြင်ဆင်နေသည်', item: 'barang', diff --git a/packages/translations/src/languages/nb.ts b/packages/translations/src/languages/nb.ts index 291b85b6f7..c454ab3e7d 100644 --- a/packages/translations/src/languages/nb.ts +++ b/packages/translations/src/languages/nb.ts @@ -229,6 +229,7 @@ export const nbTranslations: DefaultTranslationsObject = { cancel: 'Avbryt', changesNotSaved: 'Endringene dine er ikke lagret. Hvis du forlater nå, vil du miste endringene dine.', + clear: 'Tydelig', clearAll: 'Tøm alt', close: 'Lukk', collapse: 'Skjul', @@ -300,6 +301,7 @@ export const nbTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrer {{label}} der', globals: 'Globale variabler', goBack: 'Gå tilbake', + groupByLabel: 'Grupper etter {{label}}', import: 'Import', isEditing: 'redigerer', item: 'vare', diff --git a/packages/translations/src/languages/nl.ts b/packages/translations/src/languages/nl.ts index 1ba7d51a26..33ab436238 100644 --- a/packages/translations/src/languages/nl.ts +++ b/packages/translations/src/languages/nl.ts @@ -233,6 +233,7 @@ export const nlTranslations: DefaultTranslationsObject = { cancel: 'Annuleren', changesNotSaved: 'Uw wijzigingen zijn niet bewaard. Als u weggaat zullen de wijzigingen verloren gaan.', + clear: 'Duidelijk', clearAll: 'Alles wissen', close: 'Dichtbij', collapse: 'Samenvouwen', @@ -304,6 +305,7 @@ export const nlTranslations: DefaultTranslationsObject = { filterWhere: 'Filter {{label}} waar', globals: 'Globalen', goBack: 'Ga terug', + groupByLabel: 'Groepeer op {{label}}', import: 'Importeren', isEditing: 'is aan het bewerken', item: 'artikel', diff --git a/packages/translations/src/languages/pl.ts b/packages/translations/src/languages/pl.ts index 1e60b6ac79..dd5d4ab4fe 100644 --- a/packages/translations/src/languages/pl.ts +++ b/packages/translations/src/languages/pl.ts @@ -229,6 +229,7 @@ export const plTranslations: DefaultTranslationsObject = { cancel: 'Anuluj', changesNotSaved: 'Twoje zmiany nie zostały zapisane. Jeśli teraz wyjdziesz, stracisz swoje zmiany.', + clear: 'Jasne', clearAll: 'Wyczyść wszystko', close: 'Zamknij', collapse: 'Zwiń', @@ -300,6 +301,7 @@ export const plTranslations: DefaultTranslationsObject = { filterWhere: 'Filtruj gdzie', globals: 'Globalne', goBack: 'Wróć', + groupByLabel: 'Grupuj według {{label}}', import: 'Import', isEditing: 'edytuje', item: 'przedmiot', diff --git a/packages/translations/src/languages/pt.ts b/packages/translations/src/languages/pt.ts index ac01e47c00..6a26c458b1 100644 --- a/packages/translations/src/languages/pt.ts +++ b/packages/translations/src/languages/pt.ts @@ -230,6 +230,7 @@ export const ptTranslations: DefaultTranslationsObject = { cancel: 'Cancelar', changesNotSaved: 'Suas alterações não foram salvas. Se você sair agora, essas alterações serão perdidas.', + clear: 'Claro', clearAll: 'Limpar Tudo', close: 'Fechar', collapse: 'Recolher', @@ -301,6 +302,7 @@ export const ptTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrar {{label}} em que', globals: 'Globais', goBack: 'Voltar', + groupByLabel: 'Agrupar por {{label}}', import: 'Importar', isEditing: 'está editando', item: 'item', diff --git a/packages/translations/src/languages/ro.ts b/packages/translations/src/languages/ro.ts index 34bae916f4..8d58ed276b 100644 --- a/packages/translations/src/languages/ro.ts +++ b/packages/translations/src/languages/ro.ts @@ -234,6 +234,7 @@ export const roTranslations: DefaultTranslationsObject = { cancel: 'Anulați', changesNotSaved: 'Modificările dvs. nu au fost salvate. Dacă plecați acum, vă veți pierde modificările.', + clear: 'Clar', clearAll: 'Șterge tot', close: 'Închide', collapse: 'Colaps', @@ -305,6 +306,7 @@ export const roTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrează {{label}} unde', globals: 'Globale', goBack: 'Înapoi', + groupByLabel: 'Grupare după {{label}}', import: 'Import', isEditing: 'editează', item: 'articol', diff --git a/packages/translations/src/languages/rs.ts b/packages/translations/src/languages/rs.ts index 1f0701c3f4..d8a2e38249 100644 --- a/packages/translations/src/languages/rs.ts +++ b/packages/translations/src/languages/rs.ts @@ -230,6 +230,7 @@ export const rsTranslations: DefaultTranslationsObject = { backToDashboard: 'Назад на контролни панел', cancel: 'Откажи', changesNotSaved: 'Ваше промене нису сачуване. Ако изађете сада, изгубићете промене.', + clear: 'Jasno', clearAll: 'Obriši sve', close: 'Затвори', collapse: 'Скупи', @@ -301,6 +302,7 @@ export const rsTranslations: DefaultTranslationsObject = { filterWhere: 'Филтер {{label}} где', globals: 'Глобали', goBack: 'Врати се', + groupByLabel: 'Grupiši po {{label}}', import: 'Uvoz', isEditing: 'уређује', item: 'artikal', diff --git a/packages/translations/src/languages/rsLatin.ts b/packages/translations/src/languages/rsLatin.ts index 2ae83c93db..1207d5cfb4 100644 --- a/packages/translations/src/languages/rsLatin.ts +++ b/packages/translations/src/languages/rsLatin.ts @@ -230,6 +230,7 @@ export const rsLatinTranslations: DefaultTranslationsObject = { backToDashboard: 'Nazad na kontrolni panel', cancel: 'Otkaži', changesNotSaved: 'Vaše promene nisu sačuvane. Ako izađete sada, izgubićete promene.', + clear: 'Jasno', clearAll: 'Očisti sve', close: 'Zatvori', collapse: 'Skupi', @@ -301,6 +302,7 @@ export const rsLatinTranslations: DefaultTranslationsObject = { filterWhere: 'Filter {{label}} gde', globals: 'Globali', goBack: 'Vrati se', + groupByLabel: 'Grupiši po {{label}}', import: 'Uvoz', isEditing: 'uređuje', item: 'stavka', diff --git a/packages/translations/src/languages/ru.ts b/packages/translations/src/languages/ru.ts index b23b2ef9fd..4eba3f49cc 100644 --- a/packages/translations/src/languages/ru.ts +++ b/packages/translations/src/languages/ru.ts @@ -232,6 +232,7 @@ export const ruTranslations: DefaultTranslationsObject = { cancel: 'Отмена', changesNotSaved: 'Ваши изменения не были сохранены. Если вы сейчас уйдете, то потеряете свои изменения.', + clear: 'Четкий', clearAll: 'Очистить все', close: 'Закрыть', collapse: 'Свернуть', @@ -303,6 +304,7 @@ export const ruTranslations: DefaultTranslationsObject = { filterWhere: 'Где фильтровать', globals: 'Глобальные', goBack: 'Назад', + groupByLabel: 'Группировать по {{label}}', import: 'Импорт', isEditing: 'редактирует', item: 'предмет', diff --git a/packages/translations/src/languages/sk.ts b/packages/translations/src/languages/sk.ts index 4c24b250d3..13a6c5d5aa 100644 --- a/packages/translations/src/languages/sk.ts +++ b/packages/translations/src/languages/sk.ts @@ -232,6 +232,7 @@ export const skTranslations: DefaultTranslationsObject = { backToDashboard: 'Späť na nástenku', cancel: 'Zrušiť', changesNotSaved: 'Vaše zmeny neboli uložené. Ak teraz odídete, stratíte svoje zmeny.', + clear: 'Jasný', clearAll: 'Vymazať všetko', close: 'Zavrieť', collapse: 'Zbaliť', @@ -302,6 +303,7 @@ export const skTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrovat kde je {{label}}', globals: 'Globalné', goBack: 'Vrátiť sa', + groupByLabel: 'Zoskupiť podľa {{label}}', import: 'Dovoz', isEditing: 'upravuje', item: 'položka', diff --git a/packages/translations/src/languages/sl.ts b/packages/translations/src/languages/sl.ts index 02e046a58b..6880c5a6e8 100644 --- a/packages/translations/src/languages/sl.ts +++ b/packages/translations/src/languages/sl.ts @@ -230,6 +230,7 @@ export const slTranslations: DefaultTranslationsObject = { cancel: 'Prekliči', changesNotSaved: 'Vaše spremembe niso shranjene. Če zapustite zdaj, boste izgubili svoje spremembe.', + clear: 'Čisto', clearAll: 'Počisti vse', close: 'Zapri', collapse: 'Strni', @@ -300,6 +301,7 @@ export const slTranslations: DefaultTranslationsObject = { filterWhere: 'Filtriraj {{label}} kjer', globals: 'Globalne nastavitve', goBack: 'Nazaj', + groupByLabel: 'Razvrsti po {{label}}', import: 'Uvoz', isEditing: 'ureja', item: 'predmet', diff --git a/packages/translations/src/languages/sv.ts b/packages/translations/src/languages/sv.ts index caef27df3b..bad3e2435e 100644 --- a/packages/translations/src/languages/sv.ts +++ b/packages/translations/src/languages/sv.ts @@ -229,6 +229,7 @@ export const svTranslations: DefaultTranslationsObject = { cancel: 'Avbryt', changesNotSaved: 'Dina ändringar har inte sparats. Om du lämnar nu kommer du att förlora dina ändringar.', + clear: 'Tydlig', clearAll: 'Rensa alla', close: 'Stänga', collapse: 'Kollapsa', @@ -300,6 +301,7 @@ export const svTranslations: DefaultTranslationsObject = { filterWhere: 'Filtrera {{label}} där', globals: 'Globala', goBack: 'Gå tillbaka', + groupByLabel: 'Gruppera efter {{label}}', import: 'Importera', isEditing: 'redigerar', item: 'artikel', diff --git a/packages/translations/src/languages/th.ts b/packages/translations/src/languages/th.ts index 41cb9878b8..3346be921f 100644 --- a/packages/translations/src/languages/th.ts +++ b/packages/translations/src/languages/th.ts @@ -224,6 +224,8 @@ export const thTranslations: DefaultTranslationsObject = { backToDashboard: 'กลับไปหน้าแดชบอร์ด', cancel: 'ยกเลิก', changesNotSaved: 'การเปลี่ยนแปลงยังไม่ได้ถูกบันทึก ถ้าคุณออกตอนนี้ สิ่งที่แก้ไขไว้จะหายไป', + clear: + 'ให้เคารพความหมายของข้อความต้นฉบับภายในบริบทของ Payload นี่คือรายการของคำที่มักใช้ใน Payload ที่มีความหมายที่เฉพาะเจาะจงมาก:\n - Collection: Collection คือกลุ่มของเอกสารที่มีโครงสร้างและจุดประสงค์ท', clearAll: 'ล้างทั้งหมด', close: 'ปิด', collapse: 'ยุบ', @@ -295,6 +297,7 @@ export const thTranslations: DefaultTranslationsObject = { filterWhere: 'กรอง {{label}} เฉพาะ', globals: 'Globals', goBack: 'กลับไป', + groupByLabel: 'จัดกลุ่มตาม {{label}}', import: 'นำเข้า', isEditing: 'กำลังแก้ไข', item: 'รายการ', diff --git a/packages/translations/src/languages/tr.ts b/packages/translations/src/languages/tr.ts index 1daaae2925..b9d9551601 100644 --- a/packages/translations/src/languages/tr.ts +++ b/packages/translations/src/languages/tr.ts @@ -233,6 +233,7 @@ export const trTranslations: DefaultTranslationsObject = { cancel: 'İptal', changesNotSaved: 'Değişiklikleriniz henüz kaydedilmedi. Eğer bu sayfayı terk ederseniz değişiklikleri kaybedeceksiniz.', + clear: 'Temiz', clearAll: 'Hepsini Temizle', close: 'Kapat', collapse: 'Daralt', @@ -304,6 +305,7 @@ export const trTranslations: DefaultTranslationsObject = { filterWhere: '{{label}} filtrele:', globals: 'Globaller', goBack: 'Geri dön', + groupByLabel: "{{label}}'ye göre grupla", import: 'İthalat', isEditing: 'düzenliyor', item: 'öğe', diff --git a/packages/translations/src/languages/uk.ts b/packages/translations/src/languages/uk.ts index eb33c1daac..1d84f1eb7b 100644 --- a/packages/translations/src/languages/uk.ts +++ b/packages/translations/src/languages/uk.ts @@ -230,6 +230,7 @@ export const ukTranslations: DefaultTranslationsObject = { backToDashboard: 'Повернутись до головної сторінки', cancel: 'Скасувати', changesNotSaved: 'Ваши зміни не були збережені. Якщо ви вийдете зараз, то втратите свої зміни.', + clear: 'Чітко', clearAll: 'Очистити все', close: 'Закрити', collapse: 'Згорнути', @@ -300,6 +301,7 @@ export const ukTranslations: DefaultTranslationsObject = { filterWhere: 'Де фільтрувати {{label}}', globals: 'Глобальні', goBack: 'Повернутися', + groupByLabel: 'Групувати за {{label}}', import: 'Імпорт', isEditing: 'редагує', item: 'предмет', diff --git a/packages/translations/src/languages/vi.ts b/packages/translations/src/languages/vi.ts index 7f747ef15f..1af0493b64 100644 --- a/packages/translations/src/languages/vi.ts +++ b/packages/translations/src/languages/vi.ts @@ -228,6 +228,7 @@ export const viTranslations: DefaultTranslationsObject = { backToDashboard: 'Quay lại bảng điều khiển', cancel: 'Hủy', changesNotSaved: 'Thay đổi chưa được lưu lại. Bạn sẽ mất bản chỉnh sửa nếu thoát bây giờ.', + clear: 'Rõ ràng', clearAll: 'Xóa tất cả', close: 'Gần', collapse: 'Thu gọn', @@ -299,6 +300,7 @@ export const viTranslations: DefaultTranslationsObject = { filterWhere: 'Lọc {{label}} với điều kiện:', globals: 'Toàn thể (globals)', goBack: 'Quay lại', + groupByLabel: 'Nhóm theo {{label}}', import: 'Nhập khẩu', isEditing: 'đang chỉnh sửa', item: 'mặt hàng', diff --git a/packages/translations/src/languages/zh.ts b/packages/translations/src/languages/zh.ts index 84a477ba71..b8849294c8 100644 --- a/packages/translations/src/languages/zh.ts +++ b/packages/translations/src/languages/zh.ts @@ -218,6 +218,7 @@ export const zhTranslations: DefaultTranslationsObject = { backToDashboard: '返回到仪表板', cancel: '取消', changesNotSaved: '您的更改尚未保存。您确定要离开吗?', + clear: '清晰', clearAll: '清除全部', close: '关闭', collapse: '折叠', @@ -286,6 +287,7 @@ export const zhTranslations: DefaultTranslationsObject = { filterWhere: '过滤{{label}}', globals: '全局', goBack: '返回', + groupByLabel: '按{{label}}分组', import: '导入', isEditing: '正在编辑', item: '条目', diff --git a/packages/translations/src/languages/zhTw.ts b/packages/translations/src/languages/zhTw.ts index e659462f6b..451bc4fd21 100644 --- a/packages/translations/src/languages/zhTw.ts +++ b/packages/translations/src/languages/zhTw.ts @@ -217,6 +217,7 @@ export const zhTwTranslations: DefaultTranslationsObject = { backToDashboard: '返回到控制面板', cancel: '取消', changesNotSaved: '您還有尚未儲存的變更。您確定要離開嗎?', + clear: '清晰', clearAll: '清除全部', close: '關閉', collapse: '折疊', @@ -285,6 +286,7 @@ export const zhTwTranslations: DefaultTranslationsObject = { filterWhere: '過濾{{label}}', globals: '全域', goBack: '返回', + groupByLabel: '按照 {{label}} 分類', import: '進口', isEditing: '正在編輯', item: '物品', diff --git a/packages/ui/src/elements/ColumnSelector/index.tsx b/packages/ui/src/elements/ColumnSelector/index.tsx index caffabe9d7..f208d87e4c 100644 --- a/packages/ui/src/elements/ColumnSelector/index.tsx +++ b/packages/ui/src/elements/ColumnSelector/index.tsx @@ -21,7 +21,7 @@ export const ColumnSelector: React.FC = ({ collectionSlug }) => { const filteredColumns = useMemo( () => - columns.filter( + columns?.filter( (col) => !(fieldIsHiddenOrDisabled(col.field) && !fieldIsID(col.field)) && !col?.field?.admin?.disableListColumn, diff --git a/packages/ui/src/elements/DeleteMany/index.tsx b/packages/ui/src/elements/DeleteMany/index.tsx index 561cf39645..0c1a6d63c4 100644 --- a/packages/ui/src/elements/DeleteMany/index.tsx +++ b/packages/ui/src/elements/DeleteMany/index.tsx @@ -20,18 +20,23 @@ import { parseSearchParams } from '../../utilities/parseSearchParams.js' import { ConfirmationModal } from '../ConfirmationModal/index.js' import { ListSelectionButton } from '../ListSelection/index.js' -const confirmManyDeleteDrawerSlug = `confirm-delete-many-docs` - export type Props = { collection: ClientCollectionConfig + /** + * When multiple DeleteMany components are rendered on the page, this will differentiate them. + */ + modalPrefix?: string + /** + * When multiple PublishMany components are rendered on the page, this will differentiate them. + */ title?: string } export const DeleteMany: React.FC = (props) => { - const { collection: { slug } = {} } = props + const { collection: { slug } = {}, modalPrefix } = props const { permissions } = useAuth() - const { count, getSelectedIds, selectAll, toggleAll } = useSelection() + const { count, selectAll, selectedIDs, toggleAll } = useSelection() const router = useRouter() const searchParams = useSearchParams() const { clearRouteCache } = useRouteCache() @@ -39,13 +44,14 @@ export const DeleteMany: React.FC = (props) => { const collectionPermissions = permissions?.collections?.[slug] const hasDeletePermission = collectionPermissions?.delete + const selectingAll = selectAll === SelectAllStatus.AllAvailable + + const ids = selectingAll ? [] : selectedIDs + if (selectAll === SelectAllStatus.None || !hasDeletePermission) { return null } - const selectingAll = selectAll === SelectAllStatus.AllAvailable - const selectedIDs = !selectingAll ? getSelectedIds() : [] - return ( = (props) => { clearRouteCache() }} + modalPrefix={modalPrefix} search={parseSearchParams(searchParams)?.search as string} selections={{ [slug]: { all: selectAll === SelectAllStatus.AllAvailable, - ids: selectedIDs, - totalCount: selectingAll ? count : selectedIDs.length, + ids, + totalCount: selectingAll ? count : ids.length, }, }} where={parseSearchParams(searchParams)?.where as Where} @@ -91,6 +98,10 @@ type DeleteMany_v4Props = { * A callback function to be called after the delete request is completed. */ afterDelete?: (result: AfterDeleteResult) => void + /** + * When multiple DeleteMany components are rendered on the page, this will differentiate them. + */ + modalPrefix?: string /** * Optionally pass a search string to filter the documents to be deleted. * @@ -126,8 +137,15 @@ type DeleteMany_v4Props = { * * If you are deleting monomorphic documents, shape your `selections` to match the polymorphic structure. */ -export function DeleteMany_v4({ afterDelete, search, selections, where }: DeleteMany_v4Props) { +export function DeleteMany_v4({ + afterDelete, + modalPrefix, + search, + selections, + where, +}: DeleteMany_v4Props) { const { t } = useTranslation() + const { config: { collections, @@ -135,15 +153,20 @@ export function DeleteMany_v4({ afterDelete, search, selections, where }: Delete serverURL, }, } = useConfig() + const { code: locale } = useLocale() const { i18n } = useTranslation() const { openModal } = useModal() + const confirmManyDeleteDrawerSlug = `${modalPrefix ? `${modalPrefix}-` : ''}confirm-delete-many-docs` + const handleDelete = React.useCallback(async () => { const deletingOneCollection = Object.keys(selections).length === 1 const result: AfterDeleteResult = {} + for (const [relationTo, { all, ids = [] }] of Object.entries(selections)) { const collectionConfig = collections.find(({ slug }) => slug === relationTo) + if (collectionConfig) { let whereConstraint: Where @@ -153,7 +176,9 @@ export function DeleteMany_v4({ afterDelete, search, selections, where }: Delete whereConstraint = where } else { whereConstraint = { - id: { not_equals: '' }, + id: { + not_equals: '', + }, } } } else { @@ -219,6 +244,7 @@ export function DeleteMany_v4({ afterDelete, search, selections, where }: Delete toast.error(t('error:unknown')) result[relationTo].errors = [t('error:unknown')] } + continue } catch (_err) { toast.error(t('error:unknown')) @@ -247,7 +273,9 @@ export function DeleteMany_v4({ afterDelete, search, selections, where }: Delete value.totalCount > 1 ? collectionConfig.labels.plural : collectionConfig.labels.singular, i18n, )}` + let newLabel + if (index === array.length - 1 && index !== 0) { newLabel = `${acc.label} and ${collectionLabel}` } else if (index > 0) { diff --git a/packages/ui/src/elements/EditMany/DrawerContent.tsx b/packages/ui/src/elements/EditMany/DrawerContent.tsx index 74b6c50ff3..1a072db26f 100644 --- a/packages/ui/src/elements/EditMany/DrawerContent.tsx +++ b/packages/ui/src/elements/EditMany/DrawerContent.tsx @@ -139,7 +139,9 @@ type EditManyDrawerContentProps = { * The function to set the selected fields to bulk edit */ setSelectedFields: (fields: FieldOption[]) => void + where?: Where } & EditManyProps + export const EditManyDrawerContent: React.FC = (props) => { const { collection, @@ -151,6 +153,7 @@ export const EditManyDrawerContent: React.FC = (prop selectAll, selectedFields, setSelectedFields, + where, } = props const { permissions, user } = useAuth() @@ -220,6 +223,10 @@ export const EditManyDrawerContent: React.FC = (prop const queryString = useMemo((): string => { const whereConstraints: Where[] = [] + if (where) { + whereConstraints.push(where) + } + const queryWithSearch = mergeListSearchAndWhere({ collectionConfig: collection, search: searchParams.get('search'), @@ -234,7 +241,7 @@ export const EditManyDrawerContent: React.FC = (prop whereConstraints.push( (parseSearchParams(searchParams)?.where as Where) || { id: { - exists: true, + not_equals: '', }, }, ) @@ -254,7 +261,7 @@ export const EditManyDrawerContent: React.FC = (prop }, { addQueryPrefix: true }, ) - }, [collection, searchParams, selectAll, ids, locale]) + }, [collection, searchParams, selectAll, ids, locale, where]) const onSuccess = () => { router.replace( diff --git a/packages/ui/src/elements/EditMany/index.tsx b/packages/ui/src/elements/EditMany/index.tsx index b9aa3a11ae..6f1220bdc5 100644 --- a/packages/ui/src/elements/EditMany/index.tsx +++ b/packages/ui/src/elements/EditMany/index.tsx @@ -1,5 +1,5 @@ 'use client' -import type { ClientCollectionConfig } from 'payload' +import type { ClientCollectionConfig, Where } from 'payload' import { useModal } from '@faceless-ui/modal' import React, { useState } from 'react' @@ -11,9 +11,9 @@ import { EditDepthProvider } from '../../providers/EditDepth/index.js' import { SelectAllStatus, useSelection } from '../../providers/Selection/index.js' import { useTranslation } from '../../providers/Translation/index.js' import { Drawer } from '../Drawer/index.js' -import './index.scss' import { ListSelectionButton } from '../ListSelection/index.js' import { EditManyDrawerContent } from './DrawerContent.js' +import './index.scss' export const baseClass = 'edit-many' @@ -22,13 +22,14 @@ export type EditManyProps = { } export const EditMany: React.FC = (props) => { - const { count, selectAll, selected, toggleAll } = useSelection() + const { count, selectAll, selectedIDs, toggleAll } = useSelection() + return ( toggleAll(false)} + ids={selectedIDs} + onSuccess={() => toggleAll()} selectAll={selectAll === SelectAllStatus.AllAvailable} /> ) @@ -38,10 +39,15 @@ export const EditMany_v4: React.FC< { count: number ids: (number | string)[] + /** + * When multiple EditMany components are rendered on the page, this will differentiate them. + */ + modalPrefix?: string onSuccess?: () => void selectAll: boolean - } & EditManyProps -> = ({ collection, count, ids, onSuccess, selectAll }) => { + where?: Where + } & Omit +> = ({ collection, count, ids, modalPrefix, onSuccess, selectAll, where }) => { const { permissions } = useAuth() const { openModal } = useModal() @@ -51,7 +57,7 @@ export const EditMany_v4: React.FC< const collectionPermissions = permissions?.collections?.[collection.slug] - const drawerSlug = `edit-${collection.slug}` + const drawerSlug = `${modalPrefix ? `${modalPrefix}-` : ''}edit-${collection.slug}` if (count === 0 || !collectionPermissions?.update) { return null @@ -79,6 +85,7 @@ export const EditMany_v4: React.FC< selectAll={selectAll} selectedFields={selectedFields} setSelectedFields={setSelectedFields} + where={where} /> diff --git a/packages/ui/src/elements/GroupByBuilder/index.scss b/packages/ui/src/elements/GroupByBuilder/index.scss new file mode 100644 index 0000000000..f05f1c3fc9 --- /dev/null +++ b/packages/ui/src/elements/GroupByBuilder/index.scss @@ -0,0 +1,39 @@ +@import '../../scss/styles.scss'; + +@layer payload-default { + .group-by-builder { + background: var(--theme-elevation-50); + padding: var(--base); + display: flex; + flex-direction: column; + gap: calc(var(--base) / 2); + + &__header { + width: 100%; + display: flex; + justify-content: space-between; + } + + &__clear-button { + background: transparent; + border: none; + color: var(--theme-elevation-500); + line-height: inherit; + cursor: pointer; + font-size: inherit; + padding: 0; + text-decoration: underline; + } + + &__inputs { + width: 100%; + display: flex; + gap: base(1); + + & > * { + flex-grow: 1; + width: 50%; + } + } + } +} diff --git a/packages/ui/src/elements/GroupByBuilder/index.tsx b/packages/ui/src/elements/GroupByBuilder/index.tsx new file mode 100644 index 0000000000..390666cb97 --- /dev/null +++ b/packages/ui/src/elements/GroupByBuilder/index.tsx @@ -0,0 +1,144 @@ +'use client' +import type { ClientField, Field, SanitizedCollectionConfig } from 'payload' + +import './index.scss' + +import React, { useMemo } from 'react' + +import { SelectInput } from '../../fields/Select/Input.js' +import { useListQuery } from '../../providers/ListQuery/index.js' +import { useTranslation } from '../../providers/Translation/index.js' +import { reduceFieldsToOptions } from '../../utilities/reduceFieldsToOptions.js' +import { ReactSelect } from '../ReactSelect/index.js' + +export type Props = { + readonly collectionSlug: SanitizedCollectionConfig['slug'] + fields: ClientField[] +} + +const baseClass = 'group-by-builder' + +/** + * Note: Some fields are already omitted from the list of fields: + * - fields with nested field, e.g. `tabs`, `groups`, etc. + * - fields that don't affect data, i.e. `row`, `collapsible`, `ui`, etc. + * So we don't technically need to omit them here, but do anyway. + * But some remaining fields still need an additional check, e.g. `richText`, etc. + */ +const supportedFieldTypes: Field['type'][] = [ + 'text', + 'textarea', + 'number', + 'select', + 'relationship', + 'date', + 'checkbox', + 'radio', + 'email', + 'number', + 'upload', +] + +export const GroupByBuilder: React.FC = ({ collectionSlug, fields }) => { + const { i18n, t } = useTranslation() + + const reducedFields = useMemo(() => reduceFieldsToOptions({ fields, i18n }), [fields, i18n]) + + const { query, refineListData } = useListQuery() + + const groupByFieldName = query.groupBy?.replace(/^-/, '') + + const groupByField = reducedFields.find((field) => field.value === groupByFieldName) + + return ( +
    +
    +

    + {t('general:groupByLabel', { + label: '', + })} +

    + {query.groupBy && ( + + )} +
    +
    + + ((option?.data?.plainTextLabel as string) || option.label) + .toLowerCase() + .includes(inputValue.toLowerCase()) + } + id="group-by--field-select" + isClearable + isMulti={false} + onChange={async (v: { value: string } | null) => { + const value = v === null ? undefined : v.value + + // value is being cleared + if (v === null) { + await refineListData({ + groupBy: '', + page: 1, + }) + } + + await refineListData({ + groupBy: value ? (query.groupBy?.startsWith('-') ? `-${value}` : value) : undefined, + page: 1, + }) + }} + options={reducedFields.filter( + (field) => + !field.field.admin.disableListFilter && + field.value !== 'id' && + supportedFieldTypes.includes(field.field.type), + )} + value={{ + label: groupByField?.label || t('general:selectValue'), + value: groupByFieldName || '', + }} + /> + { + if (!groupByFieldName) { + return + } + + await refineListData({ + groupBy: value === 'asc' ? groupByFieldName : `-${groupByFieldName}`, + page: 1, + }) + }} + options={[ + { label: t('general:ascending'), value: 'asc' }, + { label: t('general:descending'), value: 'desc' }, + ]} + path="direction" + readOnly={!groupByFieldName} + value={ + !query.groupBy + ? 'asc' + : typeof query.groupBy === 'string' + ? `${query.groupBy.startsWith('-') ? 'desc' : 'asc'}` + : '' + } + /> +
    +
    + ) +} diff --git a/packages/ui/src/elements/ListControls/index.scss b/packages/ui/src/elements/ListControls/index.scss index 6443e5f162..86c2009f05 100644 --- a/packages/ui/src/elements/ListControls/index.scss +++ b/packages/ui/src/elements/ListControls/index.scss @@ -36,7 +36,8 @@ .pill-selector, .where-builder, - .sort-complex { + .sort-complex, + .group-by-builder { margin-top: base(1); } @@ -90,7 +91,8 @@ &__toggle-columns, &__toggle-where, - &__toggle-sort { + &__toggle-sort, + &__toggle-group-by { flex: 1; } } diff --git a/packages/ui/src/elements/ListControls/index.tsx b/packages/ui/src/elements/ListControls/index.tsx index a5adc88ce5..ebdb31d6ae 100644 --- a/packages/ui/src/elements/ListControls/index.tsx +++ b/packages/ui/src/elements/ListControls/index.tsx @@ -16,6 +16,7 @@ import { useListQuery } from '../../providers/ListQuery/index.js' import { useTranslation } from '../../providers/Translation/index.js' import { AnimateHeight } from '../AnimateHeight/index.js' import { ColumnSelector } from '../ColumnSelector/index.js' +import { GroupByBuilder } from '../GroupByBuilder/index.js' import { Pill } from '../Pill/index.js' import { SearchFilter } from '../SearchFilter/index.js' import { WhereBuilder } from '../WhereBuilder/index.js' @@ -97,7 +98,8 @@ export const ListControls: React.FC = (props) => { const hasWhereParam = useRef(Boolean(query?.where)) const shouldInitializeWhereOpened = validateWhereQuery(query?.where) - const [visibleDrawer, setVisibleDrawer] = useState<'columns' | 'sort' | 'where'>( + + const [visibleDrawer, setVisibleDrawer] = useState<'columns' | 'group-by' | 'sort' | 'where'>( shouldInitializeWhereOpened ? 'where' : undefined, ) @@ -140,7 +142,7 @@ export const ListControls: React.FC = (props) => { let listMenuItems: React.ReactNode[] = listMenuItemsFromProps if ( - collectionConfig?.enableQueryPresets && + collectionConfig.enableQueryPresets && !disableQueryPresets && queryPresetMenuItems?.length > 0 ) { @@ -160,7 +162,6 @@ export const ListControls: React.FC = (props) => { @@ -176,6 +177,7 @@ export const ListControls: React.FC = (props) => { aria-expanded={visibleDrawer === 'columns'} className={`${baseClass}__toggle-columns`} icon={} + id="toggle-columns" onClick={() => setVisibleDrawer(visibleDrawer !== 'columns' ? 'columns' : undefined) } @@ -191,6 +193,7 @@ export const ListControls: React.FC = (props) => { aria-expanded={visibleDrawer === 'where'} className={`${baseClass}__toggle-where`} icon={} + id="toggle-list-filters" onClick={() => setVisibleDrawer(visibleDrawer !== 'where' ? 'where' : undefined)} pillStyle="light" size="small" @@ -218,6 +221,24 @@ export const ListControls: React.FC = (props) => { resetPreset={resetPreset} /> )} + {collectionConfig.admin.groupBy && ( + } + id="toggle-group-by" + onClick={() => + setVisibleDrawer(visibleDrawer !== 'group-by' ? 'group-by' : undefined) + } + pillStyle="light" + size="small" + > + {t('general:groupByLabel', { + label: '', + })} + + )} {listMenuItems && Array.isArray(listMenuItems) && listMenuItems.length > 0 && ( } @@ -250,13 +271,25 @@ export const ListControls: React.FC = (props) => { id={`${baseClass}-where`} > + {collectionConfig.admin.groupBy && ( + + + + )}
    {PresetListDrawer} {EditPresetDrawer} diff --git a/packages/ui/src/elements/PageControls/GroupByPageControls.tsx b/packages/ui/src/elements/PageControls/GroupByPageControls.tsx new file mode 100644 index 0000000000..e9ec5c878f --- /dev/null +++ b/packages/ui/src/elements/PageControls/GroupByPageControls.tsx @@ -0,0 +1,62 @@ +'use client' +import type { ClientCollectionConfig, PaginatedDocs } from 'payload' + +import React, { useCallback } from 'react' + +import type { IListQueryContext } from '../../providers/ListQuery/types.js' + +import { useListQuery } from '../../providers/ListQuery/context.js' +import { PageControlsComponent } from './index.js' + +/** + * If `groupBy` is set in the query, multiple tables will render, one for each group. + * In this case, each table needs its own `PageControls` to handle pagination. + * These page controls, however, should not modify the global `ListQuery` state. + * Instead, they should only handle the pagination for the current group. + * To do this, build a wrapper around `PageControlsComponent` that handles the pagination logic for the current group. + */ +export const GroupByPageControls: React.FC<{ + AfterPageControls?: React.ReactNode + collectionConfig: ClientCollectionConfig + data: PaginatedDocs + groupByValue?: number | string +}> = ({ AfterPageControls, collectionConfig, data, groupByValue }) => { + const { refineListData } = useListQuery() + + const handlePageChange: IListQueryContext['handlePageChange'] = useCallback( + async (page) => { + await refineListData({ + queryByGroup: { + [groupByValue]: { + page, + }, + }, + }) + }, + [refineListData, groupByValue], + ) + + const handlePerPageChange: IListQueryContext['handlePerPageChange'] = useCallback( + async (limit) => { + await refineListData({ + queryByGroup: { + [groupByValue]: { + limit, + page: 1, + }, + }, + }) + }, + [refineListData, groupByValue], + ) + + return ( + + ) +} diff --git a/packages/ui/src/elements/PageControls/index.scss b/packages/ui/src/elements/PageControls/index.scss new file mode 100644 index 0000000000..70be0db966 --- /dev/null +++ b/packages/ui/src/elements/PageControls/index.scss @@ -0,0 +1,40 @@ +@import '../../scss/styles.scss'; + +@layer payload-default { + .page-controls { + width: 100%; + display: flex; + align-items: center; + + &__page-info { + [dir='ltr'] & { + margin-right: base(1); + margin-left: auto; + } + + [dir='rtl'] & { + margin-left: base(1); + margin-right: auto; + } + } + + @include small-break { + flex-wrap: wrap; + + &__page-info { + [dir='ltr'] & { + margin-left: base(0.5); + } + + [dir='rtl'] & { + margin-right: 0; + } + } + + .paginator { + width: 100%; + margin-bottom: base(0.5); + } + } + } +} diff --git a/packages/ui/src/elements/PageControls/index.tsx b/packages/ui/src/elements/PageControls/index.tsx new file mode 100644 index 0000000000..a0ea41745c --- /dev/null +++ b/packages/ui/src/elements/PageControls/index.tsx @@ -0,0 +1,94 @@ +import type { ClientCollectionConfig, PaginatedDocs } from 'payload' + +import { isNumber } from 'payload/shared' +import React, { Fragment } from 'react' + +import type { IListQueryContext } from '../../providers/ListQuery/types.js' + +import { Pagination } from '../../elements/Pagination/index.js' +import { PerPage } from '../../elements/PerPage/index.js' +import { useListQuery } from '../../providers/ListQuery/context.js' +import { useTranslation } from '../../providers/Translation/index.js' +import './index.scss' + +const baseClass = 'page-controls' + +export const PageControlsComponent: React.FC<{ + AfterPageControls?: React.ReactNode + collectionConfig: ClientCollectionConfig + data: PaginatedDocs + handlePageChange?: IListQueryContext['handlePageChange'] + handlePerPageChange?: IListQueryContext['handlePerPageChange'] + limit?: number +}> = ({ + AfterPageControls, + collectionConfig, + data, + handlePageChange, + handlePerPageChange, + limit, +}) => { + const { i18n } = useTranslation() + + return ( +
    + + {data.totalDocs > 0 && ( + +
    + {data.page * data.limit - (data.limit - 1)}- + {data.totalPages > 1 && data.totalPages !== data.page + ? data.limit * data.page + : data.totalDocs}{' '} + {i18n.t('general:of')} {data.totalDocs} +
    + + {AfterPageControls} +
    + )} +
    + ) +} + +/* + * These page controls are controlled by the global ListQuery state. + * To override thi behavior, build your own wrapper around PageControlsComponent. + */ +export const PageControls: React.FC<{ + AfterPageControls?: React.ReactNode + collectionConfig: ClientCollectionConfig +}> = ({ AfterPageControls, collectionConfig }) => { + const { + data, + defaultLimit: initialLimit, + handlePageChange, + handlePerPageChange, + query, + } = useListQuery() + + return ( + + ) +} diff --git a/packages/ui/src/elements/Pagination/ClickableArrow/index.scss b/packages/ui/src/elements/Pagination/ClickableArrow/index.scss index 4cb8c6812e..e8c103d791 100644 --- a/packages/ui/src/elements/Pagination/ClickableArrow/index.scss +++ b/packages/ui/src/elements/Pagination/ClickableArrow/index.scss @@ -4,14 +4,14 @@ .clickable-arrow { cursor: pointer; @extend %btn-reset; - width: base(2); - height: base(2); + width: base(1.5); + height: base(1.5); display: flex; justify-content: center; align-content: center; align-items: center; outline: 0; - padding: base(0.5); + padding: base(0.25); color: var(--theme-elevation-800); line-height: base(1); diff --git a/packages/ui/src/elements/Pagination/index.scss b/packages/ui/src/elements/Pagination/index.scss index e7cb22ceb0..1bdac9dea2 100644 --- a/packages/ui/src/elements/Pagination/index.scss +++ b/packages/ui/src/elements/Pagination/index.scss @@ -3,7 +3,6 @@ @layer payload-default { .paginator { display: flex; - margin-bottom: $baseline; &__page { cursor: pointer; @@ -25,15 +24,16 @@ &__page { @extend %btn-reset; - width: base(2); - height: base(2); + width: base(1.5); + height: base(1.5); display: flex; justify-content: center; align-content: center; outline: 0; + border-radius: var(--style-radius-s); padding: base(0.5); color: var(--theme-elevation-800); - line-height: base(1); + line-height: 0.9; &:focus-visible { outline: var(--accessibility-outline); diff --git a/packages/ui/src/elements/Pagination/index.tsx b/packages/ui/src/elements/Pagination/index.tsx index fb6ef55a2e..e2e7ba3010 100644 --- a/packages/ui/src/elements/Pagination/index.tsx +++ b/packages/ui/src/elements/Pagination/index.tsx @@ -52,7 +52,7 @@ export const Pagination: React.FC = (props) => { totalPages = null, } = props - if (!hasNextPage && !hasPrevPage) { + if (!hasPrevPage && !hasNextPage) { return null } diff --git a/packages/ui/src/elements/PublishMany/DrawerContent.tsx b/packages/ui/src/elements/PublishMany/DrawerContent.tsx index 7d81e3dbdf..6e73afc8b8 100644 --- a/packages/ui/src/elements/PublishMany/DrawerContent.tsx +++ b/packages/ui/src/elements/PublishMany/DrawerContent.tsx @@ -22,7 +22,9 @@ type PublishManyDrawerContentProps = { ids: (number | string)[] onSuccess?: () => void selectAll: boolean + where?: Where } & PublishManyProps + export function PublishManyDrawerContent(props: PublishManyDrawerContentProps) { const { collection, @@ -31,15 +33,18 @@ export function PublishManyDrawerContent(props: PublishManyDrawerContentProps) { ids, onSuccess, selectAll, + where, } = props const { clearRouteCache } = useRouteCache() + const { config: { routes: { api }, serverURL, }, } = useConfig() + const { code: locale } = useLocale() const router = useRouter() @@ -59,6 +64,10 @@ export function PublishManyDrawerContent(props: PublishManyDrawerContentProps) { }, ] + if (where) { + whereConstraints.push(where) + } + const queryWithSearch = mergeListSearchAndWhere({ collectionConfig: collection, search: searchParams.get('search'), @@ -73,7 +82,7 @@ export function PublishManyDrawerContent(props: PublishManyDrawerContentProps) { whereConstraints.push( (parseSearchParams(searchParams)?.where as Where) || { id: { - exists: true, + not_equals: '', }, }, ) @@ -93,7 +102,7 @@ export function PublishManyDrawerContent(props: PublishManyDrawerContentProps) { }, { addQueryPrefix: true }, ) - }, [collection, searchParams, selectAll, ids, locale]) + }, [collection, searchParams, selectAll, ids, locale, where]) const handlePublish = useCallback(async () => { await requests diff --git a/packages/ui/src/elements/PublishMany/index.tsx b/packages/ui/src/elements/PublishMany/index.tsx index 0df29b1856..33dca8d276 100644 --- a/packages/ui/src/elements/PublishMany/index.tsx +++ b/packages/ui/src/elements/PublishMany/index.tsx @@ -1,5 +1,5 @@ 'use client' -import type { ClientCollectionConfig } from 'payload' +import type { ClientCollectionConfig, Where } from 'payload' import { useModal } from '@faceless-ui/modal' import React from 'react' @@ -15,14 +15,14 @@ export type PublishManyProps = { } export const PublishMany: React.FC = (props) => { - const { count, selectAll, selected, toggleAll } = useSelection() + const { count, selectAll, selectedIDs, toggleAll } = useSelection() return ( toggleAll(false)} + ids={selectedIDs} + onSuccess={() => toggleAll()} selectAll={selectAll === SelectAllStatus.AllAvailable} /> ) @@ -31,17 +31,25 @@ export const PublishMany: React.FC = (props) => { type PublishMany_v4Props = { count: number ids: (number | string)[] + /** + * When multiple PublishMany components are rendered on the page, this will differentiate them. + */ + modalPrefix?: string onSuccess?: () => void selectAll: boolean + where?: Where } & PublishManyProps + export const PublishMany_v4: React.FC = (props) => { const { collection, collection: { slug, versions } = {}, count, ids, + modalPrefix, onSuccess, selectAll, + where, } = props const { permissions } = useAuth() @@ -52,7 +60,7 @@ export const PublishMany_v4: React.FC = (props) => { const collectionPermissions = permissions?.collections?.[slug] const hasPermission = collectionPermissions?.update - const drawerSlug = `publish-${slug}` + const drawerSlug = `${modalPrefix ? `${modalPrefix}-` : ''}publish-${slug}` if (!versions?.drafts || count === 0 || !hasPermission) { return null @@ -74,6 +82,7 @@ export const PublishMany_v4: React.FC = (props) => { ids={ids} onSuccess={onSuccess} selectAll={selectAll} + where={where} /> ) diff --git a/packages/ui/src/elements/ReactSelect/types.ts b/packages/ui/src/elements/ReactSelect/types.ts index a2a2e7ca9d..72dd352b18 100644 --- a/packages/ui/src/elements/ReactSelect/types.ts +++ b/packages/ui/src/elements/ReactSelect/types.ts @@ -84,6 +84,7 @@ export type ReactSelectAdapterProps = { boolean, GroupBase