feat: adds jobs queue (#8228)

Adds a jobs queue to Payload.

- [x] Docs, w/ examples for Vercel Cron, additional services
- [x] Type the `job` using GeneratedTypes in `JobRunnerArgs`
(@AlessioGr)
- [x] Write the `runJobs` function 
- [x] Allow for some type of `payload.runTask` 
- [x] Open up a new bin script for running jobs
- [x] Determine strategy for runner endpoint to either await jobs
successfully or return early and stay open until job work completes
(serverless ramifications here)
- [x] Allow for job runner to accept how many jobs to run in one
invocation
- [x] Make a Payload local API method for creating a new job easily
(payload.createJob) or similar which is strongly typed (@AlessioGr)
- [x] Make `payload.runJobs` or similar  (@AlessioGr)
- [x] Write tests for retrying up to max retries for a given step
- [x] Write tests for dynamic import of a runner

The shape of the config should permit the definition of steps separate
from the job workflows themselves.

```js
const config = {
  // Not sure if we need this property anymore
  queues: {
  },
  // A job is an instance of a workflow, stored in DB
  // and triggered by something at some point
  jobs: {
    // Be able to override the jobs collection
    collectionOverrides: () => {},

    // Workflows are groups of tasks that handle
    // the flow from task to task.
    // When defined on the config, they are considered as predefined workflows
    // BUT - in the future, we'll allow for UI-based workflow definition as well.
    workflows: [
      {
        slug: 'job-name',
        // Temporary name for this
        // should be able to pass function 
        // or path to it for Node to dynamically import
        controlFlowInJS: '/my-runner.js',

        // Temporary name as well
        // should be able to eventually define workflows
        // in UI (meaning they need to be serialized in JSON)
        // Should not be able to define both control flows
        controlFlowInJSON: [
          {
            task: 'myTask',
            next: {
              // etc
            }
          }
        ],

        // Workflows take input
        // which are a group of fields
        input: [
          {
            name: 'post',
            type: 'relationship',
            relationTo: 'posts',
            maxDepth: 0,
            required: true,
          },
          {
            name: 'message',
            type: 'text',
            required: true,
          },
        ],
      },
    ],

    // Tasks are defined separately as isolated functions
    // that can be retried on fail
    tasks: [
      {
        slug: 'myTask',
        retries: 2,
        // Each task takes input
        // Used to auto-type the task func args
        input: [
          {
            name: 'post',
            type: 'relationship',
            relationTo: 'posts',
            maxDepth: 0,
            required: true,
          },
          {
            name: 'message',
            type: 'text',
            required: true,
          },
        ],
        // Each task takes output
        // Used to auto-type the function signature
        output: [
          {
            name: 'success',
            type: 'checkbox',
          }
        ],
        onSuccess: () => {},
        onFail: () => {},
        run: myRunner,
      },
    ]
  }
}
```

### `payload.createJob`

This function should allow for the creation of jobs based on either a
workflow (group of tasks) or an individual task.

To create a job using a workflow:

```js
const job = await payload.createJob({
  // Accept the `name` of a workflow so we can match to either a 
  // code-based workflow OR a workflow defined in the DB
  // Should auto-type the input
  workflowName: 'myWorkflow',
  input: {
    // typed to the args of the workflow by name
  }
})
```

To create a job using a task:

```js
const job = await payload.createJob({
  // Accept the `name` of a task
  task: 'myTask',
  input: {
    // typed to the args of the task by name
  }
})
```

---------

Co-authored-by: Alessio Gravili <alessio@gravili.de>
Co-authored-by: Dan Ribbens <dan.ribbens@gmail.com>
This commit is contained in:
James Mikrut
2024-10-30 13:56:50 -04:00
committed by GitHub
parent 0574155e59
commit 8970c6b3a6
49 changed files with 6357 additions and 125 deletions

View File

@@ -30,6 +30,10 @@ export interface Config {
user: User & {
collection: 'users';
};
jobs?: {
tasks: unknown;
workflows?: unknown;
};
}
export interface UserAuthOperations {
forgotPassword: {

View File

@@ -1,3 +1,5 @@
/* tslint:disable */
/* eslint-disable */
/**
@@ -77,6 +79,10 @@ export interface Config {
user: User & {
collection: 'users';
};
jobs?: {
tasks: unknown;
workflows?: unknown;
};
}
export interface UserAuthOperations {
forgotPassword: {

View File

@@ -40,7 +40,9 @@ export const reorderColumns = async (
})
.boundingBox()
if (!fromBoundingBox || !toBoundingBox) {return}
if (!fromBoundingBox || !toBoundingBox) {
return
}
// drag the "from" column to the left of the "to" column
await page.mouse.move(fromBoundingBox.x + 2, fromBoundingBox.y + 2, { steps: 10 })

View File

@@ -44,8 +44,12 @@ import {
AdminViewComponent,
AdminViewConfig,
AdminViewProps,
baseBlockFields,
baseIDField,
BaseLocalizationConfig,
buildConfig,
Config,
defaults,
EditView,
EditViewConfig,
EmailOptions,
@@ -56,6 +60,8 @@ import {
FieldTypes,
GeneratePreviewURL,
GraphQLExtension,
hasTransport,
hasTransportOptions,
InitOptions,
LivePreviewConfig,
Locale,
@@ -64,31 +70,28 @@ import {
LocalizationConfigWithNoLabels,
PayloadHandler,
Plugin,
sanitizeConfig,
SanitizedConfig,
SanitizedLocalizationConfig,
baseBlockFields,
baseIDField,
buildConfig,
defaults,
hasTransport,
hasTransportOptions,
sanitizeConfig,
sanitizeFields,
} from 'payload/config'
import {
BaseDatabaseAdapter,
BeginTransaction,
combineQueries,
CommitTransaction,
Connect,
Count,
CountArgs,
Create,
CreateArgs,
createDatabaseAdapter,
CreateGlobal,
CreateGlobalArgs,
CreateGlobalVersion,
CreateGlobalVersionArgs,
CreateMigration,
createMigration,
CreateVersion,
CreateVersionArgs,
DBIdentifierName,
@@ -110,13 +113,24 @@ import {
FindOneArgs,
FindVersions,
FindVersionsArgs,
flattenWhereToOperators,
getLocalizedPaths,
getMigrations,
Init,
migrate,
migrateDown,
migrateRefresh,
migrateReset,
migrateStatus,
Migration,
MigrationData,
migrationsCollection,
migrationTemplate,
PaginatedDocs,
PathToQuery,
QueryDrafts,
QueryDraftsArgs,
readMigrationFiles,
RollbackTransaction,
Transaction,
TypeWithVersion,
@@ -128,20 +142,6 @@ import {
UpdateOneArgs,
UpdateVersion,
UpdateVersionArgs,
combineQueries,
createDatabaseAdapter,
createMigration,
flattenWhereToOperators,
getLocalizedPaths,
getMigrations,
migrate,
migrateDown,
migrateRefresh,
migrateReset,
migrateStatus,
migrationTemplate,
migrationsCollection,
readMigrationFiles,
validateQueryPaths,
validateSearchParam,
} from 'payload/database'
@@ -165,7 +165,7 @@ import {
QueryError,
ValidationError,
} from 'payload/errors'
import { GraphQL, buildPaginatedListType } from 'payload/graphql'
import { buildPaginatedListType, GraphQL } from 'payload/graphql'
import {
AccessArgs as AccessArgsType,
Access as AccessType,
@@ -201,21 +201,31 @@ import {
CustomSaveDraftButtonProps,
Data,
DateField,
docHasTimestamps,
Document,
EmailField,
Field,
FieldAccess,
FieldAffectingData,
fieldAffectsData,
FieldBase,
fieldHasMaxDepth,
fieldHasSubFields,
FieldHook,
FieldHookArgs,
fieldIsArrayType,
fieldIsBlockType,
fieldIsGroupType,
fieldIsLocalized,
fieldIsPresentationalOnly,
FieldPresentationalOnly,
Fields,
fieldSupportsMany,
FieldWithMany,
FieldWithMaxDepth,
FieldWithPath,
FieldWithRichTextRequiredEditor,
FieldWithSubFields,
Fields,
FileData,
FilterOptions,
FilterOptionsProps,
@@ -239,7 +249,10 @@ import {
Operation,
Operator,
Option,
optionIsObject,
optionIsValue,
OptionObject,
optionsAreObjects,
PayloadRequest,
PointField,
PolymorphicRelationshipField,
@@ -259,36 +272,23 @@ import {
SingleRelationshipField,
Tab,
TabAsField,
tabHasName,
TabsAdmin,
TabsField,
TextField,
TextareaField,
TextField,
TypeWithID,
UIField,
UnnamedTab,
UploadField,
Validate,
ValidateOptions,
validOperators,
valueIsValueWithRelation,
ValueWithRelation,
VersionOperations,
Where,
WhereField,
docHasTimestamps,
fieldAffectsData,
fieldHasMaxDepth,
fieldHasSubFields,
fieldIsArrayType,
fieldIsBlockType,
fieldIsGroupType,
fieldIsLocalized,
fieldIsPresentationalOnly,
fieldSupportsMany,
optionIsObject,
optionIsValue,
optionsAreObjects,
tabHasName,
validOperators,
valueIsValueWithRelation,
} from 'payload/types'
import {
afterReadPromise,
@@ -351,17 +351,18 @@ import {
CountryField,
Email,
FieldConfig,
FieldValues,
FieldsConfig,
FieldValues,
Form,
FormattedEmail,
CheckboxField as FormBuilderCheckboxField,
EmailField as FormBuilderEmailField,
SelectField as FormBuilderSelectField,
TextField as FormBuilderTextField,
FormFieldBlock,
FormSubmission,
FormattedEmail,
HandlePayment,
isValidBlockConfig,
MessageField,
PaymentField,
PaymentFieldConfig,
@@ -372,7 +373,6 @@ import {
StateField,
SubmissionValue,
TextAreaField,
isValidBlockConfig,
} from '@payloadcms/plugin-form-builder/types'
import nestedDocs from '@payloadcms/plugin-nested-docs'
import { createBreadcrumbsField, createParentField } from '@payloadcms/plugin-nested-docs/fields'
@@ -400,8 +400,8 @@ import {
GenerateImage,
GenerateTitle,
Meta,
PluginConfig as SeoPluginConfig,
GenerateURL as seoGenerateURL,
PluginConfig as SeoPluginConfig,
} from '@payloadcms/plugin-seo/types'
import stripePlugin from '@payloadcms/plugin-stripe'
import {
@@ -425,6 +425,10 @@ import {
$isRelationshipNode,
$isUploadNode,
AdapterProps,
addSwipeDownListener,
addSwipeLeftListener,
addSwipeRightListener,
addSwipeUpListener,
AlignFeature,
AutoLinkNode,
BlockFields,
@@ -435,30 +439,51 @@ import {
BoldTextFeature,
CAN_USE_DOM,
CheckListFeature,
cloneDeep,
consolidateHTMLConverters,
convertLexicalNodesToHTML,
convertLexicalToHTML,
convertSlateNodesToLexical,
convertSlateToLexical,
createBlockNode,
defaultEditorConfig,
defaultEditorFeatures,
defaultHTMLConverters,
defaultRichTextValue,
defaultSanitizedEditorConfig,
defaultSlateConverters,
DETAIL_TYPE_TO_DETAIL,
DOUBLE_LINE_BREAK,
EditorConfig,
EditorConfigProvider,
ELEMENT_FORMAT_TO_TYPE,
ELEMENT_TYPE_TO_FORMAT,
ENABLE_SLASH_MENU_COMMAND,
EditorConfig,
EditorConfigProvider,
Feature,
FeatureProvider,
FeatureProviderMap,
FloatingToolbarSection,
FloatingToolbarSectionEntry,
FormatSectionWithEntries,
getDOMRangeRect,
getEnabledNodes,
getSelectedNode,
HeadingFeature,
HTMLConverter,
HTMLConverterFeature,
HTMLConverterFeatureProps,
HeadingFeature,
IS_ALL_FORMATTING,
IndentFeature,
InlineCodeTextFeature,
invariant,
IS_ALL_FORMATTING,
isHTMLElement,
isPoint,
ItalicTextFeature,
LTR_REGEX,
joinClasses,
LexicalBlock,
lexicalEditor,
LexicalEditorProps,
lexicalHTML,
LexicalPluginToLexicalFeature,
LexicalRichTextAdapter,
LinebreakHTMLConverter,
@@ -466,15 +491,16 @@ import {
LinkFeatureProps,
LinkFields,
LinkNode,
NON_BREAKING_SPACE,
loadFeatures,
LTR_REGEX,
NodeFormat,
NodeValidation,
NON_BREAKING_SPACE,
OrderedListFeature,
ParagraphFeature,
ParagraphHTMLConverter,
Point,
PopulationPromise,
RTL_REGEX,
RawUploadPayload,
Rect,
RelationshipData,
@@ -482,13 +508,19 @@ import {
RelationshipNode,
ResolvedFeature,
ResolvedFeatureMap,
RTL_REGEX,
SanitizedEditorConfig,
SanitizedFeatures,
sanitizeEditorConfig,
sanitizeFeatures,
sanitizeUrl,
SerializedAutoLinkNode,
SerializedBlockNode,
SerializedLinkNode,
SerializedRelationshipNode,
SerializedUploadNode,
setFloatingElemPosition,
setFloatingElemPositionForLinkEditor,
SlashMenuGroup,
SlashMenuOption,
SlateBlockquoteConverter,
@@ -504,16 +536,17 @@ import {
SlateUnknownConverter,
SlateUnorderedListConverter,
SlateUploadConverter,
sortFeaturesForOptimalLoading,
StrikethroughTextFeature,
SubscriptTextFeature,
SuperscriptTextFeature,
TestRecorderFeature,
TEXT_MODE_TO_TYPE,
TEXT_TYPE_TO_FORMAT,
TEXT_TYPE_TO_MODE,
TOGGLE_LINK_COMMAND,
TestRecorderFeature,
TextDropdownSectionWithEntries,
TextHTMLConverter,
TOGGLE_LINK_COMMAND,
TreeViewFeature,
UnderlineTextFeature,
UnorderedListFeature,
@@ -521,48 +554,15 @@ import {
UploadFeature,
UploadFeatureProps,
UploadNode,
addSwipeDownListener,
addSwipeLeftListener,
addSwipeRightListener,
addSwipeUpListener,
cloneDeep,
consolidateHTMLConverters,
convertLexicalNodesToHTML,
convertLexicalToHTML,
convertSlateNodesToLexical,
convertSlateToLexical,
createBlockNode,
defaultEditorConfig,
defaultEditorFeatures,
defaultHTMLConverters,
defaultRichTextValue,
defaultSanitizedEditorConfig,
defaultSlateConverters,
getDOMRangeRect,
getEnabledNodes,
getSelectedNode,
invariant,
isHTMLElement,
isPoint,
joinClasses,
lexicalEditor,
lexicalHTML,
loadFeatures,
sanitizeEditorConfig,
sanitizeFeatures,
sanitizeUrl,
setFloatingElemPosition,
setFloatingElemPositionForLinkEditor,
sortFeaturesForOptimalLoading,
useEditorConfigContext,
validateUrl,
} from '@payloadcms/richtext-lexical'
import {
defaultEditorLexicalConfig,
RichTextCell,
RichTextField,
ToolbarButton,
ToolbarDropdown,
defaultEditorLexicalConfig,
} from '@payloadcms/richtext-lexical/components'
import {
AdapterArguments,
@@ -570,12 +570,12 @@ import {
ElementNode,
FieldProps,
LeafButton,
nodeIsTextNode,
RichTextCustomElement,
RichTextCustomLeaf,
RichTextElement,
RichTextLeaf,
TextNode,
nodeIsTextNode,
slateEditor,
TextNode,
toggleElement,
} from '@payloadcms/richtext-slate'

2
test/queues/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
/media
/media-gif

580
test/queues/config.ts Normal file
View File

@@ -0,0 +1,580 @@
import type { TaskConfig, WorkflowConfig } from 'payload'
import { lexicalEditor } from '@payloadcms/richtext-lexical'
import { fileURLToPath } from 'node:url'
import path from 'path'
import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js'
import { devUser } from '../credentials.js'
import { updatePostStep1, updatePostStep2 } from './runners/updatePost.js'
import { clearAndSeedEverything } from './seed.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
export default buildConfigWithDefaults({
collections: [
{
slug: 'posts',
admin: {
useAsTitle: 'title',
},
hooks: {
afterChange: [
async ({ req, doc, context }) => {
await req.payload.jobs.queue({
workflow: context.useJSONWorkflow ? 'updatePostJSONWorkflow' : 'updatePost',
input: {
post: doc.id,
message: 'hello',
},
req,
})
},
],
},
fields: [
{
name: 'title',
type: 'text',
required: true,
},
{
name: 'content',
type: 'richText',
},
{
name: 'jobStep1Ran',
type: 'text',
},
{
name: 'jobStep2Ran',
type: 'text',
},
],
},
{
slug: 'simple',
admin: {
useAsTitle: 'title',
},
fields: [
{
name: 'title',
type: 'text',
required: true,
},
],
},
],
admin: {
importMap: {
baseDir: path.resolve(dirname),
},
autoLogin: {
prefillOnly: true,
email: devUser.email,
password: devUser.password,
},
},
jobs: {
jobsCollectionOverrides: ({ defaultJobsCollection }) => {
return {
...defaultJobsCollection,
admin: {
...(defaultJobsCollection?.admin || {}),
hidden: false,
},
}
},
tasks: [
{
retries: 2,
slug: 'UpdatePost',
interfaceName: 'MyUpdatePostType',
inputSchema: [
{
name: 'post',
type: 'relationship',
relationTo: 'posts',
maxDepth: 0,
required: true,
},
{
name: 'message',
type: 'text',
required: true,
},
],
outputSchema: [
{
name: 'messageTwice',
type: 'text',
required: true,
},
],
handler: updatePostStep1,
} as TaskConfig<'UpdatePost'>,
{
retries: 2,
slug: 'UpdatePostStep2',
inputSchema: [
{
name: 'post',
type: 'relationship',
relationTo: 'posts',
maxDepth: 0,
required: true,
},
{
name: 'messageTwice',
type: 'text',
required: true,
},
],
handler: updatePostStep2,
} as TaskConfig<'UpdatePostStep2'>,
{
retries: 3,
slug: 'CreateSimple',
inputSchema: [
{
name: 'message',
type: 'text',
required: true,
},
{
name: 'shouldFail',
type: 'checkbox',
},
],
outputSchema: [
{
name: 'simpleID',
type: 'text',
required: true,
},
],
handler: async ({ input, req }) => {
if (input.shouldFail) {
throw new Error('Failed on purpose')
}
const newSimple = await req.payload.create({
collection: 'simple',
req,
data: {
title: input.message,
},
})
return {
output: {
simpleID: newSimple.id,
},
}
},
} as TaskConfig<'CreateSimple'>,
{
retries: 2,
slug: 'CreateSimpleWithDuplicateMessage',
inputSchema: [
{
name: 'message',
type: 'text',
required: true,
},
{
name: 'shouldFail',
type: 'checkbox',
},
],
outputSchema: [
{
name: 'simpleID',
type: 'text',
required: true,
},
],
handler: async ({ input, req }) => {
if (input.shouldFail) {
throw new Error('Failed on purpose')
}
const newSimple = await req.payload.create({
collection: 'simple',
req,
data: {
title: input.message + input.message,
},
})
return {
output: {
simpleID: newSimple.id,
},
}
},
} as TaskConfig<'CreateSimpleWithDuplicateMessage'>,
{
retries: 2,
slug: 'ExternalTask',
inputSchema: [
{
name: 'message',
type: 'text',
required: true,
},
],
outputSchema: [
{
name: 'simpleID',
type: 'text',
required: true,
},
],
handler: path.resolve(dirname, 'runners/externalTask.ts') + '#externalTaskHandler',
} as TaskConfig<'ExternalTask'>,
],
workflows: [
{
slug: 'updatePost',
interfaceName: 'MyUpdatePostWorkflowType',
inputSchema: [
{
name: 'post',
type: 'relationship',
relationTo: 'posts',
maxDepth: 0,
required: true,
},
{
name: 'message',
type: 'text',
required: true,
},
],
handler: async ({ job, tasks }) => {
await tasks.UpdatePost('1', {
input: {
post: job.input.post,
message: job.input.message,
},
})
await tasks.UpdatePostStep2('2', {
input: {
post: job.taskStatus.UpdatePost['1'].input.post,
messageTwice: job.taskStatus.UpdatePost['1'].output.messageTwice,
},
})
},
} as WorkflowConfig<'updatePost'>,
{
slug: 'updatePostJSONWorkflow',
inputSchema: [
{
name: 'post',
type: 'relationship',
relationTo: 'posts',
maxDepth: 0,
required: true,
},
{
name: 'message',
type: 'text',
required: true,
},
],
handler: [
{
task: 'UpdatePost',
id: '1',
input: ({ job }) => ({
post: job.input.post,
message: job.input.message,
}),
},
{
task: 'UpdatePostStep2',
id: '2',
input: ({ job }) => ({
post: job.taskStatus.UpdatePost['1'].input.post,
messageTwice: job.taskStatus.UpdatePost['1'].output.messageTwice,
}),
condition({ job }) {
return job?.taskStatus?.UpdatePost?.['1']?.complete
},
completesJob: true,
},
],
} as WorkflowConfig<'updatePostJSONWorkflow'>,
{
slug: 'retriesTest',
inputSchema: [
{
name: 'message',
type: 'text',
required: true,
},
],
handler: async ({ job, tasks, req }) => {
await req.payload.update({
collection: 'payload-jobs',
data: {
input: {
...job.input,
amountRetried:
// @ts-expect-error amountRetried is new arbitrary data and not in the type
job.input.amountRetried !== undefined ? job.input.amountRetried + 1 : 0,
},
},
id: job.id,
})
await tasks.CreateSimple('1', {
input: {
message: job.input.message,
},
})
// At this point there should always be one post created.
// job.input.amountRetried will go up to 2 as CreatePost has 2 retries
await tasks.CreateSimple('2', {
input: {
message: job.input.message,
shouldFail: true,
},
})
// This will never be reached
},
} as WorkflowConfig<'retriesTest'>,
{
slug: 'retriesRollbackTest',
inputSchema: [
{
name: 'message',
type: 'text',
required: true,
},
],
handler: async ({ job, inlineTask, req }) => {
await req.payload.update({
collection: 'payload-jobs',
data: {
input: {
...job.input,
amountRetried:
// @ts-expect-error amountRetried is new arbitrary data and not in the type
job.input.amountRetried !== undefined ? job.input.amountRetried + 1 : 0,
},
},
id: job.id,
})
await inlineTask('1', {
task: async ({ req }) => {
const newSimple = await req.payload.create({
collection: 'simple',
req,
data: {
title: job.input.message,
},
})
return {
output: {
simpleID: newSimple.id,
},
}
},
})
await inlineTask('2', {
task: async ({ req }) => {
await req.payload.create({
collection: 'simple',
req,
data: {
title: 'should not exist',
},
})
// Fail afterwards, so that we can also test that transactions work (i.e. the job is rolled back)
throw new Error('Failed on purpose')
},
retries: {
attempts: 4,
},
})
},
} as WorkflowConfig<'retriesRollbackTest'>,
{
slug: 'retriesWorkflowLevelTest',
inputSchema: [
{
name: 'message',
type: 'text',
required: true,
},
],
retries: 2, // Even though CreateSimple has 3 retries, this workflow only has 2. Thus, it will only retry once
handler: async ({ job, tasks, req }) => {
await req.payload.update({
collection: 'payload-jobs',
data: {
input: {
...job.input,
amountRetried:
// @ts-expect-error amountRetried is new arbitrary data and not in the type
job.input.amountRetried !== undefined ? job.input.amountRetried + 1 : 0,
},
},
id: job.id,
})
await tasks.CreateSimple('1', {
input: {
message: job.input.message,
},
})
// At this point there should always be one post created.
// job.input.amountRetried will go up to 2 as CreatePost has 2 retries
await tasks.CreateSimple('2', {
input: {
message: job.input.message,
shouldFail: true,
},
})
// This will never be reached
},
} as WorkflowConfig<'retriesWorkflowLevelTest'>,
{
slug: 'inlineTaskTest',
inputSchema: [
{
name: 'message',
type: 'text',
required: true,
},
],
handler: async ({ job, inlineTask }) => {
await inlineTask('1', {
task: async ({ input, req }) => {
const newSimple = await req.payload.create({
collection: 'simple',
req,
data: {
title: input.message,
},
})
return {
output: {
simpleID: newSimple.id,
},
}
},
input: {
message: job.input.message,
},
})
},
} as WorkflowConfig<'inlineTaskTest'>,
{
slug: 'externalWorkflow',
inputSchema: [
{
name: 'message',
type: 'text',
required: true,
},
],
handler: path.resolve(dirname, 'runners/externalWorkflow.ts') + '#externalWorkflowHandler',
} as WorkflowConfig<'externalWorkflow'>,
{
slug: 'retriesBackoffTest',
inputSchema: [
{
name: 'message',
type: 'text',
required: true,
},
],
handler: async ({ job, inlineTask, req }) => {
const newJob = await req.payload.update({
collection: 'payload-jobs',
data: {
input: {
...job.input,
amountRetried:
// @ts-expect-error amountRetried is new arbitrary data and not in the type
job.input.amountRetried !== undefined ? job.input.amountRetried + 1 : 0,
},
},
id: job.id,
})
job.input = newJob.input as any
await inlineTask('1', {
task: async ({ req }) => {
const totalTried = job?.taskStatus?.inline?.['1']?.totalTried || 0
const { id } = await req.payload.create({
collection: 'simple',
req,
data: {
title: 'should not exist',
},
})
// @ts-expect-error timeTried is new arbitrary data and not in the type
if (!job.input.timeTried) {
// @ts-expect-error timeTried is new arbitrary data and not in the type
job.input.timeTried = {}
}
// @ts-expect-error timeTried is new arbitrary data and not in the type
job.input.timeTried[totalTried] = new Date().toISOString()
await req.payload.update({
collection: 'payload-jobs',
data: {
input: job.input,
},
id: job.id,
})
if (totalTried < 4) {
// Cleanup the post
await req.payload.delete({
collection: 'simple',
id,
req,
})
// Last try it should succeed
throw new Error('Failed on purpose')
}
return {
output: {},
}
},
retries: {
attempts: 4,
backoff: {
type: 'exponential',
// Should retry in 300ms, then 600, then 1200, then 2400, then succeed
delay: 300,
},
},
})
},
} as WorkflowConfig<'retriesBackoffTest'>,
],
},
editor: lexicalEditor(),
onInit: async (payload) => {
if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') {
await clearAndSeedEverything(payload)
}
},
typescript: {
outputFile: path.resolve(dirname, 'payload-types.ts'),
},
})

65
test/queues/e2e.spec.ts Normal file
View File

@@ -0,0 +1,65 @@
import type { Page } from '@playwright/test'
import { expect, test } from '@playwright/test'
import * as path from 'path'
import { fileURLToPath } from 'url'
import type { PayloadTestSDK } from '../helpers/sdk/index.js'
import type { Config } from './payload-types.js'
import { ensureCompilationIsDone, initPageConsoleErrorCatch } from '../helpers.js'
import { AdminUrlUtil } from '../helpers/adminUrlUtil.js'
import { initPayloadE2ENoConfig } from '../helpers/initPayloadE2ENoConfig.js'
import { reInitializeDB } from '../helpers/reInitializeDB.js'
import { RESTClient } from '../helpers/rest.js'
import { TEST_TIMEOUT } from '../playwright.config.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
let serverURL: string
let payload: PayloadTestSDK<Config>
let client: RESTClient
test.describe('Queues', () => {
let page: Page
let url: AdminUrlUtil
test.beforeAll(async ({ browser }, testInfo) => {
testInfo.setTimeout(TEST_TIMEOUT)
process.env.SEED_IN_CONFIG_ONINIT = 'false' // Makes it so the payload config onInit seed is not run. Otherwise, the seed would be run unnecessarily twice for the initial test run - once for beforeEach and once for onInit
;({ payload, serverURL } = await initPayloadE2ENoConfig({ dirname }))
url = new AdminUrlUtil(serverURL, 'payload-jobs')
const context = await browser.newContext()
page = await context.newPage()
initPageConsoleErrorCatch(page)
await reInitializeDB({
serverURL,
snapshotKey: 'queuesTest',
})
await ensureCompilationIsDone({ page, serverURL })
})
test.beforeEach(async () => {
await reInitializeDB({
serverURL,
snapshotKey: 'fieldsTest',
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
if (client) {
await client.logout()
}
client = new RESTClient(null, { defaultSlug: 'users', serverURL })
await client.login()
await ensureCompilationIsDone({ page, serverURL })
})
test('example test', async () => {
await page.goto(url.list)
const textCell = page.locator('.row-1 .cell-text')
await expect(textCell).toHaveText('example post')
})
})

View File

@@ -0,0 +1,20 @@
import { rootParserOptions } from '../../eslint.config.js'
import { testEslintConfig } from '../eslint.config.js'
/** @typedef {import('eslint').Linter.FlatConfig} */
let FlatConfig
/** @type {FlatConfig[]} */
export const index = [
...testEslintConfig,
{
languageOptions: {
parserOptions: {
...rootParserOptions,
tsconfigRootDir: import.meta.dirname,
},
},
},
]
export default index

668
test/queues/int.spec.ts Normal file
View File

@@ -0,0 +1,668 @@
import type { JobTaskStatus, Payload } from 'payload'
import path from 'path'
import { fileURLToPath } from 'url'
import type { NextRESTClient } from '../helpers/NextRESTClient.js'
import { devUser } from '../credentials.js'
import { initPayloadInt } from '../helpers/initPayloadInt.js'
import { clearAndSeedEverything } from './seed.js'
let payload: Payload
let restClient: NextRESTClient
let token: string
const { email, password } = devUser
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
describe('Queues', () => {
beforeAll(async () => {
process.env.SEED_IN_CONFIG_ONINIT = 'false' // Makes it so the payload config onInit seed is not run. Otherwise, the seed would be run unnecessarily twice for the initial test run - once for beforeEach and once for onInit
;({ payload, restClient } = await initPayloadInt(dirname))
})
afterAll(async () => {
if (typeof payload.db.destroy === 'function') {
await payload.db.destroy()
}
})
beforeEach(async () => {
await clearAndSeedEverything(payload)
const data = await restClient
.POST('/users/login', {
body: JSON.stringify({
email,
password,
}),
})
.then((res) => res.json())
if (data.token) {
token = data.token
}
})
it('will run access control on jobs runner', async () => {
const response = await restClient.GET('/payload-jobs/run', {
headers: {
// Authorization: `JWT ${token}`,
},
}) // Needs to be a rest call to test auth
expect(response.status).toBe(401)
})
it('will return 200 from jobs runner', async () => {
const response = await restClient.GET('/payload-jobs/run', {
headers: {
Authorization: `JWT ${token}`,
},
}) // Needs to be a rest call to test auth
expect(response.status).toBe(200)
})
// There used to be a bug in payload where updating the job threw the following error - only in
// postgres:
// QueryError: The following path cannot be queried: document.relationTo
// This test is to ensure that the bug is fixed
it('can create and update new jobs', async () => {
const job = await payload.create({
collection: 'payload-jobs',
data: {
input: {
message: '1',
},
},
})
// @ts-expect-error
expect(job.input.message).toBe('1')
const updatedJob = await payload.update({
collection: 'payload-jobs',
id: job.id,
data: {
input: {
message: '2',
},
},
})
// @ts-expect-error
expect(updatedJob.input.message).toBe('2')
})
it('can create new jobs', async () => {
const newPost = await payload.create({
collection: 'posts',
data: {
title: 'my post',
},
})
const retrievedPost = await payload.findByID({
collection: 'posts',
id: newPost.id,
})
expect(retrievedPost.jobStep1Ran).toBeFalsy()
expect(retrievedPost.jobStep2Ran).toBeFalsy()
await payload.jobs.run()
const postAfterJobs = await payload.findByID({
collection: 'posts',
id: newPost.id,
})
expect(postAfterJobs.jobStep1Ran).toBe('hello')
expect(postAfterJobs.jobStep2Ran).toBe('hellohellohellohello')
})
it('can create new JSON-workflow jobs', async () => {
const newPost = await payload.create({
collection: 'posts',
data: {
title: 'my post',
},
context: {
useJSONWorkflow: true,
},
})
const retrievedPost = await payload.findByID({
collection: 'posts',
id: newPost.id,
})
expect(retrievedPost.jobStep1Ran).toBeFalsy()
expect(retrievedPost.jobStep2Ran).toBeFalsy()
await payload.jobs.run()
const postAfterJobs = await payload.findByID({
collection: 'posts',
id: newPost.id,
})
expect(postAfterJobs.jobStep1Ran).toBe('hello')
expect(postAfterJobs.jobStep2Ran).toBe('hellohellohellohello')
})
it('ensure job retrying works', async () => {
const job = await payload.jobs.queue({
workflow: 'retriesTest',
input: {
message: 'hello',
},
})
let hasJobsRemaining = true
while (hasJobsRemaining) {
const response = await payload.jobs.run()
if (response.noJobsRemaining) {
hasJobsRemaining = false
}
}
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(1)
const jobAfterRun = await payload.findByID({
collection: 'payload-jobs',
id: job.id,
})
// @ts-expect-error amountRetried is new arbitrary data and not in the type
expect(jobAfterRun.input.amountRetried).toBe(3)
})
it('ensure workflow-level retries are respected', async () => {
const job = await payload.jobs.queue({
workflow: 'retriesWorkflowLevelTest',
input: {
message: 'hello',
},
})
let hasJobsRemaining = true
while (hasJobsRemaining) {
const response = await payload.jobs.run()
if (response.noJobsRemaining) {
hasJobsRemaining = false
}
}
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(1)
const jobAfterRun = await payload.findByID({
collection: 'payload-jobs',
id: job.id,
})
// @ts-expect-error amountRetried is new arbitrary data and not in the type
expect(jobAfterRun.input.amountRetried).toBe(2)
})
/*
// Task rollbacks are not supported in the current version of Payload. This test will be re-enabled when task rollbacks are supported once we figure out the transaction issues
it('ensure failed tasks are rolled back via transactions', async () => {
const job = await payload.jobs.queue({
workflow: 'retriesRollbackTest',
input: {
message: 'hello',
},
})
let hasJobsRemaining = true
while (hasJobsRemaining) {
const response = await payload.jobs.run()
if (response.noJobsRemaining) {
hasJobsRemaining = false
}
}
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(1) // Failure happens after task creates a simple document, but still within the task => any document creation should be rolled back
const jobAfterRun = await payload.findByID({
collection: 'payload-jobs',
id: job.id,
})
// @ts-expect-error amountRetried is new arbitrary data and not in the type
expect(jobAfterRun.input.amountRetried).toBe(4)
})*/
it('ensure backoff strategy of task is respected', async () => {
const job = await payload.jobs.queue({
workflow: 'retriesBackoffTest',
input: {
message: 'hello',
},
})
let hasJobsRemaining = true
let firstGotNoJobs = null
const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms))
// Keep running until no jobs found. If no jobs found, wait for 1.6 seconds to see if any new jobs are added
// (Specifically here we want to see if the backoff strategy is respected and thus need to wait for `waitUntil`)
while (
hasJobsRemaining ||
!firstGotNoJobs ||
new Date().getTime() - firstGotNoJobs.getTime() < 3000
) {
const response = await payload.jobs.run()
if (response.noJobsRemaining) {
if (hasJobsRemaining) {
firstGotNoJobs = new Date()
hasJobsRemaining = false
}
} else {
firstGotNoJobs = null
hasJobsRemaining = true
}
// Add a 100ms delay before the next iteration
await delay(100)
}
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(1)
const jobAfterRun = await payload.findByID({
collection: 'payload-jobs',
id: job.id,
})
expect(jobAfterRun.totalTried).toBe(5)
expect((jobAfterRun.taskStatus as JobTaskStatus).inline['1'].totalTried).toBe(5)
// @ts-expect-error amountRetried is new arbitrary data and not in the type
expect(jobAfterRun.input.amountRetried).toBe(4)
/*
Job.input.timeTried may look something like this:
timeTried: {
'0': '2024-10-07T16:05:49.300Z',
'1': '2024-10-07T16:05:49.469Z',
'2': '2024-10-07T16:05:49.779Z',
'3': '2024-10-07T16:05:50.388Z',
'4': '2024-10-07T16:05:51.597Z'
}
Convert this into an array, each item is the duration between the fails. So this should have 4 items
*/
const timeTried: {
[key: string]: string
// @ts-expect-error timeTried is new arbitrary data and not in the type
} = jobAfterRun.input.timeTried
const durations = Object.values(timeTried)
.map((time, index, arr) => {
if (index === arr.length - 1) {
return null
}
return new Date(arr[index + 1]).getTime() - new Date(time).getTime()
})
.filter((p) => p !== null)
expect(durations).toHaveLength(4)
expect(durations[0]).toBeGreaterThan(300)
expect(durations[1]).toBeGreaterThan(600)
expect(durations[2]).toBeGreaterThan(1200)
expect(durations[3]).toBeGreaterThan(2400)
})
it('can create new inline jobs', async () => {
await payload.jobs.queue({
workflow: 'inlineTaskTest',
input: {
message: 'hello!',
},
})
await payload.jobs.run()
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(1)
expect(allSimples.docs[0].title).toBe('hello!')
})
it('can queue single tasks', async () => {
await payload.jobs.queue({
task: 'CreateSimple',
input: {
message: 'from single task',
},
})
await payload.jobs.run()
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(1)
expect(allSimples.docs[0].title).toBe('from single task')
})
/*
// Task rollbacks are not supported in the current version of Payload. This test will be re-enabled when task rollbacks are supported once we figure out the transaction issues
it('transaction test against payload-jobs collection', async () => {
// This kinds of emulates what happens when multiple jobs are queued and then run in parallel.
const runWorkflowFN = async (i: number) => {
const { id } = await payload.create({
collection: 'payload-jobs',
data: {
input: {
message: 'Number ' + i,
},
taskSlug: 'CreateSimple',
},
})
const _req = await createLocalReq({}, payload)
const t1Req = isolateObjectProperty(_req, 'transactionID')
delete t1Req.transactionID
await initTransaction(t1Req)
await payload.update({
collection: 'payload-jobs',
id,
req: t1Req,
data: {
input: {
message: 'Number ' + i + ' Update 1',
},
processing: true,
taskSlug: 'CreateSimple',
},
})
/**
* T1 start
*/
/*
const t2Req = isolateObjectProperty(t1Req, 'transactionID')
delete t2Req.transactionID
//
await initTransaction(t2Req)
await payload.update({
collection: 'payload-jobs',
id,
req: t1Req,
data: {
input: {
message: 'Number ' + i + ' Update 2',
},
processing: true,
taskSlug: 'CreateSimple',
},
})
await payload.create({
collection: 'simple',
req: t2Req,
data: {
title: 'from single task',
},
})
await payload.update({
collection: 'payload-jobs',
id,
req: t1Req,
data: {
input: {
message: 'Number ' + i + ' Update 3',
},
processing: true,
taskSlug: 'CreateSimple',
},
})
await commitTransaction(t2Req)
/**
* T1 end
*/
/*
await payload.update({
collection: 'payload-jobs',
id,
req: t1Req,
data: {
input: {
message: 'Number ' + i + ' Update 4',
},
processing: true,
taskSlug: 'CreateSimple',
},
})
await commitTransaction(t1Req)
}
await Promise.all(
new Array(30).fill(0).map(async (_, i) => {
await runWorkflowFN(i)
}),
)
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(30)
})*/
it('can queue single tasks 8 times', async () => {
for (let i = 0; i < 8; i++) {
await payload.jobs.queue({
task: 'CreateSimple',
input: {
message: 'from single task',
},
})
}
await payload.jobs.run()
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(8)
expect(allSimples.docs[0].title).toBe('from single task')
expect(allSimples.docs[7].title).toBe('from single task')
})
it('can queue single tasks 500 times', async () => {
for (let i = 0; i < 500; i++) {
await payload.jobs.queue({
task: 'CreateSimple',
input: {
message: 'from single task',
},
})
}
await payload.jobs.run({
limit: 1000,
})
const allSimples = await payload.find({
collection: 'simple',
limit: 1000,
})
expect(allSimples.totalDocs).toBe(500) // Default limit: 10
expect(allSimples.docs[0].title).toBe('from single task')
expect(allSimples.docs[490].title).toBe('from single task')
})
it('ensure default jobs run limit of 10 works', async () => {
for (let i = 0; i < 500; i++) {
await payload.jobs.queue({
task: 'CreateSimple',
input: {
message: 'from single task',
},
})
}
await payload.jobs.run()
const allSimples = await payload.find({
collection: 'simple',
limit: 1000,
})
expect(allSimples.totalDocs).toBe(10) // Default limit: 10
expect(allSimples.docs[0].title).toBe('from single task')
expect(allSimples.docs[9].title).toBe('from single task')
})
it('ensure jobs run limit can be customized', async () => {
for (let i = 0; i < 500; i++) {
await payload.jobs.queue({
task: 'CreateSimple',
input: {
message: 'from single task',
},
})
}
await payload.jobs.run({
limit: 42,
})
const allSimples = await payload.find({
collection: 'simple',
limit: 1000,
})
expect(allSimples.totalDocs).toBe(42) // Default limit: 10
expect(allSimples.docs[0].title).toBe('from single task')
expect(allSimples.docs[30].title).toBe('from single task')
expect(allSimples.docs[41].title).toBe('from single task')
})
it('can queue different kinds of single tasks multiple times', async () => {
for (let i = 0; i < 3; i++) {
await payload.jobs.queue({
task: 'CreateSimpleWithDuplicateMessage',
input: {
message: 'hello',
},
})
await payload.jobs.queue({
task: 'CreateSimple',
input: {
message: 'from single task',
},
})
await payload.jobs.queue({
task: 'CreateSimpleWithDuplicateMessage',
input: {
message: 'hello',
},
})
}
await payload.jobs.run()
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(9)
let amountOfCreateSimple = 0
let amountOfCreateSimpleWithDuplicateMessage = 0
for (const simple of allSimples.docs) {
if (simple.title === 'from single task') {
amountOfCreateSimple++
} else if (simple.title === 'hellohello') {
amountOfCreateSimpleWithDuplicateMessage++
}
}
expect(amountOfCreateSimple).toBe(3)
expect(amountOfCreateSimpleWithDuplicateMessage).toBe(6)
})
it('can queue external tasks', async () => {
await payload.jobs.queue({
task: 'ExternalTask',
input: {
message: 'external',
},
})
await payload.jobs.run()
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(1)
expect(allSimples.docs[0].title).toBe('external')
})
it('can queue external workflow that is running external task', async () => {
await payload.jobs.queue({
workflow: 'externalWorkflow',
input: {
message: 'externalWorkflow',
},
})
await payload.jobs.run()
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(1)
expect(allSimples.docs[0].title).toBe('externalWorkflow')
})
})

View File

@@ -0,0 +1,446 @@
/* tslint:disable */
/* eslint-disable */
/**
* This file was automatically generated by Payload.
* DO NOT MODIFY IT BY HAND. Instead, modify your source Payload config,
* and re-run `payload generate:types` to regenerate this file.
*/
export interface Config {
auth: {
users: UserAuthOperations;
};
collections: {
posts: Post;
simple: Simple;
users: User;
'payload-jobs': PayloadJob;
'payload-locked-documents': PayloadLockedDocument;
'payload-preferences': PayloadPreference;
'payload-migrations': PayloadMigration;
};
db: {
defaultIDType: string;
};
globals: {};
locale: null;
user: User & {
collection: 'users';
};
jobs?: {
tasks: {
UpdatePost: MyUpdatePostType;
UpdatePostStep2: TaskUpdatePostStep2;
CreateSimple: TaskCreateSimple;
CreateSimpleWithDuplicateMessage: TaskCreateSimpleWithDuplicateMessage;
ExternalTask: TaskExternalTask;
inline?: {
input: unknown;
output: unknown;
};
};
workflows?: {
updatePost?: MyUpdatePostWorkflowType;
updatePostJSONWorkflow?: WorkflowUpdatePostJSONWorkflow;
retriesTest?: WorkflowRetriesTest;
retriesRollbackTest?: WorkflowRetriesRollbackTest;
retriesWorkflowLevelTest?: WorkflowRetriesWorkflowLevelTest;
inlineTaskTest?: WorkflowInlineTaskTest;
externalWorkflow?: WorkflowExternalWorkflow;
retriesBackoffTest?: WorkflowRetriesBackoffTest;
};
};
}
export interface UserAuthOperations {
forgotPassword: {
email: string;
password: string;
};
login: {
email: string;
password: string;
};
registerFirstUser: {
email: string;
password: string;
};
unlock: {
email: string;
password: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "posts".
*/
export interface Post {
id: string;
title: string;
content?: {
root: {
type: string;
children: {
type: string;
version: number;
[k: string]: unknown;
}[];
direction: ('ltr' | 'rtl') | null;
format: 'left' | 'start' | 'center' | 'right' | 'end' | 'justify' | '';
indent: number;
version: number;
};
[k: string]: unknown;
} | null;
jobStep1Ran?: string | null;
jobStep2Ran?: string | null;
updatedAt: string;
createdAt: string;
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "simple".
*/
export interface Simple {
id: string;
title: string;
updatedAt: string;
createdAt: string;
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "users".
*/
export interface User {
id: string;
updatedAt: string;
createdAt: string;
email: string;
resetPasswordToken?: string | null;
resetPasswordExpiration?: string | null;
salt?: string | null;
hash?: string | null;
loginAttempts?: number | null;
lockUntil?: string | null;
password?: string | null;
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "payload-jobs".
*/
export interface PayloadJob {
id: string;
input?:
| {
[k: string]: unknown;
}
| unknown[]
| string
| number
| boolean
| null;
taskStatus?:
| {
[k: string]: unknown;
}
| unknown[]
| string
| number
| boolean
| null;
completedAt?: string | null;
totalTried?: number | null;
hasError?: boolean | null;
error?:
| {
[k: string]: unknown;
}
| unknown[]
| string
| number
| boolean
| null;
log?:
| {
executedAt: string;
completedAt: string;
taskSlug:
| 'inline'
| 'UpdatePost'
| 'UpdatePostStep2'
| 'CreateSimple'
| 'CreateSimpleWithDuplicateMessage'
| 'ExternalTask';
taskID: string;
input?:
| {
[k: string]: unknown;
}
| unknown[]
| string
| number
| boolean
| null;
output?:
| {
[k: string]: unknown;
}
| unknown[]
| string
| number
| boolean
| null;
state: 'failed' | 'succeeded';
error?:
| {
[k: string]: unknown;
}
| unknown[]
| string
| number
| boolean
| null;
id?: string | null;
}[]
| null;
workflowSlug?:
| (
| 'updatePost'
| 'updatePostJSONWorkflow'
| 'retriesTest'
| 'retriesRollbackTest'
| 'retriesWorkflowLevelTest'
| 'inlineTaskTest'
| 'externalWorkflow'
| 'retriesBackoffTest'
)
| null;
taskSlug?:
| (
| 'inline'
| 'UpdatePost'
| 'UpdatePostStep2'
| 'CreateSimple'
| 'CreateSimpleWithDuplicateMessage'
| 'ExternalTask'
)
| null;
queue?: 'default' | null;
waitUntil?: string | null;
processing?: boolean | null;
updatedAt: string;
createdAt: string;
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "payload-locked-documents".
*/
export interface PayloadLockedDocument {
id: string;
document?:
| ({
relationTo: 'posts';
value: string | Post;
} | null)
| ({
relationTo: 'simple';
value: string | Simple;
} | null)
| ({
relationTo: 'users';
value: string | User;
} | null)
| ({
relationTo: 'payload-jobs';
value: string | PayloadJob;
} | null);
globalSlug?: string | null;
user: {
relationTo: 'users';
value: string | User;
};
updatedAt: string;
createdAt: string;
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "payload-preferences".
*/
export interface PayloadPreference {
id: string;
user: {
relationTo: 'users';
value: string | User;
};
key?: string | null;
value?:
| {
[k: string]: unknown;
}
| unknown[]
| string
| number
| boolean
| null;
updatedAt: string;
createdAt: string;
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "payload-migrations".
*/
export interface PayloadMigration {
id: string;
name?: string | null;
batch?: number | null;
updatedAt: string;
createdAt: string;
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "MyUpdatePostType".
*/
export interface MyUpdatePostType {
input: {
post: string | Post;
message: string;
};
output: {
messageTwice: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "TaskUpdatePostStep2".
*/
export interface TaskUpdatePostStep2 {
input: {
post: string | Post;
messageTwice: string;
};
output?: unknown;
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "TaskCreateSimple".
*/
export interface TaskCreateSimple {
input: {
message: string;
shouldFail?: boolean | null;
};
output: {
simpleID: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "TaskCreateSimpleWithDuplicateMessage".
*/
export interface TaskCreateSimpleWithDuplicateMessage {
input: {
message: string;
shouldFail?: boolean | null;
};
output: {
simpleID: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "TaskExternalTask".
*/
export interface TaskExternalTask {
input: {
message: string;
};
output: {
simpleID: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "MyUpdatePostWorkflowType".
*/
export interface MyUpdatePostWorkflowType {
input: {
post: string | Post;
message: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "WorkflowUpdatePostJSONWorkflow".
*/
export interface WorkflowUpdatePostJSONWorkflow {
input: {
post: string | Post;
message: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "WorkflowRetriesTest".
*/
export interface WorkflowRetriesTest {
input: {
message: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "WorkflowRetriesRollbackTest".
*/
export interface WorkflowRetriesRollbackTest {
input: {
message: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "WorkflowRetriesWorkflowLevelTest".
*/
export interface WorkflowRetriesWorkflowLevelTest {
input: {
message: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "WorkflowInlineTaskTest".
*/
export interface WorkflowInlineTaskTest {
input: {
message: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "WorkflowExternalWorkflow".
*/
export interface WorkflowExternalWorkflow {
input: {
message: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "WorkflowRetriesBackoffTest".
*/
export interface WorkflowRetriesBackoffTest {
input: {
message: string;
};
}
/**
* This interface was referenced by `Config`'s JSON-Schema
* via the `definition` "auth".
*/
export interface Auth {
[k: string]: unknown;
}
declare module 'payload' {
// @ts-ignore
export interface GeneratedTypes extends Config {}
}

View File

@@ -0,0 +1,16 @@
import type { TaskHandler } from 'payload'
export const externalTaskHandler: TaskHandler<'ExternalTask'> = async ({ input, req }) => {
const newSimple = await req.payload.create({
collection: 'simple',
req,
data: {
title: input.message,
},
})
return {
output: {
simpleID: newSimple.id,
},
}
}

View File

@@ -0,0 +1,12 @@
import type { WorkflowHandler } from 'payload'
export const externalWorkflowHandler: WorkflowHandler<'externalWorkflow'> = async ({
job,
tasks,
}) => {
await tasks.ExternalTask('1', {
input: {
message: job.input.message,
},
})
}

View File

@@ -0,0 +1,55 @@
import type { TaskHandler } from 'payload'
export const updatePostStep1: TaskHandler<'UpdatePost'> = async ({ req, input }) => {
const postID =
typeof input.post === 'string' || typeof input.post === 'number' ? input.post : input.post.id
if (!postID) {
return {
state: 'failed',
output: null,
}
}
await req.payload.update({
collection: 'posts',
id: postID,
req,
data: {
jobStep1Ran: input.message,
},
})
return {
state: 'succeeded',
output: {
messageTwice: input.message + input.message,
},
}
}
export const updatePostStep2: TaskHandler<'UpdatePostStep2'> = async ({ req, input, job }) => {
const postID =
typeof input.post === 'string' || typeof input.post === 'number' ? input.post : input.post.id
if (!postID) {
return {
state: 'failed',
output: null,
}
}
await req.payload.update({
collection: 'posts',
id: postID,
req,
data: {
jobStep2Ran: input.messageTwice + job.taskStatus.UpdatePost['1'].output.messageTwice,
},
})
return {
state: 'succeeded',
output: null,
}
}

1902
test/queues/schema.graphql Normal file

File diff suppressed because it is too large Load Diff

30
test/queues/seed.ts Normal file
View File

@@ -0,0 +1,30 @@
import type { Payload } from 'payload'
import path from 'path'
import { fileURLToPath } from 'url'
import { devUser } from '../credentials.js'
import { seedDB } from '../helpers/seed.js'
const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)
export const seed = async (_payload: Payload) => {
await _payload.create({
collection: 'users',
data: {
email: devUser.email,
password: devUser.password,
},
})
}
export async function clearAndSeedEverything(_payload: Payload) {
return await seedDB({
_payload,
collectionSlugs: _payload.config.collections.map((collection) => collection.slug),
seedFunction: seed,
snapshotKey: 'fieldsTest',
uploadsDir: path.resolve(dirname, './collections/Upload/uploads'),
})
}

View File

@@ -0,0 +1,13 @@
{
// extend your base config to share compilerOptions, etc
//"extends": "./tsconfig.json",
"compilerOptions": {
// ensure that nobody can accidentally use this config for a build
"noEmit": true
},
"include": [
// whatever paths you intend to lint
"./**/*.ts",
"./**/*.tsx"
]
}

View File

@@ -0,0 +1,3 @@
{
"extends": "../tsconfig.json"
}