Adds a new `schedule` property to workflow and task configs that can be used to have Payload automatically _queue_ jobs following a certain _schedule_. Docs: https://payloadcms.com/docs/dynamic/jobs-queue/schedules?branch=feat/schedule-jobs ## API Example ```ts export default buildConfig({ // ... jobs: { // ... scheduler: 'manual', // Or `cron` if you're not using serverless. If `manual` is used, then user needs to set up running /api/payload-jobs/handleSchedules or payload.jobs.handleSchedules in regular intervals tasks: [ { schedule: [ { cron: '* * * * * *', queue: 'autorunSecond', // Hooks are optional hooks: { // Not an array, as providing and calling `defaultBeforeSchedule` would be more error-prone if this was an array beforeSchedule: async (args) => { // Handles verifying that there are no jobs already scheduled or processing. // You can override this behavior by not calling defaultBeforeSchedule, e.g. if you wanted // to allow a maximum of 3 scheduled jobs in the queue instead of 1, or add any additional conditions const result = await args.defaultBeforeSchedule(args) return { ...result, input: { message: 'This task runs every second', }, } }, afterSchedule: async (args) => { await args.defaultAfterSchedule(args) // Handles updating the payload-jobs-stats global args.req.payload.logger.info( 'EverySecond task scheduled: ' + (args.status === 'success' ? args.job.id : 'skipped or failed to schedule'), ) }, }, }, ], slug: 'EverySecond', inputSchema: [ { name: 'message', type: 'text', required: true, }, ], handler: ({ input, req }) => { req.payload.logger.info(input.message) return { output: {}, } }, } ] } }) ``` --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1210495300843759
64 lines
1.9 KiB
TypeScript
64 lines
1.9 KiB
TypeScript
import fs from 'fs'
|
|
import path from 'node:path'
|
|
import { fileURLToPath, pathToFileURL } from 'node:url'
|
|
import { generateImportMap, type SanitizedConfig } from 'payload'
|
|
|
|
import type { allDatabaseAdapters } from './generateDatabaseAdapter.js'
|
|
|
|
import { generateDatabaseAdapter } from './generateDatabaseAdapter.js'
|
|
import { getNextRootDir } from './helpers/getNextRootDir.js'
|
|
|
|
const filename = fileURLToPath(import.meta.url)
|
|
const dirname = path.dirname(filename)
|
|
|
|
const runImmediately = process.argv[2]
|
|
|
|
export async function initDevAndTest(
|
|
testSuiteArg: string,
|
|
writeDBAdapter: string,
|
|
skipGenImportMap: string,
|
|
configFile?: string,
|
|
): Promise<void> {
|
|
const importMapPath: string = path.resolve(
|
|
getNextRootDir(testSuiteArg).rootDir,
|
|
'./app/(payload)/admin/importMap.js',
|
|
)
|
|
|
|
try {
|
|
fs.writeFileSync(importMapPath, 'export const importMap = {}')
|
|
} catch (error) {
|
|
console.log('Error writing importMap.js', error)
|
|
}
|
|
|
|
if (writeDBAdapter === 'true') {
|
|
const dbAdapter: keyof typeof allDatabaseAdapters =
|
|
(process.env.PAYLOAD_DATABASE as keyof typeof allDatabaseAdapters) || 'mongodb'
|
|
generateDatabaseAdapter(dbAdapter)
|
|
}
|
|
|
|
if (skipGenImportMap === 'true') {
|
|
console.log('Done')
|
|
return
|
|
}
|
|
|
|
// Generate importMap
|
|
const testDir = path.resolve(dirname, testSuiteArg)
|
|
console.log('Generating import map for config:', testDir)
|
|
|
|
const configUrl = pathToFileURL(path.resolve(testDir, configFile ?? 'config.ts')).href
|
|
const config: SanitizedConfig = await (await import(configUrl)).default
|
|
|
|
process.env.ROOT_DIR = getNextRootDir(testSuiteArg).rootDir
|
|
|
|
await generateImportMap(config, { log: true, force: true })
|
|
|
|
console.log('Done')
|
|
}
|
|
|
|
if (runImmediately === 'true') {
|
|
const testSuiteArg = process.argv[3]
|
|
const writeDBAdapter = process.argv[4]
|
|
const skipGenImportMap = process.argv[5]
|
|
void initDevAndTest(testSuiteArg, writeDBAdapter, skipGenImportMap)
|
|
}
|