feat: add payload.jobs.runByID (#9875)

This commit is contained in:
Alessio Gravili
2024-12-10 16:37:06 -07:00
committed by GitHub
parent da6bc55b19
commit 09246a45e0
4 changed files with 99 additions and 12 deletions

View File

@@ -98,6 +98,8 @@ After the project is deployed to Vercel, the Vercel Cron job will automatically
If you want to process jobs programmatically from your server-side code, you can use the Local API:
**Run all jobs:**
```ts
const results = await payload.jobs.run()
@@ -105,6 +107,14 @@ const results = await payload.jobs.run()
await payload.jobs.run({ queue: 'nightly', limit: 100 })
```
**Run a single job:**
```ts
const results = await payload.jobs.runByID({
id: myJobID
})
```
#### Bin script
Finally, you can process jobs via the bin script that comes with Payload out of the box.

View File

@@ -80,4 +80,18 @@ export const getJobsLocalAPI = (payload: Payload) => ({
})
return result
},
runByID: async (args: {
id: number | string
overrideAccess?: boolean
req?: PayloadRequest
}): Promise<ReturnType<typeof runJobs>> => {
const newReq: PayloadRequest = args?.req ?? (await createLocalReq({}, payload))
const result = await runJobs({
id: args.id,
overrideAccess: args?.overrideAccess !== false,
req: newReq,
})
return result
},
})

View File

@@ -17,6 +17,10 @@ import { runJob } from './runJob/index.js'
import { runJSONJob } from './runJSONJob/index.js'
export type RunJobsArgs = {
/**
* ID of the job to run
*/
id?: number | string
limit?: number
overrideAccess?: boolean
queue?: string
@@ -36,6 +40,7 @@ export type RunJobsResult = {
}
export const runJobs = async ({
id,
limit = 10,
overrideAccess,
queue,
@@ -91,7 +96,25 @@ export const runJobs = async ({
// Find all jobs and ensure we set job to processing: true as early as possible to reduce the chance of
// the same job being picked up by another worker
const jobsQuery = (await req.payload.update({
const jobsQuery: {
docs: BaseJob[]
} = id
? {
docs: [
(await req.payload.update({
id,
collection: 'payload-jobs',
data: {
processing: true,
seenByWorker: true,
},
depth: req.payload.config.jobs.depth,
disableTransaction: true,
showHiddenFields: true,
})) as BaseJob,
],
}
: ((await req.payload.update({
collection: 'payload-jobs',
data: {
processing: true,
@@ -102,7 +125,7 @@ export const runJobs = async ({
limit,
showHiddenFields: true,
where,
})) as unknown as PaginatedDocs<BaseJob>
})) as unknown as PaginatedDocs<BaseJob>)
/**
* Just for logging purposes, we want to know how many jobs are new and how many are existing (= already been tried).

View File

@@ -925,4 +925,44 @@ describe('Queues', () => {
expect(allSimples.totalDocs).toBe(1)
expect(allSimples.docs[0].title).toBe('externalWorkflow')
})
it('ensure payload.jobs.runByID works and only runs the specified job', async () => {
payload.config.jobs.deleteJobOnComplete = false
let lastJobID: string = null
for (let i = 0; i < 3; i++) {
const job = await payload.jobs.queue({
task: 'CreateSimple',
input: {
message: 'from single task',
},
})
lastJobID = job.id
}
await payload.jobs.runByID({
id: lastJobID,
})
const allSimples = await payload.find({
collection: 'simple',
limit: 100,
})
expect(allSimples.totalDocs).toBe(1)
expect(allSimples.docs[0].title).toBe('from single task')
const allCompletedJobs = await payload.find({
collection: 'payload-jobs',
limit: 100,
where: {
completedAt: {
exists: true,
},
},
})
expect(allCompletedJobs.totalDocs).toBe(1)
expect(allCompletedJobs.docs[0].id).toBe(lastJobID)
})
})