@nicnocquee/dataqueue 1.25.0 → 1.26.0-beta.20260223202259
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ai/build-docs-content.ts +96 -0
- package/ai/build-llms-full.ts +42 -0
- package/ai/docs-content.json +284 -0
- package/ai/rules/advanced.md +150 -0
- package/ai/rules/basic.md +159 -0
- package/ai/rules/react-dashboard.md +83 -0
- package/ai/skills/dataqueue-advanced/SKILL.md +370 -0
- package/ai/skills/dataqueue-core/SKILL.md +234 -0
- package/ai/skills/dataqueue-react/SKILL.md +189 -0
- package/dist/cli.cjs +1149 -14
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.d.cts +66 -1
- package/dist/cli.d.ts +66 -1
- package/dist/cli.js +1146 -13
- package/dist/cli.js.map +1 -1
- package/dist/index.cjs +3236 -1237
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +697 -23
- package/dist/index.d.ts +697 -23
- package/dist/index.js +3235 -1238
- package/dist/index.js.map +1 -1
- package/dist/mcp-server.cjs +186 -0
- package/dist/mcp-server.cjs.map +1 -0
- package/dist/mcp-server.d.cts +32 -0
- package/dist/mcp-server.d.ts +32 -0
- package/dist/mcp-server.js +175 -0
- package/dist/mcp-server.js.map +1 -0
- package/migrations/1781200000004_create_cron_schedules_table.sql +33 -0
- package/migrations/1781200000005_add_retry_config_to_job_queue.sql +17 -0
- package/package.json +24 -21
- package/src/backend.ts +170 -5
- package/src/backends/postgres.ts +992 -63
- package/src/backends/redis-scripts.ts +358 -26
- package/src/backends/redis.test.ts +1532 -0
- package/src/backends/redis.ts +993 -35
- package/src/cli.test.ts +82 -6
- package/src/cli.ts +73 -10
- package/src/cron.test.ts +126 -0
- package/src/cron.ts +40 -0
- package/src/db-util.ts +1 -1
- package/src/index.test.ts +1034 -11
- package/src/index.ts +267 -39
- package/src/init-command.test.ts +449 -0
- package/src/init-command.ts +709 -0
- package/src/install-mcp-command.test.ts +216 -0
- package/src/install-mcp-command.ts +185 -0
- package/src/install-rules-command.test.ts +218 -0
- package/src/install-rules-command.ts +233 -0
- package/src/install-skills-command.test.ts +176 -0
- package/src/install-skills-command.ts +124 -0
- package/src/mcp-server.test.ts +162 -0
- package/src/mcp-server.ts +231 -0
- package/src/processor.ts +104 -113
- package/src/queue.test.ts +465 -0
- package/src/queue.ts +34 -252
- package/src/supervisor.test.ts +340 -0
- package/src/supervisor.ts +177 -0
- package/src/types.ts +476 -12
- package/LICENSE +0 -21
package/src/backends/redis.ts
CHANGED
|
@@ -9,11 +9,60 @@ import {
|
|
|
9
9
|
TagQueryMode,
|
|
10
10
|
JobType,
|
|
11
11
|
RedisJobQueueConfig,
|
|
12
|
+
CronScheduleRecord,
|
|
13
|
+
CronScheduleStatus,
|
|
14
|
+
EditCronScheduleOptions,
|
|
15
|
+
WaitpointRecord,
|
|
16
|
+
CreateTokenOptions,
|
|
17
|
+
AddJobOptions,
|
|
12
18
|
} from '../types.js';
|
|
13
|
-
import {
|
|
19
|
+
import {
|
|
20
|
+
QueueBackend,
|
|
21
|
+
JobFilters,
|
|
22
|
+
JobUpdates,
|
|
23
|
+
CronScheduleInput,
|
|
24
|
+
} from '../backend.js';
|
|
14
25
|
import { log } from '../log-context.js';
|
|
26
|
+
|
|
27
|
+
const MAX_TIMEOUT_MS = 365 * 24 * 60 * 60 * 1000;
|
|
28
|
+
|
|
29
|
+
/** Parse a timeout string like '10m', '1h', '24h', '7d' into milliseconds. */
|
|
30
|
+
function parseTimeoutString(timeout: string): number {
|
|
31
|
+
const match = timeout.match(/^(\d+)(s|m|h|d)$/);
|
|
32
|
+
if (!match) {
|
|
33
|
+
throw new Error(
|
|
34
|
+
`Invalid timeout format: "${timeout}". Expected format like "10m", "1h", "24h", "7d".`,
|
|
35
|
+
);
|
|
36
|
+
}
|
|
37
|
+
const value = parseInt(match[1], 10);
|
|
38
|
+
const unit = match[2];
|
|
39
|
+
let ms: number;
|
|
40
|
+
switch (unit) {
|
|
41
|
+
case 's':
|
|
42
|
+
ms = value * 1000;
|
|
43
|
+
break;
|
|
44
|
+
case 'm':
|
|
45
|
+
ms = value * 60 * 1000;
|
|
46
|
+
break;
|
|
47
|
+
case 'h':
|
|
48
|
+
ms = value * 60 * 60 * 1000;
|
|
49
|
+
break;
|
|
50
|
+
case 'd':
|
|
51
|
+
ms = value * 24 * 60 * 60 * 1000;
|
|
52
|
+
break;
|
|
53
|
+
default:
|
|
54
|
+
throw new Error(`Unknown timeout unit: "${unit}"`);
|
|
55
|
+
}
|
|
56
|
+
if (!Number.isFinite(ms) || ms > MAX_TIMEOUT_MS) {
|
|
57
|
+
throw new Error(
|
|
58
|
+
`Timeout value "${timeout}" is too large. Maximum allowed is 365 days.`,
|
|
59
|
+
);
|
|
60
|
+
}
|
|
61
|
+
return ms;
|
|
62
|
+
}
|
|
15
63
|
import {
|
|
16
64
|
ADD_JOB_SCRIPT,
|
|
65
|
+
ADD_JOBS_SCRIPT,
|
|
17
66
|
GET_NEXT_BATCH_SCRIPT,
|
|
18
67
|
COMPLETE_JOB_SCRIPT,
|
|
19
68
|
FAIL_JOB_SCRIPT,
|
|
@@ -21,8 +70,12 @@ import {
|
|
|
21
70
|
CANCEL_JOB_SCRIPT,
|
|
22
71
|
PROLONG_JOB_SCRIPT,
|
|
23
72
|
RECLAIM_STUCK_JOBS_SCRIPT,
|
|
24
|
-
|
|
73
|
+
CLEANUP_OLD_JOBS_BATCH_SCRIPT,
|
|
74
|
+
WAIT_JOB_SCRIPT,
|
|
75
|
+
COMPLETE_WAITPOINT_SCRIPT,
|
|
76
|
+
EXPIRE_TIMED_OUT_WAITPOINTS_SCRIPT,
|
|
25
77
|
} from './redis-scripts.js';
|
|
78
|
+
import { randomUUID } from 'crypto';
|
|
26
79
|
|
|
27
80
|
/** Helper: convert a Redis hash flat array [k,v,k,v,...] to a JS object */
|
|
28
81
|
function hashToObject(arr: string[]): Record<string, string> {
|
|
@@ -108,15 +161,60 @@ function deserializeJob<PayloadMap, T extends JobType<PayloadMap>>(
|
|
|
108
161
|
tags,
|
|
109
162
|
idempotencyKey: nullish(h.idempotencyKey) as string | null | undefined,
|
|
110
163
|
progress: numOrNull(h.progress),
|
|
164
|
+
waitUntil: dateOrNull(h.waitUntil),
|
|
165
|
+
waitTokenId: nullish(h.waitTokenId) as string | null | undefined,
|
|
166
|
+
stepData: parseStepData(h.stepData),
|
|
167
|
+
retryDelay: numOrNull(h.retryDelay),
|
|
168
|
+
retryBackoff:
|
|
169
|
+
h.retryBackoff === 'true'
|
|
170
|
+
? true
|
|
171
|
+
: h.retryBackoff === 'false'
|
|
172
|
+
? false
|
|
173
|
+
: null,
|
|
174
|
+
retryDelayMax: numOrNull(h.retryDelayMax),
|
|
111
175
|
};
|
|
112
176
|
}
|
|
113
177
|
|
|
178
|
+
/** Parse step data from a Redis hash field. */
|
|
179
|
+
function parseStepData(
|
|
180
|
+
raw: string | undefined,
|
|
181
|
+
): Record<string, any> | undefined {
|
|
182
|
+
if (!raw || raw === 'null') return undefined;
|
|
183
|
+
try {
|
|
184
|
+
return JSON.parse(raw);
|
|
185
|
+
} catch {
|
|
186
|
+
return undefined;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
114
190
|
export class RedisBackend implements QueueBackend {
|
|
115
191
|
private client: RedisType;
|
|
116
192
|
private prefix: string;
|
|
117
193
|
|
|
118
|
-
|
|
119
|
-
|
|
194
|
+
/**
|
|
195
|
+
* Create a RedisBackend.
|
|
196
|
+
*
|
|
197
|
+
* @param configOrClient - Either `redisConfig` from the config file (the
|
|
198
|
+
* library creates a new ioredis client) or an existing ioredis client
|
|
199
|
+
* instance (bring your own).
|
|
200
|
+
* @param keyPrefix - Key prefix, only used when `configOrClient` is an
|
|
201
|
+
* external client. Ignored when `redisConfig` is passed (uses
|
|
202
|
+
* `redisConfig.keyPrefix` instead). Default: `'dq:'`.
|
|
203
|
+
*/
|
|
204
|
+
constructor(
|
|
205
|
+
configOrClient: RedisJobQueueConfig['redisConfig'] | RedisType,
|
|
206
|
+
keyPrefix?: string,
|
|
207
|
+
) {
|
|
208
|
+
if (configOrClient && typeof (configOrClient as any).eval === 'function') {
|
|
209
|
+
this.client = configOrClient as RedisType;
|
|
210
|
+
this.prefix = keyPrefix ?? 'dq:';
|
|
211
|
+
return;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
const redisConfig = configOrClient as NonNullable<
|
|
215
|
+
RedisJobQueueConfig['redisConfig']
|
|
216
|
+
>;
|
|
217
|
+
|
|
120
218
|
let IORedis: any;
|
|
121
219
|
try {
|
|
122
220
|
const _require = createRequire(import.meta.url);
|
|
@@ -194,17 +292,29 @@ export class RedisBackend implements QueueBackend {
|
|
|
194
292
|
|
|
195
293
|
// ── Job CRUD ──────────────────────────────────────────────────────────
|
|
196
294
|
|
|
197
|
-
async addJob<PayloadMap, T extends JobType<PayloadMap>>(
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
295
|
+
async addJob<PayloadMap, T extends JobType<PayloadMap>>(
|
|
296
|
+
{
|
|
297
|
+
jobType,
|
|
298
|
+
payload,
|
|
299
|
+
maxAttempts = 3,
|
|
300
|
+
priority = 0,
|
|
301
|
+
runAt = null,
|
|
302
|
+
timeoutMs = undefined,
|
|
303
|
+
forceKillOnTimeout = false,
|
|
304
|
+
tags = undefined,
|
|
305
|
+
idempotencyKey = undefined,
|
|
306
|
+
retryDelay = undefined,
|
|
307
|
+
retryBackoff = undefined,
|
|
308
|
+
retryDelayMax = undefined,
|
|
309
|
+
}: JobOptions<PayloadMap, T>,
|
|
310
|
+
options?: AddJobOptions,
|
|
311
|
+
): Promise<number> {
|
|
312
|
+
if (options?.db) {
|
|
313
|
+
throw new Error(
|
|
314
|
+
'The db option is not supported with the Redis backend. ' +
|
|
315
|
+
'Transactional job creation is only available with PostgreSQL.',
|
|
316
|
+
);
|
|
317
|
+
}
|
|
208
318
|
const now = this.nowMs();
|
|
209
319
|
const runAtMs = runAt ? runAt.getTime() : 0;
|
|
210
320
|
|
|
@@ -222,6 +332,9 @@ export class RedisBackend implements QueueBackend {
|
|
|
222
332
|
tags ? JSON.stringify(tags) : 'null',
|
|
223
333
|
idempotencyKey ?? 'null',
|
|
224
334
|
now,
|
|
335
|
+
retryDelay !== undefined ? retryDelay.toString() : 'null',
|
|
336
|
+
retryBackoff !== undefined ? retryBackoff.toString() : 'null',
|
|
337
|
+
retryDelayMax !== undefined ? retryDelayMax.toString() : 'null',
|
|
225
338
|
)) as number;
|
|
226
339
|
|
|
227
340
|
const jobId = Number(result);
|
|
@@ -237,6 +350,83 @@ export class RedisBackend implements QueueBackend {
|
|
|
237
350
|
return jobId;
|
|
238
351
|
}
|
|
239
352
|
|
|
353
|
+
/**
|
|
354
|
+
* Insert multiple jobs atomically via a single Lua script.
|
|
355
|
+
* Returns IDs in the same order as the input array.
|
|
356
|
+
*/
|
|
357
|
+
async addJobs<PayloadMap, T extends JobType<PayloadMap>>(
|
|
358
|
+
jobs: JobOptions<PayloadMap, T>[],
|
|
359
|
+
options?: AddJobOptions,
|
|
360
|
+
): Promise<number[]> {
|
|
361
|
+
if (jobs.length === 0) return [];
|
|
362
|
+
|
|
363
|
+
if (options?.db) {
|
|
364
|
+
throw new Error(
|
|
365
|
+
'The db option is not supported with the Redis backend. ' +
|
|
366
|
+
'Transactional job creation is only available with PostgreSQL.',
|
|
367
|
+
);
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
const now = this.nowMs();
|
|
371
|
+
|
|
372
|
+
const jobsPayload = jobs.map((job) => ({
|
|
373
|
+
jobType: job.jobType,
|
|
374
|
+
payload: JSON.stringify(job.payload),
|
|
375
|
+
maxAttempts: job.maxAttempts ?? 3,
|
|
376
|
+
priority: job.priority ?? 0,
|
|
377
|
+
runAtMs: job.runAt ? job.runAt.getTime() : 0,
|
|
378
|
+
timeoutMs:
|
|
379
|
+
job.timeoutMs !== undefined ? job.timeoutMs.toString() : 'null',
|
|
380
|
+
forceKillOnTimeout: job.forceKillOnTimeout ? 'true' : 'false',
|
|
381
|
+
tags: job.tags ? JSON.stringify(job.tags) : 'null',
|
|
382
|
+
idempotencyKey: job.idempotencyKey ?? 'null',
|
|
383
|
+
retryDelay:
|
|
384
|
+
job.retryDelay !== undefined ? job.retryDelay.toString() : 'null',
|
|
385
|
+
retryBackoff:
|
|
386
|
+
job.retryBackoff !== undefined ? job.retryBackoff.toString() : 'null',
|
|
387
|
+
retryDelayMax:
|
|
388
|
+
job.retryDelayMax !== undefined ? job.retryDelayMax.toString() : 'null',
|
|
389
|
+
}));
|
|
390
|
+
|
|
391
|
+
const result = (await this.client.eval(
|
|
392
|
+
ADD_JOBS_SCRIPT,
|
|
393
|
+
1,
|
|
394
|
+
this.prefix,
|
|
395
|
+
JSON.stringify(jobsPayload),
|
|
396
|
+
now,
|
|
397
|
+
)) as number[];
|
|
398
|
+
|
|
399
|
+
const ids = result.map(Number);
|
|
400
|
+
log(`Batch-inserted ${jobs.length} jobs, IDs: [${ids.join(', ')}]`);
|
|
401
|
+
|
|
402
|
+
// Record events for newly inserted jobs (skip idempotency duplicates)
|
|
403
|
+
const existingIdempotencyIds = new Set<number>();
|
|
404
|
+
for (let i = 0; i < jobs.length; i++) {
|
|
405
|
+
if (jobs[i].idempotencyKey) {
|
|
406
|
+
// If the returned ID existed before this batch, it was a duplicate.
|
|
407
|
+
// We detect this by checking if the same ID appears for a different
|
|
408
|
+
// idempotency-keyed job (unlikely) or by checking if the ID was less
|
|
409
|
+
// than what we'd expect. The simplest approach: record events for all,
|
|
410
|
+
// since the Lua script returns the existing ID for duplicates but
|
|
411
|
+
// doesn't tell us if it was newly created. We can compare: if
|
|
412
|
+
// multiple jobs have the same idempotency key in the batch and got
|
|
413
|
+
// the same ID, only record once.
|
|
414
|
+
if (existingIdempotencyIds.has(ids[i])) {
|
|
415
|
+
continue;
|
|
416
|
+
}
|
|
417
|
+
existingIdempotencyIds.add(ids[i]);
|
|
418
|
+
}
|
|
419
|
+
await this.recordJobEvent(ids[i], JobEventType.Added, {
|
|
420
|
+
jobType: jobs[i].jobType,
|
|
421
|
+
payload: jobs[i].payload,
|
|
422
|
+
tags: jobs[i].tags,
|
|
423
|
+
idempotencyKey: jobs[i].idempotencyKey,
|
|
424
|
+
});
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
return ids;
|
|
428
|
+
}
|
|
429
|
+
|
|
240
430
|
async getJob<PayloadMap, T extends JobType<PayloadMap>>(
|
|
241
431
|
id: number,
|
|
242
432
|
): Promise<JobRecord<PayloadMap, T> | null> {
|
|
@@ -314,10 +504,19 @@ export class RedisBackend implements QueueBackend {
|
|
|
314
504
|
if (filters.runAt) {
|
|
315
505
|
jobs = this.filterByRunAt(jobs, filters.runAt);
|
|
316
506
|
}
|
|
507
|
+
// Cursor-based (keyset) pagination: only return jobs with id < cursor
|
|
508
|
+
if (filters.cursor !== undefined) {
|
|
509
|
+
jobs = jobs.filter((j) => j.id < filters.cursor!);
|
|
510
|
+
}
|
|
317
511
|
}
|
|
318
512
|
|
|
319
|
-
// Sort by
|
|
320
|
-
jobs.sort((a, b) => b.
|
|
513
|
+
// Sort by id DESC for consistent keyset pagination (matches Postgres ORDER BY id DESC)
|
|
514
|
+
jobs.sort((a, b) => b.id - a.id);
|
|
515
|
+
|
|
516
|
+
// When using cursor, skip offset
|
|
517
|
+
if (filters?.cursor !== undefined) {
|
|
518
|
+
return jobs.slice(0, limit);
|
|
519
|
+
}
|
|
321
520
|
return jobs.slice(offset, offset + limit);
|
|
322
521
|
}
|
|
323
522
|
|
|
@@ -582,6 +781,31 @@ export class RedisBackend implements QueueBackend {
|
|
|
582
781
|
}
|
|
583
782
|
metadata.tags = updates.tags;
|
|
584
783
|
}
|
|
784
|
+
if (updates.retryDelay !== undefined) {
|
|
785
|
+
fields.push(
|
|
786
|
+
'retryDelay',
|
|
787
|
+
updates.retryDelay !== null ? updates.retryDelay.toString() : 'null',
|
|
788
|
+
);
|
|
789
|
+
metadata.retryDelay = updates.retryDelay;
|
|
790
|
+
}
|
|
791
|
+
if (updates.retryBackoff !== undefined) {
|
|
792
|
+
fields.push(
|
|
793
|
+
'retryBackoff',
|
|
794
|
+
updates.retryBackoff !== null
|
|
795
|
+
? updates.retryBackoff.toString()
|
|
796
|
+
: 'null',
|
|
797
|
+
);
|
|
798
|
+
metadata.retryBackoff = updates.retryBackoff;
|
|
799
|
+
}
|
|
800
|
+
if (updates.retryDelayMax !== undefined) {
|
|
801
|
+
fields.push(
|
|
802
|
+
'retryDelayMax',
|
|
803
|
+
updates.retryDelayMax !== null
|
|
804
|
+
? updates.retryDelayMax.toString()
|
|
805
|
+
: 'null',
|
|
806
|
+
);
|
|
807
|
+
metadata.retryDelayMax = updates.retryDelayMax;
|
|
808
|
+
}
|
|
585
809
|
|
|
586
810
|
if (fields.length === 0) {
|
|
587
811
|
log(`No fields to update for job ${jobId}`);
|
|
@@ -616,27 +840,115 @@ export class RedisBackend implements QueueBackend {
|
|
|
616
840
|
return count;
|
|
617
841
|
}
|
|
618
842
|
|
|
619
|
-
|
|
843
|
+
/**
|
|
844
|
+
* Delete completed jobs older than the given number of days.
|
|
845
|
+
* Uses SSCAN to iterate the completed set in batches, avoiding
|
|
846
|
+
* loading all IDs into memory and preventing long Redis blocks.
|
|
847
|
+
*
|
|
848
|
+
* @param daysToKeep - Number of days to retain completed jobs (default 30).
|
|
849
|
+
* @param batchSize - Number of IDs to scan per SSCAN iteration (default 200).
|
|
850
|
+
* @returns Total number of deleted jobs.
|
|
851
|
+
*/
|
|
852
|
+
async cleanupOldJobs(daysToKeep = 30, batchSize = 200): Promise<number> {
|
|
620
853
|
const cutoffMs = this.nowMs() - daysToKeep * 24 * 60 * 60 * 1000;
|
|
621
|
-
const
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
854
|
+
const setKey = `${this.prefix}status:completed`;
|
|
855
|
+
let totalDeleted = 0;
|
|
856
|
+
let cursor = '0';
|
|
857
|
+
|
|
858
|
+
do {
|
|
859
|
+
const [nextCursor, ids] = await this.client.sscan(
|
|
860
|
+
setKey,
|
|
861
|
+
cursor,
|
|
862
|
+
'COUNT',
|
|
863
|
+
batchSize,
|
|
864
|
+
);
|
|
865
|
+
cursor = nextCursor;
|
|
866
|
+
|
|
867
|
+
if (ids.length > 0) {
|
|
868
|
+
const result = (await this.client.eval(
|
|
869
|
+
CLEANUP_OLD_JOBS_BATCH_SCRIPT,
|
|
870
|
+
1,
|
|
871
|
+
this.prefix,
|
|
872
|
+
cutoffMs,
|
|
873
|
+
...ids,
|
|
874
|
+
)) as number;
|
|
875
|
+
totalDeleted += Number(result);
|
|
876
|
+
}
|
|
877
|
+
} while (cursor !== '0');
|
|
878
|
+
|
|
879
|
+
log(`Deleted ${totalDeleted} old jobs`);
|
|
880
|
+
return totalDeleted;
|
|
629
881
|
}
|
|
630
882
|
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
883
|
+
/**
|
|
884
|
+
* Delete job events older than the given number of days.
|
|
885
|
+
* Iterates all event lists and removes events whose createdAt is before the cutoff.
|
|
886
|
+
* Also removes orphaned event lists (where the job no longer exists).
|
|
887
|
+
*
|
|
888
|
+
* @param daysToKeep - Number of days to retain events (default 30).
|
|
889
|
+
* @param batchSize - Number of event keys to scan per SCAN iteration (default 200).
|
|
890
|
+
* @returns Total number of deleted events.
|
|
891
|
+
*/
|
|
892
|
+
async cleanupOldJobEvents(daysToKeep = 30, batchSize = 200): Promise<number> {
|
|
893
|
+
const cutoffMs = this.nowMs() - daysToKeep * 24 * 60 * 60 * 1000;
|
|
894
|
+
const pattern = `${this.prefix}events:*`;
|
|
895
|
+
let totalDeleted = 0;
|
|
896
|
+
let cursor = '0';
|
|
897
|
+
|
|
898
|
+
do {
|
|
899
|
+
const [nextCursor, keys] = await this.client.scan(
|
|
900
|
+
cursor,
|
|
901
|
+
'MATCH',
|
|
902
|
+
pattern,
|
|
903
|
+
'COUNT',
|
|
904
|
+
batchSize,
|
|
905
|
+
);
|
|
906
|
+
cursor = nextCursor;
|
|
907
|
+
|
|
908
|
+
for (const key of keys) {
|
|
909
|
+
// Check if the job still exists; if not, delete the entire event list
|
|
910
|
+
const jobIdStr = key.slice(`${this.prefix}events:`.length);
|
|
911
|
+
const jobExists = await this.client.exists(
|
|
912
|
+
`${this.prefix}job:${jobIdStr}`,
|
|
913
|
+
);
|
|
914
|
+
if (!jobExists) {
|
|
915
|
+
const len = await this.client.llen(key);
|
|
916
|
+
await this.client.del(key);
|
|
917
|
+
totalDeleted += len;
|
|
918
|
+
continue;
|
|
919
|
+
}
|
|
920
|
+
|
|
921
|
+
// Filter events by date: read all, keep recent, rewrite
|
|
922
|
+
const events = await this.client.lrange(key, 0, -1);
|
|
923
|
+
const kept: string[] = [];
|
|
924
|
+
for (const raw of events) {
|
|
925
|
+
try {
|
|
926
|
+
const e = JSON.parse(raw);
|
|
927
|
+
if (e.createdAt >= cutoffMs) {
|
|
928
|
+
kept.push(raw);
|
|
929
|
+
} else {
|
|
930
|
+
totalDeleted++;
|
|
931
|
+
}
|
|
932
|
+
} catch {
|
|
933
|
+
totalDeleted++;
|
|
934
|
+
}
|
|
935
|
+
}
|
|
936
|
+
|
|
937
|
+
if (kept.length === 0) {
|
|
938
|
+
await this.client.del(key);
|
|
939
|
+
} else if (kept.length < events.length) {
|
|
940
|
+
const pipeline = this.client.pipeline();
|
|
941
|
+
pipeline.del(key);
|
|
942
|
+
for (const raw of kept) {
|
|
943
|
+
pipeline.rpush(key, raw);
|
|
944
|
+
}
|
|
945
|
+
await pipeline.exec();
|
|
946
|
+
}
|
|
947
|
+
}
|
|
948
|
+
} while (cursor !== '0');
|
|
949
|
+
|
|
950
|
+
log(`Deleted ${totalDeleted} old job events`);
|
|
951
|
+
return totalDeleted;
|
|
640
952
|
}
|
|
641
953
|
|
|
642
954
|
async reclaimStuckJobs(maxProcessingTimeMinutes = 10): Promise<number> {
|
|
@@ -653,6 +965,230 @@ export class RedisBackend implements QueueBackend {
|
|
|
653
965
|
return Number(result);
|
|
654
966
|
}
|
|
655
967
|
|
|
968
|
+
// ── Wait / step-data support ────────────────────────────────────────
|
|
969
|
+
|
|
970
|
+
/**
|
|
971
|
+
* Transition a job from 'processing' to 'waiting' status.
|
|
972
|
+
* Persists step data so the handler can resume from where it left off.
|
|
973
|
+
*
|
|
974
|
+
* @param jobId - The job to pause.
|
|
975
|
+
* @param options - Wait configuration including optional waitUntil date, token ID, and step data.
|
|
976
|
+
*/
|
|
977
|
+
async waitJob(
|
|
978
|
+
jobId: number,
|
|
979
|
+
options: {
|
|
980
|
+
waitUntil?: Date;
|
|
981
|
+
waitTokenId?: string;
|
|
982
|
+
stepData: Record<string, any>;
|
|
983
|
+
},
|
|
984
|
+
): Promise<void> {
|
|
985
|
+
const now = this.nowMs();
|
|
986
|
+
const waitUntilMs = options.waitUntil
|
|
987
|
+
? options.waitUntil.getTime().toString()
|
|
988
|
+
: 'null';
|
|
989
|
+
const waitTokenId = options.waitTokenId ?? 'null';
|
|
990
|
+
const stepDataJson = JSON.stringify(options.stepData);
|
|
991
|
+
|
|
992
|
+
const result = await this.client.eval(
|
|
993
|
+
WAIT_JOB_SCRIPT,
|
|
994
|
+
1,
|
|
995
|
+
this.prefix,
|
|
996
|
+
jobId,
|
|
997
|
+
waitUntilMs,
|
|
998
|
+
waitTokenId,
|
|
999
|
+
stepDataJson,
|
|
1000
|
+
now,
|
|
1001
|
+
);
|
|
1002
|
+
|
|
1003
|
+
if (Number(result) === 0) {
|
|
1004
|
+
log(
|
|
1005
|
+
`Job ${jobId} could not be set to waiting (may have been reclaimed or is no longer processing)`,
|
|
1006
|
+
);
|
|
1007
|
+
return;
|
|
1008
|
+
}
|
|
1009
|
+
|
|
1010
|
+
await this.recordJobEvent(jobId, JobEventType.Waiting, {
|
|
1011
|
+
waitUntil: options.waitUntil?.toISOString() ?? null,
|
|
1012
|
+
waitTokenId: options.waitTokenId ?? null,
|
|
1013
|
+
});
|
|
1014
|
+
log(`Job ${jobId} set to waiting`);
|
|
1015
|
+
}
|
|
1016
|
+
|
|
1017
|
+
/**
|
|
1018
|
+
* Persist step data for a job. Called after each ctx.run() step completes.
|
|
1019
|
+
* Best-effort: does not throw to avoid killing the running handler.
|
|
1020
|
+
*
|
|
1021
|
+
* @param jobId - The job to update.
|
|
1022
|
+
* @param stepData - The step data to persist.
|
|
1023
|
+
*/
|
|
1024
|
+
async updateStepData(
|
|
1025
|
+
jobId: number,
|
|
1026
|
+
stepData: Record<string, any>,
|
|
1027
|
+
): Promise<void> {
|
|
1028
|
+
try {
|
|
1029
|
+
const now = this.nowMs();
|
|
1030
|
+
await this.client.hset(
|
|
1031
|
+
`${this.prefix}job:${jobId}`,
|
|
1032
|
+
'stepData',
|
|
1033
|
+
JSON.stringify(stepData),
|
|
1034
|
+
'updatedAt',
|
|
1035
|
+
now.toString(),
|
|
1036
|
+
);
|
|
1037
|
+
} catch (error) {
|
|
1038
|
+
log(`Error updating stepData for job ${jobId}: ${error}`);
|
|
1039
|
+
}
|
|
1040
|
+
}
|
|
1041
|
+
|
|
1042
|
+
/**
|
|
1043
|
+
* Create a waitpoint token.
|
|
1044
|
+
*
|
|
1045
|
+
* @param jobId - The job ID to associate with the token (null if created outside a handler).
|
|
1046
|
+
* @param options - Optional timeout string (e.g. '10m', '1h') and tags.
|
|
1047
|
+
* @returns The created waitpoint with its unique ID.
|
|
1048
|
+
*/
|
|
1049
|
+
async createWaitpoint(
|
|
1050
|
+
jobId: number | null,
|
|
1051
|
+
options?: CreateTokenOptions,
|
|
1052
|
+
): Promise<{ id: string }> {
|
|
1053
|
+
const id = `wp_${randomUUID()}`;
|
|
1054
|
+
const now = this.nowMs();
|
|
1055
|
+
let timeoutAt: number | null = null;
|
|
1056
|
+
|
|
1057
|
+
if (options?.timeout) {
|
|
1058
|
+
const ms = parseTimeoutString(options.timeout);
|
|
1059
|
+
timeoutAt = now + ms;
|
|
1060
|
+
}
|
|
1061
|
+
|
|
1062
|
+
const key = `${this.prefix}waitpoint:${id}`;
|
|
1063
|
+
const fields: string[] = [
|
|
1064
|
+
'id',
|
|
1065
|
+
id,
|
|
1066
|
+
'jobId',
|
|
1067
|
+
jobId !== null ? jobId.toString() : 'null',
|
|
1068
|
+
'status',
|
|
1069
|
+
'waiting',
|
|
1070
|
+
'output',
|
|
1071
|
+
'null',
|
|
1072
|
+
'timeoutAt',
|
|
1073
|
+
timeoutAt !== null ? timeoutAt.toString() : 'null',
|
|
1074
|
+
'createdAt',
|
|
1075
|
+
now.toString(),
|
|
1076
|
+
'completedAt',
|
|
1077
|
+
'null',
|
|
1078
|
+
'tags',
|
|
1079
|
+
options?.tags ? JSON.stringify(options.tags) : 'null',
|
|
1080
|
+
];
|
|
1081
|
+
|
|
1082
|
+
await (this.client as any).hmset(key, ...fields);
|
|
1083
|
+
|
|
1084
|
+
if (timeoutAt !== null) {
|
|
1085
|
+
await this.client.zadd(`${this.prefix}waitpoint_timeout`, timeoutAt, id);
|
|
1086
|
+
}
|
|
1087
|
+
|
|
1088
|
+
log(`Created waitpoint ${id} for job ${jobId}`);
|
|
1089
|
+
return { id };
|
|
1090
|
+
}
|
|
1091
|
+
|
|
1092
|
+
/**
|
|
1093
|
+
* Complete a waitpoint token and move the associated job back to 'pending'.
|
|
1094
|
+
*
|
|
1095
|
+
* @param tokenId - The waitpoint token ID to complete.
|
|
1096
|
+
* @param data - Optional data to pass to the waiting handler.
|
|
1097
|
+
*/
|
|
1098
|
+
async completeWaitpoint(tokenId: string, data?: any): Promise<void> {
|
|
1099
|
+
const now = this.nowMs();
|
|
1100
|
+
const outputJson = data != null ? JSON.stringify(data) : 'null';
|
|
1101
|
+
|
|
1102
|
+
const result = await this.client.eval(
|
|
1103
|
+
COMPLETE_WAITPOINT_SCRIPT,
|
|
1104
|
+
1,
|
|
1105
|
+
this.prefix,
|
|
1106
|
+
tokenId,
|
|
1107
|
+
outputJson,
|
|
1108
|
+
now,
|
|
1109
|
+
);
|
|
1110
|
+
|
|
1111
|
+
if (Number(result) === 0) {
|
|
1112
|
+
log(`Waitpoint ${tokenId} not found or already completed`);
|
|
1113
|
+
return;
|
|
1114
|
+
}
|
|
1115
|
+
|
|
1116
|
+
log(`Completed waitpoint ${tokenId}`);
|
|
1117
|
+
}
|
|
1118
|
+
|
|
1119
|
+
/**
|
|
1120
|
+
* Retrieve a waitpoint token by its ID.
|
|
1121
|
+
*
|
|
1122
|
+
* @param tokenId - The waitpoint token ID to look up.
|
|
1123
|
+
* @returns The waitpoint record, or null if not found.
|
|
1124
|
+
*/
|
|
1125
|
+
async getWaitpoint(tokenId: string): Promise<WaitpointRecord | null> {
|
|
1126
|
+
const data = await this.client.hgetall(
|
|
1127
|
+
`${this.prefix}waitpoint:${tokenId}`,
|
|
1128
|
+
);
|
|
1129
|
+
if (!data || Object.keys(data).length === 0) return null;
|
|
1130
|
+
|
|
1131
|
+
const nullish = (v: string | undefined) =>
|
|
1132
|
+
v === undefined || v === 'null' || v === '' ? null : v;
|
|
1133
|
+
const numOrNull = (v: string | undefined): number | null => {
|
|
1134
|
+
const n = nullish(v);
|
|
1135
|
+
return n === null ? null : Number(n);
|
|
1136
|
+
};
|
|
1137
|
+
const dateOrNull = (v: string | undefined): Date | null => {
|
|
1138
|
+
const n = numOrNull(v);
|
|
1139
|
+
return n === null ? null : new Date(n);
|
|
1140
|
+
};
|
|
1141
|
+
|
|
1142
|
+
let output: any = null;
|
|
1143
|
+
if (data.output && data.output !== 'null') {
|
|
1144
|
+
try {
|
|
1145
|
+
output = JSON.parse(data.output);
|
|
1146
|
+
} catch {
|
|
1147
|
+
output = data.output;
|
|
1148
|
+
}
|
|
1149
|
+
}
|
|
1150
|
+
|
|
1151
|
+
let tags: string[] | null = null;
|
|
1152
|
+
if (data.tags && data.tags !== 'null') {
|
|
1153
|
+
try {
|
|
1154
|
+
tags = JSON.parse(data.tags);
|
|
1155
|
+
} catch {
|
|
1156
|
+
/* ignore */
|
|
1157
|
+
}
|
|
1158
|
+
}
|
|
1159
|
+
|
|
1160
|
+
return {
|
|
1161
|
+
id: data.id,
|
|
1162
|
+
jobId: numOrNull(data.jobId),
|
|
1163
|
+
status: data.status as WaitpointRecord['status'],
|
|
1164
|
+
output,
|
|
1165
|
+
timeoutAt: dateOrNull(data.timeoutAt),
|
|
1166
|
+
createdAt: new Date(Number(data.createdAt)),
|
|
1167
|
+
completedAt: dateOrNull(data.completedAt),
|
|
1168
|
+
tags,
|
|
1169
|
+
};
|
|
1170
|
+
}
|
|
1171
|
+
|
|
1172
|
+
/**
|
|
1173
|
+
* Expire timed-out waitpoint tokens and move their associated jobs back to 'pending'.
|
|
1174
|
+
*
|
|
1175
|
+
* @returns The number of tokens that were expired.
|
|
1176
|
+
*/
|
|
1177
|
+
async expireTimedOutWaitpoints(): Promise<number> {
|
|
1178
|
+
const now = this.nowMs();
|
|
1179
|
+
const result = (await this.client.eval(
|
|
1180
|
+
EXPIRE_TIMED_OUT_WAITPOINTS_SCRIPT,
|
|
1181
|
+
1,
|
|
1182
|
+
this.prefix,
|
|
1183
|
+
now,
|
|
1184
|
+
)) as number;
|
|
1185
|
+
const count = Number(result);
|
|
1186
|
+
if (count > 0) {
|
|
1187
|
+
log(`Expired ${count} timed-out waitpoints`);
|
|
1188
|
+
}
|
|
1189
|
+
return count;
|
|
1190
|
+
}
|
|
1191
|
+
|
|
656
1192
|
// ── Internal helpers ──────────────────────────────────────────────────
|
|
657
1193
|
|
|
658
1194
|
async setPendingReasonForUnpickedJobs(
|
|
@@ -795,6 +1331,428 @@ export class RedisBackend implements QueueBackend {
|
|
|
795
1331
|
});
|
|
796
1332
|
}
|
|
797
1333
|
|
|
1334
|
+
// ── Cron schedules ──────────────────────────────────────────────────
|
|
1335
|
+
|
|
1336
|
+
/** Create a cron schedule and return its ID. */
|
|
1337
|
+
async addCronSchedule(input: CronScheduleInput): Promise<number> {
|
|
1338
|
+
const existingId = await this.client.get(
|
|
1339
|
+
`${this.prefix}cron_name:${input.scheduleName}`,
|
|
1340
|
+
);
|
|
1341
|
+
if (existingId !== null) {
|
|
1342
|
+
throw new Error(
|
|
1343
|
+
`Cron schedule with name "${input.scheduleName}" already exists`,
|
|
1344
|
+
);
|
|
1345
|
+
}
|
|
1346
|
+
|
|
1347
|
+
const id = await this.client.incr(`${this.prefix}cron_id_seq`);
|
|
1348
|
+
const now = this.nowMs();
|
|
1349
|
+
const key = `${this.prefix}cron:${id}`;
|
|
1350
|
+
|
|
1351
|
+
const fields: string[] = [
|
|
1352
|
+
'id',
|
|
1353
|
+
id.toString(),
|
|
1354
|
+
'scheduleName',
|
|
1355
|
+
input.scheduleName,
|
|
1356
|
+
'cronExpression',
|
|
1357
|
+
input.cronExpression,
|
|
1358
|
+
'jobType',
|
|
1359
|
+
input.jobType,
|
|
1360
|
+
'payload',
|
|
1361
|
+
JSON.stringify(input.payload),
|
|
1362
|
+
'maxAttempts',
|
|
1363
|
+
input.maxAttempts.toString(),
|
|
1364
|
+
'priority',
|
|
1365
|
+
input.priority.toString(),
|
|
1366
|
+
'timeoutMs',
|
|
1367
|
+
input.timeoutMs !== null ? input.timeoutMs.toString() : 'null',
|
|
1368
|
+
'forceKillOnTimeout',
|
|
1369
|
+
input.forceKillOnTimeout ? 'true' : 'false',
|
|
1370
|
+
'tags',
|
|
1371
|
+
input.tags ? JSON.stringify(input.tags) : 'null',
|
|
1372
|
+
'timezone',
|
|
1373
|
+
input.timezone,
|
|
1374
|
+
'allowOverlap',
|
|
1375
|
+
input.allowOverlap ? 'true' : 'false',
|
|
1376
|
+
'status',
|
|
1377
|
+
'active',
|
|
1378
|
+
'lastEnqueuedAt',
|
|
1379
|
+
'null',
|
|
1380
|
+
'lastJobId',
|
|
1381
|
+
'null',
|
|
1382
|
+
'nextRunAt',
|
|
1383
|
+
input.nextRunAt ? input.nextRunAt.getTime().toString() : 'null',
|
|
1384
|
+
'createdAt',
|
|
1385
|
+
now.toString(),
|
|
1386
|
+
'updatedAt',
|
|
1387
|
+
now.toString(),
|
|
1388
|
+
'retryDelay',
|
|
1389
|
+
input.retryDelay !== null && input.retryDelay !== undefined
|
|
1390
|
+
? input.retryDelay.toString()
|
|
1391
|
+
: 'null',
|
|
1392
|
+
'retryBackoff',
|
|
1393
|
+
input.retryBackoff !== null && input.retryBackoff !== undefined
|
|
1394
|
+
? input.retryBackoff.toString()
|
|
1395
|
+
: 'null',
|
|
1396
|
+
'retryDelayMax',
|
|
1397
|
+
input.retryDelayMax !== null && input.retryDelayMax !== undefined
|
|
1398
|
+
? input.retryDelayMax.toString()
|
|
1399
|
+
: 'null',
|
|
1400
|
+
];
|
|
1401
|
+
|
|
1402
|
+
await (this.client as any).hmset(key, ...fields);
|
|
1403
|
+
await this.client.set(
|
|
1404
|
+
`${this.prefix}cron_name:${input.scheduleName}`,
|
|
1405
|
+
id.toString(),
|
|
1406
|
+
);
|
|
1407
|
+
await this.client.sadd(`${this.prefix}crons`, id.toString());
|
|
1408
|
+
await this.client.sadd(`${this.prefix}cron_status:active`, id.toString());
|
|
1409
|
+
|
|
1410
|
+
if (input.nextRunAt) {
|
|
1411
|
+
await this.client.zadd(
|
|
1412
|
+
`${this.prefix}cron_due`,
|
|
1413
|
+
input.nextRunAt.getTime(),
|
|
1414
|
+
id.toString(),
|
|
1415
|
+
);
|
|
1416
|
+
}
|
|
1417
|
+
|
|
1418
|
+
log(`Added cron schedule ${id}: "${input.scheduleName}"`);
|
|
1419
|
+
return id;
|
|
1420
|
+
}
|
|
1421
|
+
|
|
1422
|
+
/** Get a cron schedule by ID. */
|
|
1423
|
+
async getCronSchedule(id: number): Promise<CronScheduleRecord | null> {
|
|
1424
|
+
const data = await this.client.hgetall(`${this.prefix}cron:${id}`);
|
|
1425
|
+
if (!data || Object.keys(data).length === 0) return null;
|
|
1426
|
+
return this.deserializeCronSchedule(data);
|
|
1427
|
+
}
|
|
1428
|
+
|
|
1429
|
+
/** Get a cron schedule by its unique name. */
|
|
1430
|
+
async getCronScheduleByName(
|
|
1431
|
+
name: string,
|
|
1432
|
+
): Promise<CronScheduleRecord | null> {
|
|
1433
|
+
const id = await this.client.get(`${this.prefix}cron_name:${name}`);
|
|
1434
|
+
if (id === null) return null;
|
|
1435
|
+
return this.getCronSchedule(Number(id));
|
|
1436
|
+
}
|
|
1437
|
+
|
|
1438
|
+
/** List cron schedules, optionally filtered by status. */
|
|
1439
|
+
async listCronSchedules(
|
|
1440
|
+
status?: CronScheduleStatus,
|
|
1441
|
+
): Promise<CronScheduleRecord[]> {
|
|
1442
|
+
let ids: string[];
|
|
1443
|
+
if (status) {
|
|
1444
|
+
ids = await this.client.smembers(`${this.prefix}cron_status:${status}`);
|
|
1445
|
+
} else {
|
|
1446
|
+
ids = await this.client.smembers(`${this.prefix}crons`);
|
|
1447
|
+
}
|
|
1448
|
+
if (ids.length === 0) return [];
|
|
1449
|
+
|
|
1450
|
+
const pipeline = this.client.pipeline();
|
|
1451
|
+
for (const id of ids) {
|
|
1452
|
+
pipeline.hgetall(`${this.prefix}cron:${id}`);
|
|
1453
|
+
}
|
|
1454
|
+
const results = await pipeline.exec();
|
|
1455
|
+
const schedules: CronScheduleRecord[] = [];
|
|
1456
|
+
if (results) {
|
|
1457
|
+
for (const [err, data] of results) {
|
|
1458
|
+
if (
|
|
1459
|
+
!err &&
|
|
1460
|
+
data &&
|
|
1461
|
+
typeof data === 'object' &&
|
|
1462
|
+
Object.keys(data as object).length > 0
|
|
1463
|
+
) {
|
|
1464
|
+
schedules.push(
|
|
1465
|
+
this.deserializeCronSchedule(data as Record<string, string>),
|
|
1466
|
+
);
|
|
1467
|
+
}
|
|
1468
|
+
}
|
|
1469
|
+
}
|
|
1470
|
+
schedules.sort((a, b) => a.createdAt.getTime() - b.createdAt.getTime());
|
|
1471
|
+
return schedules;
|
|
1472
|
+
}
|
|
1473
|
+
|
|
1474
|
+
/** Delete a cron schedule by ID. */
|
|
1475
|
+
async removeCronSchedule(id: number): Promise<void> {
|
|
1476
|
+
const data = await this.client.hgetall(`${this.prefix}cron:${id}`);
|
|
1477
|
+
if (!data || Object.keys(data).length === 0) return;
|
|
1478
|
+
|
|
1479
|
+
const name = data.scheduleName;
|
|
1480
|
+
const status = data.status;
|
|
1481
|
+
|
|
1482
|
+
await this.client.del(`${this.prefix}cron:${id}`);
|
|
1483
|
+
await this.client.del(`${this.prefix}cron_name:${name}`);
|
|
1484
|
+
await this.client.srem(`${this.prefix}crons`, id.toString());
|
|
1485
|
+
await this.client.srem(
|
|
1486
|
+
`${this.prefix}cron_status:${status}`,
|
|
1487
|
+
id.toString(),
|
|
1488
|
+
);
|
|
1489
|
+
await this.client.zrem(`${this.prefix}cron_due`, id.toString());
|
|
1490
|
+
log(`Removed cron schedule ${id}`);
|
|
1491
|
+
}
|
|
1492
|
+
|
|
1493
|
+
/** Pause a cron schedule. */
|
|
1494
|
+
async pauseCronSchedule(id: number): Promise<void> {
|
|
1495
|
+
const now = this.nowMs();
|
|
1496
|
+
await this.client.hset(
|
|
1497
|
+
`${this.prefix}cron:${id}`,
|
|
1498
|
+
'status',
|
|
1499
|
+
'paused',
|
|
1500
|
+
'updatedAt',
|
|
1501
|
+
now.toString(),
|
|
1502
|
+
);
|
|
1503
|
+
await this.client.srem(`${this.prefix}cron_status:active`, id.toString());
|
|
1504
|
+
await this.client.sadd(`${this.prefix}cron_status:paused`, id.toString());
|
|
1505
|
+
await this.client.zrem(`${this.prefix}cron_due`, id.toString());
|
|
1506
|
+
log(`Paused cron schedule ${id}`);
|
|
1507
|
+
}
|
|
1508
|
+
|
|
1509
|
+
/** Resume a paused cron schedule. */
|
|
1510
|
+
async resumeCronSchedule(id: number): Promise<void> {
|
|
1511
|
+
const now = this.nowMs();
|
|
1512
|
+
await this.client.hset(
|
|
1513
|
+
`${this.prefix}cron:${id}`,
|
|
1514
|
+
'status',
|
|
1515
|
+
'active',
|
|
1516
|
+
'updatedAt',
|
|
1517
|
+
now.toString(),
|
|
1518
|
+
);
|
|
1519
|
+
await this.client.srem(`${this.prefix}cron_status:paused`, id.toString());
|
|
1520
|
+
await this.client.sadd(`${this.prefix}cron_status:active`, id.toString());
|
|
1521
|
+
|
|
1522
|
+
const nextRunAt = await this.client.hget(
|
|
1523
|
+
`${this.prefix}cron:${id}`,
|
|
1524
|
+
'nextRunAt',
|
|
1525
|
+
);
|
|
1526
|
+
if (nextRunAt && nextRunAt !== 'null') {
|
|
1527
|
+
await this.client.zadd(
|
|
1528
|
+
`${this.prefix}cron_due`,
|
|
1529
|
+
Number(nextRunAt),
|
|
1530
|
+
id.toString(),
|
|
1531
|
+
);
|
|
1532
|
+
}
|
|
1533
|
+
log(`Resumed cron schedule ${id}`);
|
|
1534
|
+
}
|
|
1535
|
+
|
|
1536
|
+
/** Edit a cron schedule. */
|
|
1537
|
+
async editCronSchedule(
|
|
1538
|
+
id: number,
|
|
1539
|
+
updates: EditCronScheduleOptions,
|
|
1540
|
+
nextRunAt?: Date | null,
|
|
1541
|
+
): Promise<void> {
|
|
1542
|
+
const now = this.nowMs();
|
|
1543
|
+
const fields: string[] = [];
|
|
1544
|
+
|
|
1545
|
+
if (updates.cronExpression !== undefined) {
|
|
1546
|
+
fields.push('cronExpression', updates.cronExpression);
|
|
1547
|
+
}
|
|
1548
|
+
if (updates.payload !== undefined) {
|
|
1549
|
+
fields.push('payload', JSON.stringify(updates.payload));
|
|
1550
|
+
}
|
|
1551
|
+
if (updates.maxAttempts !== undefined) {
|
|
1552
|
+
fields.push('maxAttempts', updates.maxAttempts.toString());
|
|
1553
|
+
}
|
|
1554
|
+
if (updates.priority !== undefined) {
|
|
1555
|
+
fields.push('priority', updates.priority.toString());
|
|
1556
|
+
}
|
|
1557
|
+
if (updates.timeoutMs !== undefined) {
|
|
1558
|
+
fields.push(
|
|
1559
|
+
'timeoutMs',
|
|
1560
|
+
updates.timeoutMs !== null ? updates.timeoutMs.toString() : 'null',
|
|
1561
|
+
);
|
|
1562
|
+
}
|
|
1563
|
+
if (updates.forceKillOnTimeout !== undefined) {
|
|
1564
|
+
fields.push(
|
|
1565
|
+
'forceKillOnTimeout',
|
|
1566
|
+
updates.forceKillOnTimeout ? 'true' : 'false',
|
|
1567
|
+
);
|
|
1568
|
+
}
|
|
1569
|
+
if (updates.tags !== undefined) {
|
|
1570
|
+
fields.push(
|
|
1571
|
+
'tags',
|
|
1572
|
+
updates.tags !== null ? JSON.stringify(updates.tags) : 'null',
|
|
1573
|
+
);
|
|
1574
|
+
}
|
|
1575
|
+
if (updates.timezone !== undefined) {
|
|
1576
|
+
fields.push('timezone', updates.timezone);
|
|
1577
|
+
}
|
|
1578
|
+
if (updates.allowOverlap !== undefined) {
|
|
1579
|
+
fields.push('allowOverlap', updates.allowOverlap ? 'true' : 'false');
|
|
1580
|
+
}
|
|
1581
|
+
if (updates.retryDelay !== undefined) {
|
|
1582
|
+
fields.push(
|
|
1583
|
+
'retryDelay',
|
|
1584
|
+
updates.retryDelay !== null ? updates.retryDelay.toString() : 'null',
|
|
1585
|
+
);
|
|
1586
|
+
}
|
|
1587
|
+
if (updates.retryBackoff !== undefined) {
|
|
1588
|
+
fields.push(
|
|
1589
|
+
'retryBackoff',
|
|
1590
|
+
updates.retryBackoff !== null
|
|
1591
|
+
? updates.retryBackoff.toString()
|
|
1592
|
+
: 'null',
|
|
1593
|
+
);
|
|
1594
|
+
}
|
|
1595
|
+
if (updates.retryDelayMax !== undefined) {
|
|
1596
|
+
fields.push(
|
|
1597
|
+
'retryDelayMax',
|
|
1598
|
+
updates.retryDelayMax !== null
|
|
1599
|
+
? updates.retryDelayMax.toString()
|
|
1600
|
+
: 'null',
|
|
1601
|
+
);
|
|
1602
|
+
}
|
|
1603
|
+
if (nextRunAt !== undefined) {
|
|
1604
|
+
const val = nextRunAt !== null ? nextRunAt.getTime().toString() : 'null';
|
|
1605
|
+
fields.push('nextRunAt', val);
|
|
1606
|
+
if (nextRunAt !== null) {
|
|
1607
|
+
await this.client.zadd(
|
|
1608
|
+
`${this.prefix}cron_due`,
|
|
1609
|
+
nextRunAt.getTime(),
|
|
1610
|
+
id.toString(),
|
|
1611
|
+
);
|
|
1612
|
+
} else {
|
|
1613
|
+
await this.client.zrem(`${this.prefix}cron_due`, id.toString());
|
|
1614
|
+
}
|
|
1615
|
+
}
|
|
1616
|
+
|
|
1617
|
+
if (fields.length === 0) {
|
|
1618
|
+
log(`No fields to update for cron schedule ${id}`);
|
|
1619
|
+
return;
|
|
1620
|
+
}
|
|
1621
|
+
|
|
1622
|
+
fields.push('updatedAt', now.toString());
|
|
1623
|
+
await (this.client as any).hmset(`${this.prefix}cron:${id}`, ...fields);
|
|
1624
|
+
log(`Edited cron schedule ${id}`);
|
|
1625
|
+
}
|
|
1626
|
+
|
|
1627
|
+
/**
|
|
1628
|
+
* Fetch all active cron schedules whose nextRunAt <= now.
|
|
1629
|
+
* Uses a sorted set (cron_due) for efficient range query.
|
|
1630
|
+
*/
|
|
1631
|
+
async getDueCronSchedules(): Promise<CronScheduleRecord[]> {
|
|
1632
|
+
const now = this.nowMs();
|
|
1633
|
+
const ids = await this.client.zrangebyscore(
|
|
1634
|
+
`${this.prefix}cron_due`,
|
|
1635
|
+
0,
|
|
1636
|
+
now,
|
|
1637
|
+
);
|
|
1638
|
+
if (ids.length === 0) {
|
|
1639
|
+
log('Found 0 due cron schedules');
|
|
1640
|
+
return [];
|
|
1641
|
+
}
|
|
1642
|
+
|
|
1643
|
+
const schedules: CronScheduleRecord[] = [];
|
|
1644
|
+
for (const id of ids) {
|
|
1645
|
+
const data = await this.client.hgetall(`${this.prefix}cron:${id}`);
|
|
1646
|
+
if (data && Object.keys(data).length > 0 && data.status === 'active') {
|
|
1647
|
+
schedules.push(this.deserializeCronSchedule(data));
|
|
1648
|
+
}
|
|
1649
|
+
}
|
|
1650
|
+
log(`Found ${schedules.length} due cron schedules`);
|
|
1651
|
+
return schedules;
|
|
1652
|
+
}
|
|
1653
|
+
|
|
1654
|
+
/**
|
|
1655
|
+
* Update a cron schedule after a job has been enqueued.
|
|
1656
|
+
* Sets lastEnqueuedAt, lastJobId, and advances nextRunAt.
|
|
1657
|
+
*/
|
|
1658
|
+
async updateCronScheduleAfterEnqueue(
|
|
1659
|
+
id: number,
|
|
1660
|
+
lastEnqueuedAt: Date,
|
|
1661
|
+
lastJobId: number,
|
|
1662
|
+
nextRunAt: Date | null,
|
|
1663
|
+
): Promise<void> {
|
|
1664
|
+
const fields: string[] = [
|
|
1665
|
+
'lastEnqueuedAt',
|
|
1666
|
+
lastEnqueuedAt.getTime().toString(),
|
|
1667
|
+
'lastJobId',
|
|
1668
|
+
lastJobId.toString(),
|
|
1669
|
+
'nextRunAt',
|
|
1670
|
+
nextRunAt ? nextRunAt.getTime().toString() : 'null',
|
|
1671
|
+
'updatedAt',
|
|
1672
|
+
this.nowMs().toString(),
|
|
1673
|
+
];
|
|
1674
|
+
|
|
1675
|
+
await (this.client as any).hmset(`${this.prefix}cron:${id}`, ...fields);
|
|
1676
|
+
|
|
1677
|
+
if (nextRunAt) {
|
|
1678
|
+
await this.client.zadd(
|
|
1679
|
+
`${this.prefix}cron_due`,
|
|
1680
|
+
nextRunAt.getTime(),
|
|
1681
|
+
id.toString(),
|
|
1682
|
+
);
|
|
1683
|
+
} else {
|
|
1684
|
+
await this.client.zrem(`${this.prefix}cron_due`, id.toString());
|
|
1685
|
+
}
|
|
1686
|
+
|
|
1687
|
+
log(
|
|
1688
|
+
`Updated cron schedule ${id}: lastJobId=${lastJobId}, nextRunAt=${nextRunAt?.toISOString() ?? 'null'}`,
|
|
1689
|
+
);
|
|
1690
|
+
}
|
|
1691
|
+
|
|
1692
|
+
/** Deserialize a Redis hash into a CronScheduleRecord. */
|
|
1693
|
+
private deserializeCronSchedule(
|
|
1694
|
+
h: Record<string, string>,
|
|
1695
|
+
): CronScheduleRecord {
|
|
1696
|
+
const nullish = (v: string | undefined) =>
|
|
1697
|
+
v === undefined || v === 'null' || v === '' ? null : v;
|
|
1698
|
+
const numOrNull = (v: string | undefined): number | null => {
|
|
1699
|
+
const n = nullish(v);
|
|
1700
|
+
return n === null ? null : Number(n);
|
|
1701
|
+
};
|
|
1702
|
+
const dateOrNull = (v: string | undefined): Date | null => {
|
|
1703
|
+
const n = numOrNull(v);
|
|
1704
|
+
return n === null ? null : new Date(n);
|
|
1705
|
+
};
|
|
1706
|
+
|
|
1707
|
+
let payload: any;
|
|
1708
|
+
try {
|
|
1709
|
+
payload = JSON.parse(h.payload);
|
|
1710
|
+
} catch {
|
|
1711
|
+
payload = h.payload;
|
|
1712
|
+
}
|
|
1713
|
+
|
|
1714
|
+
let tags: string[] | undefined;
|
|
1715
|
+
try {
|
|
1716
|
+
const raw = h.tags;
|
|
1717
|
+
if (raw && raw !== 'null') {
|
|
1718
|
+
tags = JSON.parse(raw);
|
|
1719
|
+
}
|
|
1720
|
+
} catch {
|
|
1721
|
+
/* ignore */
|
|
1722
|
+
}
|
|
1723
|
+
|
|
1724
|
+
return {
|
|
1725
|
+
id: Number(h.id),
|
|
1726
|
+
scheduleName: h.scheduleName,
|
|
1727
|
+
cronExpression: h.cronExpression,
|
|
1728
|
+
jobType: h.jobType,
|
|
1729
|
+
payload,
|
|
1730
|
+
maxAttempts: Number(h.maxAttempts),
|
|
1731
|
+
priority: Number(h.priority),
|
|
1732
|
+
timeoutMs: numOrNull(h.timeoutMs),
|
|
1733
|
+
forceKillOnTimeout: h.forceKillOnTimeout === 'true',
|
|
1734
|
+
tags,
|
|
1735
|
+
timezone: h.timezone,
|
|
1736
|
+
allowOverlap: h.allowOverlap === 'true',
|
|
1737
|
+
status: h.status as CronScheduleStatus,
|
|
1738
|
+
lastEnqueuedAt: dateOrNull(h.lastEnqueuedAt),
|
|
1739
|
+
lastJobId: numOrNull(h.lastJobId),
|
|
1740
|
+
nextRunAt: dateOrNull(h.nextRunAt),
|
|
1741
|
+
createdAt: new Date(Number(h.createdAt)),
|
|
1742
|
+
updatedAt: new Date(Number(h.updatedAt)),
|
|
1743
|
+
retryDelay: numOrNull(h.retryDelay),
|
|
1744
|
+
retryBackoff:
|
|
1745
|
+
h.retryBackoff === 'true'
|
|
1746
|
+
? true
|
|
1747
|
+
: h.retryBackoff === 'false'
|
|
1748
|
+
? false
|
|
1749
|
+
: null,
|
|
1750
|
+
retryDelayMax: numOrNull(h.retryDelayMax),
|
|
1751
|
+
};
|
|
1752
|
+
}
|
|
1753
|
+
|
|
1754
|
+
// ── Private helpers (filters) ─────────────────────────────────────────
|
|
1755
|
+
|
|
798
1756
|
private async applyFilters(
|
|
799
1757
|
ids: string[],
|
|
800
1758
|
filters: JobFilters,
|