@nicnocquee/dataqueue 1.24.0 → 1.26.0-beta.20260223195940

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/README.md +44 -0
  2. package/ai/build-docs-content.ts +96 -0
  3. package/ai/build-llms-full.ts +42 -0
  4. package/ai/docs-content.json +278 -0
  5. package/ai/rules/advanced.md +132 -0
  6. package/ai/rules/basic.md +159 -0
  7. package/ai/rules/react-dashboard.md +83 -0
  8. package/ai/skills/dataqueue-advanced/SKILL.md +320 -0
  9. package/ai/skills/dataqueue-core/SKILL.md +234 -0
  10. package/ai/skills/dataqueue-react/SKILL.md +189 -0
  11. package/dist/cli.cjs +1149 -14
  12. package/dist/cli.cjs.map +1 -1
  13. package/dist/cli.d.cts +66 -1
  14. package/dist/cli.d.ts +66 -1
  15. package/dist/cli.js +1146 -13
  16. package/dist/cli.js.map +1 -1
  17. package/dist/index.cjs +4630 -928
  18. package/dist/index.cjs.map +1 -1
  19. package/dist/index.d.cts +1033 -15
  20. package/dist/index.d.ts +1033 -15
  21. package/dist/index.js +4626 -929
  22. package/dist/index.js.map +1 -1
  23. package/dist/mcp-server.cjs +186 -0
  24. package/dist/mcp-server.cjs.map +1 -0
  25. package/dist/mcp-server.d.cts +32 -0
  26. package/dist/mcp-server.d.ts +32 -0
  27. package/dist/mcp-server.js +175 -0
  28. package/dist/mcp-server.js.map +1 -0
  29. package/migrations/1751131910825_add_timeout_seconds_to_job_queue.sql +2 -2
  30. package/migrations/1751186053000_add_job_events_table.sql +12 -8
  31. package/migrations/1751984773000_add_tags_to_job_queue.sql +1 -1
  32. package/migrations/1765809419000_add_force_kill_on_timeout_to_job_queue.sql +1 -1
  33. package/migrations/1771100000000_add_idempotency_key_to_job_queue.sql +7 -0
  34. package/migrations/1781200000000_add_wait_support.sql +12 -0
  35. package/migrations/1781200000001_create_waitpoints_table.sql +18 -0
  36. package/migrations/1781200000002_add_performance_indexes.sql +34 -0
  37. package/migrations/1781200000003_add_progress_to_job_queue.sql +7 -0
  38. package/migrations/1781200000004_create_cron_schedules_table.sql +33 -0
  39. package/migrations/1781200000005_add_retry_config_to_job_queue.sql +17 -0
  40. package/package.json +40 -23
  41. package/src/backend.ts +328 -0
  42. package/src/backends/postgres.ts +2040 -0
  43. package/src/backends/redis-scripts.ts +865 -0
  44. package/src/backends/redis.test.ts +1906 -0
  45. package/src/backends/redis.ts +1792 -0
  46. package/src/cli.test.ts +82 -6
  47. package/src/cli.ts +73 -10
  48. package/src/cron.test.ts +126 -0
  49. package/src/cron.ts +40 -0
  50. package/src/db-util.ts +4 -2
  51. package/src/index.test.ts +688 -1
  52. package/src/index.ts +277 -39
  53. package/src/init-command.test.ts +449 -0
  54. package/src/init-command.ts +709 -0
  55. package/src/install-mcp-command.test.ts +216 -0
  56. package/src/install-mcp-command.ts +185 -0
  57. package/src/install-rules-command.test.ts +218 -0
  58. package/src/install-rules-command.ts +233 -0
  59. package/src/install-skills-command.test.ts +176 -0
  60. package/src/install-skills-command.ts +124 -0
  61. package/src/mcp-server.test.ts +162 -0
  62. package/src/mcp-server.ts +231 -0
  63. package/src/processor.test.ts +559 -18
  64. package/src/processor.ts +456 -49
  65. package/src/queue.test.ts +682 -6
  66. package/src/queue.ts +135 -944
  67. package/src/supervisor.test.ts +340 -0
  68. package/src/supervisor.ts +162 -0
  69. package/src/test-util.ts +32 -0
  70. package/src/types.ts +726 -17
  71. package/src/wait.test.ts +698 -0
  72. package/LICENSE +0 -21
@@ -0,0 +1,1792 @@
1
+ import { createRequire } from 'module';
2
+ import type { Redis as RedisType } from 'ioredis';
3
+ import {
4
+ JobOptions,
5
+ JobRecord,
6
+ FailureReason,
7
+ JobEvent,
8
+ JobEventType,
9
+ TagQueryMode,
10
+ JobType,
11
+ RedisJobQueueConfig,
12
+ CronScheduleRecord,
13
+ CronScheduleStatus,
14
+ EditCronScheduleOptions,
15
+ WaitpointRecord,
16
+ CreateTokenOptions,
17
+ AddJobOptions,
18
+ } from '../types.js';
19
+ import {
20
+ QueueBackend,
21
+ JobFilters,
22
+ JobUpdates,
23
+ CronScheduleInput,
24
+ } from '../backend.js';
25
+ import { log } from '../log-context.js';
26
+
27
+ const MAX_TIMEOUT_MS = 365 * 24 * 60 * 60 * 1000;
28
+
29
+ /** Parse a timeout string like '10m', '1h', '24h', '7d' into milliseconds. */
30
+ function parseTimeoutString(timeout: string): number {
31
+ const match = timeout.match(/^(\d+)(s|m|h|d)$/);
32
+ if (!match) {
33
+ throw new Error(
34
+ `Invalid timeout format: "${timeout}". Expected format like "10m", "1h", "24h", "7d".`,
35
+ );
36
+ }
37
+ const value = parseInt(match[1], 10);
38
+ const unit = match[2];
39
+ let ms: number;
40
+ switch (unit) {
41
+ case 's':
42
+ ms = value * 1000;
43
+ break;
44
+ case 'm':
45
+ ms = value * 60 * 1000;
46
+ break;
47
+ case 'h':
48
+ ms = value * 60 * 60 * 1000;
49
+ break;
50
+ case 'd':
51
+ ms = value * 24 * 60 * 60 * 1000;
52
+ break;
53
+ default:
54
+ throw new Error(`Unknown timeout unit: "${unit}"`);
55
+ }
56
+ if (!Number.isFinite(ms) || ms > MAX_TIMEOUT_MS) {
57
+ throw new Error(
58
+ `Timeout value "${timeout}" is too large. Maximum allowed is 365 days.`,
59
+ );
60
+ }
61
+ return ms;
62
+ }
63
+ import {
64
+ ADD_JOB_SCRIPT,
65
+ ADD_JOBS_SCRIPT,
66
+ GET_NEXT_BATCH_SCRIPT,
67
+ COMPLETE_JOB_SCRIPT,
68
+ FAIL_JOB_SCRIPT,
69
+ RETRY_JOB_SCRIPT,
70
+ CANCEL_JOB_SCRIPT,
71
+ PROLONG_JOB_SCRIPT,
72
+ RECLAIM_STUCK_JOBS_SCRIPT,
73
+ CLEANUP_OLD_JOBS_BATCH_SCRIPT,
74
+ WAIT_JOB_SCRIPT,
75
+ COMPLETE_WAITPOINT_SCRIPT,
76
+ EXPIRE_TIMED_OUT_WAITPOINTS_SCRIPT,
77
+ } from './redis-scripts.js';
78
+ import { randomUUID } from 'crypto';
79
+
80
+ /** Helper: convert a Redis hash flat array [k,v,k,v,...] to a JS object */
81
+ function hashToObject(arr: string[]): Record<string, string> {
82
+ const obj: Record<string, string> = {};
83
+ for (let i = 0; i < arr.length; i += 2) {
84
+ obj[arr[i]] = arr[i + 1];
85
+ }
86
+ return obj;
87
+ }
88
+
89
+ /** Deserialise a Redis hash object into a JobRecord */
90
+ function deserializeJob<PayloadMap, T extends JobType<PayloadMap>>(
91
+ h: Record<string, string>,
92
+ ): JobRecord<PayloadMap, T> {
93
+ const nullish = (v: string | undefined) =>
94
+ v === undefined || v === 'null' || v === '' ? null : v;
95
+
96
+ const numOrNull = (v: string | undefined): number | null => {
97
+ const n = nullish(v);
98
+ return n === null ? null : Number(n);
99
+ };
100
+ const dateOrNull = (v: string | undefined): Date | null => {
101
+ const n = numOrNull(v);
102
+ return n === null ? null : new Date(n);
103
+ };
104
+
105
+ let errorHistory: { message: string; timestamp: string }[] = [];
106
+ try {
107
+ const raw = h.errorHistory;
108
+ if (raw && raw !== '[]') {
109
+ errorHistory = JSON.parse(raw);
110
+ }
111
+ } catch {
112
+ /* ignore */
113
+ }
114
+
115
+ let tags: string[] | undefined;
116
+ try {
117
+ const raw = h.tags;
118
+ if (raw && raw !== 'null') {
119
+ tags = JSON.parse(raw);
120
+ }
121
+ } catch {
122
+ /* ignore */
123
+ }
124
+
125
+ let payload: any;
126
+ try {
127
+ payload = JSON.parse(h.payload);
128
+ } catch {
129
+ payload = h.payload;
130
+ }
131
+
132
+ return {
133
+ id: Number(h.id),
134
+ jobType: h.jobType as T,
135
+ payload,
136
+ status: h.status as any,
137
+ createdAt: new Date(Number(h.createdAt)),
138
+ updatedAt: new Date(Number(h.updatedAt)),
139
+ lockedAt: dateOrNull(h.lockedAt),
140
+ lockedBy: nullish(h.lockedBy) as string | null,
141
+ attempts: Number(h.attempts),
142
+ maxAttempts: Number(h.maxAttempts),
143
+ nextAttemptAt: dateOrNull(h.nextAttemptAt),
144
+ priority: Number(h.priority),
145
+ runAt: new Date(Number(h.runAt)),
146
+ pendingReason: nullish(h.pendingReason) as string | null | undefined,
147
+ errorHistory,
148
+ timeoutMs: numOrNull(h.timeoutMs),
149
+ forceKillOnTimeout:
150
+ h.forceKillOnTimeout === 'true' || h.forceKillOnTimeout === '1'
151
+ ? true
152
+ : h.forceKillOnTimeout === 'false' || h.forceKillOnTimeout === '0'
153
+ ? false
154
+ : null,
155
+ failureReason: (nullish(h.failureReason) as FailureReason | null) ?? null,
156
+ completedAt: dateOrNull(h.completedAt),
157
+ startedAt: dateOrNull(h.startedAt),
158
+ lastRetriedAt: dateOrNull(h.lastRetriedAt),
159
+ lastFailedAt: dateOrNull(h.lastFailedAt),
160
+ lastCancelledAt: dateOrNull(h.lastCancelledAt),
161
+ tags,
162
+ idempotencyKey: nullish(h.idempotencyKey) as string | null | undefined,
163
+ progress: numOrNull(h.progress),
164
+ waitUntil: dateOrNull(h.waitUntil),
165
+ waitTokenId: nullish(h.waitTokenId) as string | null | undefined,
166
+ stepData: parseStepData(h.stepData),
167
+ retryDelay: numOrNull(h.retryDelay),
168
+ retryBackoff:
169
+ h.retryBackoff === 'true'
170
+ ? true
171
+ : h.retryBackoff === 'false'
172
+ ? false
173
+ : null,
174
+ retryDelayMax: numOrNull(h.retryDelayMax),
175
+ };
176
+ }
177
+
178
+ /** Parse step data from a Redis hash field. */
179
+ function parseStepData(
180
+ raw: string | undefined,
181
+ ): Record<string, any> | undefined {
182
+ if (!raw || raw === 'null') return undefined;
183
+ try {
184
+ return JSON.parse(raw);
185
+ } catch {
186
+ return undefined;
187
+ }
188
+ }
189
+
190
+ export class RedisBackend implements QueueBackend {
191
+ private client: RedisType;
192
+ private prefix: string;
193
+
194
+ /**
195
+ * Create a RedisBackend.
196
+ *
197
+ * @param configOrClient - Either `redisConfig` from the config file (the
198
+ * library creates a new ioredis client) or an existing ioredis client
199
+ * instance (bring your own).
200
+ * @param keyPrefix - Key prefix, only used when `configOrClient` is an
201
+ * external client. Ignored when `redisConfig` is passed (uses
202
+ * `redisConfig.keyPrefix` instead). Default: `'dq:'`.
203
+ */
204
+ constructor(
205
+ configOrClient: RedisJobQueueConfig['redisConfig'] | RedisType,
206
+ keyPrefix?: string,
207
+ ) {
208
+ if (configOrClient && typeof (configOrClient as any).eval === 'function') {
209
+ this.client = configOrClient as RedisType;
210
+ this.prefix = keyPrefix ?? 'dq:';
211
+ return;
212
+ }
213
+
214
+ const redisConfig = configOrClient as NonNullable<
215
+ RedisJobQueueConfig['redisConfig']
216
+ >;
217
+
218
+ let IORedis: any;
219
+ try {
220
+ const _require = createRequire(import.meta.url);
221
+ IORedis = _require('ioredis');
222
+ } catch {
223
+ throw new Error(
224
+ 'Redis backend requires the "ioredis" package. Install it with: npm install ioredis',
225
+ );
226
+ }
227
+
228
+ this.prefix = redisConfig.keyPrefix ?? 'dq:';
229
+
230
+ if (redisConfig.url) {
231
+ this.client = new IORedis(redisConfig.url, {
232
+ ...(redisConfig.tls ? { tls: redisConfig.tls } : {}),
233
+ ...(redisConfig.db !== undefined ? { db: redisConfig.db } : {}),
234
+ });
235
+ } else {
236
+ this.client = new IORedis({
237
+ host: redisConfig.host ?? '127.0.0.1',
238
+ port: redisConfig.port ?? 6379,
239
+ password: redisConfig.password,
240
+ db: redisConfig.db ?? 0,
241
+ ...(redisConfig.tls ? { tls: redisConfig.tls } : {}),
242
+ });
243
+ }
244
+ }
245
+
246
+ /** Expose the raw ioredis client for advanced usage. */
247
+ getClient(): RedisType {
248
+ return this.client;
249
+ }
250
+
251
+ private nowMs(): number {
252
+ return Date.now();
253
+ }
254
+
255
+ // ── Events ──────────────────────────────────────────────────────────
256
+
257
+ async recordJobEvent(
258
+ jobId: number,
259
+ eventType: JobEventType,
260
+ metadata?: any,
261
+ ): Promise<void> {
262
+ try {
263
+ const eventId = await this.client.incr(`${this.prefix}event_id_seq`);
264
+ const event = JSON.stringify({
265
+ id: eventId,
266
+ jobId,
267
+ eventType,
268
+ createdAt: this.nowMs(),
269
+ metadata: metadata ?? null,
270
+ });
271
+ await this.client.rpush(`${this.prefix}events:${jobId}`, event);
272
+ } catch (error) {
273
+ log(`Error recording job event for job ${jobId}: ${error}`);
274
+ // Do not throw
275
+ }
276
+ }
277
+
278
+ async getJobEvents(jobId: number): Promise<JobEvent[]> {
279
+ const raw = await this.client.lrange(
280
+ `${this.prefix}events:${jobId}`,
281
+ 0,
282
+ -1,
283
+ );
284
+ return raw.map((r: string) => {
285
+ const e = JSON.parse(r);
286
+ return {
287
+ ...e,
288
+ createdAt: new Date(e.createdAt),
289
+ };
290
+ });
291
+ }
292
+
293
+ // ── Job CRUD ──────────────────────────────────────────────────────────
294
+
295
+ async addJob<PayloadMap, T extends JobType<PayloadMap>>(
296
+ {
297
+ jobType,
298
+ payload,
299
+ maxAttempts = 3,
300
+ priority = 0,
301
+ runAt = null,
302
+ timeoutMs = undefined,
303
+ forceKillOnTimeout = false,
304
+ tags = undefined,
305
+ idempotencyKey = undefined,
306
+ retryDelay = undefined,
307
+ retryBackoff = undefined,
308
+ retryDelayMax = undefined,
309
+ }: JobOptions<PayloadMap, T>,
310
+ options?: AddJobOptions,
311
+ ): Promise<number> {
312
+ if (options?.db) {
313
+ throw new Error(
314
+ 'The db option is not supported with the Redis backend. ' +
315
+ 'Transactional job creation is only available with PostgreSQL.',
316
+ );
317
+ }
318
+ const now = this.nowMs();
319
+ const runAtMs = runAt ? runAt.getTime() : 0;
320
+
321
+ const result = (await this.client.eval(
322
+ ADD_JOB_SCRIPT,
323
+ 1,
324
+ this.prefix,
325
+ jobType,
326
+ JSON.stringify(payload),
327
+ maxAttempts,
328
+ priority,
329
+ runAtMs.toString(),
330
+ timeoutMs !== undefined ? timeoutMs.toString() : 'null',
331
+ forceKillOnTimeout ? 'true' : 'false',
332
+ tags ? JSON.stringify(tags) : 'null',
333
+ idempotencyKey ?? 'null',
334
+ now,
335
+ retryDelay !== undefined ? retryDelay.toString() : 'null',
336
+ retryBackoff !== undefined ? retryBackoff.toString() : 'null',
337
+ retryDelayMax !== undefined ? retryDelayMax.toString() : 'null',
338
+ )) as number;
339
+
340
+ const jobId = Number(result);
341
+ log(
342
+ `Added job ${jobId}: payload ${JSON.stringify(payload)}, ${runAt ? `runAt ${runAt.toISOString()}, ` : ''}priority ${priority}, maxAttempts ${maxAttempts}, jobType ${jobType}, tags ${JSON.stringify(tags)}${idempotencyKey ? `, idempotencyKey "${idempotencyKey}"` : ''}`,
343
+ );
344
+ await this.recordJobEvent(jobId, JobEventType.Added, {
345
+ jobType,
346
+ payload,
347
+ tags,
348
+ idempotencyKey,
349
+ });
350
+ return jobId;
351
+ }
352
+
353
+ /**
354
+ * Insert multiple jobs atomically via a single Lua script.
355
+ * Returns IDs in the same order as the input array.
356
+ */
357
+ async addJobs<PayloadMap, T extends JobType<PayloadMap>>(
358
+ jobs: JobOptions<PayloadMap, T>[],
359
+ options?: AddJobOptions,
360
+ ): Promise<number[]> {
361
+ if (jobs.length === 0) return [];
362
+
363
+ if (options?.db) {
364
+ throw new Error(
365
+ 'The db option is not supported with the Redis backend. ' +
366
+ 'Transactional job creation is only available with PostgreSQL.',
367
+ );
368
+ }
369
+
370
+ const now = this.nowMs();
371
+
372
+ const jobsPayload = jobs.map((job) => ({
373
+ jobType: job.jobType,
374
+ payload: JSON.stringify(job.payload),
375
+ maxAttempts: job.maxAttempts ?? 3,
376
+ priority: job.priority ?? 0,
377
+ runAtMs: job.runAt ? job.runAt.getTime() : 0,
378
+ timeoutMs:
379
+ job.timeoutMs !== undefined ? job.timeoutMs.toString() : 'null',
380
+ forceKillOnTimeout: job.forceKillOnTimeout ? 'true' : 'false',
381
+ tags: job.tags ? JSON.stringify(job.tags) : 'null',
382
+ idempotencyKey: job.idempotencyKey ?? 'null',
383
+ retryDelay:
384
+ job.retryDelay !== undefined ? job.retryDelay.toString() : 'null',
385
+ retryBackoff:
386
+ job.retryBackoff !== undefined ? job.retryBackoff.toString() : 'null',
387
+ retryDelayMax:
388
+ job.retryDelayMax !== undefined ? job.retryDelayMax.toString() : 'null',
389
+ }));
390
+
391
+ const result = (await this.client.eval(
392
+ ADD_JOBS_SCRIPT,
393
+ 1,
394
+ this.prefix,
395
+ JSON.stringify(jobsPayload),
396
+ now,
397
+ )) as number[];
398
+
399
+ const ids = result.map(Number);
400
+ log(`Batch-inserted ${jobs.length} jobs, IDs: [${ids.join(', ')}]`);
401
+
402
+ // Record events for newly inserted jobs (skip idempotency duplicates)
403
+ const existingIdempotencyIds = new Set<number>();
404
+ for (let i = 0; i < jobs.length; i++) {
405
+ if (jobs[i].idempotencyKey) {
406
+ // If the returned ID existed before this batch, it was a duplicate.
407
+ // We detect this by checking if the same ID appears for a different
408
+ // idempotency-keyed job (unlikely) or by checking if the ID was less
409
+ // than what we'd expect. The simplest approach: record events for all,
410
+ // since the Lua script returns the existing ID for duplicates but
411
+ // doesn't tell us if it was newly created. We can compare: if
412
+ // multiple jobs have the same idempotency key in the batch and got
413
+ // the same ID, only record once.
414
+ if (existingIdempotencyIds.has(ids[i])) {
415
+ continue;
416
+ }
417
+ existingIdempotencyIds.add(ids[i]);
418
+ }
419
+ await this.recordJobEvent(ids[i], JobEventType.Added, {
420
+ jobType: jobs[i].jobType,
421
+ payload: jobs[i].payload,
422
+ tags: jobs[i].tags,
423
+ idempotencyKey: jobs[i].idempotencyKey,
424
+ });
425
+ }
426
+
427
+ return ids;
428
+ }
429
+
430
+ async getJob<PayloadMap, T extends JobType<PayloadMap>>(
431
+ id: number,
432
+ ): Promise<JobRecord<PayloadMap, T> | null> {
433
+ const data = await this.client.hgetall(`${this.prefix}job:${id}`);
434
+ if (!data || Object.keys(data).length === 0) {
435
+ log(`Job ${id} not found`);
436
+ return null;
437
+ }
438
+ log(`Found job ${id}`);
439
+ return deserializeJob<PayloadMap, T>(data);
440
+ }
441
+
442
+ async getJobsByStatus<PayloadMap, T extends JobType<PayloadMap>>(
443
+ status: string,
444
+ limit = 100,
445
+ offset = 0,
446
+ ): Promise<JobRecord<PayloadMap, T>[]> {
447
+ const ids = await this.client.smembers(`${this.prefix}status:${status}`);
448
+ if (ids.length === 0) return [];
449
+
450
+ // Load all, sort by createdAt DESC, then paginate
451
+ const jobs = await this.loadJobsByIds<PayloadMap, T>(ids);
452
+ jobs.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
453
+ return jobs.slice(offset, offset + limit);
454
+ }
455
+
456
+ async getAllJobs<PayloadMap, T extends JobType<PayloadMap>>(
457
+ limit = 100,
458
+ offset = 0,
459
+ ): Promise<JobRecord<PayloadMap, T>[]> {
460
+ // All jobs sorted by createdAt DESC (the 'all' sorted set is scored by createdAt ms)
461
+ const ids = await this.client.zrevrange(
462
+ `${this.prefix}all`,
463
+ offset,
464
+ offset + limit - 1,
465
+ );
466
+ if (ids.length === 0) return [];
467
+ return this.loadJobsByIds<PayloadMap, T>(ids);
468
+ }
469
+
470
+ async getJobs<PayloadMap, T extends JobType<PayloadMap>>(
471
+ filters?: JobFilters,
472
+ limit = 100,
473
+ offset = 0,
474
+ ): Promise<JobRecord<PayloadMap, T>[]> {
475
+ // Start with all job IDs
476
+ let candidateIds: string[];
477
+
478
+ if (filters?.jobType) {
479
+ candidateIds = await this.client.smembers(
480
+ `${this.prefix}type:${filters.jobType}`,
481
+ );
482
+ } else {
483
+ candidateIds = await this.client.zrevrange(`${this.prefix}all`, 0, -1);
484
+ }
485
+
486
+ if (candidateIds.length === 0) return [];
487
+
488
+ // Apply tag filter via set operations
489
+ if (filters?.tags && filters.tags.values.length > 0) {
490
+ candidateIds = await this.filterByTags(
491
+ candidateIds,
492
+ filters.tags.values,
493
+ filters.tags.mode || 'all',
494
+ );
495
+ }
496
+
497
+ // Load and filter remaining criteria in-memory
498
+ let jobs = await this.loadJobsByIds<PayloadMap, T>(candidateIds);
499
+
500
+ if (filters) {
501
+ if (filters.priority !== undefined) {
502
+ jobs = jobs.filter((j) => j.priority === filters.priority);
503
+ }
504
+ if (filters.runAt) {
505
+ jobs = this.filterByRunAt(jobs, filters.runAt);
506
+ }
507
+ // Cursor-based (keyset) pagination: only return jobs with id < cursor
508
+ if (filters.cursor !== undefined) {
509
+ jobs = jobs.filter((j) => j.id < filters.cursor!);
510
+ }
511
+ }
512
+
513
+ // Sort by id DESC for consistent keyset pagination (matches Postgres ORDER BY id DESC)
514
+ jobs.sort((a, b) => b.id - a.id);
515
+
516
+ // When using cursor, skip offset
517
+ if (filters?.cursor !== undefined) {
518
+ return jobs.slice(0, limit);
519
+ }
520
+ return jobs.slice(offset, offset + limit);
521
+ }
522
+
523
+ async getJobsByTags<PayloadMap, T extends JobType<PayloadMap>>(
524
+ tags: string[],
525
+ mode: TagQueryMode = 'all',
526
+ limit = 100,
527
+ offset = 0,
528
+ ): Promise<JobRecord<PayloadMap, T>[]> {
529
+ // Start with all IDs
530
+ const allIds = await this.client.zrevrange(`${this.prefix}all`, 0, -1);
531
+ if (allIds.length === 0) return [];
532
+
533
+ const filtered = await this.filterByTags(allIds, tags, mode);
534
+ if (filtered.length === 0) return [];
535
+
536
+ const jobs = await this.loadJobsByIds<PayloadMap, T>(filtered);
537
+ jobs.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
538
+ return jobs.slice(offset, offset + limit);
539
+ }
540
+
541
+ // ── Processing lifecycle ──────────────────────────────────────────────
542
+
543
+ async getNextBatch<PayloadMap, T extends JobType<PayloadMap>>(
544
+ workerId: string,
545
+ batchSize = 10,
546
+ jobType?: string | string[],
547
+ ): Promise<JobRecord<PayloadMap, T>[]> {
548
+ const now = this.nowMs();
549
+ const jobTypeFilter =
550
+ jobType === undefined
551
+ ? 'null'
552
+ : Array.isArray(jobType)
553
+ ? JSON.stringify(jobType)
554
+ : jobType;
555
+
556
+ const result = (await this.client.eval(
557
+ GET_NEXT_BATCH_SCRIPT,
558
+ 1,
559
+ this.prefix,
560
+ workerId,
561
+ batchSize,
562
+ now,
563
+ jobTypeFilter,
564
+ )) as string[];
565
+
566
+ if (!result || result.length === 0) {
567
+ log('Found 0 jobs to process');
568
+ return [];
569
+ }
570
+
571
+ // Parse the flat result into jobs separated by __JOB_SEP__
572
+ const jobs: JobRecord<PayloadMap, T>[] = [];
573
+ let current: string[] = [];
574
+ for (const item of result) {
575
+ if (item === '__JOB_SEP__') {
576
+ if (current.length > 0) {
577
+ const h = hashToObject(current);
578
+ jobs.push(deserializeJob<PayloadMap, T>(h));
579
+ }
580
+ current = [];
581
+ } else {
582
+ current.push(item);
583
+ }
584
+ }
585
+
586
+ log(`Found ${jobs.length} jobs to process`);
587
+
588
+ // Record processing events
589
+ for (const job of jobs) {
590
+ await this.recordJobEvent(job.id, JobEventType.Processing);
591
+ }
592
+
593
+ return jobs;
594
+ }
595
+
596
+ async completeJob(jobId: number): Promise<void> {
597
+ const now = this.nowMs();
598
+ await this.client.eval(COMPLETE_JOB_SCRIPT, 1, this.prefix, jobId, now);
599
+ await this.recordJobEvent(jobId, JobEventType.Completed);
600
+ log(`Completed job ${jobId}`);
601
+ }
602
+
603
+ async failJob(
604
+ jobId: number,
605
+ error: Error,
606
+ failureReason?: FailureReason,
607
+ ): Promise<void> {
608
+ const now = this.nowMs();
609
+ const errorJson = JSON.stringify([
610
+ {
611
+ message: error.message || String(error),
612
+ timestamp: new Date(now).toISOString(),
613
+ },
614
+ ]);
615
+ await this.client.eval(
616
+ FAIL_JOB_SCRIPT,
617
+ 1,
618
+ this.prefix,
619
+ jobId,
620
+ errorJson,
621
+ failureReason ?? 'null',
622
+ now,
623
+ );
624
+ await this.recordJobEvent(jobId, JobEventType.Failed, {
625
+ message: error.message || String(error),
626
+ failureReason,
627
+ });
628
+ log(`Failed job ${jobId}`);
629
+ }
630
+
631
+ async prolongJob(jobId: number): Promise<void> {
632
+ try {
633
+ const now = this.nowMs();
634
+ await this.client.eval(PROLONG_JOB_SCRIPT, 1, this.prefix, jobId, now);
635
+ await this.recordJobEvent(jobId, JobEventType.Prolonged);
636
+ log(`Prolonged job ${jobId}`);
637
+ } catch (error) {
638
+ log(`Error prolonging job ${jobId}: ${error}`);
639
+ // Best-effort, do not throw
640
+ }
641
+ }
642
+
643
+ // ── Progress ──────────────────────────────────────────────────────────
644
+
645
+ async updateProgress(jobId: number, progress: number): Promise<void> {
646
+ try {
647
+ const now = this.nowMs();
648
+ await this.client.hset(
649
+ `${this.prefix}job:${jobId}`,
650
+ 'progress',
651
+ progress.toString(),
652
+ 'updatedAt',
653
+ now.toString(),
654
+ );
655
+ log(`Updated progress for job ${jobId}: ${progress}%`);
656
+ } catch (error) {
657
+ log(`Error updating progress for job ${jobId}: ${error}`);
658
+ // Best-effort: do not throw to avoid killing the running handler
659
+ }
660
+ }
661
+
662
+ // ── Job management ────────────────────────────────────────────────────
663
+
664
+ async retryJob(jobId: number): Promise<void> {
665
+ const now = this.nowMs();
666
+ await this.client.eval(RETRY_JOB_SCRIPT, 1, this.prefix, jobId, now);
667
+ await this.recordJobEvent(jobId, JobEventType.Retried);
668
+ log(`Retried job ${jobId}`);
669
+ }
670
+
671
+ async cancelJob(jobId: number): Promise<void> {
672
+ const now = this.nowMs();
673
+ await this.client.eval(CANCEL_JOB_SCRIPT, 1, this.prefix, jobId, now);
674
+ await this.recordJobEvent(jobId, JobEventType.Cancelled);
675
+ log(`Cancelled job ${jobId}`);
676
+ }
677
+
678
+ async cancelAllUpcomingJobs(filters?: JobFilters): Promise<number> {
679
+ // Get all pending IDs
680
+ let ids = await this.client.smembers(`${this.prefix}status:pending`);
681
+ if (ids.length === 0) return 0;
682
+
683
+ if (filters) {
684
+ ids = await this.applyFilters(ids, filters);
685
+ }
686
+
687
+ const now = this.nowMs();
688
+ let count = 0;
689
+ for (const id of ids) {
690
+ const result = await this.client.eval(
691
+ CANCEL_JOB_SCRIPT,
692
+ 1,
693
+ this.prefix,
694
+ id,
695
+ now,
696
+ );
697
+ if (Number(result) === 1) count++;
698
+ }
699
+
700
+ log(`Cancelled ${count} jobs`);
701
+ return count;
702
+ }
703
+
704
+ async editJob(jobId: number, updates: JobUpdates): Promise<void> {
705
+ const jk = `${this.prefix}job:${jobId}`;
706
+ const status = await this.client.hget(jk, 'status');
707
+ if (status !== 'pending') {
708
+ log(`Job ${jobId} is not pending (status: ${status}), skipping edit`);
709
+ return;
710
+ }
711
+
712
+ const now = this.nowMs();
713
+ const fields: string[] = [];
714
+ const metadata: any = {};
715
+
716
+ if (updates.payload !== undefined) {
717
+ fields.push('payload', JSON.stringify(updates.payload));
718
+ metadata.payload = updates.payload;
719
+ }
720
+ if (updates.maxAttempts !== undefined) {
721
+ fields.push('maxAttempts', updates.maxAttempts.toString());
722
+ metadata.maxAttempts = updates.maxAttempts;
723
+ }
724
+ if (updates.priority !== undefined) {
725
+ fields.push('priority', updates.priority.toString());
726
+ metadata.priority = updates.priority;
727
+
728
+ // Recompute queue score
729
+ const createdAt = await this.client.hget(jk, 'createdAt');
730
+ const score = updates.priority * 1e15 + (1e15 - Number(createdAt));
731
+ // Update score in queue if present
732
+ const inQueue = await this.client.zscore(
733
+ `${this.prefix}queue`,
734
+ jobId.toString(),
735
+ );
736
+ if (inQueue !== null) {
737
+ await this.client.zadd(`${this.prefix}queue`, score, jobId.toString());
738
+ }
739
+ }
740
+ if (updates.runAt !== undefined) {
741
+ if (updates.runAt === null) {
742
+ fields.push('runAt', now.toString());
743
+ } else {
744
+ fields.push('runAt', updates.runAt.getTime().toString());
745
+ }
746
+ metadata.runAt = updates.runAt;
747
+ }
748
+ if (updates.timeoutMs !== undefined) {
749
+ fields.push(
750
+ 'timeoutMs',
751
+ updates.timeoutMs !== null ? updates.timeoutMs.toString() : 'null',
752
+ );
753
+ metadata.timeoutMs = updates.timeoutMs;
754
+ }
755
+ if (updates.tags !== undefined) {
756
+ // Update tag indexes: remove old, add new
757
+ const oldTagsJson = await this.client.hget(jk, 'tags');
758
+ if (oldTagsJson && oldTagsJson !== 'null') {
759
+ try {
760
+ const oldTags = JSON.parse(oldTagsJson) as string[];
761
+ for (const tag of oldTags) {
762
+ await this.client.srem(
763
+ `${this.prefix}tag:${tag}`,
764
+ jobId.toString(),
765
+ );
766
+ }
767
+ } catch {
768
+ /* ignore */
769
+ }
770
+ }
771
+ await this.client.del(`${this.prefix}job:${jobId}:tags`);
772
+
773
+ if (updates.tags !== null) {
774
+ for (const tag of updates.tags) {
775
+ await this.client.sadd(`${this.prefix}tag:${tag}`, jobId.toString());
776
+ await this.client.sadd(`${this.prefix}job:${jobId}:tags`, tag);
777
+ }
778
+ fields.push('tags', JSON.stringify(updates.tags));
779
+ } else {
780
+ fields.push('tags', 'null');
781
+ }
782
+ metadata.tags = updates.tags;
783
+ }
784
+ if (updates.retryDelay !== undefined) {
785
+ fields.push(
786
+ 'retryDelay',
787
+ updates.retryDelay !== null ? updates.retryDelay.toString() : 'null',
788
+ );
789
+ metadata.retryDelay = updates.retryDelay;
790
+ }
791
+ if (updates.retryBackoff !== undefined) {
792
+ fields.push(
793
+ 'retryBackoff',
794
+ updates.retryBackoff !== null
795
+ ? updates.retryBackoff.toString()
796
+ : 'null',
797
+ );
798
+ metadata.retryBackoff = updates.retryBackoff;
799
+ }
800
+ if (updates.retryDelayMax !== undefined) {
801
+ fields.push(
802
+ 'retryDelayMax',
803
+ updates.retryDelayMax !== null
804
+ ? updates.retryDelayMax.toString()
805
+ : 'null',
806
+ );
807
+ metadata.retryDelayMax = updates.retryDelayMax;
808
+ }
809
+
810
+ if (fields.length === 0) {
811
+ log(`No fields to update for job ${jobId}`);
812
+ return;
813
+ }
814
+
815
+ fields.push('updatedAt', now.toString());
816
+ await (this.client as any).hmset(jk, ...fields);
817
+
818
+ await this.recordJobEvent(jobId, JobEventType.Edited, metadata);
819
+ log(`Edited job ${jobId}: ${JSON.stringify(metadata)}`);
820
+ }
821
+
822
+ async editAllPendingJobs(
823
+ filters: JobFilters | undefined,
824
+ updates: JobUpdates,
825
+ ): Promise<number> {
826
+ let ids = await this.client.smembers(`${this.prefix}status:pending`);
827
+ if (ids.length === 0) return 0;
828
+
829
+ if (filters) {
830
+ ids = await this.applyFilters(ids, filters);
831
+ }
832
+
833
+ let count = 0;
834
+ for (const id of ids) {
835
+ await this.editJob(Number(id), updates);
836
+ count++;
837
+ }
838
+
839
+ log(`Edited ${count} pending jobs`);
840
+ return count;
841
+ }
842
+
843
+ /**
844
+ * Delete completed jobs older than the given number of days.
845
+ * Uses SSCAN to iterate the completed set in batches, avoiding
846
+ * loading all IDs into memory and preventing long Redis blocks.
847
+ *
848
+ * @param daysToKeep - Number of days to retain completed jobs (default 30).
849
+ * @param batchSize - Number of IDs to scan per SSCAN iteration (default 200).
850
+ * @returns Total number of deleted jobs.
851
+ */
852
+ async cleanupOldJobs(daysToKeep = 30, batchSize = 200): Promise<number> {
853
+ const cutoffMs = this.nowMs() - daysToKeep * 24 * 60 * 60 * 1000;
854
+ const setKey = `${this.prefix}status:completed`;
855
+ let totalDeleted = 0;
856
+ let cursor = '0';
857
+
858
+ do {
859
+ const [nextCursor, ids] = await this.client.sscan(
860
+ setKey,
861
+ cursor,
862
+ 'COUNT',
863
+ batchSize,
864
+ );
865
+ cursor = nextCursor;
866
+
867
+ if (ids.length > 0) {
868
+ const result = (await this.client.eval(
869
+ CLEANUP_OLD_JOBS_BATCH_SCRIPT,
870
+ 1,
871
+ this.prefix,
872
+ cutoffMs,
873
+ ...ids,
874
+ )) as number;
875
+ totalDeleted += Number(result);
876
+ }
877
+ } while (cursor !== '0');
878
+
879
+ log(`Deleted ${totalDeleted} old jobs`);
880
+ return totalDeleted;
881
+ }
882
+
883
+ /**
884
+ * Delete job events older than the given number of days.
885
+ * Iterates all event lists and removes events whose createdAt is before the cutoff.
886
+ * Also removes orphaned event lists (where the job no longer exists).
887
+ *
888
+ * @param daysToKeep - Number of days to retain events (default 30).
889
+ * @param batchSize - Number of event keys to scan per SCAN iteration (default 200).
890
+ * @returns Total number of deleted events.
891
+ */
892
+ async cleanupOldJobEvents(daysToKeep = 30, batchSize = 200): Promise<number> {
893
+ const cutoffMs = this.nowMs() - daysToKeep * 24 * 60 * 60 * 1000;
894
+ const pattern = `${this.prefix}events:*`;
895
+ let totalDeleted = 0;
896
+ let cursor = '0';
897
+
898
+ do {
899
+ const [nextCursor, keys] = await this.client.scan(
900
+ cursor,
901
+ 'MATCH',
902
+ pattern,
903
+ 'COUNT',
904
+ batchSize,
905
+ );
906
+ cursor = nextCursor;
907
+
908
+ for (const key of keys) {
909
+ // Check if the job still exists; if not, delete the entire event list
910
+ const jobIdStr = key.slice(`${this.prefix}events:`.length);
911
+ const jobExists = await this.client.exists(
912
+ `${this.prefix}job:${jobIdStr}`,
913
+ );
914
+ if (!jobExists) {
915
+ const len = await this.client.llen(key);
916
+ await this.client.del(key);
917
+ totalDeleted += len;
918
+ continue;
919
+ }
920
+
921
+ // Filter events by date: read all, keep recent, rewrite
922
+ const events = await this.client.lrange(key, 0, -1);
923
+ const kept: string[] = [];
924
+ for (const raw of events) {
925
+ try {
926
+ const e = JSON.parse(raw);
927
+ if (e.createdAt >= cutoffMs) {
928
+ kept.push(raw);
929
+ } else {
930
+ totalDeleted++;
931
+ }
932
+ } catch {
933
+ totalDeleted++;
934
+ }
935
+ }
936
+
937
+ if (kept.length === 0) {
938
+ await this.client.del(key);
939
+ } else if (kept.length < events.length) {
940
+ const pipeline = this.client.pipeline();
941
+ pipeline.del(key);
942
+ for (const raw of kept) {
943
+ pipeline.rpush(key, raw);
944
+ }
945
+ await pipeline.exec();
946
+ }
947
+ }
948
+ } while (cursor !== '0');
949
+
950
+ log(`Deleted ${totalDeleted} old job events`);
951
+ return totalDeleted;
952
+ }
953
+
954
+ async reclaimStuckJobs(maxProcessingTimeMinutes = 10): Promise<number> {
955
+ const maxAgeMs = maxProcessingTimeMinutes * 60 * 1000;
956
+ const now = this.nowMs();
957
+ const result = (await this.client.eval(
958
+ RECLAIM_STUCK_JOBS_SCRIPT,
959
+ 1,
960
+ this.prefix,
961
+ maxAgeMs,
962
+ now,
963
+ )) as number;
964
+ log(`Reclaimed ${result} stuck jobs`);
965
+ return Number(result);
966
+ }
967
+
968
+ // ── Wait / step-data support ────────────────────────────────────────
969
+
970
+ /**
971
+ * Transition a job from 'processing' to 'waiting' status.
972
+ * Persists step data so the handler can resume from where it left off.
973
+ *
974
+ * @param jobId - The job to pause.
975
+ * @param options - Wait configuration including optional waitUntil date, token ID, and step data.
976
+ */
977
+ async waitJob(
978
+ jobId: number,
979
+ options: {
980
+ waitUntil?: Date;
981
+ waitTokenId?: string;
982
+ stepData: Record<string, any>;
983
+ },
984
+ ): Promise<void> {
985
+ const now = this.nowMs();
986
+ const waitUntilMs = options.waitUntil
987
+ ? options.waitUntil.getTime().toString()
988
+ : 'null';
989
+ const waitTokenId = options.waitTokenId ?? 'null';
990
+ const stepDataJson = JSON.stringify(options.stepData);
991
+
992
+ const result = await this.client.eval(
993
+ WAIT_JOB_SCRIPT,
994
+ 1,
995
+ this.prefix,
996
+ jobId,
997
+ waitUntilMs,
998
+ waitTokenId,
999
+ stepDataJson,
1000
+ now,
1001
+ );
1002
+
1003
+ if (Number(result) === 0) {
1004
+ log(
1005
+ `Job ${jobId} could not be set to waiting (may have been reclaimed or is no longer processing)`,
1006
+ );
1007
+ return;
1008
+ }
1009
+
1010
+ await this.recordJobEvent(jobId, JobEventType.Waiting, {
1011
+ waitUntil: options.waitUntil?.toISOString() ?? null,
1012
+ waitTokenId: options.waitTokenId ?? null,
1013
+ });
1014
+ log(`Job ${jobId} set to waiting`);
1015
+ }
1016
+
1017
+ /**
1018
+ * Persist step data for a job. Called after each ctx.run() step completes.
1019
+ * Best-effort: does not throw to avoid killing the running handler.
1020
+ *
1021
+ * @param jobId - The job to update.
1022
+ * @param stepData - The step data to persist.
1023
+ */
1024
+ async updateStepData(
1025
+ jobId: number,
1026
+ stepData: Record<string, any>,
1027
+ ): Promise<void> {
1028
+ try {
1029
+ const now = this.nowMs();
1030
+ await this.client.hset(
1031
+ `${this.prefix}job:${jobId}`,
1032
+ 'stepData',
1033
+ JSON.stringify(stepData),
1034
+ 'updatedAt',
1035
+ now.toString(),
1036
+ );
1037
+ } catch (error) {
1038
+ log(`Error updating stepData for job ${jobId}: ${error}`);
1039
+ }
1040
+ }
1041
+
1042
+ /**
1043
+ * Create a waitpoint token.
1044
+ *
1045
+ * @param jobId - The job ID to associate with the token (null if created outside a handler).
1046
+ * @param options - Optional timeout string (e.g. '10m', '1h') and tags.
1047
+ * @returns The created waitpoint with its unique ID.
1048
+ */
1049
+ async createWaitpoint(
1050
+ jobId: number | null,
1051
+ options?: CreateTokenOptions,
1052
+ ): Promise<{ id: string }> {
1053
+ const id = `wp_${randomUUID()}`;
1054
+ const now = this.nowMs();
1055
+ let timeoutAt: number | null = null;
1056
+
1057
+ if (options?.timeout) {
1058
+ const ms = parseTimeoutString(options.timeout);
1059
+ timeoutAt = now + ms;
1060
+ }
1061
+
1062
+ const key = `${this.prefix}waitpoint:${id}`;
1063
+ const fields: string[] = [
1064
+ 'id',
1065
+ id,
1066
+ 'jobId',
1067
+ jobId !== null ? jobId.toString() : 'null',
1068
+ 'status',
1069
+ 'waiting',
1070
+ 'output',
1071
+ 'null',
1072
+ 'timeoutAt',
1073
+ timeoutAt !== null ? timeoutAt.toString() : 'null',
1074
+ 'createdAt',
1075
+ now.toString(),
1076
+ 'completedAt',
1077
+ 'null',
1078
+ 'tags',
1079
+ options?.tags ? JSON.stringify(options.tags) : 'null',
1080
+ ];
1081
+
1082
+ await (this.client as any).hmset(key, ...fields);
1083
+
1084
+ if (timeoutAt !== null) {
1085
+ await this.client.zadd(`${this.prefix}waitpoint_timeout`, timeoutAt, id);
1086
+ }
1087
+
1088
+ log(`Created waitpoint ${id} for job ${jobId}`);
1089
+ return { id };
1090
+ }
1091
+
1092
+ /**
1093
+ * Complete a waitpoint token and move the associated job back to 'pending'.
1094
+ *
1095
+ * @param tokenId - The waitpoint token ID to complete.
1096
+ * @param data - Optional data to pass to the waiting handler.
1097
+ */
1098
+ async completeWaitpoint(tokenId: string, data?: any): Promise<void> {
1099
+ const now = this.nowMs();
1100
+ const outputJson = data != null ? JSON.stringify(data) : 'null';
1101
+
1102
+ const result = await this.client.eval(
1103
+ COMPLETE_WAITPOINT_SCRIPT,
1104
+ 1,
1105
+ this.prefix,
1106
+ tokenId,
1107
+ outputJson,
1108
+ now,
1109
+ );
1110
+
1111
+ if (Number(result) === 0) {
1112
+ log(`Waitpoint ${tokenId} not found or already completed`);
1113
+ return;
1114
+ }
1115
+
1116
+ log(`Completed waitpoint ${tokenId}`);
1117
+ }
1118
+
1119
+ /**
1120
+ * Retrieve a waitpoint token by its ID.
1121
+ *
1122
+ * @param tokenId - The waitpoint token ID to look up.
1123
+ * @returns The waitpoint record, or null if not found.
1124
+ */
1125
+ async getWaitpoint(tokenId: string): Promise<WaitpointRecord | null> {
1126
+ const data = await this.client.hgetall(
1127
+ `${this.prefix}waitpoint:${tokenId}`,
1128
+ );
1129
+ if (!data || Object.keys(data).length === 0) return null;
1130
+
1131
+ const nullish = (v: string | undefined) =>
1132
+ v === undefined || v === 'null' || v === '' ? null : v;
1133
+ const numOrNull = (v: string | undefined): number | null => {
1134
+ const n = nullish(v);
1135
+ return n === null ? null : Number(n);
1136
+ };
1137
+ const dateOrNull = (v: string | undefined): Date | null => {
1138
+ const n = numOrNull(v);
1139
+ return n === null ? null : new Date(n);
1140
+ };
1141
+
1142
+ let output: any = null;
1143
+ if (data.output && data.output !== 'null') {
1144
+ try {
1145
+ output = JSON.parse(data.output);
1146
+ } catch {
1147
+ output = data.output;
1148
+ }
1149
+ }
1150
+
1151
+ let tags: string[] | null = null;
1152
+ if (data.tags && data.tags !== 'null') {
1153
+ try {
1154
+ tags = JSON.parse(data.tags);
1155
+ } catch {
1156
+ /* ignore */
1157
+ }
1158
+ }
1159
+
1160
+ return {
1161
+ id: data.id,
1162
+ jobId: numOrNull(data.jobId),
1163
+ status: data.status as WaitpointRecord['status'],
1164
+ output,
1165
+ timeoutAt: dateOrNull(data.timeoutAt),
1166
+ createdAt: new Date(Number(data.createdAt)),
1167
+ completedAt: dateOrNull(data.completedAt),
1168
+ tags,
1169
+ };
1170
+ }
1171
+
1172
+ /**
1173
+ * Expire timed-out waitpoint tokens and move their associated jobs back to 'pending'.
1174
+ *
1175
+ * @returns The number of tokens that were expired.
1176
+ */
1177
+ async expireTimedOutWaitpoints(): Promise<number> {
1178
+ const now = this.nowMs();
1179
+ const result = (await this.client.eval(
1180
+ EXPIRE_TIMED_OUT_WAITPOINTS_SCRIPT,
1181
+ 1,
1182
+ this.prefix,
1183
+ now,
1184
+ )) as number;
1185
+ const count = Number(result);
1186
+ if (count > 0) {
1187
+ log(`Expired ${count} timed-out waitpoints`);
1188
+ }
1189
+ return count;
1190
+ }
1191
+
1192
+ // ── Internal helpers ──────────────────────────────────────────────────
1193
+
1194
+ async setPendingReasonForUnpickedJobs(
1195
+ reason: string,
1196
+ jobType?: string | string[],
1197
+ ): Promise<void> {
1198
+ let ids = await this.client.smembers(`${this.prefix}status:pending`);
1199
+ if (ids.length === 0) return;
1200
+
1201
+ if (jobType) {
1202
+ const types = Array.isArray(jobType) ? jobType : [jobType];
1203
+ const typeSet = new Set<string>();
1204
+ for (const t of types) {
1205
+ const typeIds = await this.client.smembers(`${this.prefix}type:${t}`);
1206
+ for (const id of typeIds) typeSet.add(id);
1207
+ }
1208
+ ids = ids.filter((id: string) => typeSet.has(id));
1209
+ }
1210
+
1211
+ for (const id of ids) {
1212
+ await this.client.hset(
1213
+ `${this.prefix}job:${id}`,
1214
+ 'pendingReason',
1215
+ reason,
1216
+ );
1217
+ }
1218
+ }
1219
+
1220
+ // ── Private helpers ───────────────────────────────────────────────────
1221
+
1222
+ private async loadJobsByIds<PayloadMap, T extends JobType<PayloadMap>>(
1223
+ ids: string[],
1224
+ ): Promise<JobRecord<PayloadMap, T>[]> {
1225
+ const pipeline = this.client.pipeline();
1226
+ for (const id of ids) {
1227
+ pipeline.hgetall(`${this.prefix}job:${id}`);
1228
+ }
1229
+ const results = await pipeline.exec();
1230
+ const jobs: JobRecord<PayloadMap, T>[] = [];
1231
+ if (results) {
1232
+ for (const [err, data] of results) {
1233
+ if (
1234
+ !err &&
1235
+ data &&
1236
+ typeof data === 'object' &&
1237
+ Object.keys(data as object).length > 0
1238
+ ) {
1239
+ jobs.push(
1240
+ deserializeJob<PayloadMap, T>(data as Record<string, string>),
1241
+ );
1242
+ }
1243
+ }
1244
+ }
1245
+ return jobs;
1246
+ }
1247
+
1248
+ private async filterByTags(
1249
+ candidateIds: string[],
1250
+ tags: string[],
1251
+ mode: TagQueryMode,
1252
+ ): Promise<string[]> {
1253
+ const candidateSet = new Set(candidateIds.map(String));
1254
+
1255
+ if (mode === 'exact') {
1256
+ // Jobs whose tags set is exactly equal to the given tags
1257
+ const tagSet = new Set(tags);
1258
+ const result: string[] = [];
1259
+ for (const id of candidateIds) {
1260
+ const jobTags = await this.client.smembers(
1261
+ `${this.prefix}job:${id}:tags`,
1262
+ );
1263
+ if (
1264
+ jobTags.length === tagSet.size &&
1265
+ jobTags.every((t: string) => tagSet.has(t))
1266
+ ) {
1267
+ result.push(id);
1268
+ }
1269
+ }
1270
+ return result;
1271
+ }
1272
+
1273
+ if (mode === 'all') {
1274
+ // Jobs that have ALL the given tags
1275
+ let intersection = new Set(candidateIds.map(String));
1276
+ for (const tag of tags) {
1277
+ const tagMembers = await this.client.smembers(
1278
+ `${this.prefix}tag:${tag}`,
1279
+ );
1280
+ const tagSet = new Set(tagMembers.map(String));
1281
+ intersection = new Set(
1282
+ [...intersection].filter((id) => tagSet.has(id)),
1283
+ );
1284
+ }
1285
+ return [...intersection].filter((id) => candidateSet.has(id));
1286
+ }
1287
+
1288
+ if (mode === 'any') {
1289
+ // Jobs that have at least ONE of the given tags
1290
+ const union = new Set<string>();
1291
+ for (const tag of tags) {
1292
+ const tagMembers = await this.client.smembers(
1293
+ `${this.prefix}tag:${tag}`,
1294
+ );
1295
+ for (const id of tagMembers) union.add(String(id));
1296
+ }
1297
+ return [...union].filter((id) => candidateSet.has(id));
1298
+ }
1299
+
1300
+ if (mode === 'none') {
1301
+ // Jobs that have NONE of the given tags
1302
+ const exclude = new Set<string>();
1303
+ for (const tag of tags) {
1304
+ const tagMembers = await this.client.smembers(
1305
+ `${this.prefix}tag:${tag}`,
1306
+ );
1307
+ for (const id of tagMembers) exclude.add(String(id));
1308
+ }
1309
+ return candidateIds.filter((id) => !exclude.has(String(id)));
1310
+ }
1311
+
1312
+ // Default: 'all'
1313
+ return this.filterByTags(candidateIds, tags, 'all');
1314
+ }
1315
+
1316
+ private filterByRunAt<PayloadMap, T extends JobType<PayloadMap>>(
1317
+ jobs: JobRecord<PayloadMap, T>[],
1318
+ runAt: Date | { gt?: Date; gte?: Date; lt?: Date; lte?: Date; eq?: Date },
1319
+ ): JobRecord<PayloadMap, T>[] {
1320
+ if (runAt instanceof Date) {
1321
+ return jobs.filter((j) => j.runAt.getTime() === runAt.getTime());
1322
+ }
1323
+ return jobs.filter((j) => {
1324
+ const t = j.runAt.getTime();
1325
+ if (runAt.gt && !(t > runAt.gt.getTime())) return false;
1326
+ if (runAt.gte && !(t >= runAt.gte.getTime())) return false;
1327
+ if (runAt.lt && !(t < runAt.lt.getTime())) return false;
1328
+ if (runAt.lte && !(t <= runAt.lte.getTime())) return false;
1329
+ if (runAt.eq && t !== runAt.eq.getTime()) return false;
1330
+ return true;
1331
+ });
1332
+ }
1333
+
1334
+ // ── Cron schedules ──────────────────────────────────────────────────
1335
+
1336
+ /** Create a cron schedule and return its ID. */
1337
+ async addCronSchedule(input: CronScheduleInput): Promise<number> {
1338
+ const existingId = await this.client.get(
1339
+ `${this.prefix}cron_name:${input.scheduleName}`,
1340
+ );
1341
+ if (existingId !== null) {
1342
+ throw new Error(
1343
+ `Cron schedule with name "${input.scheduleName}" already exists`,
1344
+ );
1345
+ }
1346
+
1347
+ const id = await this.client.incr(`${this.prefix}cron_id_seq`);
1348
+ const now = this.nowMs();
1349
+ const key = `${this.prefix}cron:${id}`;
1350
+
1351
+ const fields: string[] = [
1352
+ 'id',
1353
+ id.toString(),
1354
+ 'scheduleName',
1355
+ input.scheduleName,
1356
+ 'cronExpression',
1357
+ input.cronExpression,
1358
+ 'jobType',
1359
+ input.jobType,
1360
+ 'payload',
1361
+ JSON.stringify(input.payload),
1362
+ 'maxAttempts',
1363
+ input.maxAttempts.toString(),
1364
+ 'priority',
1365
+ input.priority.toString(),
1366
+ 'timeoutMs',
1367
+ input.timeoutMs !== null ? input.timeoutMs.toString() : 'null',
1368
+ 'forceKillOnTimeout',
1369
+ input.forceKillOnTimeout ? 'true' : 'false',
1370
+ 'tags',
1371
+ input.tags ? JSON.stringify(input.tags) : 'null',
1372
+ 'timezone',
1373
+ input.timezone,
1374
+ 'allowOverlap',
1375
+ input.allowOverlap ? 'true' : 'false',
1376
+ 'status',
1377
+ 'active',
1378
+ 'lastEnqueuedAt',
1379
+ 'null',
1380
+ 'lastJobId',
1381
+ 'null',
1382
+ 'nextRunAt',
1383
+ input.nextRunAt ? input.nextRunAt.getTime().toString() : 'null',
1384
+ 'createdAt',
1385
+ now.toString(),
1386
+ 'updatedAt',
1387
+ now.toString(),
1388
+ 'retryDelay',
1389
+ input.retryDelay !== null && input.retryDelay !== undefined
1390
+ ? input.retryDelay.toString()
1391
+ : 'null',
1392
+ 'retryBackoff',
1393
+ input.retryBackoff !== null && input.retryBackoff !== undefined
1394
+ ? input.retryBackoff.toString()
1395
+ : 'null',
1396
+ 'retryDelayMax',
1397
+ input.retryDelayMax !== null && input.retryDelayMax !== undefined
1398
+ ? input.retryDelayMax.toString()
1399
+ : 'null',
1400
+ ];
1401
+
1402
+ await (this.client as any).hmset(key, ...fields);
1403
+ await this.client.set(
1404
+ `${this.prefix}cron_name:${input.scheduleName}`,
1405
+ id.toString(),
1406
+ );
1407
+ await this.client.sadd(`${this.prefix}crons`, id.toString());
1408
+ await this.client.sadd(`${this.prefix}cron_status:active`, id.toString());
1409
+
1410
+ if (input.nextRunAt) {
1411
+ await this.client.zadd(
1412
+ `${this.prefix}cron_due`,
1413
+ input.nextRunAt.getTime(),
1414
+ id.toString(),
1415
+ );
1416
+ }
1417
+
1418
+ log(`Added cron schedule ${id}: "${input.scheduleName}"`);
1419
+ return id;
1420
+ }
1421
+
1422
+ /** Get a cron schedule by ID. */
1423
+ async getCronSchedule(id: number): Promise<CronScheduleRecord | null> {
1424
+ const data = await this.client.hgetall(`${this.prefix}cron:${id}`);
1425
+ if (!data || Object.keys(data).length === 0) return null;
1426
+ return this.deserializeCronSchedule(data);
1427
+ }
1428
+
1429
+ /** Get a cron schedule by its unique name. */
1430
+ async getCronScheduleByName(
1431
+ name: string,
1432
+ ): Promise<CronScheduleRecord | null> {
1433
+ const id = await this.client.get(`${this.prefix}cron_name:${name}`);
1434
+ if (id === null) return null;
1435
+ return this.getCronSchedule(Number(id));
1436
+ }
1437
+
1438
+ /** List cron schedules, optionally filtered by status. */
1439
+ async listCronSchedules(
1440
+ status?: CronScheduleStatus,
1441
+ ): Promise<CronScheduleRecord[]> {
1442
+ let ids: string[];
1443
+ if (status) {
1444
+ ids = await this.client.smembers(`${this.prefix}cron_status:${status}`);
1445
+ } else {
1446
+ ids = await this.client.smembers(`${this.prefix}crons`);
1447
+ }
1448
+ if (ids.length === 0) return [];
1449
+
1450
+ const pipeline = this.client.pipeline();
1451
+ for (const id of ids) {
1452
+ pipeline.hgetall(`${this.prefix}cron:${id}`);
1453
+ }
1454
+ const results = await pipeline.exec();
1455
+ const schedules: CronScheduleRecord[] = [];
1456
+ if (results) {
1457
+ for (const [err, data] of results) {
1458
+ if (
1459
+ !err &&
1460
+ data &&
1461
+ typeof data === 'object' &&
1462
+ Object.keys(data as object).length > 0
1463
+ ) {
1464
+ schedules.push(
1465
+ this.deserializeCronSchedule(data as Record<string, string>),
1466
+ );
1467
+ }
1468
+ }
1469
+ }
1470
+ schedules.sort((a, b) => a.createdAt.getTime() - b.createdAt.getTime());
1471
+ return schedules;
1472
+ }
1473
+
1474
+ /** Delete a cron schedule by ID. */
1475
+ async removeCronSchedule(id: number): Promise<void> {
1476
+ const data = await this.client.hgetall(`${this.prefix}cron:${id}`);
1477
+ if (!data || Object.keys(data).length === 0) return;
1478
+
1479
+ const name = data.scheduleName;
1480
+ const status = data.status;
1481
+
1482
+ await this.client.del(`${this.prefix}cron:${id}`);
1483
+ await this.client.del(`${this.prefix}cron_name:${name}`);
1484
+ await this.client.srem(`${this.prefix}crons`, id.toString());
1485
+ await this.client.srem(
1486
+ `${this.prefix}cron_status:${status}`,
1487
+ id.toString(),
1488
+ );
1489
+ await this.client.zrem(`${this.prefix}cron_due`, id.toString());
1490
+ log(`Removed cron schedule ${id}`);
1491
+ }
1492
+
1493
+ /** Pause a cron schedule. */
1494
+ async pauseCronSchedule(id: number): Promise<void> {
1495
+ const now = this.nowMs();
1496
+ await this.client.hset(
1497
+ `${this.prefix}cron:${id}`,
1498
+ 'status',
1499
+ 'paused',
1500
+ 'updatedAt',
1501
+ now.toString(),
1502
+ );
1503
+ await this.client.srem(`${this.prefix}cron_status:active`, id.toString());
1504
+ await this.client.sadd(`${this.prefix}cron_status:paused`, id.toString());
1505
+ await this.client.zrem(`${this.prefix}cron_due`, id.toString());
1506
+ log(`Paused cron schedule ${id}`);
1507
+ }
1508
+
1509
+ /** Resume a paused cron schedule. */
1510
+ async resumeCronSchedule(id: number): Promise<void> {
1511
+ const now = this.nowMs();
1512
+ await this.client.hset(
1513
+ `${this.prefix}cron:${id}`,
1514
+ 'status',
1515
+ 'active',
1516
+ 'updatedAt',
1517
+ now.toString(),
1518
+ );
1519
+ await this.client.srem(`${this.prefix}cron_status:paused`, id.toString());
1520
+ await this.client.sadd(`${this.prefix}cron_status:active`, id.toString());
1521
+
1522
+ const nextRunAt = await this.client.hget(
1523
+ `${this.prefix}cron:${id}`,
1524
+ 'nextRunAt',
1525
+ );
1526
+ if (nextRunAt && nextRunAt !== 'null') {
1527
+ await this.client.zadd(
1528
+ `${this.prefix}cron_due`,
1529
+ Number(nextRunAt),
1530
+ id.toString(),
1531
+ );
1532
+ }
1533
+ log(`Resumed cron schedule ${id}`);
1534
+ }
1535
+
1536
+ /** Edit a cron schedule. */
1537
+ async editCronSchedule(
1538
+ id: number,
1539
+ updates: EditCronScheduleOptions,
1540
+ nextRunAt?: Date | null,
1541
+ ): Promise<void> {
1542
+ const now = this.nowMs();
1543
+ const fields: string[] = [];
1544
+
1545
+ if (updates.cronExpression !== undefined) {
1546
+ fields.push('cronExpression', updates.cronExpression);
1547
+ }
1548
+ if (updates.payload !== undefined) {
1549
+ fields.push('payload', JSON.stringify(updates.payload));
1550
+ }
1551
+ if (updates.maxAttempts !== undefined) {
1552
+ fields.push('maxAttempts', updates.maxAttempts.toString());
1553
+ }
1554
+ if (updates.priority !== undefined) {
1555
+ fields.push('priority', updates.priority.toString());
1556
+ }
1557
+ if (updates.timeoutMs !== undefined) {
1558
+ fields.push(
1559
+ 'timeoutMs',
1560
+ updates.timeoutMs !== null ? updates.timeoutMs.toString() : 'null',
1561
+ );
1562
+ }
1563
+ if (updates.forceKillOnTimeout !== undefined) {
1564
+ fields.push(
1565
+ 'forceKillOnTimeout',
1566
+ updates.forceKillOnTimeout ? 'true' : 'false',
1567
+ );
1568
+ }
1569
+ if (updates.tags !== undefined) {
1570
+ fields.push(
1571
+ 'tags',
1572
+ updates.tags !== null ? JSON.stringify(updates.tags) : 'null',
1573
+ );
1574
+ }
1575
+ if (updates.timezone !== undefined) {
1576
+ fields.push('timezone', updates.timezone);
1577
+ }
1578
+ if (updates.allowOverlap !== undefined) {
1579
+ fields.push('allowOverlap', updates.allowOverlap ? 'true' : 'false');
1580
+ }
1581
+ if (updates.retryDelay !== undefined) {
1582
+ fields.push(
1583
+ 'retryDelay',
1584
+ updates.retryDelay !== null ? updates.retryDelay.toString() : 'null',
1585
+ );
1586
+ }
1587
+ if (updates.retryBackoff !== undefined) {
1588
+ fields.push(
1589
+ 'retryBackoff',
1590
+ updates.retryBackoff !== null
1591
+ ? updates.retryBackoff.toString()
1592
+ : 'null',
1593
+ );
1594
+ }
1595
+ if (updates.retryDelayMax !== undefined) {
1596
+ fields.push(
1597
+ 'retryDelayMax',
1598
+ updates.retryDelayMax !== null
1599
+ ? updates.retryDelayMax.toString()
1600
+ : 'null',
1601
+ );
1602
+ }
1603
+ if (nextRunAt !== undefined) {
1604
+ const val = nextRunAt !== null ? nextRunAt.getTime().toString() : 'null';
1605
+ fields.push('nextRunAt', val);
1606
+ if (nextRunAt !== null) {
1607
+ await this.client.zadd(
1608
+ `${this.prefix}cron_due`,
1609
+ nextRunAt.getTime(),
1610
+ id.toString(),
1611
+ );
1612
+ } else {
1613
+ await this.client.zrem(`${this.prefix}cron_due`, id.toString());
1614
+ }
1615
+ }
1616
+
1617
+ if (fields.length === 0) {
1618
+ log(`No fields to update for cron schedule ${id}`);
1619
+ return;
1620
+ }
1621
+
1622
+ fields.push('updatedAt', now.toString());
1623
+ await (this.client as any).hmset(`${this.prefix}cron:${id}`, ...fields);
1624
+ log(`Edited cron schedule ${id}`);
1625
+ }
1626
+
1627
+ /**
1628
+ * Fetch all active cron schedules whose nextRunAt <= now.
1629
+ * Uses a sorted set (cron_due) for efficient range query.
1630
+ */
1631
+ async getDueCronSchedules(): Promise<CronScheduleRecord[]> {
1632
+ const now = this.nowMs();
1633
+ const ids = await this.client.zrangebyscore(
1634
+ `${this.prefix}cron_due`,
1635
+ 0,
1636
+ now,
1637
+ );
1638
+ if (ids.length === 0) {
1639
+ log('Found 0 due cron schedules');
1640
+ return [];
1641
+ }
1642
+
1643
+ const schedules: CronScheduleRecord[] = [];
1644
+ for (const id of ids) {
1645
+ const data = await this.client.hgetall(`${this.prefix}cron:${id}`);
1646
+ if (data && Object.keys(data).length > 0 && data.status === 'active') {
1647
+ schedules.push(this.deserializeCronSchedule(data));
1648
+ }
1649
+ }
1650
+ log(`Found ${schedules.length} due cron schedules`);
1651
+ return schedules;
1652
+ }
1653
+
1654
+ /**
1655
+ * Update a cron schedule after a job has been enqueued.
1656
+ * Sets lastEnqueuedAt, lastJobId, and advances nextRunAt.
1657
+ */
1658
+ async updateCronScheduleAfterEnqueue(
1659
+ id: number,
1660
+ lastEnqueuedAt: Date,
1661
+ lastJobId: number,
1662
+ nextRunAt: Date | null,
1663
+ ): Promise<void> {
1664
+ const fields: string[] = [
1665
+ 'lastEnqueuedAt',
1666
+ lastEnqueuedAt.getTime().toString(),
1667
+ 'lastJobId',
1668
+ lastJobId.toString(),
1669
+ 'nextRunAt',
1670
+ nextRunAt ? nextRunAt.getTime().toString() : 'null',
1671
+ 'updatedAt',
1672
+ this.nowMs().toString(),
1673
+ ];
1674
+
1675
+ await (this.client as any).hmset(`${this.prefix}cron:${id}`, ...fields);
1676
+
1677
+ if (nextRunAt) {
1678
+ await this.client.zadd(
1679
+ `${this.prefix}cron_due`,
1680
+ nextRunAt.getTime(),
1681
+ id.toString(),
1682
+ );
1683
+ } else {
1684
+ await this.client.zrem(`${this.prefix}cron_due`, id.toString());
1685
+ }
1686
+
1687
+ log(
1688
+ `Updated cron schedule ${id}: lastJobId=${lastJobId}, nextRunAt=${nextRunAt?.toISOString() ?? 'null'}`,
1689
+ );
1690
+ }
1691
+
1692
+ /** Deserialize a Redis hash into a CronScheduleRecord. */
1693
+ private deserializeCronSchedule(
1694
+ h: Record<string, string>,
1695
+ ): CronScheduleRecord {
1696
+ const nullish = (v: string | undefined) =>
1697
+ v === undefined || v === 'null' || v === '' ? null : v;
1698
+ const numOrNull = (v: string | undefined): number | null => {
1699
+ const n = nullish(v);
1700
+ return n === null ? null : Number(n);
1701
+ };
1702
+ const dateOrNull = (v: string | undefined): Date | null => {
1703
+ const n = numOrNull(v);
1704
+ return n === null ? null : new Date(n);
1705
+ };
1706
+
1707
+ let payload: any;
1708
+ try {
1709
+ payload = JSON.parse(h.payload);
1710
+ } catch {
1711
+ payload = h.payload;
1712
+ }
1713
+
1714
+ let tags: string[] | undefined;
1715
+ try {
1716
+ const raw = h.tags;
1717
+ if (raw && raw !== 'null') {
1718
+ tags = JSON.parse(raw);
1719
+ }
1720
+ } catch {
1721
+ /* ignore */
1722
+ }
1723
+
1724
+ return {
1725
+ id: Number(h.id),
1726
+ scheduleName: h.scheduleName,
1727
+ cronExpression: h.cronExpression,
1728
+ jobType: h.jobType,
1729
+ payload,
1730
+ maxAttempts: Number(h.maxAttempts),
1731
+ priority: Number(h.priority),
1732
+ timeoutMs: numOrNull(h.timeoutMs),
1733
+ forceKillOnTimeout: h.forceKillOnTimeout === 'true',
1734
+ tags,
1735
+ timezone: h.timezone,
1736
+ allowOverlap: h.allowOverlap === 'true',
1737
+ status: h.status as CronScheduleStatus,
1738
+ lastEnqueuedAt: dateOrNull(h.lastEnqueuedAt),
1739
+ lastJobId: numOrNull(h.lastJobId),
1740
+ nextRunAt: dateOrNull(h.nextRunAt),
1741
+ createdAt: new Date(Number(h.createdAt)),
1742
+ updatedAt: new Date(Number(h.updatedAt)),
1743
+ retryDelay: numOrNull(h.retryDelay),
1744
+ retryBackoff:
1745
+ h.retryBackoff === 'true'
1746
+ ? true
1747
+ : h.retryBackoff === 'false'
1748
+ ? false
1749
+ : null,
1750
+ retryDelayMax: numOrNull(h.retryDelayMax),
1751
+ };
1752
+ }
1753
+
1754
+ // ── Private helpers (filters) ─────────────────────────────────────────
1755
+
1756
+ private async applyFilters(
1757
+ ids: string[],
1758
+ filters: JobFilters,
1759
+ ): Promise<string[]> {
1760
+ let result = ids;
1761
+
1762
+ if (filters.jobType) {
1763
+ const typeIds = new Set(
1764
+ await this.client.smembers(`${this.prefix}type:${filters.jobType}`),
1765
+ );
1766
+ result = result.filter((id) => typeIds.has(id));
1767
+ }
1768
+
1769
+ if (filters.tags && filters.tags.values.length > 0) {
1770
+ result = await this.filterByTags(
1771
+ result,
1772
+ filters.tags.values,
1773
+ filters.tags.mode || 'all',
1774
+ );
1775
+ }
1776
+
1777
+ // For priority and runAt, we need to load job data
1778
+ if (filters.priority !== undefined || filters.runAt) {
1779
+ const jobs = await this.loadJobsByIds(result);
1780
+ let filtered = jobs;
1781
+ if (filters.priority !== undefined) {
1782
+ filtered = filtered.filter((j) => j.priority === filters.priority);
1783
+ }
1784
+ if (filters.runAt) {
1785
+ filtered = this.filterByRunAt(filtered, filters.runAt);
1786
+ }
1787
+ result = filtered.map((j) => j.id.toString());
1788
+ }
1789
+
1790
+ return result;
1791
+ }
1792
+ }